Skip to content

Commit 894bc73

Browse files
authored
[Flight] Patch Promise cycles and toString on Server Functions (#35345)
Server Functions can be stringified (sometimes implicitly) when passed as data. This adds an override to hide the source code in that case - just in case someone puts sensitive information in there. Note that this still preserves the `name` field but this is also available on the export but in practice is likely minified anyway. There's nothing else on these referenes we'd consider unsafe unless you explicitly expose expandos which are part of the `"use server"` export. This adds a safety check to ensure you don't encode cyclic Promises. This isn't a parser bug per se. Promises do have a safety mechanism that avoids them infinite looping. However, since we use custom Thenables, what can happen is that every time a native Promise awaits it, another Promise wrapper is created around the Thenable which foils the ECMAScript Promise cycle detection which can lead to an infinite loop. This also ensures that embedded `ReadableStream` and `AsyncIterable` streams are properly closed if the source stream closes early both on the Server and Client. This doesn't cause an infinite loop but just to make sure resource clean up can proceed properly. We're also adding some more explicit clear errors for invalid payloads since we no longer need to obfuscate the original issue.
1 parent d3eb566 commit 894bc73

File tree

14 files changed

+441
-240
lines changed

14 files changed

+441
-240
lines changed

packages/react-client/src/ReactFlightClient.js

Lines changed: 148 additions & 123 deletions
Original file line numberDiff line numberDiff line change
@@ -894,6 +894,7 @@ function resolveModuleChunk<T>(
894894
const resolvedChunk: ResolvedModuleChunk<T> = (chunk: any);
895895
resolvedChunk.status = RESOLVED_MODULE;
896896
resolvedChunk.value = value;
897+
resolvedChunk.reason = null;
897898
if (__DEV__) {
898899
const debugInfo = getModuleDebugInfo(value);
899900
if (debugInfo !== null) {
@@ -1114,6 +1115,8 @@ export function reportGlobalError(
11141115
// because we won't be getting any new data to resolve it.
11151116
if (chunk.status === PENDING) {
11161117
triggerErrorOnChunk(response, chunk, error);
1118+
} else if (chunk.status === INITIALIZED && chunk.reason !== null) {
1119+
chunk.reason.error(error);
11171120
}
11181121
});
11191122
if (__DEV__) {
@@ -1462,15 +1465,95 @@ function fulfillReference(
14621465
): void {
14631466
const {handler, parentObject, key, map, path} = reference;
14641467

1465-
for (let i = 1; i < path.length; i++) {
1468+
try {
1469+
for (let i = 1; i < path.length; i++) {
1470+
while (
1471+
typeof value === 'object' &&
1472+
value !== null &&
1473+
value.$$typeof === REACT_LAZY_TYPE
1474+
) {
1475+
// We never expect to see a Lazy node on this path because we encode those as
1476+
// separate models. This must mean that we have inserted an extra lazy node
1477+
// e.g. to replace a blocked element. We must instead look for it inside.
1478+
const referencedChunk: SomeChunk<any> = value._payload;
1479+
if (referencedChunk === handler.chunk) {
1480+
// This is a reference to the thing we're currently blocking. We can peak
1481+
// inside of it to get the value.
1482+
value = handler.value;
1483+
continue;
1484+
} else {
1485+
switch (referencedChunk.status) {
1486+
case RESOLVED_MODEL:
1487+
initializeModelChunk(referencedChunk);
1488+
break;
1489+
case RESOLVED_MODULE:
1490+
initializeModuleChunk(referencedChunk);
1491+
break;
1492+
}
1493+
switch (referencedChunk.status) {
1494+
case INITIALIZED: {
1495+
value = referencedChunk.value;
1496+
continue;
1497+
}
1498+
case BLOCKED: {
1499+
// It is possible that we're blocked on our own chunk if it's a cycle.
1500+
// Before adding the listener to the inner chunk, let's check if it would
1501+
// result in a cycle.
1502+
const cyclicHandler = resolveBlockedCycle(
1503+
referencedChunk,
1504+
reference,
1505+
);
1506+
if (cyclicHandler !== null) {
1507+
// This reference points back to this chunk. We can resolve the cycle by
1508+
// using the value from that handler.
1509+
value = cyclicHandler.value;
1510+
continue;
1511+
}
1512+
// Fallthrough
1513+
}
1514+
case PENDING: {
1515+
// If we're not yet initialized we need to skip what we've already drilled
1516+
// through and then wait for the next value to become available.
1517+
path.splice(0, i - 1);
1518+
// Add "listener" to our new chunk dependency.
1519+
if (referencedChunk.value === null) {
1520+
referencedChunk.value = [reference];
1521+
} else {
1522+
referencedChunk.value.push(reference);
1523+
}
1524+
if (referencedChunk.reason === null) {
1525+
referencedChunk.reason = [reference];
1526+
} else {
1527+
referencedChunk.reason.push(reference);
1528+
}
1529+
return;
1530+
}
1531+
case HALTED: {
1532+
// Do nothing. We couldn't fulfill.
1533+
// TODO: Mark downstreams as halted too.
1534+
return;
1535+
}
1536+
default: {
1537+
rejectReference(
1538+
response,
1539+
reference.handler,
1540+
referencedChunk.reason,
1541+
);
1542+
return;
1543+
}
1544+
}
1545+
}
1546+
}
1547+
value = value[path[i]];
1548+
}
1549+
14661550
while (
14671551
typeof value === 'object' &&
14681552
value !== null &&
14691553
value.$$typeof === REACT_LAZY_TYPE
14701554
) {
1471-
// We never expect to see a Lazy node on this path because we encode those as
1472-
// separate models. This must mean that we have inserted an extra lazy node
1473-
// e.g. to replace a blocked element. We must instead look for it inside.
1555+
// If what we're referencing is a Lazy it must be because we inserted one as a virtual node
1556+
// while it was blocked by other data. If it's no longer blocked, we can unwrap it.
14741557
const referencedChunk: SomeChunk<any> = value._payload;
14751558
if (referencedChunk === handler.chunk) {
14761559
// This is a reference to the thing we're currently blocking. We can peak
@@ -1491,132 +1574,57 @@ function fulfillReference(
14911574
value = referencedChunk.value;
14921575
continue;
14931576
}
1494-
case BLOCKED: {
1495-
// It is possible that we're blocked on our own chunk if it's a cycle.
1496-
// Before adding the listener to the inner chunk, let's check if it would
1497-
// result in a cycle.
1498-
const cyclicHandler = resolveBlockedCycle(
1499-
referencedChunk,
1500-
reference,
1501-
);
1502-
if (cyclicHandler !== null) {
1503-
// This reference points back to this chunk. We can resolve the cycle by
1504-
// using the value from that handler.
1505-
value = cyclicHandler.value;
1506-
continue;
1507-
}
1508-
// Fallthrough
1509-
}
1510-
case PENDING: {
1511-
// If we're not yet initialized we need to skip what we've already drilled
1512-
// through and then wait for the next value to become available.
1513-
path.splice(0, i - 1);
1514-
// Add "listener" to our new chunk dependency.
1515-
if (referencedChunk.value === null) {
1516-
referencedChunk.value = [reference];
1517-
} else {
1518-
referencedChunk.value.push(reference);
1519-
}
1520-
if (referencedChunk.reason === null) {
1521-
referencedChunk.reason = [reference];
1522-
} else {
1523-
referencedChunk.reason.push(reference);
1524-
}
1525-
return;
1526-
}
1527-
case HALTED: {
1528-
// Do nothing. We couldn't fulfill.
1529-
// TODO: Mark downstreams as halted too.
1530-
return;
1531-
}
1532-
default: {
1533-
rejectReference(
1534-
response,
1535-
reference.handler,
1536-
referencedChunk.reason,
1537-
);
1538-
return;
1539-
}
15401577
}
15411578
}
1579+
break;
15421580
}
1543-
value = value[path[i]];
1544-
}
15451581

1546-
while (
1547-
typeof value === 'object' &&
1548-
value !== null &&
1549-
value.$$typeof === REACT_LAZY_TYPE
1550-
) {
1551-
// If what we're referencing is a Lazy it must be because we inserted one as a virtual node
1552-
// while it was blocked by other data. If it's no longer blocked, we can unwrap it.
1553-
const referencedChunk: SomeChunk<any> = value._payload;
1554-
if (referencedChunk === handler.chunk) {
1555-
// This is a reference to the thing we're currently blocking. We can peak
1556-
// inside of it to get the value.
1557-
value = handler.value;
1558-
continue;
1559-
} else {
1560-
switch (referencedChunk.status) {
1561-
case RESOLVED_MODEL:
1562-
initializeModelChunk(referencedChunk);
1582+
const mappedValue = map(response, value, parentObject, key);
1583+
parentObject[key] = mappedValue;
1584+
1585+
// If this is the root object for a model reference, where `handler.value`
1586+
// is a stale `null`, the resolved value can be used directly.
1587+
if (key === '' && handler.value === null) {
1588+
handler.value = mappedValue;
1589+
}
1590+
1591+
// If the parent object is an unparsed React element tuple, we also need to
1592+
// update the props and owner of the parsed element object (i.e.
1593+
// handler.value).
1594+
if (
1595+
parentObject[0] === REACT_ELEMENT_TYPE &&
1596+
typeof handler.value === 'object' &&
1597+
handler.value !== null &&
1598+
handler.value.$$typeof === REACT_ELEMENT_TYPE
1599+
) {
1600+
const element: any = handler.value;
1601+
switch (key) {
1602+
case '3':
1603+
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
1604+
element.props = mappedValue;
1605+
break;
1606+
case '4':
1607+
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
1608+
if (__DEV__) {
1609+
element._owner = mappedValue;
1610+
}
15631611
break;
1564-
case RESOLVED_MODULE:
1565-
initializeModuleChunk(referencedChunk);
1612+
case '5':
1613+
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
1614+
if (__DEV__) {
1615+
element._debugStack = mappedValue;
1616+
}
1617+
break;
1618+
default:
1619+
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
15661620
break;
15671621
}
1568-
switch (referencedChunk.status) {
1569-
case INITIALIZED: {
1570-
value = referencedChunk.value;
1571-
continue;
1572-
}
1573-
}
1574-
}
1575-
break;
1576-
}
1577-
1578-
const mappedValue = map(response, value, parentObject, key);
1579-
parentObject[key] = mappedValue;
1580-
1581-
// If this is the root object for a model reference, where `handler.value`
1582-
// is a stale `null`, the resolved value can be used directly.
1583-
if (key === '' && handler.value === null) {
1584-
handler.value = mappedValue;
1585-
}
1586-
1587-
// If the parent object is an unparsed React element tuple, we also need to
1588-
// update the props and owner of the parsed element object (i.e.
1589-
// handler.value).
1590-
if (
1591-
parentObject[0] === REACT_ELEMENT_TYPE &&
1592-
typeof handler.value === 'object' &&
1593-
handler.value !== null &&
1594-
handler.value.$$typeof === REACT_ELEMENT_TYPE
1595-
) {
1596-
const element: any = handler.value;
1597-
switch (key) {
1598-
case '3':
1599-
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
1600-
element.props = mappedValue;
1601-
break;
1602-
case '4':
1603-
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
1604-
if (__DEV__) {
1605-
element._owner = mappedValue;
1606-
}
1607-
break;
1608-
case '5':
1609-
// This path doesn't call transferReferencedDebugInfo because this reference is to a debug chunk.
1610-
if (__DEV__) {
1611-
element._debugStack = mappedValue;
1612-
}
1613-
break;
1614-
default:
1615-
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
1616-
break;
1622+
} else if (__DEV__ && !reference.isDebug) {
1623+
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
16171624
}
1618-
} else if (__DEV__ && !reference.isDebug) {
1619-
transferReferencedDebugInfo(handler.chunk, fulfilledChunk);
1625+
} catch (error) {
1626+
rejectReference(response, reference.handler, error);
1627+
return;
16201628
}
16211629

16221630
handler.deps--;
@@ -1882,6 +1890,7 @@ function loadServerReference<A: Iterable<any>, T>(
18821890
const initializedChunk: InitializedChunk<T> = (chunk: any);
18831891
initializedChunk.status = INITIALIZED;
18841892
initializedChunk.value = handler.value;
1893+
initializedChunk.reason = null;
18851894
if (resolveListeners !== null) {
18861895
wakeChunk(response, resolveListeners, handler.value, initializedChunk);
18871896
} else {
@@ -2359,7 +2368,7 @@ function parseModelString(
23592368
// Symbol
23602369
return Symbol.for(value.slice(2));
23612370
}
2362-
case 'F': {
2371+
case 'h': {
23632372
// Server Reference
23642373
const ref = value.slice(2);
23652374
return getOutlinedModel(
@@ -3138,6 +3147,7 @@ function startReadableStream<T>(
31383147
streamState: StreamState,
31393148
): void {
31403149
let controller: ReadableStreamController = (null: any);
3150+
let closed = false;
31413151
const stream = new ReadableStream({
31423152
type: type,
31433153
start(c) {
@@ -3195,6 +3205,10 @@ function startReadableStream<T>(
31953205
}
31963206
},
31973207
close(json: UninitializedModel): void {
3208+
if (closed) {
3209+
return;
3210+
}
3211+
closed = true;
31983212
if (previousBlockedChunk === null) {
31993213
controller.close();
32003214
} else {
@@ -3205,6 +3219,10 @@ function startReadableStream<T>(
32053219
}
32063220
},
32073221
error(error: mixed): void {
3222+
if (closed) {
3223+
return;
3224+
}
3225+
closed = true;
32083226
if (previousBlockedChunk === null) {
32093227
// $FlowFixMe[incompatible-call]
32103228
controller.error(error);
@@ -3265,6 +3283,7 @@ function startAsyncIterable<T>(
32653283
(chunk: any);
32663284
initializedChunk.status = INITIALIZED;
32673285
initializedChunk.value = {done: false, value: value};
3286+
initializedChunk.reason = null;
32683287
if (resolveListeners !== null) {
32693288
wakeChunkIfInitialized(
32703289
response,
@@ -3294,6 +3313,9 @@ function startAsyncIterable<T>(
32943313
nextWriteIndex++;
32953314
},
32963315
close(value: UninitializedModel): void {
3316+
if (closed) {
3317+
return;
3318+
}
32973319
closed = true;
32983320
if (nextWriteIndex === buffer.length) {
32993321
buffer[nextWriteIndex] = createResolvedIteratorResultChunk(
@@ -3321,6 +3343,9 @@ function startAsyncIterable<T>(
33213343
}
33223344
},
33233345
error(error: Error): void {
3346+
if (closed) {
3347+
return;
3348+
}
33243349
closed = true;
33253350
if (nextWriteIndex === buffer.length) {
33263351
buffer[nextWriteIndex] =

0 commit comments

Comments
 (0)