@@ -2,6 +2,7 @@ import { describe, expect, test } from "vitest";
2
2
3
3
import { patchCode } from "@opennextjs/aws/build/patch/astCodePatcher.js" ;
4
4
import { rule } from "@opennextjs/aws/build/patch/patches/patchFetchCacheWaitUntil.js" ;
5
+ import { computePatchDiff } from "./util.js" ;
5
6
6
7
describe ( "patchFetchCacheSetMissingWaitUntil" , ( ) => {
7
8
test ( "on minified code" , ( ) => {
@@ -139,115 +140,39 @@ describe("patchFetchCacheSetMissingWaitUntil", () => {
139
140
}
140
141
` ;
141
142
142
- expect ( patchCode ( code , rule ) ) . toMatchInlineSnapshot ( `
143
- "if (
144
- res.status === 200 &&
145
- incrementalCache &&
146
- cacheKey &&
147
- (isCacheableRevalidate ||
148
- useCacheOrRequestStore?.serverComponentsHmrCache)
149
- ) {
150
- const normalizedRevalidate =
151
- finalRevalidate >= INFINITE_CACHE
152
- ? CACHE_ONE_YEAR
153
- : finalRevalidate
154
- const externalRevalidate =
155
- finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
156
-
157
- if (workUnitStore && workUnitStore.type === 'prerender') {
158
- // We are prerendering at build time or revalidate time with dynamicIO so we need to
159
- // buffer the response so we can guarantee it can be read in a microtask
160
- const bodyBuffer = await res.arrayBuffer()
161
-
162
- const fetchedData = {
163
- headers: Object.fromEntries(res.headers.entries()),
164
- body: Buffer.from(bodyBuffer).toString('base64'),
165
- status: res.status,
166
- url: res.url,
167
- }
168
-
169
- // We can skip checking the serverComponentsHmrCache because we aren't in
170
- // dev mode.
171
-
172
- await incrementalCache.set(
173
- cacheKey,
174
- {
175
- kind: CachedRouteKind.FETCH,
176
- data: fetchedData,
177
- revalidate: normalizedRevalidate,
178
- },
179
- {
180
- fetchCache: true,
181
- revalidate: externalRevalidate,
182
- fetchUrl,
183
- fetchIdx,
184
- tags,
185
- }
186
- )
187
- await handleUnlock()
188
-
189
- // We return a new Response to the caller.
190
- return new Response(bodyBuffer, {
191
- headers: res.headers,
192
- status: res.status,
193
- statusText: res.statusText,
194
- })
195
- } else {
196
- // We're cloning the response using this utility because there
197
- // exists a bug in the undici library around response cloning.
198
- // See the following pull request for more details:
199
- // https://github.com/vercel/next.js/pull/73274
200
-
201
- const [cloned1, cloned2] = cloneResponse(res)
202
-
203
- // We are dynamically rendering including dev mode. We want to return
204
- // the response to the caller as soon as possible because it might stream
205
- // over a very long time.
206
- globalThis.__openNextAls?.getStore()?.pendingPromiseRunner.add(cloned1
207
- .arrayBuffer()
208
- .then(async (arrayBuffer) => {
209
- const bodyBuffer = Buffer.from(arrayBuffer)
210
-
211
- const fetchedData = {
212
- headers: Object.fromEntries(cloned1.headers.entries()),
213
- body: bodyBuffer.toString('base64'),
214
- status: cloned1.status,
215
- url: cloned1.url,
216
- }
217
-
218
- useCacheOrRequestStore?.serverComponentsHmrCache?.set(
219
- cacheKey,
220
- fetchedData
221
- )
222
-
223
- if (isCacheableRevalidate) {
224
- await incrementalCache.set(
225
- cacheKey,
226
- {
227
- kind: CachedRouteKind.FETCH,
228
- data: fetchedData,
229
- revalidate: normalizedRevalidate,
230
- },
231
- {
232
- fetchCache: true,
233
- revalidate: externalRevalidate,
234
- fetchUrl,
235
- fetchIdx,
236
- tags,
237
- }
238
- )
239
- }
240
- })
241
- .catch((error) =>
242
- console.warn(\`Failed to set fetch cache\`, input, error)
243
- )
244
- .finally(handleUnlock))
245
-
246
-
247
- return cloned2
248
- }
249
- }
250
- "
143
+ expect (
144
+ computePatchDiff ( "patch-fetch.js" , code , rule ) ,
145
+ ) . toMatchInlineSnapshot ( `
146
+ "Index: patch-fetch.js
147
+ ===================================================================
148
+ --- patch-fetch.js
149
+ +++ patch-fetch.js
150
+ @@ -60,9 +60,9 @@
151
+
152
+ // We are dynamically rendering including dev mode. We want to return
153
+ // the response to the caller as soon as possible because it might stream
154
+ // over a very long time.
155
+ - cloned1
156
+ + globalThis.__openNextAls?.getStore()?.pendingPromiseRunner.add(cloned1
157
+ .arrayBuffer()
158
+ .then(async (arrayBuffer) => {
159
+ const bodyBuffer = Buffer.from(arrayBuffer)
160
+
161
+ @@ -98,10 +98,11 @@
162
+ })
163
+ .catch((error) =>
164
+ console.warn(\`Failed to set fetch cache\`, input, error)
165
+ )
166
+ - .finally(handleUnlock)
167
+ + .finally(handleUnlock))
168
+
169
+ +
170
+ return cloned2
171
+ }
172
+ }
173
+
174
+ \\ No newline at end of file
175
+ "
251
176
` ) ;
252
177
} ) ;
253
178
@@ -353,107 +278,38 @@ describe("patchFetchCacheSetMissingWaitUntil", () => {
353
278
}
354
279
}` ;
355
280
356
- expect ( patchCode ( code , rule ) ) . toMatchInlineSnapshot ( `
357
- "if (
358
- res.status === 200 &&
359
- incrementalCache &&
360
- cacheKey &&
361
- (isCacheableRevalidate || requestStore?.serverComponentsHmrCache)
362
- ) {
363
- const normalizedRevalidate =
364
- finalRevalidate >= INFINITE_CACHE
365
- ? CACHE_ONE_YEAR
366
- : finalRevalidate
367
- const externalRevalidate =
368
- finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
369
-
370
- if (workUnitStore && workUnitStore.type === 'prerender') {
371
- // We are prerendering at build time or revalidate time with dynamicIO so we need to
372
- // buffer the response so we can guarantee it can be read in a microtask
373
- const bodyBuffer = await res.arrayBuffer()
374
-
375
- const fetchedData = {
376
- headers: Object.fromEntries(res.headers.entries()),
377
- body: Buffer.from(bodyBuffer).toString('base64'),
378
- status: res.status,
379
- url: res.url,
380
- }
381
-
382
- // We can skip checking the serverComponentsHmrCache because we aren't in
383
- // dev mode.
384
-
385
- await incrementalCache.set(
386
- cacheKey,
387
- {
388
- kind: CachedRouteKind.FETCH,
389
- data: fetchedData,
390
- revalidate: normalizedRevalidate,
391
- },
392
- {
393
- fetchCache: true,
394
- revalidate: externalRevalidate,
395
- fetchUrl,
396
- fetchIdx,
397
- tags,
398
- }
399
- )
400
- await handleUnlock()
401
-
402
- // We we return a new Response to the caller.
403
- return new Response(bodyBuffer, {
404
- headers: res.headers,
405
- status: res.status,
406
- statusText: res.statusText,
407
- })
408
- } else {
409
- // We are dynamically rendering including dev mode. We want to return
410
- // the response to the caller as soon as possible because it might stream
411
- // over a very long time.
412
- globalThis.__openNextAls?.getStore()?.pendingPromiseRunner.add(res
413
- .clone()
414
- .arrayBuffer()
415
- .then(async (arrayBuffer) => {
416
- const bodyBuffer = Buffer.from(arrayBuffer)
417
-
418
- const fetchedData = {
419
- headers: Object.fromEntries(res.headers.entries()),
420
- body: bodyBuffer.toString('base64'),
421
- status: res.status,
422
- url: res.url,
423
- }
424
-
425
- requestStore?.serverComponentsHmrCache?.set(
426
- cacheKey,
427
- fetchedData
428
- )
429
-
430
- if (isCacheableRevalidate) {
431
- await incrementalCache.set(
432
- cacheKey,
433
- {
434
- kind: CachedRouteKind.FETCH,
435
- data: fetchedData,
436
- revalidate: normalizedRevalidate,
437
- },
438
- {
439
- fetchCache: true,
440
- revalidate: externalRevalidate,
441
- fetchUrl,
442
- fetchIdx,
443
- tags,
444
- }
445
- )
446
- }
447
- })
448
- .catch((error) =>
449
- console.warn(\`Failed to set fetch cache\`, input, error)
450
- )
451
- .finally(handleUnlock))
452
-
453
-
454
- return res
455
- }
456
- }"
281
+ expect (
282
+ computePatchDiff ( "patch-fetch.js" , code , rule ) ,
283
+ ) . toMatchInlineSnapshot ( `
284
+ "Index: patch-fetch.js
285
+ ===================================================================
286
+ --- patch-fetch.js
287
+ +++ patch-fetch.js
288
+ @@ -52,9 +52,9 @@
289
+ } else {
290
+ // We are dynamically rendering including dev mode. We want to return
291
+ // the response to the caller as soon as possible because it might stream
292
+ // over a very long time.
293
+ - res
294
+ + globalThis.__openNextAls?.getStore()?.pendingPromiseRunner.add(res
295
+ .clone()
296
+ .arrayBuffer()
297
+ .then(async (arrayBuffer) => {
298
+ const bodyBuffer = Buffer.from(arrayBuffer)
299
+ @@ -91,9 +91,10 @@
300
+ })
301
+ .catch((error) =>
302
+ console.warn(\`Failed to set fetch cache\`, input, error)
303
+ )
304
+ - .finally(handleUnlock)
305
+ + .finally(handleUnlock))
306
+
307
+ +
308
+ return res
309
+ }
310
+ }
311
+ \\ No newline at end of file
312
+ "
457
313
` ) ;
458
314
} ) ;
459
315
} ) ;
0 commit comments