@@ -9,13 +9,34 @@ import { recordWarning } from '../handlers/tracer.cjs'
99// lru-cache types don't like using `null` for values, so we use a symbol to represent it and do conversion
1010// so it doesn't leak outside
1111const NullValue = Symbol . for ( 'null-value' )
12- type BlobLRUCache = LRUCache < string , BlobType | typeof NullValue | Promise < BlobType | null > >
12+ type DataWithEtag = { data : BlobType ; etag : string }
13+ 14+ const isDataWithEtag = ( value : unknown ) : value is DataWithEtag => {
15+ return typeof value === 'object' && value !== null && 'data' in value && 'etag' in value
16+ }
17+ 18+ type BlobLRUCache = LRUCache <
19+ string ,
20+ BlobType | typeof NullValue | Promise < BlobType | null > | DataWithEtag
21+ >
1322
1423const IN_MEMORY_CACHE_MAX_SIZE = Symbol . for ( 'nf-in-memory-cache-max-size' )
1524const IN_MEMORY_LRU_CACHE = Symbol . for ( 'nf-in-memory-lru-cache' )
1625const extendedGlobalThis = globalThis as typeof globalThis & {
1726 [ IN_MEMORY_CACHE_MAX_SIZE ] ?: number
18- [ IN_MEMORY_LRU_CACHE ] ?: BlobLRUCache | null
27+ [ IN_MEMORY_LRU_CACHE ] ?: {
28+ /**
29+ * entries are scoped to request IDs
30+ */
31+ perRequest : BlobLRUCache
32+ /**
33+ * global cache shared between requests, does not allow immediate re-use, but is used for
34+ * conditional blob gets with etags and given blob key is first tried in given request.
35+ * Map values are weak references to avoid this map strongly referencing blobs and allowing
36+ * GC based on per request LRU cache evictions alone.
37+ */
38+ global : Map < string , WeakRef < DataWithEtag > >
39+ } | null
1940}
2041
2142const DEFAULT_FALLBACK_MAX_SIZE = 50 * 1024 * 1024 // 50MB, same as default Next.js config
@@ -31,40 +52,46 @@ const isPositiveNumber = (value: unknown): value is PositiveNumber => {
3152}
3253
3354const BASE_BLOB_SIZE = 25 as PositiveNumber
55+ const BASE_BLOB_WITH_ETAG_SIZE = ( BASE_BLOB_SIZE + 34 ) as PositiveNumber
3456
3557const estimateBlobKnownTypeSize = (
36- valueToStore : BlobType | null | Promise < unknown > ,
58+ valueToStore : BlobType | null | Promise < unknown > | DataWithEtag ,
3759) : number | undefined => {
3860 // very approximate size calculation to avoid expensive exact size calculation
3961 // inspired by https://github.com/vercel/next.js/blob/ed10f7ed0246fcc763194197eb9beebcbd063162/packages/next/src/server/lib/incremental-cache/file-system-cache.ts#L60-L79
40- if ( valueToStore === null || isPromise ( valueToStore ) || isTagManifest ( valueToStore ) ) {
62+ if ( valueToStore === null || isPromise ( valueToStore ) ) {
4163 return BASE_BLOB_SIZE
4264 }
43- if ( isHtmlBlob ( valueToStore ) ) {
44- return BASE_BLOB_SIZE + valueToStore . html . length
65+ 66+ const { data, baseSize } = isDataWithEtag ( valueToStore )
67+ ? { data : valueToStore . data , baseSize : BASE_BLOB_WITH_ETAG_SIZE }
68+ : { data : valueToStore , baseSize : BASE_BLOB_SIZE }
69+ 70+ if ( isTagManifest ( data ) ) {
71+ return baseSize
72+ }
73+ 74+ if ( isHtmlBlob ( data ) ) {
75+ return baseSize + data . html . length
4576 }
4677
47- if ( valueToStore . value ?. kind === 'FETCH' ) {
48- return BASE_BLOB_SIZE + valueToStore . value . data . body . length
78+ if ( data . value ?. kind === 'FETCH' ) {
79+ return baseSize + data . value . data . body . length
4980 }
50- if ( valueToStore . value ?. kind === 'APP_PAGE' ) {
51- return (
52- BASE_BLOB_SIZE + valueToStore . value . html . length + ( valueToStore . value . rscData ?. length ?? 0 )
53- )
81+ if ( data . value ?. kind === 'APP_PAGE' ) {
82+ return baseSize + data . value . html . length + ( data . value . rscData ?. length ?? 0 )
5483 }
55- if ( valueToStore . value ?. kind === 'PAGE' || valueToStore . value ?. kind === 'PAGES' ) {
56- return (
57- BASE_BLOB_SIZE +
58- valueToStore . value . html . length +
59- JSON . stringify ( valueToStore . value . pageData ) . length
60- )
84+ if ( data . value ?. kind === 'PAGE' || data . value ?. kind === 'PAGES' ) {
85+ return baseSize + data . value . html . length + JSON . stringify ( data . value . pageData ) . length
6186 }
62- if ( valueToStore . value ?. kind === 'ROUTE' || valueToStore . value ?. kind === 'APP_ROUTE' ) {
63- return BASE_BLOB_SIZE + valueToStore . value . body . length
87+ if ( data . value ?. kind === 'ROUTE' || data . value ?. kind === 'APP_ROUTE' ) {
88+ return baseSize + data . value . body . length
6489 }
6590}
6691
67- const estimateBlobSize = ( valueToStore : BlobType | null | Promise < unknown > ) : PositiveNumber => {
92+ const estimateBlobSize = (
93+ valueToStore : BlobType | null | Promise < unknown > | DataWithEtag ,
94+ ) : PositiveNumber => {
6895 let estimatedKnownTypeSize : number | undefined
6996 let estimateBlobKnownTypeSizeError : unknown
7097 try {
@@ -98,23 +125,45 @@ function getInMemoryLRUCache() {
98125 ? extendedGlobalThis [ IN_MEMORY_CACHE_MAX_SIZE ]
99126 : DEFAULT_FALLBACK_MAX_SIZE
100127
101- extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] =
102- maxSize === 0
103- ? null // if user sets 0 in their config, we should honor that and not use in-memory cache
104- : new LRUCache < string , BlobType | typeof NullValue | Promise < BlobType | null > > ( {
105- max : 1000 ,
106- maxSize,
107- sizeCalculation : ( valueToStore ) => {
108- return estimateBlobSize ( valueToStore === NullValue ? null : valueToStore )
109- } ,
110- } )
128+ if ( maxSize === 0 ) {
129+ extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] = null
130+ } else {
131+ const global = new Map < string , WeakRef < DataWithEtag > > ( )
132+ 133+ const perRequest = new LRUCache <
134+ string ,
135+ BlobType | typeof NullValue | Promise < BlobType | null > | DataWithEtag
136+ > ( {
137+ max : 1000 ,
138+ maxSize,
139+ sizeCalculation : ( valueToStore , key ) => {
140+ return estimateBlobSize ( valueToStore === NullValue ? null : valueToStore )
141+ } ,
142+ } )
143+ 144+ extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] = {
145+ perRequest,
146+ global,
147+ }
148+ }
111149 }
112150 return extendedGlobalThis [ IN_MEMORY_LRU_CACHE ]
113151}
114152
153+ export function clearInMemoryLRUCacheForTesting ( ) {
154+ extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] = undefined
155+ }
156+ 115157interface RequestScopedInMemoryCache {
116- get ( key : string ) : BlobType | null | Promise < BlobType | null > | undefined
117- set ( key : string , value : BlobType | null | Promise < BlobType | null > ) : void
158+ get ( key : string ) :
159+ | { conditional : false ; currentRequestValue : BlobType | null | Promise < BlobType | null > }
160+ | {
161+ conditional : true
162+ globalValue : BlobType
163+ etag : string
164+ }
165+ | undefined
166+ set ( key : string , value : BlobType | null | Promise < BlobType | null > | DataWithEtag ) : void
118167}
119168
120169export const getRequestScopedInMemoryCache = ( ) : RequestScopedInMemoryCache => {
@@ -125,8 +174,35 @@ export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => {
125174 get ( key ) {
126175 if ( ! requestContext ) return
127176 try {
128- const value = inMemoryLRUCache ?. get ( `${ requestContext . requestID } :${ key } ` )
129- return value === NullValue ? null : value
177+ const currentRequestValue = inMemoryLRUCache ?. perRequest . get (
178+ `${ requestContext . requestID } :${ key } ` ,
179+ )
180+ if ( currentRequestValue ) {
181+ return {
182+ conditional : false ,
183+ currentRequestValue :
184+ currentRequestValue === NullValue
185+ ? null
186+ : isDataWithEtag ( currentRequestValue )
187+ ? currentRequestValue . data
188+ : currentRequestValue ,
189+ }
190+ }
191+ 192+ const globalEntry = inMemoryLRUCache ?. global . get ( key )
193+ if ( globalEntry ) {
194+ const derefencedGlobalEntry = globalEntry . deref ( )
195+ if ( derefencedGlobalEntry ) {
196+ return {
197+ conditional : true ,
198+ globalValue : derefencedGlobalEntry . data ,
199+ etag : derefencedGlobalEntry . etag ,
200+ }
201+ }
202+ 203+ // value has been GC'ed so we can cleanup entry from the map as it no longer points to existing value
204+ inMemoryLRUCache ?. global . delete ( key )
205+ }
130206 } catch ( error ) {
131207 // using in-memory store is perf optimization not requirement
132208 // trying to use optimization should NOT cause crashes
@@ -137,7 +213,10 @@ export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => {
137213 set ( key , value ) {
138214 if ( ! requestContext ) return
139215 try {
140- inMemoryLRUCache ?. set ( `${ requestContext ?. requestID } :${ key } ` , value ?? NullValue )
216+ if ( isDataWithEtag ( value ) ) {
217+ inMemoryLRUCache ?. global . set ( key , new WeakRef ( value ) )
218+ }
219+ inMemoryLRUCache ?. perRequest . set ( `${ requestContext . requestID } :${ key } ` , value ?? NullValue )
141220 } catch ( error ) {
142221 // using in-memory store is perf optimization not requirement
143222 // trying to use optimization should NOT cause crashes
0 commit comments