@@ -9,13 +9,34 @@ import { recordWarning } from '../handlers/tracer.cjs'
99// lru-cache types don't like using `null` for values, so we use a symbol to represent it and do conversion
1010// so it doesn't leak outside
1111const NullValue = Symbol . for ( 'null-value' )
12- type BlobLRUCache = LRUCache < string , BlobType | typeof NullValue | Promise < BlobType | null > >
12+ type DataWithEtag = { data : BlobType ; etag : string }
13+ 14+ const isDataWithEtag = ( value : unknown ) : value is DataWithEtag => {
15+ return typeof value === 'object' && value !== null && 'data' in value && 'etag' in value
16+ }
17+ 18+ type BlobLRUCache = LRUCache <
19+ string ,
20+ BlobType | typeof NullValue | Promise < BlobType | null > | DataWithEtag
21+ >
1322
1423const IN_MEMORY_CACHE_MAX_SIZE = Symbol . for ( 'nf-in-memory-cache-max-size' )
1524const IN_MEMORY_LRU_CACHE = Symbol . for ( 'nf-in-memory-lru-cache' )
1625const extendedGlobalThis = globalThis as typeof globalThis & {
1726 [ IN_MEMORY_CACHE_MAX_SIZE ] ?: number
18- [ IN_MEMORY_LRU_CACHE ] ?: BlobLRUCache | null
27+ [ IN_MEMORY_LRU_CACHE ] ?: {
28+ /**
29+ * entries are scoped to request IDs
30+ */
31+ perRequest : BlobLRUCache
32+ /**
33+ * global cache shared between requests, does not allow immediate re-use, but is used for
34+ * conditional blob gets with etags and given blob key is first tried in given request.
35+ * Map values are weak references to avoid this map strongly referencing blobs and allowing
36+ * GC based on per request LRU cache evictions alone.
37+ */
38+ global : Map < string , WeakRef < DataWithEtag > >
39+ } | null
1940}
2041
2142const DEFAULT_FALLBACK_MAX_SIZE = 50 * 1024 * 1024 // 50MB, same as default Next.js config
@@ -31,40 +52,46 @@ const isPositiveNumber = (value: unknown): value is PositiveNumber => {
3152}
3253
3354const BASE_BLOB_SIZE = 25 as PositiveNumber
55+ const BASE_BLOB_WITH_ETAG_SIZE = ( BASE_BLOB_SIZE + 34 ) as PositiveNumber
3456
3557const estimateBlobKnownTypeSize = (
36- valueToStore : BlobType | null | Promise < unknown > ,
58+ valueToStore : BlobType | null | Promise < unknown > | DataWithEtag ,
3759) : number | undefined => {
3860 // very approximate size calculation to avoid expensive exact size calculation
3961 // inspired by https://github.com/vercel/next.js/blob/ed10f7ed0246fcc763194197eb9beebcbd063162/packages/next/src/server/lib/incremental-cache/file-system-cache.ts#L60-L79
40- if ( valueToStore === null || isPromise ( valueToStore ) || isTagManifest ( valueToStore ) ) {
62+ if ( valueToStore === null || isPromise ( valueToStore ) ) {
4163 return BASE_BLOB_SIZE
4264 }
43- if ( isHtmlBlob ( valueToStore ) ) {
44- return BASE_BLOB_SIZE + valueToStore . html . length
65+ 66+ const { data, baseSize } = isDataWithEtag ( valueToStore )
67+ ? { data : valueToStore . data , baseSize : BASE_BLOB_WITH_ETAG_SIZE }
68+ : { data : valueToStore , baseSize : BASE_BLOB_SIZE }
69+ 70+ if ( isTagManifest ( data ) ) {
71+ return baseSize
72+ }
73+ 74+ if ( isHtmlBlob ( data ) ) {
75+ return baseSize + data . html . length
4576 }
4677
47- if ( valueToStore . value ?. kind === 'FETCH' ) {
48- return BASE_BLOB_SIZE + valueToStore . value . data . body . length
78+ if ( data . value ?. kind === 'FETCH' ) {
79+ return baseSize + data . value . data . body . length
4980 }
50- if ( valueToStore . value ?. kind === 'APP_PAGE' ) {
51- return (
52- BASE_BLOB_SIZE + valueToStore . value . html . length + ( valueToStore . value . rscData ?. length ?? 0 )
53- )
81+ if ( data . value ?. kind === 'APP_PAGE' ) {
82+ return baseSize + data . value . html . length + ( data . value . rscData ?. length ?? 0 )
5483 }
55- if ( valueToStore . value ?. kind === 'PAGE' || valueToStore . value ?. kind === 'PAGES' ) {
56- return (
57- BASE_BLOB_SIZE +
58- valueToStore . value . html . length +
59- JSON . stringify ( valueToStore . value . pageData ) . length
60- )
84+ if ( data . value ?. kind === 'PAGE' || data . value ?. kind === 'PAGES' ) {
85+ return baseSize + data . value . html . length + JSON . stringify ( data . value . pageData ) . length
6186 }
62- if ( valueToStore . value ?. kind === 'ROUTE' || valueToStore . value ?. kind === 'APP_ROUTE' ) {
63- return BASE_BLOB_SIZE + valueToStore . value . body . length
87+ if ( data . value ?. kind === 'ROUTE' || data . value ?. kind === 'APP_ROUTE' ) {
88+ return baseSize + data . value . body . length
6489 }
6590}
6691
67- const estimateBlobSize = ( valueToStore : BlobType | null | Promise < unknown > ) : PositiveNumber => {
92+ const estimateBlobSize = (
93+ valueToStore : BlobType | null | Promise < unknown > | DataWithEtag ,
94+ ) : PositiveNumber => {
6895 let estimatedKnownTypeSize : number | undefined
6996 let estimateBlobKnownTypeSizeError : unknown
7097 try {
@@ -98,23 +125,41 @@ function getInMemoryLRUCache() {
98125 ? extendedGlobalThis [ IN_MEMORY_CACHE_MAX_SIZE ]
99126 : DEFAULT_FALLBACK_MAX_SIZE
100127
101- extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] =
102- maxSize === 0
103- ? null // if user sets 0 in their config, we should honor that and not use in-memory cache
104- : new LRUCache < string , BlobType | typeof NullValue | Promise < BlobType | null > > ( {
105- max : 1000 ,
106- maxSize,
107- sizeCalculation : ( valueToStore ) => {
108- return estimateBlobSize ( valueToStore === NullValue ? null : valueToStore )
109- } ,
110- } )
128+ if ( maxSize === 0 ) {
129+ extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] = null
130+ } else {
131+ const global = new Map < string , WeakRef < DataWithEtag > > ( )
132+ 133+ const perRequest = new LRUCache <
134+ string ,
135+ BlobType | typeof NullValue | Promise < BlobType | null > | DataWithEtag
136+ > ( {
137+ max : 1000 ,
138+ maxSize,
139+ sizeCalculation : ( valueToStore ) => {
140+ return estimateBlobSize ( valueToStore === NullValue ? null : valueToStore )
141+ } ,
142+ } )
143+ 144+ extendedGlobalThis [ IN_MEMORY_LRU_CACHE ] = {
145+ perRequest,
146+ global,
147+ }
148+ }
111149 }
112150 return extendedGlobalThis [ IN_MEMORY_LRU_CACHE ]
113151}
114152
115153interface RequestScopedInMemoryCache {
116- get ( key : string ) : BlobType | null | Promise < BlobType | null > | undefined
117- set ( key : string , value : BlobType | null | Promise < BlobType | null > ) : void
154+ get ( key : string ) :
155+ | { conditional : false ; currentRequestValue : BlobType | null | Promise < BlobType | null > }
156+ | {
157+ conditional : true
158+ globalValue : BlobType
159+ etag : string
160+ }
161+ | undefined
162+ set ( key : string , value : BlobType | null | Promise < BlobType | null > | DataWithEtag ) : void
118163}
119164
120165export const getRequestScopedInMemoryCache = ( ) : RequestScopedInMemoryCache => {
@@ -125,8 +170,35 @@ export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => {
125170 get ( key ) {
126171 if ( ! requestContext ) return
127172 try {
128- const value = inMemoryLRUCache ?. get ( `${ requestContext . requestID } :${ key } ` )
129- return value === NullValue ? null : value
173+ const currentRequestValue = inMemoryLRUCache ?. perRequest . get (
174+ `${ requestContext . requestID } :${ key } ` ,
175+ )
176+ if ( currentRequestValue ) {
177+ return {
178+ conditional : false ,
179+ currentRequestValue :
180+ currentRequestValue === NullValue
181+ ? null
182+ : isDataWithEtag ( currentRequestValue )
183+ ? currentRequestValue . data
184+ : currentRequestValue ,
185+ }
186+ }
187+ 188+ const globalEntry = inMemoryLRUCache ?. global . get ( key )
189+ if ( globalEntry ) {
190+ const derefencedGlobalEntry = globalEntry . deref ( )
191+ if ( derefencedGlobalEntry ) {
192+ return {
193+ conditional : true ,
194+ globalValue : derefencedGlobalEntry . data ,
195+ etag : derefencedGlobalEntry . etag ,
196+ }
197+ }
198+ 199+ // value has been GC'ed so we can cleanup entry from the map as it no longer points to existing value
200+ inMemoryLRUCache ?. global . delete ( key )
201+ }
130202 } catch ( error ) {
131203 // using in-memory store is perf optimization not requirement
132204 // trying to use optimization should NOT cause crashes
@@ -137,7 +209,10 @@ export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => {
137209 set ( key , value ) {
138210 if ( ! requestContext ) return
139211 try {
140- inMemoryLRUCache ?. set ( `${ requestContext ?. requestID } :${ key } ` , value ?? NullValue )
212+ if ( isDataWithEtag ( value ) ) {
213+ inMemoryLRUCache ?. global . set ( key , new WeakRef ( value ) )
214+ }
215+ inMemoryLRUCache ?. perRequest . set ( `${ requestContext . requestID } :${ key } ` , value ?? NullValue )
141216 } catch ( error ) {
142217 // using in-memory store is perf optimization not requirement
143218 // trying to use optimization should NOT cause crashes
0 commit comments