Initial commit
This commit is contained in:
8
frontend/webapp/node_modules/next/dist/server/use-cache/cache-life.d.ts
generated
vendored
Normal file
8
frontend/webapp/node_modules/next/dist/server/use-cache/cache-life.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export type CacheLife = {
|
||||
stale?: number;
|
||||
revalidate?: number;
|
||||
expire?: number;
|
||||
};
|
||||
type CacheLifeProfiles = 'default' | 'seconds' | 'minutes' | 'hours' | 'days' | 'weeks' | 'max' | (string & {});
|
||||
export declare function cacheLife(profile: CacheLifeProfiles | CacheLife): void;
|
||||
export {};
|
||||
156
frontend/webapp/node_modules/next/dist/server/use-cache/cache-life.js
generated
vendored
Normal file
156
frontend/webapp/node_modules/next/dist/server/use-cache/cache-life.js
generated
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "cacheLife", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return cacheLife;
|
||||
}
|
||||
});
|
||||
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
|
||||
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
|
||||
function validateCacheLife(profile) {
|
||||
if (profile.stale !== undefined) {
|
||||
if (profile.stale === false) {
|
||||
throw Object.defineProperty(new Error('Pass `Infinity` instead of `false` if you want to cache on the client forever ' + 'without checking with the server.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E407",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else if (typeof profile.stale !== 'number') {
|
||||
throw Object.defineProperty(new Error('The stale option must be a number of seconds.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E308",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (profile.revalidate !== undefined) {
|
||||
if (profile.revalidate === false) {
|
||||
throw Object.defineProperty(new Error('Pass `Infinity` instead of `false` if you do not want to revalidate by time.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E104",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else if (typeof profile.revalidate !== 'number') {
|
||||
throw Object.defineProperty(new Error('The revalidate option must be a number of seconds.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E233",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (profile.expire !== undefined) {
|
||||
if (profile.expire === false) {
|
||||
throw Object.defineProperty(new Error('Pass `Infinity` instead of `false` if you want to cache on the server forever ' + 'without checking with the origin.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E658",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else if (typeof profile.expire !== 'number') {
|
||||
throw Object.defineProperty(new Error('The expire option must be a number of seconds.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E3",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (profile.revalidate !== undefined && profile.expire !== undefined) {
|
||||
if (profile.revalidate > profile.expire) {
|
||||
throw Object.defineProperty(new Error('If providing both the revalidate and expire options, ' + 'the expire option must be greater than the revalidate option. ' + 'The expire option indicates how many seconds from the start ' + 'until it can no longer be used.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E656",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (profile.stale !== undefined && profile.expire !== undefined) {
|
||||
if (profile.stale > profile.expire) {
|
||||
throw Object.defineProperty(new Error('If providing both the stale and expire options, ' + 'the expire option must be greater than the stale option. ' + 'The expire option indicates how many seconds from the start ' + 'until it can no longer be used.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E655",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function cacheLife(profile) {
|
||||
if (!process.env.__NEXT_USE_CACHE) {
|
||||
throw Object.defineProperty(new Error('cacheLife() is only available with the experimental.useCache config.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E627",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (!workUnitStore || workUnitStore.type !== 'cache') {
|
||||
throw Object.defineProperty(new Error('cacheLife() can only be called inside a "use cache" function.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E250",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (typeof profile === 'string') {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
throw Object.defineProperty(new Error('cacheLife() can only be called during App Router rendering at the moment.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E94",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (!workStore.cacheLifeProfiles) {
|
||||
throw Object.defineProperty(new Error('cacheLifeProfiles should always be provided. This is a bug in Next.js.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E294",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// TODO: This should be globally available and not require an AsyncLocalStorage.
|
||||
const configuredProfile = workStore.cacheLifeProfiles[profile];
|
||||
if (configuredProfile === undefined) {
|
||||
if (workStore.cacheLifeProfiles[profile.trim()]) {
|
||||
throw Object.defineProperty(new Error(`Unknown cacheLife profile "${profile}" is not configured in next.config.js\n` + `Did you mean "${profile.trim()}" without the spaces?`), "__NEXT_ERROR_CODE", {
|
||||
value: "E16",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
throw Object.defineProperty(new Error(`Unknown cacheLife profile "${profile}" is not configured in next.config.js\n` + 'module.exports = {\n' + ' experimental: {\n' + ' cacheLife: {\n' + ` "${profile}": ...\n` + ' }\n' + ' }\n' + '}'), "__NEXT_ERROR_CODE", {
|
||||
value: "E137",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
profile = configuredProfile;
|
||||
} else if (typeof profile !== 'object' || profile === null || Array.isArray(profile)) {
|
||||
throw Object.defineProperty(new Error('Invalid cacheLife() option. Either pass a profile name or object.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E110",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
validateCacheLife(profile);
|
||||
}
|
||||
if (profile.revalidate !== undefined) {
|
||||
// Track the explicit revalidate time.
|
||||
if (workUnitStore.explicitRevalidate === undefined || workUnitStore.explicitRevalidate > profile.revalidate) {
|
||||
workUnitStore.explicitRevalidate = profile.revalidate;
|
||||
}
|
||||
}
|
||||
if (profile.expire !== undefined) {
|
||||
// Track the explicit expire time.
|
||||
if (workUnitStore.explicitExpire === undefined || workUnitStore.explicitExpire > profile.expire) {
|
||||
workUnitStore.explicitExpire = profile.expire;
|
||||
}
|
||||
}
|
||||
if (profile.stale !== undefined) {
|
||||
// Track the explicit stale time.
|
||||
if (workUnitStore.explicitStale === undefined || workUnitStore.explicitStale > profile.stale) {
|
||||
workUnitStore.explicitStale = profile.stale;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-life.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/cache-life.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/cache-life.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
frontend/webapp/node_modules/next/dist/server/use-cache/cache-tag.d.ts
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/cache-tag.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function cacheTag(...tags: string[]): void;
|
||||
37
frontend/webapp/node_modules/next/dist/server/use-cache/cache-tag.js
generated
vendored
Normal file
37
frontend/webapp/node_modules/next/dist/server/use-cache/cache-tag.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "cacheTag", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return cacheTag;
|
||||
}
|
||||
});
|
||||
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
|
||||
const _patchfetch = require("../lib/patch-fetch");
|
||||
function cacheTag(...tags) {
|
||||
if (!process.env.__NEXT_USE_CACHE) {
|
||||
throw Object.defineProperty(new Error('cacheTag() is only available with the experimental.useCache config.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E628",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (!workUnitStore || workUnitStore.type !== 'cache') {
|
||||
throw Object.defineProperty(new Error('cacheTag() can only be called inside a "use cache" function.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E177",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const validTags = (0, _patchfetch.validateTags)(tags, 'cacheTag()');
|
||||
if (!workUnitStore.tags) {
|
||||
workUnitStore.tags = validTags;
|
||||
} else {
|
||||
workUnitStore.tags.push(...validTags);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-tag.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/cache-tag.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/cache-tag.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/use-cache/cache-tag.ts"],"sourcesContent":["import { workUnitAsyncStorage } from '../app-render/work-unit-async-storage.external'\nimport { validateTags } from '../lib/patch-fetch'\n\nexport function cacheTag(...tags: string[]): void {\n if (!process.env.__NEXT_USE_CACHE) {\n throw new Error(\n 'cacheTag() is only available with the experimental.useCache config.'\n )\n }\n\n const workUnitStore = workUnitAsyncStorage.getStore()\n if (!workUnitStore || workUnitStore.type !== 'cache') {\n throw new Error(\n 'cacheTag() can only be called inside a \"use cache\" function.'\n )\n }\n\n const validTags = validateTags(tags, 'cacheTag()')\n\n if (!workUnitStore.tags) {\n workUnitStore.tags = validTags\n } else {\n workUnitStore.tags.push(...validTags)\n }\n}\n"],"names":["cacheTag","tags","process","env","__NEXT_USE_CACHE","Error","workUnitStore","workUnitAsyncStorage","getStore","type","validTags","validateTags","push"],"mappings":";;;;+BAGgBA;;;eAAAA;;;8CAHqB;4BACR;AAEtB,SAASA,SAAS,GAAGC,IAAc;IACxC,IAAI,CAACC,QAAQC,GAAG,CAACC,gBAAgB,EAAE;QACjC,MAAM,qBAEL,CAFK,IAAIC,MACR,wEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,MAAMC,gBAAgBC,kDAAoB,CAACC,QAAQ;IACnD,IAAI,CAACF,iBAAiBA,cAAcG,IAAI,KAAK,SAAS;QACpD,MAAM,qBAEL,CAFK,IAAIJ,MACR,iEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,MAAMK,YAAYC,IAAAA,wBAAY,EAACV,MAAM;IAErC,IAAI,CAACK,cAAcL,IAAI,EAAE;QACvBK,cAAcL,IAAI,GAAGS;IACvB,OAAO;QACLJ,cAAcL,IAAI,CAACW,IAAI,IAAIF;IAC7B;AACF"}
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/constants.d.ts
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/constants.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare const DYNAMIC_EXPIRE = 300;
|
||||
14
frontend/webapp/node_modules/next/dist/server/use-cache/constants.js
generated
vendored
Normal file
14
frontend/webapp/node_modules/next/dist/server/use-cache/constants.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
// If the expire time is less than .
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "DYNAMIC_EXPIRE", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return DYNAMIC_EXPIRE;
|
||||
}
|
||||
});
|
||||
const DYNAMIC_EXPIRE = 300;
|
||||
|
||||
//# sourceMappingURL=constants.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/constants.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/constants.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/use-cache/constants.ts"],"sourcesContent":["// If the expire time is less than .\nexport const DYNAMIC_EXPIRE = 300\n"],"names":["DYNAMIC_EXPIRE"],"mappings":"AAAA,oCAAoC;;;;;+BACvBA;;;eAAAA;;;AAAN,MAAMA,iBAAiB"}
|
||||
32
frontend/webapp/node_modules/next/dist/server/use-cache/handlers.d.ts
generated
vendored
Normal file
32
frontend/webapp/node_modules/next/dist/server/use-cache/handlers.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import type { CacheHandlerCompat } from '../lib/cache-handlers/types';
|
||||
/**
|
||||
* Initialize the cache handlers.
|
||||
* @returns `true` if the cache handlers were initialized, `false` if they were already initialized.
|
||||
*/
|
||||
export declare function initializeCacheHandlers(): boolean;
|
||||
/**
|
||||
* Get a cache handler by kind.
|
||||
* @param kind - The kind of cache handler to get.
|
||||
* @returns The cache handler, or `undefined` if it does not exist.
|
||||
* @throws If the cache handlers are not initialized.
|
||||
*/
|
||||
export declare function getCacheHandler(kind: string): CacheHandlerCompat | undefined;
|
||||
/**
|
||||
* Get a set iterator over the cache handlers.
|
||||
* @returns An iterator over the cache handlers, or `undefined` if they are not
|
||||
* initialized.
|
||||
*/
|
||||
export declare function getCacheHandlers(): SetIterator<CacheHandlerCompat> | undefined;
|
||||
/**
|
||||
* Get a map iterator over the cache handlers (keyed by kind).
|
||||
* @returns An iterator over the cache handler entries, or `undefined` if they
|
||||
* are not initialized.
|
||||
* @throws If the cache handlers are not initialized.
|
||||
*/
|
||||
export declare function getCacheHandlerEntries(): MapIterator<[string, CacheHandlerCompat]> | undefined;
|
||||
/**
|
||||
* Set a cache handler by kind.
|
||||
* @param kind - The kind of cache handler to set.
|
||||
* @param cacheHandler - The cache handler to set.
|
||||
*/
|
||||
export declare function setCacheHandler(kind: string, cacheHandler: CacheHandlerCompat): void;
|
||||
125
frontend/webapp/node_modules/next/dist/server/use-cache/handlers.js
generated
vendored
Normal file
125
frontend/webapp/node_modules/next/dist/server/use-cache/handlers.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
getCacheHandler: null,
|
||||
getCacheHandlerEntries: null,
|
||||
getCacheHandlers: null,
|
||||
initializeCacheHandlers: null,
|
||||
setCacheHandler: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
getCacheHandler: function() {
|
||||
return getCacheHandler;
|
||||
},
|
||||
getCacheHandlerEntries: function() {
|
||||
return getCacheHandlerEntries;
|
||||
},
|
||||
getCacheHandlers: function() {
|
||||
return getCacheHandlers;
|
||||
},
|
||||
initializeCacheHandlers: function() {
|
||||
return initializeCacheHandlers;
|
||||
},
|
||||
setCacheHandler: function() {
|
||||
return setCacheHandler;
|
||||
}
|
||||
});
|
||||
const _default = /*#__PURE__*/ _interop_require_default(require("../lib/cache-handlers/default"));
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE ? (message, ...args)=>{
|
||||
console.log(`use-cache: ${message}`, ...args);
|
||||
} : undefined;
|
||||
const handlersSymbol = Symbol.for('@next/cache-handlers');
|
||||
const handlersMapSymbol = Symbol.for('@next/cache-handlers-map');
|
||||
const handlersSetSymbol = Symbol.for('@next/cache-handlers-set');
|
||||
/**
|
||||
* The reference to the cache handlers. We store the cache handlers on the
|
||||
* global object so that we can access the same instance across different
|
||||
* boundaries (such as different copies of the same module).
|
||||
*/ const reference = globalThis;
|
||||
function initializeCacheHandlers() {
|
||||
// If the cache handlers have already been initialized, don't do it again.
|
||||
if (reference[handlersMapSymbol]) {
|
||||
debug == null ? void 0 : debug('cache handlers already initialized');
|
||||
return false;
|
||||
}
|
||||
debug == null ? void 0 : debug('initializing cache handlers');
|
||||
reference[handlersMapSymbol] = new Map();
|
||||
// Initialize the cache from the symbol contents first.
|
||||
if (reference[handlersSymbol]) {
|
||||
let fallback;
|
||||
if (reference[handlersSymbol].DefaultCache) {
|
||||
debug == null ? void 0 : debug('setting "default" cache handler from symbol');
|
||||
fallback = reference[handlersSymbol].DefaultCache;
|
||||
} else {
|
||||
debug == null ? void 0 : debug('setting "default" cache handler from default');
|
||||
fallback = _default.default;
|
||||
}
|
||||
reference[handlersMapSymbol].set('default', fallback);
|
||||
if (reference[handlersSymbol].RemoteCache) {
|
||||
debug == null ? void 0 : debug('setting "remote" cache handler from symbol');
|
||||
reference[handlersMapSymbol].set('remote', reference[handlersSymbol].RemoteCache);
|
||||
} else {
|
||||
debug == null ? void 0 : debug('setting "remote" cache handler from default');
|
||||
reference[handlersMapSymbol].set('remote', fallback);
|
||||
}
|
||||
} else {
|
||||
debug == null ? void 0 : debug('setting "default" cache handler from default');
|
||||
reference[handlersMapSymbol].set('default', _default.default);
|
||||
debug == null ? void 0 : debug('setting "remote" cache handler from default');
|
||||
reference[handlersMapSymbol].set('remote', _default.default);
|
||||
}
|
||||
// Create a set of the cache handlers.
|
||||
reference[handlersSetSymbol] = new Set(reference[handlersMapSymbol].values());
|
||||
return true;
|
||||
}
|
||||
function getCacheHandler(kind) {
|
||||
// This should never be called before initializeCacheHandlers.
|
||||
if (!reference[handlersMapSymbol]) {
|
||||
throw Object.defineProperty(new Error('Cache handlers not initialized'), "__NEXT_ERROR_CODE", {
|
||||
value: "E649",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return reference[handlersMapSymbol].get(kind);
|
||||
}
|
||||
function getCacheHandlers() {
|
||||
if (!reference[handlersSetSymbol]) {
|
||||
return undefined;
|
||||
}
|
||||
return reference[handlersSetSymbol].values();
|
||||
}
|
||||
function getCacheHandlerEntries() {
|
||||
if (!reference[handlersMapSymbol]) {
|
||||
return undefined;
|
||||
}
|
||||
return reference[handlersMapSymbol].entries();
|
||||
}
|
||||
function setCacheHandler(kind, cacheHandler) {
|
||||
// This should never be called before initializeCacheHandlers.
|
||||
if (!reference[handlersMapSymbol] || !reference[handlersSetSymbol]) {
|
||||
throw Object.defineProperty(new Error('Cache handlers not initialized'), "__NEXT_ERROR_CODE", {
|
||||
value: "E649",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
debug == null ? void 0 : debug('setting cache handler for "%s"', kind);
|
||||
reference[handlersMapSymbol].set(kind, cacheHandler);
|
||||
reference[handlersSetSymbol].add(cacheHandler);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=handlers.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/handlers.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/handlers.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-errors.d.ts
generated
vendored
Normal file
7
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-errors.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
declare const USE_CACHE_TIMEOUT_ERROR_CODE = "USE_CACHE_TIMEOUT";
|
||||
export declare class UseCacheTimeoutError extends Error {
|
||||
digest: typeof USE_CACHE_TIMEOUT_ERROR_CODE;
|
||||
constructor();
|
||||
}
|
||||
export declare function isUseCacheTimeoutError(err: unknown): err is UseCacheTimeoutError;
|
||||
export {};
|
||||
36
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-errors.js
generated
vendored
Normal file
36
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-errors.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
UseCacheTimeoutError: null,
|
||||
isUseCacheTimeoutError: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
UseCacheTimeoutError: function() {
|
||||
return UseCacheTimeoutError;
|
||||
},
|
||||
isUseCacheTimeoutError: function() {
|
||||
return isUseCacheTimeoutError;
|
||||
}
|
||||
});
|
||||
const USE_CACHE_TIMEOUT_ERROR_CODE = 'USE_CACHE_TIMEOUT';
|
||||
class UseCacheTimeoutError extends Error {
|
||||
constructor(){
|
||||
super('Filling a cache during prerender timed out, likely because request-specific arguments such as params, searchParams, cookies() or dynamic data were used inside "use cache".'), this.digest = USE_CACHE_TIMEOUT_ERROR_CODE;
|
||||
}
|
||||
}
|
||||
function isUseCacheTimeoutError(err) {
|
||||
if (typeof err !== 'object' || err === null || !('digest' in err) || typeof err.digest !== 'string') {
|
||||
return false;
|
||||
}
|
||||
return err.digest === USE_CACHE_TIMEOUT_ERROR_CODE;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=use-cache-errors.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-errors.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-errors.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/use-cache/use-cache-errors.ts"],"sourcesContent":["const USE_CACHE_TIMEOUT_ERROR_CODE = 'USE_CACHE_TIMEOUT'\n\nexport class UseCacheTimeoutError extends Error {\n digest: typeof USE_CACHE_TIMEOUT_ERROR_CODE = USE_CACHE_TIMEOUT_ERROR_CODE\n\n constructor() {\n super(\n 'Filling a cache during prerender timed out, likely because request-specific arguments such as params, searchParams, cookies() or dynamic data were used inside \"use cache\".'\n )\n }\n}\n\nexport function isUseCacheTimeoutError(\n err: unknown\n): err is UseCacheTimeoutError {\n if (\n typeof err !== 'object' ||\n err === null ||\n !('digest' in err) ||\n typeof err.digest !== 'string'\n ) {\n return false\n }\n\n return err.digest === USE_CACHE_TIMEOUT_ERROR_CODE\n}\n"],"names":["UseCacheTimeoutError","isUseCacheTimeoutError","USE_CACHE_TIMEOUT_ERROR_CODE","Error","constructor","digest","err"],"mappings":";;;;;;;;;;;;;;;IAEaA,oBAAoB;eAApBA;;IAUGC,sBAAsB;eAAtBA;;;AAZhB,MAAMC,+BAA+B;AAE9B,MAAMF,6BAA6BG;IAGxCC,aAAc;QACZ,KAAK,CACH,qLAJJC,SAA8CH;IAM9C;AACF;AAEO,SAASD,uBACdK,GAAY;IAEZ,IACE,OAAOA,QAAQ,YACfA,QAAQ,QACR,CAAE,CAAA,YAAYA,GAAE,KAChB,OAAOA,IAAID,MAAM,KAAK,UACtB;QACA,OAAO;IACT;IAEA,OAAOC,IAAID,MAAM,KAAKH;AACxB"}
|
||||
8
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-wrapper.d.ts
generated
vendored
Normal file
8
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-wrapper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { type SearchParams } from '../request/search-params';
|
||||
import type { Params } from '../request/params';
|
||||
export interface UseCachePageComponentProps {
|
||||
params: Promise<Params>;
|
||||
searchParams: Promise<SearchParams>;
|
||||
$$isPageComponent: true;
|
||||
}
|
||||
export declare function cache(kind: string, id: string, boundArgsLength: number, originalFn: (...args: unknown[]) => Promise<unknown>): (...args: any[]) => Promise<unknown>;
|
||||
669
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-wrapper.js
generated
vendored
Normal file
669
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-wrapper.js
generated
vendored
Normal file
@@ -0,0 +1,669 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "cache", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return cache;
|
||||
}
|
||||
});
|
||||
const _serveredge = require("react-server-dom-webpack/server.edge");
|
||||
const _clientedge = require("react-server-dom-webpack/client.edge");
|
||||
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
|
||||
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
|
||||
const _cleanasyncsnapshotexternal = require("../app-render/clean-async-snapshot.external");
|
||||
const _dynamicrenderingutils = require("../dynamic-rendering-utils");
|
||||
const _encryptionutils = require("../app-render/encryption-utils");
|
||||
const _encryption = require("../app-render/encryption");
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _createerrorhandler = require("../app-render/create-error-handler");
|
||||
const _constants = require("./constants");
|
||||
const _handlers = require("./handlers");
|
||||
const _usecacheerrors = require("./use-cache-errors");
|
||||
const _dynamicrendering = require("../app-render/dynamic-rendering");
|
||||
const _searchparams = require("../request/search-params");
|
||||
const _react = /*#__PURE__*/ _interop_require_default(require("react"));
|
||||
const _lazyresult = require("../lib/lazy-result");
|
||||
function _interop_require_default(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
|
||||
const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE ? console.debug.bind(console, 'use-cache:') : undefined;
|
||||
function generateCacheEntry(workStore, outerWorkUnitStore, clientReferenceManifest, encodedArguments, fn, timeoutError) {
|
||||
// We need to run this inside a clean AsyncLocalStorage snapshot so that the cache
|
||||
// generation cannot read anything from the context we're currently executing which
|
||||
// might include request specific things like cookies() inside a React.cache().
|
||||
// Note: It is important that we await at least once before this because it lets us
|
||||
// pop out of any stack specific contexts as well - aka "Sync" Local Storage.
|
||||
return (0, _cleanasyncsnapshotexternal.runInCleanSnapshot)(generateCacheEntryWithRestoredWorkStore, workStore, outerWorkUnitStore, clientReferenceManifest, encodedArguments, fn, timeoutError);
|
||||
}
|
||||
function generateCacheEntryWithRestoredWorkStore(workStore, outerWorkUnitStore, clientReferenceManifest, encodedArguments, fn, timeoutError) {
|
||||
// Since we cleared the AsyncLocalStorage we need to restore the workStore.
|
||||
// Note: We explicitly don't restore the RequestStore nor the PrerenderStore.
|
||||
// We don't want any request specific information leaking an we don't want to create a
|
||||
// bloated fake request mock for every cache call. So any feature that currently lives
|
||||
// in RequestStore but should be available to Caches need to move to WorkStore.
|
||||
// PrerenderStore is not needed inside the cache scope because the outer most one will
|
||||
// be the one to report its result to the outer Prerender.
|
||||
return _workasyncstorageexternal.workAsyncStorage.run(workStore, generateCacheEntryWithCacheContext, workStore, outerWorkUnitStore, clientReferenceManifest, encodedArguments, fn, timeoutError);
|
||||
}
|
||||
function generateCacheEntryWithCacheContext(workStore, outerWorkUnitStore, clientReferenceManifest, encodedArguments, fn, timeoutError) {
|
||||
if (!workStore.cacheLifeProfiles) {
|
||||
throw Object.defineProperty(new Error('cacheLifeProfiles should always be provided. This is a bug in Next.js.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E294",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const defaultCacheLife = workStore.cacheLifeProfiles['default'];
|
||||
if (!defaultCacheLife || defaultCacheLife.revalidate == null || defaultCacheLife.expire == null || defaultCacheLife.stale == null) {
|
||||
throw Object.defineProperty(new Error('A default cacheLife profile must always be provided. This is a bug in Next.js.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E520",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const useCacheOrRequestStore = (outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.type) === 'request' || (outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.type) === 'cache' ? outerWorkUnitStore : undefined;
|
||||
// Initialize the Store for this Cache entry.
|
||||
const cacheStore = {
|
||||
type: 'cache',
|
||||
phase: 'render',
|
||||
implicitTags: outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.implicitTags,
|
||||
revalidate: defaultCacheLife.revalidate,
|
||||
expire: defaultCacheLife.expire,
|
||||
stale: defaultCacheLife.stale,
|
||||
explicitRevalidate: undefined,
|
||||
explicitExpire: undefined,
|
||||
explicitStale: undefined,
|
||||
tags: null,
|
||||
hmrRefreshHash: outerWorkUnitStore && (0, _workunitasyncstorageexternal.getHmrRefreshHash)(workStore, outerWorkUnitStore),
|
||||
isHmrRefresh: (useCacheOrRequestStore == null ? void 0 : useCacheOrRequestStore.isHmrRefresh) ?? false,
|
||||
serverComponentsHmrCache: useCacheOrRequestStore == null ? void 0 : useCacheOrRequestStore.serverComponentsHmrCache,
|
||||
forceRevalidate: shouldForceRevalidate(workStore, outerWorkUnitStore),
|
||||
draftMode: outerWorkUnitStore && (0, _workunitasyncstorageexternal.getDraftModeProviderForCacheScope)(workStore, outerWorkUnitStore)
|
||||
};
|
||||
return _workunitasyncstorageexternal.workUnitAsyncStorage.run(cacheStore, generateCacheEntryImpl, workStore, outerWorkUnitStore, cacheStore, clientReferenceManifest, encodedArguments, fn, timeoutError);
|
||||
}
|
||||
function propagateCacheLifeAndTags(workUnitStore, entry) {
|
||||
if (workUnitStore && (workUnitStore.type === 'cache' || workUnitStore.type === 'prerender' || workUnitStore.type === 'prerender-ppr' || workUnitStore.type === 'prerender-legacy')) {
|
||||
// Propagate tags and revalidate upwards
|
||||
const outerTags = workUnitStore.tags ?? (workUnitStore.tags = []);
|
||||
const entryTags = entry.tags;
|
||||
for(let i = 0; i < entryTags.length; i++){
|
||||
const tag = entryTags[i];
|
||||
if (!outerTags.includes(tag)) {
|
||||
outerTags.push(tag);
|
||||
}
|
||||
}
|
||||
if (workUnitStore.stale > entry.stale) {
|
||||
workUnitStore.stale = entry.stale;
|
||||
}
|
||||
if (workUnitStore.revalidate > entry.revalidate) {
|
||||
workUnitStore.revalidate = entry.revalidate;
|
||||
}
|
||||
if (workUnitStore.expire > entry.expire) {
|
||||
workUnitStore.expire = entry.expire;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function collectResult(savedStream, workStore, outerWorkUnitStore, innerCacheStore, startTime, errors, timer) {
|
||||
// We create a buffered stream that collects all chunks until the end to
|
||||
// ensure that RSC has finished rendering and therefore we have collected
|
||||
// all tags. In the future the RSC API might allow for the equivalent of
|
||||
// the allReady Promise that exists on SSR streams.
|
||||
//
|
||||
// If something errored or rejected anywhere in the render, we close
|
||||
// the stream as errored. This lets a CacheHandler choose to save the
|
||||
// partial result up until that point for future hits for a while to avoid
|
||||
// unnecessary retries or not to retry. We use the end of the stream for
|
||||
// this to avoid another complicated side-channel. A receiver has to consider
|
||||
// that the stream might also error for other reasons anyway such as losing
|
||||
// connection.
|
||||
const buffer = [];
|
||||
const reader = savedStream.getReader();
|
||||
for(let entry; !(entry = await reader.read()).done;){
|
||||
buffer.push(entry.value);
|
||||
}
|
||||
let idx = 0;
|
||||
const bufferStream = new ReadableStream({
|
||||
pull (controller) {
|
||||
if (workStore.invalidUsageError) {
|
||||
controller.error(workStore.invalidUsageError);
|
||||
} else if (idx < buffer.length) {
|
||||
controller.enqueue(buffer[idx++]);
|
||||
} else if (errors.length > 0) {
|
||||
// TODO: Should we use AggregateError here?
|
||||
controller.error(errors[0]);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
const collectedTags = innerCacheStore.tags;
|
||||
// If cacheLife() was used to set an explicit revalidate time we use that.
|
||||
// Otherwise, we use the lowest of all inner fetch()/unstable_cache() or nested "use cache".
|
||||
// If they're lower than our default.
|
||||
const collectedRevalidate = innerCacheStore.explicitRevalidate !== undefined ? innerCacheStore.explicitRevalidate : innerCacheStore.revalidate;
|
||||
const collectedExpire = innerCacheStore.explicitExpire !== undefined ? innerCacheStore.explicitExpire : innerCacheStore.expire;
|
||||
const collectedStale = innerCacheStore.explicitStale !== undefined ? innerCacheStore.explicitStale : innerCacheStore.stale;
|
||||
const entry = {
|
||||
value: bufferStream,
|
||||
timestamp: startTime,
|
||||
revalidate: collectedRevalidate,
|
||||
expire: collectedExpire,
|
||||
stale: collectedStale,
|
||||
tags: collectedTags === null ? [] : collectedTags
|
||||
};
|
||||
// Propagate tags/revalidate to the parent context.
|
||||
propagateCacheLifeAndTags(outerWorkUnitStore, entry);
|
||||
const cacheSignal = outerWorkUnitStore && outerWorkUnitStore.type === 'prerender' ? outerWorkUnitStore.cacheSignal : null;
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
}
|
||||
if (timer !== undefined) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
async function generateCacheEntryImpl(workStore, outerWorkUnitStore, innerCacheStore, clientReferenceManifest, encodedArguments, fn, timeoutError) {
|
||||
const temporaryReferences = (0, _serveredge.createTemporaryReferenceSet)();
|
||||
const [, , args] = typeof encodedArguments === 'string' ? await (0, _serveredge.decodeReply)(encodedArguments, (0, _encryptionutils.getServerModuleMap)(), {
|
||||
temporaryReferences
|
||||
}) : await (0, _serveredge.decodeReplyFromAsyncIterable)({
|
||||
async *[Symbol.asyncIterator] () {
|
||||
for (const entry of encodedArguments){
|
||||
yield entry;
|
||||
}
|
||||
// The encoded arguments might contain hanging promises. In this
|
||||
// case we don't want to reject with "Error: Connection closed.",
|
||||
// so we intentionally keep the iterable alive. This is similar to
|
||||
// the halting trick that we do while rendering.
|
||||
if ((outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.type) === 'prerender') {
|
||||
await new Promise((resolve)=>{
|
||||
if (outerWorkUnitStore.renderSignal.aborted) {
|
||||
resolve();
|
||||
} else {
|
||||
outerWorkUnitStore.renderSignal.addEventListener('abort', ()=>resolve(), {
|
||||
once: true
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}, (0, _encryptionutils.getServerModuleMap)(), {
|
||||
temporaryReferences
|
||||
});
|
||||
// Track the timestamp when we started computing the result.
|
||||
const startTime = performance.timeOrigin + performance.now();
|
||||
// Invoke the inner function to load a new result. We delay the invocation
|
||||
// though, until React awaits the promise so that React's request store (ALS)
|
||||
// is available when the function is invoked. This allows us, for example, to
|
||||
// capture logs so that we can later replay them.
|
||||
const resultPromise = (0, _lazyresult.createLazyResult)(()=>fn.apply(null, args));
|
||||
let errors = [];
|
||||
let timer = undefined;
|
||||
const controller = new AbortController();
|
||||
if ((outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.type) === 'prerender') {
|
||||
// If we're prerendering, we give you 50 seconds to fill a cache entry.
|
||||
// Otherwise we assume you stalled on hanging input and de-opt. This needs
|
||||
// to be lower than just the general timeout of 60 seconds.
|
||||
timer = setTimeout(()=>{
|
||||
controller.abort(timeoutError);
|
||||
}, 50000);
|
||||
}
|
||||
const stream = (0, _serveredge.renderToReadableStream)(resultPromise, clientReferenceManifest.clientModules, {
|
||||
environmentName: 'Cache',
|
||||
signal: controller.signal,
|
||||
temporaryReferences,
|
||||
// In the "Cache" environment, we only need to make sure that the error
|
||||
// digests are handled correctly. Error formatting and reporting is not
|
||||
// necessary here; the errors are encoded in the stream, and will be
|
||||
// reported in the "Server" environment.
|
||||
onError: (error)=>{
|
||||
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
if (process.env.NODE_ENV !== 'development') {
|
||||
// TODO: For now we're also reporting the error here, because in
|
||||
// production, the "Server" environment will only get the obfuscated
|
||||
// error (created by the Flight Client in the cache wrapper).
|
||||
console.error(error);
|
||||
}
|
||||
if (error === timeoutError) {
|
||||
// The timeout error already aborted the whole stream. We don't need
|
||||
// to also push this error into the `errors` array.
|
||||
return timeoutError.digest;
|
||||
}
|
||||
errors.push(error);
|
||||
}
|
||||
});
|
||||
const [returnStream, savedStream] = stream.tee();
|
||||
const promiseOfCacheEntry = collectResult(savedStream, workStore, outerWorkUnitStore, innerCacheStore, startTime, errors, timer);
|
||||
// Return the stream as we're creating it. This means that if it ends up
|
||||
// erroring we cannot return a stale-while-error version but it allows
|
||||
// streaming back the result earlier.
|
||||
return [
|
||||
returnStream,
|
||||
promiseOfCacheEntry
|
||||
];
|
||||
}
|
||||
function cloneCacheEntry(entry) {
|
||||
const [streamA, streamB] = entry.value.tee();
|
||||
entry.value = streamA;
|
||||
const clonedEntry = {
|
||||
value: streamB,
|
||||
timestamp: entry.timestamp,
|
||||
revalidate: entry.revalidate,
|
||||
expire: entry.expire,
|
||||
stale: entry.stale,
|
||||
tags: entry.tags
|
||||
};
|
||||
return [
|
||||
entry,
|
||||
clonedEntry
|
||||
];
|
||||
}
|
||||
async function clonePendingCacheEntry(pendingCacheEntry) {
|
||||
const entry = await pendingCacheEntry;
|
||||
return cloneCacheEntry(entry);
|
||||
}
|
||||
async function getNthCacheEntry(split, i) {
|
||||
return (await split)[i];
|
||||
}
|
||||
async function encodeFormData(formData) {
|
||||
let result = '';
|
||||
for (let [key, value] of formData){
|
||||
// We don't need this key to be serializable but from a security perspective it should not be
|
||||
// possible to generate a string that looks the same from a different structure. To ensure this
|
||||
// we need a delimeter between fields but just using a delimeter is not enough since a string
|
||||
// might contain that delimeter. We use the length of each field as the delimeter to avoid
|
||||
// escaping the values.
|
||||
result += key.length.toString(16) + ':' + key;
|
||||
let stringValue;
|
||||
if (typeof value === 'string') {
|
||||
stringValue = value;
|
||||
} else {
|
||||
// The FormData might contain binary data that is not valid UTF-8 so this cache
|
||||
// key may generate a UCS-2 string. Passing this to another service needs to be
|
||||
// aware that the key might not be compatible.
|
||||
const arrayBuffer = await value.arrayBuffer();
|
||||
if (arrayBuffer.byteLength % 2 === 0) {
|
||||
stringValue = String.fromCodePoint(...new Uint16Array(arrayBuffer));
|
||||
} else {
|
||||
stringValue = String.fromCodePoint(...new Uint16Array(arrayBuffer, 0, (arrayBuffer.byteLength - 1) / 2)) + String.fromCodePoint(new Uint8Array(arrayBuffer, arrayBuffer.byteLength - 1, 1)[0]);
|
||||
}
|
||||
}
|
||||
result += stringValue.length.toString(16) + ':' + stringValue;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function createTrackedReadableStream(stream, cacheSignal) {
|
||||
const reader = stream.getReader();
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
controller.close();
|
||||
cacheSignal.endRead();
|
||||
} else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function cache(kind, id, boundArgsLength, originalFn) {
|
||||
const cacheHandler = (0, _handlers.getCacheHandler)(kind);
|
||||
if (cacheHandler === undefined) {
|
||||
throw Object.defineProperty(new Error('Unknown cache handler: ' + kind), "__NEXT_ERROR_CODE", {
|
||||
value: "E248",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// Capture the timeout error here to ensure a useful stack.
|
||||
const timeoutError = new _usecacheerrors.UseCacheTimeoutError();
|
||||
Error.captureStackTrace(timeoutError, cache);
|
||||
const name = originalFn.name;
|
||||
const cachedFn = {
|
||||
[name]: async function(...args) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
if (workStore === undefined) {
|
||||
throw Object.defineProperty(new Error('"use cache" cannot be used outside of App Router. Expected a WorkStore.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E279",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let fn = originalFn;
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
// Get the clientReferenceManifest while we're still in the outer Context.
|
||||
// In case getClientReferenceManifestSingleton is implemented using AsyncLocalStorage.
|
||||
const clientReferenceManifest = (0, _encryptionutils.getClientReferenceManifestForRsc)();
|
||||
// Because the Action ID is not yet unique per implementation of that Action we can't
|
||||
// safely reuse the results across builds yet. In the meantime we add the buildId to the
|
||||
// arguments as a seed to ensure they're not reused. Remove this once Action IDs hash
|
||||
// the implementation.
|
||||
const buildId = workStore.buildId;
|
||||
// In dev mode, when the HMR refresh hash is set, we include it in the
|
||||
// cache key. This ensures that cache entries are not reused when server
|
||||
// components have been edited. This is a very coarse approach. But it's
|
||||
// also only a temporary solution until Action IDs are unique per
|
||||
// implementation. Remove this once Action IDs hash the implementation.
|
||||
const hmrRefreshHash = workUnitStore && (0, _workunitasyncstorageexternal.getHmrRefreshHash)(workStore, workUnitStore);
|
||||
const hangingInputAbortSignal = (workUnitStore == null ? void 0 : workUnitStore.type) === 'prerender' ? (0, _dynamicrendering.createHangingInputAbortSignal)(workUnitStore) : undefined;
|
||||
// When dynamicIO is not enabled, we can not encode searchParams as
|
||||
// hanging promises. To still avoid unused search params from making a
|
||||
// page dynamic, we overwrite them here with a promise that resolves to an
|
||||
// empty object, while also overwriting the to-be-invoked function for
|
||||
// generating a cache entry with a function that creates an erroring
|
||||
// searchParams prop before invoking the original function. This ensures
|
||||
// that used searchParams inside of cached functions would still yield an
|
||||
// error.
|
||||
if (!workStore.dynamicIOEnabled && isPageComponent(args)) {
|
||||
const [{ params, searchParams }] = args;
|
||||
// Overwrite the props to omit $$isPageComponent.
|
||||
args = [
|
||||
{
|
||||
params,
|
||||
searchParams
|
||||
}
|
||||
];
|
||||
fn = ({
|
||||
[name]: async ({ params: serializedParams })=>originalFn.apply(null, [
|
||||
{
|
||||
params: serializedParams,
|
||||
searchParams: (0, _searchparams.makeErroringExoticSearchParamsForUseCache)(workStore)
|
||||
}
|
||||
])
|
||||
})[name];
|
||||
}
|
||||
if (boundArgsLength > 0) {
|
||||
if (args.length === 0) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the "use cache" function ${JSON.stringify(fn.name)} to receive its encrypted bound arguments as the first argument.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E524",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const encryptedBoundArgs = args.shift();
|
||||
const boundArgs = await (0, _encryption.decryptActionBoundArgs)(id, encryptedBoundArgs);
|
||||
if (!Array.isArray(boundArgs)) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the bound arguments of "use cache" function ${JSON.stringify(fn.name)} to deserialize into an array, got ${typeof boundArgs} instead.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E581",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (boundArgsLength !== boundArgs.length) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the "use cache" function ${JSON.stringify(fn.name)} to receive ${boundArgsLength} bound arguments, got ${boundArgs.length} instead.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E559",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
args.unshift(boundArgs);
|
||||
}
|
||||
const temporaryReferences = (0, _clientedge.createTemporaryReferenceSet)();
|
||||
const cacheKeyParts = hmrRefreshHash ? [
|
||||
buildId,
|
||||
id,
|
||||
args,
|
||||
hmrRefreshHash
|
||||
] : [
|
||||
buildId,
|
||||
id,
|
||||
args
|
||||
];
|
||||
const encodedCacheKeyParts = await (0, _clientedge.encodeReply)(cacheKeyParts, {
|
||||
temporaryReferences,
|
||||
signal: hangingInputAbortSignal
|
||||
});
|
||||
const serializedCacheKey = typeof encodedCacheKeyParts === 'string' ? // Convert it to an ArrayBuffer if it wants to.
|
||||
encodedCacheKeyParts : await encodeFormData(encodedCacheKeyParts);
|
||||
let stream = undefined;
|
||||
// Get an immutable and mutable versions of the resume data cache.
|
||||
const prerenderResumeDataCache = workUnitStore ? (0, _workunitasyncstorageexternal.getPrerenderResumeDataCache)(workUnitStore) : null;
|
||||
const renderResumeDataCache = workUnitStore ? (0, _workunitasyncstorageexternal.getRenderResumeDataCache)(workUnitStore) : null;
|
||||
if (renderResumeDataCache) {
|
||||
const cacheSignal = workUnitStore && workUnitStore.type === 'prerender' ? workUnitStore.cacheSignal : null;
|
||||
if (cacheSignal) {
|
||||
cacheSignal.beginRead();
|
||||
}
|
||||
const cachedEntry = renderResumeDataCache.cache.get(serializedCacheKey);
|
||||
if (cachedEntry !== undefined) {
|
||||
const existingEntry = await cachedEntry;
|
||||
propagateCacheLifeAndTags(workUnitStore, existingEntry);
|
||||
if (workUnitStore !== undefined && workUnitStore.type === 'prerender' && existingEntry !== undefined && (existingEntry.revalidate === 0 || existingEntry.expire < _constants.DYNAMIC_EXPIRE)) {
|
||||
// In a Dynamic I/O prerender, if the cache entry has revalidate: 0 or if the
|
||||
// expire time is under 5 minutes, then we consider this cache entry dynamic
|
||||
// as it's not worth generating static pages for such data. It's better to leave
|
||||
// a PPR hole that can be filled in dynamically with a potentially cached entry.
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
}
|
||||
return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, 'dynamic "use cache"');
|
||||
}
|
||||
const [streamA, streamB] = existingEntry.value.tee();
|
||||
existingEntry.value = streamB;
|
||||
if (cacheSignal) {
|
||||
// When we have a cacheSignal we need to block on reading the cache
|
||||
// entry before ending the read.
|
||||
stream = createTrackedReadableStream(streamA, cacheSignal);
|
||||
} else {
|
||||
stream = streamA;
|
||||
}
|
||||
} else {
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (stream === undefined) {
|
||||
var _workUnitStore_implicitTags;
|
||||
const cacheSignal = workUnitStore && workUnitStore.type === 'prerender' ? workUnitStore.cacheSignal : null;
|
||||
if (cacheSignal) {
|
||||
// Either the cache handler or the generation can be using I/O at this point.
|
||||
// We need to track when they start and when they complete.
|
||||
cacheSignal.beginRead();
|
||||
}
|
||||
const lazyRefreshTags = workStore.refreshTagsByCacheKind.get(kind);
|
||||
if (lazyRefreshTags && !(0, _lazyresult.isResolvedLazyResult)(lazyRefreshTags)) {
|
||||
await lazyRefreshTags;
|
||||
}
|
||||
let entry = shouldForceRevalidate(workStore, workUnitStore) ? undefined : 'getExpiration' in cacheHandler ? await cacheHandler.get(serializedCacheKey) : // instead of checking their staleness here, as we do for modern
|
||||
// cache handlers (see below).
|
||||
await cacheHandler.get(serializedCacheKey, (workUnitStore == null ? void 0 : (_workUnitStore_implicitTags = workUnitStore.implicitTags) == null ? void 0 : _workUnitStore_implicitTags.tags) ?? []);
|
||||
if (entry) {
|
||||
var _workUnitStore_implicitTags1;
|
||||
const implicitTags = (workUnitStore == null ? void 0 : (_workUnitStore_implicitTags1 = workUnitStore.implicitTags) == null ? void 0 : _workUnitStore_implicitTags1.tags) ?? [];
|
||||
let implicitTagsExpiration = 0;
|
||||
if (workUnitStore == null ? void 0 : workUnitStore.implicitTags) {
|
||||
const lazyExpiration = workUnitStore.implicitTags.expirationsByCacheKind.get(kind);
|
||||
if (lazyExpiration) {
|
||||
if ((0, _lazyresult.isResolvedLazyResult)(lazyExpiration)) {
|
||||
implicitTagsExpiration = lazyExpiration.value;
|
||||
} else {
|
||||
implicitTagsExpiration = await lazyExpiration;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (shouldDiscardCacheEntry(entry, workStore, implicitTags, implicitTagsExpiration)) {
|
||||
debug == null ? void 0 : debug('discarding stale entry', serializedCacheKey);
|
||||
entry = undefined;
|
||||
}
|
||||
}
|
||||
const currentTime = performance.timeOrigin + performance.now();
|
||||
if (workUnitStore !== undefined && workUnitStore.type === 'prerender' && entry !== undefined && (entry.revalidate === 0 || entry.expire < _constants.DYNAMIC_EXPIRE)) {
|
||||
// In a Dynamic I/O prerender, if the cache entry has revalidate: 0 or if the
|
||||
// expire time is under 5 minutes, then we consider this cache entry dynamic
|
||||
// as it's not worth generating static pages for such data. It's better to leave
|
||||
// a PPR hole that can be filled in dynamically with a potentially cached entry.
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
}
|
||||
return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, 'dynamic "use cache"');
|
||||
} else if (entry === undefined || currentTime > entry.timestamp + entry.expire * 1000 || workStore.isStaticGeneration && currentTime > entry.timestamp + entry.revalidate * 1000) {
|
||||
// Miss. Generate a new result.
|
||||
// If the cache entry is stale and we're prerendering, we don't want to use the
|
||||
// stale entry since it would unnecessarily need to shorten the lifetime of the
|
||||
// prerender. We're not time constrained here so we can re-generated it now.
|
||||
// We need to run this inside a clean AsyncLocalStorage snapshot so that the cache
|
||||
// generation cannot read anything from the context we're currently executing which
|
||||
// might include request specific things like cookies() inside a React.cache().
|
||||
// Note: It is important that we await at least once before this because it lets us
|
||||
// pop out of any stack specific contexts as well - aka "Sync" Local Storage.
|
||||
if (entry) {
|
||||
if (currentTime > entry.timestamp + entry.expire * 1000) {
|
||||
debug == null ? void 0 : debug('entry is expired', serializedCacheKey);
|
||||
}
|
||||
if (workStore.isStaticGeneration && currentTime > entry.timestamp + entry.revalidate * 1000) {
|
||||
debug == null ? void 0 : debug('static generation, entry is stale', serializedCacheKey);
|
||||
}
|
||||
}
|
||||
const [newStream, pendingCacheEntry] = await generateCacheEntry(workStore, workUnitStore, clientReferenceManifest, encodedCacheKeyParts, fn, timeoutError);
|
||||
// When draft mode is enabled, we must not save the cache entry.
|
||||
if (!workStore.isDraftMode) {
|
||||
let savedCacheEntry;
|
||||
if (prerenderResumeDataCache) {
|
||||
// Create a clone that goes into the cache scope memory cache.
|
||||
const split = clonePendingCacheEntry(pendingCacheEntry);
|
||||
savedCacheEntry = getNthCacheEntry(split, 0);
|
||||
prerenderResumeDataCache.cache.set(serializedCacheKey, getNthCacheEntry(split, 1));
|
||||
} else {
|
||||
savedCacheEntry = pendingCacheEntry;
|
||||
}
|
||||
const promise = cacheHandler.set(serializedCacheKey, savedCacheEntry);
|
||||
workStore.pendingRevalidateWrites ??= [];
|
||||
workStore.pendingRevalidateWrites.push(promise);
|
||||
}
|
||||
stream = newStream;
|
||||
} else {
|
||||
propagateCacheLifeAndTags(workUnitStore, entry);
|
||||
// We want to return this stream, even if it's stale.
|
||||
stream = entry.value;
|
||||
// If we have a cache scope, we need to clone the entry and set it on
|
||||
// the inner cache scope.
|
||||
if (prerenderResumeDataCache) {
|
||||
const [entryLeft, entryRight] = cloneCacheEntry(entry);
|
||||
if (cacheSignal) {
|
||||
stream = createTrackedReadableStream(entryLeft.value, cacheSignal);
|
||||
} else {
|
||||
stream = entryLeft.value;
|
||||
}
|
||||
prerenderResumeDataCache.cache.set(serializedCacheKey, Promise.resolve(entryRight));
|
||||
} else {
|
||||
// If we're not regenerating we need to signal that we've finished
|
||||
// putting the entry into the cache scope at this point. Otherwise we do
|
||||
// that inside generateCacheEntry.
|
||||
cacheSignal == null ? void 0 : cacheSignal.endRead();
|
||||
}
|
||||
if (currentTime > entry.timestamp + entry.revalidate * 1000) {
|
||||
// If this is stale, and we're not in a prerender (i.e. this is dynamic render),
|
||||
// then we should warm up the cache with a fresh revalidated entry.
|
||||
const [ignoredStream, pendingCacheEntry] = await generateCacheEntry(workStore, undefined, clientReferenceManifest, encodedCacheKeyParts, fn, timeoutError);
|
||||
let savedCacheEntry;
|
||||
if (prerenderResumeDataCache) {
|
||||
const split = clonePendingCacheEntry(pendingCacheEntry);
|
||||
savedCacheEntry = getNthCacheEntry(split, 0);
|
||||
prerenderResumeDataCache.cache.set(serializedCacheKey, getNthCacheEntry(split, 1));
|
||||
} else {
|
||||
savedCacheEntry = pendingCacheEntry;
|
||||
}
|
||||
const promise = cacheHandler.set(serializedCacheKey, savedCacheEntry);
|
||||
if (!workStore.pendingRevalidateWrites) {
|
||||
workStore.pendingRevalidateWrites = [];
|
||||
}
|
||||
workStore.pendingRevalidateWrites.push(promise);
|
||||
await ignoredStream.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Logs are replayed even if it's a hit - to ensure we see them on the client eventually.
|
||||
// If we didn't then the client wouldn't see the logs if it was seeded from a prewarm that
|
||||
// never made it to the client. However, this also means that you see logs even when the
|
||||
// cached function isn't actually re-executed. We should instead ensure prewarms always
|
||||
// make it to the client. Another issue is that this will cause double logging in the
|
||||
// server terminal. Once while generating the cache entry and once when replaying it on
|
||||
// the server, which is required to pick it up for replaying again on the client.
|
||||
const replayConsoleLogs = true;
|
||||
const serverConsumerManifest = {
|
||||
// moduleLoading must be null because we don't want to trigger preloads of ClientReferences
|
||||
// to be added to the consumer. Instead, we'll wait for any ClientReference to be emitted
|
||||
// which themselves will handle the preloading.
|
||||
moduleLoading: null,
|
||||
moduleMap: isEdgeRuntime ? clientReferenceManifest.edgeRscModuleMapping : clientReferenceManifest.rscModuleMapping,
|
||||
serverModuleMap: (0, _encryptionutils.getServerModuleMap)()
|
||||
};
|
||||
return (0, _clientedge.createFromReadableStream)(stream, {
|
||||
serverConsumerManifest,
|
||||
temporaryReferences,
|
||||
replayConsoleLogs,
|
||||
environmentName: 'Cache'
|
||||
});
|
||||
}
|
||||
}[name];
|
||||
return _react.default.cache(cachedFn);
|
||||
}
|
||||
function isPageComponent(args) {
|
||||
if (args.length !== 2) {
|
||||
return false;
|
||||
}
|
||||
const [props, ref] = args;
|
||||
return ref === undefined && // server components receive an undefined ref arg
|
||||
props !== null && typeof props === 'object' && props.$$isPageComponent;
|
||||
}
|
||||
function shouldForceRevalidate(workStore, workUnitStore) {
|
||||
if (workStore.isOnDemandRevalidate || workStore.isDraftMode) {
|
||||
return true;
|
||||
}
|
||||
if (workStore.dev && workUnitStore) {
|
||||
if (workUnitStore.type === 'request') {
|
||||
return workUnitStore.headers.get('cache-control') === 'no-cache';
|
||||
}
|
||||
if (workUnitStore.type === 'cache') {
|
||||
return workUnitStore.forceRevalidate;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function shouldDiscardCacheEntry(entry, workStore, implicitTags, implicitTagsExpiration) {
|
||||
// If the cache entry contains revalidated tags that the cache handler might
|
||||
// not know about yet, we need to discard it.
|
||||
if (entry.tags.some((tag)=>isRecentlyRevalidatedTag(tag, workStore))) {
|
||||
return true;
|
||||
}
|
||||
// If the cache entry was created before any of the implicit tags were
|
||||
// revalidated last, we also need to discard it.
|
||||
if (entry.timestamp <= implicitTagsExpiration) {
|
||||
debug == null ? void 0 : debug('entry was created at', entry.timestamp, 'before implicit tags were revalidated at', implicitTagsExpiration);
|
||||
return true;
|
||||
}
|
||||
// Finally, if any of the implicit tags have been revalidated recently, we
|
||||
// also need to discard the cache entry.
|
||||
if (implicitTags.some((tag)=>isRecentlyRevalidatedTag(tag, workStore))) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isRecentlyRevalidatedTag(tag, workStore) {
|
||||
const { previouslyRevalidatedTags, pendingRevalidatedTags } = workStore;
|
||||
// Was the tag previously revalidated (e.g. by a redirecting server action)?
|
||||
if (previouslyRevalidatedTags.includes(tag)) {
|
||||
debug == null ? void 0 : debug('tag', tag, 'was previously revalidated');
|
||||
return true;
|
||||
}
|
||||
// It could also have been revalidated by the currently running server action.
|
||||
// In this case the revalidation might not have been propagated to the cache
|
||||
// handler yet, so we read it from the pending tags in the work store.
|
||||
if (pendingRevalidatedTags == null ? void 0 : pendingRevalidatedTags.includes(tag)) {
|
||||
debug == null ? void 0 : debug('tag', tag, 'was just revalidated');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=use-cache-wrapper.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-wrapper.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/server/use-cache/use-cache-wrapper.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user