Initial commit

This commit is contained in:
bilulib
2025-04-13 00:18:57 +02:00
parent cff009bb7c
commit d894249e61
18301 changed files with 2905442 additions and 3845 deletions

View File

@@ -0,0 +1,48 @@
import type { CacheEntry } from '../lib/cache-handlers/types';
import type { CachedFetchValue } from '../response-cache/types';
/**
* A generic cache store type that provides a subset of Map functionality
*/
type CacheStore<T> = Pick<Map<string, T>, 'entries' | 'keys' | 'size' | 'get' | 'set'>;
/**
* A cache store specifically for fetch cache values
*/
export type FetchCacheStore = CacheStore<CachedFetchValue>;
/**
* A cache store for encrypted bound args of inline server functions.
*/
export type EncryptedBoundArgsCacheStore = CacheStore<string>;
/**
* An in-memory-only cache store for decrypted bound args of inline server
* functions.
*/
export type DecryptedBoundArgsCacheStore = CacheStore<string>;
/**
* Serialized format for "use cache" entries
*/
export interface UseCacheCacheStoreSerialized {
value: string;
tags: string[];
stale: number;
timestamp: number;
expire: number;
revalidate: number;
}
/**
* A cache store specifically for "use cache" values that stores promises of
* cache entries.
*/
export type UseCacheCacheStore = CacheStore<Promise<CacheEntry>>;
/**
* Parses serialized cache entries into a UseCacheCacheStore
* @param entries - The serialized entries to parse
* @returns A new UseCacheCacheStore containing the parsed entries
*/
export declare function parseUseCacheCacheStore(entries: Iterable<[string, UseCacheCacheStoreSerialized]>): UseCacheCacheStore;
/**
* Serializes UseCacheCacheStore entries into an array of key-value pairs
* @param entries - The store entries to stringify
* @returns A promise that resolves to an array of key-value pairs with serialized values
*/
export declare function serializeUseCacheCacheStore(entries: IterableIterator<[string, Promise<CacheEntry>]>): Promise<Array<[string, UseCacheCacheStoreSerialized] | null>>;
export {};

View File

@@ -0,0 +1,78 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
parseUseCacheCacheStore: null,
serializeUseCacheCacheStore: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
parseUseCacheCacheStore: function() {
return parseUseCacheCacheStore;
},
serializeUseCacheCacheStore: function() {
return serializeUseCacheCacheStore;
}
});
const _encryptionutils = require("../app-render/encryption-utils");
function parseUseCacheCacheStore(entries) {
const store = new Map();
for (const [key, { value, tags, stale, timestamp, expire, revalidate }] of entries){
store.set(key, Promise.resolve({
// Create a ReadableStream from the Uint8Array
value: new ReadableStream({
start (controller) {
// Enqueue the Uint8Array to the stream
controller.enqueue((0, _encryptionutils.stringToUint8Array)(atob(value)));
// Close the stream
controller.close();
}
}),
tags,
stale,
timestamp,
expire,
revalidate
}));
}
return store;
}
async function serializeUseCacheCacheStore(entries) {
return Promise.all(Array.from(entries).map(([key, value])=>{
return value.then(async (entry)=>{
const [left, right] = entry.value.tee();
entry.value = right;
let binaryString = '';
// We want to encode the value as a string, but we aren't sure if the
// value is a a stream of UTF-8 bytes or not, so let's just encode it
// as a string using base64.
for await (const chunk of left){
binaryString += (0, _encryptionutils.arrayBufferToString)(chunk);
}
return [
key,
{
// Encode the value as a base64 string.
value: btoa(binaryString),
tags: entry.tags,
stale: entry.stale,
timestamp: entry.timestamp,
expire: entry.expire,
revalidate: entry.revalidate
}
];
}).catch(()=>{
// Any failed cache writes should be ignored as to not discard the
// entire cache.
return null;
});
}));
}
//# sourceMappingURL=cache-store.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,89 @@
import { type UseCacheCacheStore, type FetchCacheStore, type EncryptedBoundArgsCacheStore, type DecryptedBoundArgsCacheStore } from './cache-store';
/**
* An immutable version of the resume data cache used during rendering.
* This cache is read-only and cannot be modified once created.
*/
export interface RenderResumeDataCache {
/**
* A read-only Map store for values cached by the 'use cache' React hook.
* The 'set' operation is omitted to enforce immutability.
*/
readonly cache: Omit<UseCacheCacheStore, 'set'>;
/**
* A read-only Map store for cached fetch responses.
* The 'set' operation is omitted to enforce immutability.
*/
readonly fetch: Omit<FetchCacheStore, 'set'>;
/**
* A read-only Map store for encrypted bound args of inline server functions.
* The 'set' operation is omitted to enforce immutability.
*/
readonly encryptedBoundArgs: Omit<EncryptedBoundArgsCacheStore, 'set'>;
/**
* A read-only Map store for decrypted bound args of inline server functions.
* This is only intended for in-memory usage during pre-rendering, and must
* not be persisted in the resume store. The 'set' operation is omitted to
* enforce immutability.
*/
readonly decryptedBoundArgs: Omit<DecryptedBoundArgsCacheStore, 'set'>;
}
/**
* A mutable version of the resume data cache used during pre-rendering.
* This cache allows both reading and writing of cached values.
*/
export interface PrerenderResumeDataCache {
/**
* A mutable Map store for values cached by the 'use cache' React hook.
* Supports both 'get' and 'set' operations to build the cache during
* pre-rendering.
*/
readonly cache: UseCacheCacheStore;
/**
* A mutable Map store for cached fetch responses.
* Supports both 'get' and 'set' operations to build the cache during
* pre-rendering.
*/
readonly fetch: FetchCacheStore;
/**
* A mutable Map store for encrypted bound args of inline server functions.
* Supports both 'get' and 'set' operations to build the cache during
* pre-rendering.
*/
readonly encryptedBoundArgs: EncryptedBoundArgsCacheStore;
/**
* A mutable Map store for decrypted bound args of inline server functions.
* This is only intended for in-memory usage during pre-rendering, and must
* not be persisted in the resume store. Supports both 'get' and 'set'
* operations to build the cache during pre-rendering.
*/
readonly decryptedBoundArgs: DecryptedBoundArgsCacheStore;
}
/**
* Serializes a resume data cache into a JSON string for storage or
* transmission. Handles 'use cache' values, fetch responses, and encrypted
* bound args for inline server functions.
*
* @param resumeDataCache - The immutable cache to serialize
* @returns A Promise that resolves to the serialized cache as a JSON string, or
* 'null' if empty
*/
export declare function stringifyResumeDataCache(resumeDataCache: RenderResumeDataCache | PrerenderResumeDataCache): Promise<string>;
/**
* Creates a new empty mutable resume data cache for pre-rendering.
* Initializes fresh Map instances for both the 'use cache' and fetch caches.
* Used at the start of pre-rendering to begin collecting cached values.
*
* @returns A new empty PrerenderResumeDataCache instance
*/
export declare function createPrerenderResumeDataCache(): PrerenderResumeDataCache;
/**
* Creates an immutable render resume data cache from either:
* 1. An existing prerender cache instance
* 2. A serialized cache string
*
* @param prerenderResumeDataCache - A PrerenderResumeDataCache instance to convert to immutable
* @param persistedCache - A serialized cache string to parse
* @returns An immutable RenderResumeDataCache instance
*/
export declare function createRenderResumeDataCache(prerenderResumeDataCache: PrerenderResumeDataCache): RenderResumeDataCache;
export declare function createRenderResumeDataCache(persistedCache: string): RenderResumeDataCache;

View File

@@ -0,0 +1,96 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createPrerenderResumeDataCache: null,
createRenderResumeDataCache: null,
stringifyResumeDataCache: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createPrerenderResumeDataCache: function() {
return createPrerenderResumeDataCache;
},
createRenderResumeDataCache: function() {
return createRenderResumeDataCache;
},
stringifyResumeDataCache: function() {
return stringifyResumeDataCache;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
const _cachestore = require("./cache-store");
async function stringifyResumeDataCache(resumeDataCache) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new _invarianterror.InvariantError('`stringifyResumeDataCache` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E602",
enumerable: false,
configurable: true
});
} else {
if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) {
return 'null';
}
const json = {
store: {
fetch: Object.fromEntries(Array.from(resumeDataCache.fetch.entries())),
cache: Object.fromEntries((await (0, _cachestore.serializeUseCacheCacheStore)(resumeDataCache.cache.entries())).filter((entry)=>entry !== null)),
encryptedBoundArgs: Object.fromEntries(Array.from(resumeDataCache.encryptedBoundArgs.entries()))
}
};
// Compress the JSON string using zlib. As the data we already want to
// decompress is in memory, we use the synchronous deflateSync function.
const { deflateSync } = require('node:zlib');
return deflateSync(JSON.stringify(json)).toString('base64');
}
}
function createPrerenderResumeDataCache() {
return {
cache: new Map(),
fetch: new Map(),
encryptedBoundArgs: new Map(),
decryptedBoundArgs: new Map()
};
}
function createRenderResumeDataCache(prerenderResumeDataCacheOrPersistedCache) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new _invarianterror.InvariantError('`createRenderResumeDataCache` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E556",
enumerable: false,
configurable: true
});
} else {
if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') {
// If the cache is already a prerender cache, we can return it directly,
// we're just performing a type change.
return prerenderResumeDataCacheOrPersistedCache;
}
if (prerenderResumeDataCacheOrPersistedCache === 'null') {
return {
cache: new Map(),
fetch: new Map(),
encryptedBoundArgs: new Map(),
decryptedBoundArgs: new Map()
};
}
// This should be a compressed string. Let's decompress it using zlib.
// As the data we already want to decompress is in memory, we use the
// synchronous inflateSync function.
const { inflateSync } = require('node:zlib');
const json = JSON.parse(inflateSync(Buffer.from(prerenderResumeDataCacheOrPersistedCache, 'base64')).toString('utf-8'));
return {
cache: (0, _cachestore.parseUseCacheCacheStore)(Object.entries(json.store.cache)),
fetch: new Map(Object.entries(json.store.fetch)),
encryptedBoundArgs: new Map(Object.entries(json.store.encryptedBoundArgs)),
decryptedBoundArgs: new Map()
};
}
}
//# sourceMappingURL=resume-data-cache.js.map

File diff suppressed because one or more lines are too long