Initial commit
This commit is contained in:
1
frontend/webapp/node_modules/next/dist/esm/server/ReactDOMServerPages.d.ts
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/ReactDOMServerPages.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export * from 'react-dom/server.edge'
|
||||
14
frontend/webapp/node_modules/next/dist/esm/server/ReactDOMServerPages.js
generated
vendored
Normal file
14
frontend/webapp/node_modules/next/dist/esm/server/ReactDOMServerPages.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
let ReactDOMServer;
|
||||
try {
|
||||
ReactDOMServer = require('react-dom/server.edge');
|
||||
} catch (error) {
|
||||
if (error.code !== 'MODULE_NOT_FOUND' && error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED') {
|
||||
throw error;
|
||||
}
|
||||
// In React versions without react-dom/server.edge, the browser build works in Node.js.
|
||||
// The Node.js build does not support renderToReadableStream.
|
||||
ReactDOMServer = require('react-dom/server.browser');
|
||||
}
|
||||
module.exports = ReactDOMServer;
|
||||
|
||||
//# sourceMappingURL=ReactDOMServerPages.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/ReactDOMServerPages.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/ReactDOMServerPages.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../src/server/ReactDOMServerPages.js"],"sourcesContent":["let ReactDOMServer\n\ntry {\n ReactDOMServer = require('react-dom/server.edge')\n} catch (error) {\n if (\n error.code !== 'MODULE_NOT_FOUND' &&\n error.code !== 'ERR_PACKAGE_PATH_NOT_EXPORTED'\n ) {\n throw error\n }\n // In React versions without react-dom/server.edge, the browser build works in Node.js.\n // The Node.js build does not support renderToReadableStream.\n ReactDOMServer = require('react-dom/server.browser')\n}\n\nmodule.exports = ReactDOMServer\n"],"names":["ReactDOMServer","require","error","code","module","exports"],"mappings":"AAAA,IAAIA;AAEJ,IAAI;IACFA,iBAAiBC,QAAQ;AAC3B,EAAE,OAAOC,OAAO;IACd,IACEA,MAAMC,IAAI,KAAK,sBACfD,MAAMC,IAAI,KAAK,iCACf;QACA,MAAMD;IACR;IACA,uFAAuF;IACvF,6DAA6D;IAC7DF,iBAAiBC,QAAQ;AAC3B;AAEAG,OAAOC,OAAO,GAAGL"}
|
||||
122
frontend/webapp/node_modules/next/dist/esm/server/accept-header.js
generated
vendored
Normal file
122
frontend/webapp/node_modules/next/dist/esm/server/accept-header.js
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
function parse(raw, preferences, options) {
|
||||
const lowers = new Map();
|
||||
const header = raw.replace(/[ \t]/g, '');
|
||||
if (preferences) {
|
||||
let pos = 0;
|
||||
for (const preference of preferences){
|
||||
const lower = preference.toLowerCase();
|
||||
lowers.set(lower, {
|
||||
orig: preference,
|
||||
pos: pos++
|
||||
});
|
||||
if (options.prefixMatch) {
|
||||
const parts = lower.split('-');
|
||||
while(parts.pop(), parts.length > 0){
|
||||
const joined = parts.join('-');
|
||||
if (!lowers.has(joined)) {
|
||||
lowers.set(joined, {
|
||||
orig: preference,
|
||||
pos: pos++
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const parts = header.split(',');
|
||||
const selections = [];
|
||||
const map = new Set();
|
||||
for(let i = 0; i < parts.length; ++i){
|
||||
const part = parts[i];
|
||||
if (!part) {
|
||||
continue;
|
||||
}
|
||||
const params = part.split(';');
|
||||
if (params.length > 2) {
|
||||
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
|
||||
value: "E77",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let token = params[0].toLowerCase();
|
||||
if (!token) {
|
||||
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
|
||||
value: "E77",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const selection = {
|
||||
token,
|
||||
pos: i,
|
||||
q: 1
|
||||
};
|
||||
if (preferences && lowers.has(token)) {
|
||||
selection.pref = lowers.get(token).pos;
|
||||
}
|
||||
map.add(selection.token);
|
||||
if (params.length === 2) {
|
||||
const q = params[1];
|
||||
const [key, value] = q.split('=');
|
||||
if (!value || key !== 'q' && key !== 'Q') {
|
||||
throw Object.defineProperty(new Error(`Invalid ${options.type} header`), "__NEXT_ERROR_CODE", {
|
||||
value: "E77",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const score = parseFloat(value);
|
||||
if (score === 0) {
|
||||
continue;
|
||||
}
|
||||
if (Number.isFinite(score) && score <= 1 && score >= 0.001) {
|
||||
selection.q = score;
|
||||
}
|
||||
}
|
||||
selections.push(selection);
|
||||
}
|
||||
selections.sort((a, b)=>{
|
||||
if (b.q !== a.q) {
|
||||
return b.q - a.q;
|
||||
}
|
||||
if (b.pref !== a.pref) {
|
||||
if (a.pref === undefined) {
|
||||
return 1;
|
||||
}
|
||||
if (b.pref === undefined) {
|
||||
return -1;
|
||||
}
|
||||
return a.pref - b.pref;
|
||||
}
|
||||
return a.pos - b.pos;
|
||||
});
|
||||
const values = selections.map((selection)=>selection.token);
|
||||
if (!preferences || !preferences.length) {
|
||||
return values;
|
||||
}
|
||||
const preferred = [];
|
||||
for (const selection of values){
|
||||
if (selection === '*') {
|
||||
for (const [preference, value] of lowers){
|
||||
if (!map.has(preference)) {
|
||||
preferred.push(value.orig);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const lower = selection.toLowerCase();
|
||||
if (lowers.has(lower)) {
|
||||
preferred.push(lowers.get(lower).orig);
|
||||
}
|
||||
}
|
||||
}
|
||||
return preferred;
|
||||
}
|
||||
export function acceptLanguage(header = '', preferences) {
|
||||
return parse(header, preferences, {
|
||||
type: 'accept-language',
|
||||
prefixMatch: true
|
||||
})[0] || '';
|
||||
}
|
||||
|
||||
//# sourceMappingURL=accept-header.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/accept-header.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/accept-header.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
123
frontend/webapp/node_modules/next/dist/esm/server/after/after-context.js
generated
vendored
Normal file
123
frontend/webapp/node_modules/next/dist/esm/server/after/after-context.js
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
import PromiseQueue from 'next/dist/compiled/p-queue';
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
import { isThenable } from '../../shared/lib/is-thenable';
|
||||
import { workAsyncStorage } from '../app-render/work-async-storage.external';
|
||||
import { withExecuteRevalidates } from '../revalidation-utils';
|
||||
import { bindSnapshot } from '../app-render/async-local-storage';
|
||||
import { workUnitAsyncStorage } from '../app-render/work-unit-async-storage.external';
|
||||
import { afterTaskAsyncStorage } from '../app-render/after-task-async-storage.external';
|
||||
export class AfterContext {
|
||||
constructor({ waitUntil, onClose, onTaskError }){
|
||||
this.workUnitStores = new Set();
|
||||
this.waitUntil = waitUntil;
|
||||
this.onClose = onClose;
|
||||
this.onTaskError = onTaskError;
|
||||
this.callbackQueue = new PromiseQueue();
|
||||
this.callbackQueue.pause();
|
||||
}
|
||||
after(task) {
|
||||
if (isThenable(task)) {
|
||||
if (!this.waitUntil) {
|
||||
errorWaitUntilNotAvailable();
|
||||
}
|
||||
this.waitUntil(task.catch((error)=>this.reportTaskError('promise', error)));
|
||||
} else if (typeof task === 'function') {
|
||||
// TODO(after): implement tracing
|
||||
this.addCallback(task);
|
||||
} else {
|
||||
throw Object.defineProperty(new Error('`after()`: Argument must be a promise or a function'), "__NEXT_ERROR_CODE", {
|
||||
value: "E50",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
addCallback(callback) {
|
||||
// if something is wrong, throw synchronously, bubbling up to the `after` callsite.
|
||||
if (!this.waitUntil) {
|
||||
errorWaitUntilNotAvailable();
|
||||
}
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
this.workUnitStores.add(workUnitStore);
|
||||
}
|
||||
const afterTaskStore = afterTaskAsyncStorage.getStore();
|
||||
// This is used for checking if request APIs can be called inside `after`.
|
||||
// Note that we need to check the phase in which the *topmost* `after` was called (which should be "action"),
|
||||
// not the current phase (which might be "after" if we're in a nested after).
|
||||
// Otherwise, we might allow `after(() => headers())`, but not `after(() => after(() => headers()))`.
|
||||
const rootTaskSpawnPhase = afterTaskStore ? afterTaskStore.rootTaskSpawnPhase // nested after
|
||||
: workUnitStore == null ? void 0 : workUnitStore.phase // topmost after
|
||||
;
|
||||
// this should only happen once.
|
||||
if (!this.runCallbacksOnClosePromise) {
|
||||
this.runCallbacksOnClosePromise = this.runCallbacksOnClose();
|
||||
this.waitUntil(this.runCallbacksOnClosePromise);
|
||||
}
|
||||
// Bind the callback to the current execution context (i.e. preserve all currently available ALS-es).
|
||||
// We do this because we want all of these to be equivalent in every regard except timing:
|
||||
// after(() => x())
|
||||
// after(x())
|
||||
// await x()
|
||||
const wrappedCallback = bindSnapshot(async ()=>{
|
||||
try {
|
||||
await afterTaskAsyncStorage.run({
|
||||
rootTaskSpawnPhase
|
||||
}, ()=>callback());
|
||||
} catch (error) {
|
||||
this.reportTaskError('function', error);
|
||||
}
|
||||
});
|
||||
this.callbackQueue.add(wrappedCallback);
|
||||
}
|
||||
async runCallbacksOnClose() {
|
||||
await new Promise((resolve)=>this.onClose(resolve));
|
||||
return this.runCallbacks();
|
||||
}
|
||||
async runCallbacks() {
|
||||
if (this.callbackQueue.size === 0) return;
|
||||
for (const workUnitStore of this.workUnitStores){
|
||||
workUnitStore.phase = 'after';
|
||||
}
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
throw Object.defineProperty(new InvariantError('Missing workStore in AfterContext.runCallbacks'), "__NEXT_ERROR_CODE", {
|
||||
value: "E547",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return withExecuteRevalidates(workStore, ()=>{
|
||||
this.callbackQueue.start();
|
||||
return this.callbackQueue.onIdle();
|
||||
});
|
||||
}
|
||||
reportTaskError(taskKind, error) {
|
||||
// TODO(after): this is fine for now, but will need better intergration with our error reporting.
|
||||
// TODO(after): should we log this if we have a onTaskError callback?
|
||||
console.error(taskKind === 'promise' ? `A promise passed to \`after()\` rejected:` : `An error occurred in a function passed to \`after()\`:`, error);
|
||||
if (this.onTaskError) {
|
||||
// this is very defensive, but we really don't want anything to blow up in an error handler
|
||||
try {
|
||||
this.onTaskError == null ? void 0 : this.onTaskError.call(this, error);
|
||||
} catch (handlerError) {
|
||||
console.error(Object.defineProperty(new InvariantError('`onTaskError` threw while handling an error thrown from an `after` task', {
|
||||
cause: handlerError
|
||||
}), "__NEXT_ERROR_CODE", {
|
||||
value: "E569",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function errorWaitUntilNotAvailable() {
|
||||
throw Object.defineProperty(new Error('`after()` will not work correctly, because `waitUntil` is not available in the current environment.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E91",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=after-context.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/after/after-context.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/after/after-context.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
18
frontend/webapp/node_modules/next/dist/esm/server/after/after.js
generated
vendored
Normal file
18
frontend/webapp/node_modules/next/dist/esm/server/after/after.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import { workAsyncStorage } from '../app-render/work-async-storage.external';
|
||||
/**
|
||||
* This function allows you to schedule callbacks to be executed after the current request finishes.
|
||||
*/ export function after(task) {
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
// TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore
|
||||
throw Object.defineProperty(new Error('`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'), "__NEXT_ERROR_CODE", {
|
||||
value: "E468",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { afterContext } = workStore;
|
||||
return afterContext.after(task);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=after.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/after/after.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/after/after.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/after.ts"],"sourcesContent":["import { workAsyncStorage } from '../app-render/work-async-storage.external'\n\nexport type AfterTask<T = unknown> = Promise<T> | AfterCallback<T>\nexport type AfterCallback<T = unknown> = () => T | Promise<T>\n\n/**\n * This function allows you to schedule callbacks to be executed after the current request finishes.\n */\nexport function after<T>(task: AfterTask<T>): void {\n const workStore = workAsyncStorage.getStore()\n\n if (!workStore) {\n // TODO(after): the linked docs page talks about *dynamic* APIs, which after soon won't be anymore\n throw new Error(\n '`after` was called outside a request scope. Read more: https://nextjs.org/docs/messages/next-dynamic-api-wrong-context'\n )\n }\n\n const { afterContext } = workStore\n return afterContext.after(task)\n}\n"],"names":["workAsyncStorage","after","task","workStore","getStore","Error","afterContext"],"mappings":"AAAA,SAASA,gBAAgB,QAAQ,4CAA2C;AAK5E;;CAEC,GACD,OAAO,SAASC,MAASC,IAAkB;IACzC,MAAMC,YAAYH,iBAAiBI,QAAQ;IAE3C,IAAI,CAACD,WAAW;QACd,kGAAkG;QAClG,MAAM,qBAEL,CAFK,IAAIE,MACR,2HADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,MAAM,EAAEC,YAAY,EAAE,GAAGH;IACzB,OAAOG,aAAaL,KAAK,CAACC;AAC5B"}
|
||||
60
frontend/webapp/node_modules/next/dist/esm/server/after/awaiter.js
generated
vendored
Normal file
60
frontend/webapp/node_modules/next/dist/esm/server/after/awaiter.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
/**
|
||||
* Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).
|
||||
* Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --
|
||||
* if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,
|
||||
* that second promise will also be awaited.
|
||||
*/ export class AwaiterMulti {
|
||||
constructor({ onError } = {}){
|
||||
this.promises = new Set();
|
||||
this.waitUntil = (promise)=>{
|
||||
// if a promise settles before we await it, we should drop it --
|
||||
// storing them indefinitely could result in a memory leak.
|
||||
const cleanup = ()=>{
|
||||
this.promises.delete(promise);
|
||||
};
|
||||
promise.then(cleanup, (err)=>{
|
||||
cleanup();
|
||||
this.onError(err);
|
||||
});
|
||||
this.promises.add(promise);
|
||||
};
|
||||
this.onError = onError ?? console.error;
|
||||
}
|
||||
async awaiting() {
|
||||
while(this.promises.size > 0){
|
||||
const promises = Array.from(this.promises);
|
||||
this.promises.clear();
|
||||
await Promise.allSettled(promises);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Like {@link AwaiterMulti}, but can only be awaited once.
|
||||
* If {@link AwaiterOnce.waitUntil} is called after that, it will throw.
|
||||
*/ export class AwaiterOnce {
|
||||
constructor(options = {}){
|
||||
this.done = false;
|
||||
this.waitUntil = (promise)=>{
|
||||
if (this.done) {
|
||||
throw Object.defineProperty(new InvariantError('Cannot call waitUntil() on an AwaiterOnce that was already awaited'), "__NEXT_ERROR_CODE", {
|
||||
value: "E563",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return this.awaiter.waitUntil(promise);
|
||||
};
|
||||
this.awaiter = new AwaiterMulti(options);
|
||||
}
|
||||
async awaiting() {
|
||||
if (!this.pending) {
|
||||
this.pending = this.awaiter.awaiting().finally(()=>{
|
||||
this.done = true;
|
||||
});
|
||||
}
|
||||
return this.pending;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=awaiter.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/after/awaiter.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/after/awaiter.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/awaiter.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\n\n/**\n * Provides a `waitUntil` implementation which gathers promises to be awaited later (via {@link AwaiterMulti.awaiting}).\n * Unlike a simple `Promise.all`, {@link AwaiterMulti} works recursively --\n * if a promise passed to {@link AwaiterMulti.waitUntil} calls `waitUntil` again,\n * that second promise will also be awaited.\n */\nexport class AwaiterMulti {\n private promises: Set<Promise<unknown>> = new Set()\n private onError: (error: unknown) => void\n\n constructor({ onError }: { onError?: (error: unknown) => void } = {}) {\n this.onError = onError ?? console.error\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n // if a promise settles before we await it, we should drop it --\n // storing them indefinitely could result in a memory leak.\n const cleanup = () => {\n this.promises.delete(promise)\n }\n\n promise.then(cleanup, (err) => {\n cleanup()\n this.onError(err)\n })\n\n this.promises.add(promise)\n }\n\n public async awaiting(): Promise<void> {\n while (this.promises.size > 0) {\n const promises = Array.from(this.promises)\n this.promises.clear()\n await Promise.allSettled(promises)\n }\n }\n}\n\n/**\n * Like {@link AwaiterMulti}, but can only be awaited once.\n * If {@link AwaiterOnce.waitUntil} is called after that, it will throw.\n */\nexport class AwaiterOnce {\n private awaiter: AwaiterMulti\n private done: boolean = false\n private pending: Promise<void> | undefined\n\n constructor(options: { onError?: (error: unknown) => void } = {}) {\n this.awaiter = new AwaiterMulti(options)\n }\n\n public waitUntil = (promise: Promise<unknown>): void => {\n if (this.done) {\n throw new InvariantError(\n 'Cannot call waitUntil() on an AwaiterOnce that was already awaited'\n )\n }\n return this.awaiter.waitUntil(promise)\n }\n\n public async awaiting(): Promise<void> {\n if (!this.pending) {\n this.pending = this.awaiter.awaiting().finally(() => {\n this.done = true\n })\n }\n return this.pending\n }\n}\n"],"names":["InvariantError","AwaiterMulti","constructor","onError","promises","Set","waitUntil","promise","cleanup","delete","then","err","add","console","error","awaiting","size","Array","from","clear","Promise","allSettled","AwaiterOnce","options","done","awaiter","pending","finally"],"mappings":"AAAA,SAASA,cAAc,QAAQ,mCAAkC;AAEjE;;;;;CAKC,GACD,OAAO,MAAMC;IAIXC,YAAY,EAAEC,OAAO,EAA0C,GAAG,CAAC,CAAC,CAAE;aAH9DC,WAAkC,IAAIC;aAOvCC,YAAY,CAACC;YAClB,gEAAgE;YAChE,2DAA2D;YAC3D,MAAMC,UAAU;gBACd,IAAI,CAACJ,QAAQ,CAACK,MAAM,CAACF;YACvB;YAEAA,QAAQG,IAAI,CAACF,SAAS,CAACG;gBACrBH;gBACA,IAAI,CAACL,OAAO,CAACQ;YACf;YAEA,IAAI,CAACP,QAAQ,CAACQ,GAAG,CAACL;QACpB;QAhBE,IAAI,CAACJ,OAAO,GAAGA,WAAWU,QAAQC,KAAK;IACzC;IAiBA,MAAaC,WAA0B;QACrC,MAAO,IAAI,CAACX,QAAQ,CAACY,IAAI,GAAG,EAAG;YAC7B,MAAMZ,WAAWa,MAAMC,IAAI,CAAC,IAAI,CAACd,QAAQ;YACzC,IAAI,CAACA,QAAQ,CAACe,KAAK;YACnB,MAAMC,QAAQC,UAAU,CAACjB;QAC3B;IACF;AACF;AAEA;;;CAGC,GACD,OAAO,MAAMkB;IAKXpB,YAAYqB,UAAkD,CAAC,CAAC,CAAE;aAH1DC,OAAgB;aAOjBlB,YAAY,CAACC;YAClB,IAAI,IAAI,CAACiB,IAAI,EAAE;gBACb,MAAM,qBAEL,CAFK,IAAIxB,eACR,uEADI,qBAAA;2BAAA;gCAAA;kCAAA;gBAEN;YACF;YACA,OAAO,IAAI,CAACyB,OAAO,CAACnB,SAAS,CAACC;QAChC;QAVE,IAAI,CAACkB,OAAO,GAAG,IAAIxB,aAAasB;IAClC;IAWA,MAAaR,WAA0B;QACrC,IAAI,CAAC,IAAI,CAACW,OAAO,EAAE;YACjB,IAAI,CAACA,OAAO,GAAG,IAAI,CAACD,OAAO,CAACV,QAAQ,GAAGY,OAAO,CAAC;gBAC7C,IAAI,CAACH,IAAI,GAAG;YACd;QACF;QACA,OAAO,IAAI,CAACE,OAAO;IACrB;AACF"}
|
||||
19
frontend/webapp/node_modules/next/dist/esm/server/after/builtin-request-context.js
generated
vendored
Normal file
19
frontend/webapp/node_modules/next/dist/esm/server/after/builtin-request-context.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { createAsyncLocalStorage } from '../app-render/async-local-storage';
|
||||
export function getBuiltinRequestContext() {
|
||||
const _globalThis = globalThis;
|
||||
const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL];
|
||||
return ctx == null ? void 0 : ctx.get();
|
||||
}
|
||||
const NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context');
|
||||
/** "@next/request-context" has a different signature from AsyncLocalStorage,
|
||||
* matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).
|
||||
* We don't need a full AsyncContext adapter here, just having `.get()` is enough
|
||||
*/ export function createLocalRequestContext() {
|
||||
const storage = createAsyncLocalStorage();
|
||||
return {
|
||||
get: ()=>storage.getStore(),
|
||||
run: (value, callback)=>storage.run(value, callback)
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=builtin-request-context.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/after/builtin-request-context.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/after/builtin-request-context.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/builtin-request-context.ts"],"sourcesContent":["import { createAsyncLocalStorage } from '../app-render/async-local-storage'\n\nexport function getBuiltinRequestContext():\n | BuiltinRequestContextValue\n | undefined {\n const _globalThis = globalThis as GlobalThisWithRequestContext\n const ctx = _globalThis[NEXT_REQUEST_CONTEXT_SYMBOL]\n return ctx?.get()\n}\n\nconst NEXT_REQUEST_CONTEXT_SYMBOL = Symbol.for('@next/request-context')\n\ntype GlobalThisWithRequestContext = typeof globalThis & {\n [NEXT_REQUEST_CONTEXT_SYMBOL]?: BuiltinRequestContext\n}\n\n/** A request context provided by the platform. */\nexport type BuiltinRequestContext = {\n get(): BuiltinRequestContextValue | undefined\n}\n\nexport type RunnableBuiltinRequestContext = BuiltinRequestContext & {\n run<T>(value: BuiltinRequestContextValue, callback: () => T): T\n}\n\nexport type BuiltinRequestContextValue = {\n waitUntil?: WaitUntil\n}\nexport type WaitUntil = (promise: Promise<any>) => void\n\n/** \"@next/request-context\" has a different signature from AsyncLocalStorage,\n * matching [AsyncContext.Variable](https://github.com/tc39/proposal-async-context).\n * We don't need a full AsyncContext adapter here, just having `.get()` is enough\n */\nexport function createLocalRequestContext(): RunnableBuiltinRequestContext {\n const storage = createAsyncLocalStorage<BuiltinRequestContextValue>()\n return {\n get: () => storage.getStore(),\n run: (value, callback) => storage.run(value, callback),\n }\n}\n"],"names":["createAsyncLocalStorage","getBuiltinRequestContext","_globalThis","globalThis","ctx","NEXT_REQUEST_CONTEXT_SYMBOL","get","Symbol","for","createLocalRequestContext","storage","getStore","run","value","callback"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,oCAAmC;AAE3E,OAAO,SAASC;IAGd,MAAMC,cAAcC;IACpB,MAAMC,MAAMF,WAAW,CAACG,4BAA4B;IACpD,OAAOD,uBAAAA,IAAKE,GAAG;AACjB;AAEA,MAAMD,8BAA8BE,OAAOC,GAAG,CAAC;AAoB/C;;;CAGC,GACD,OAAO,SAASC;IACd,MAAMC,UAAUV;IAChB,OAAO;QACLM,KAAK,IAAMI,QAAQC,QAAQ;QAC3BC,KAAK,CAACC,OAAOC,WAAaJ,QAAQE,GAAG,CAACC,OAAOC;IAC/C;AACF"}
|
||||
3
frontend/webapp/node_modules/next/dist/esm/server/after/index.js
generated
vendored
Normal file
3
frontend/webapp/node_modules/next/dist/esm/server/after/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './after';
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/after/index.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/after/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/index.ts"],"sourcesContent":["export * from './after'\n"],"names":[],"mappings":"AAAA,cAAc,UAAS"}
|
||||
25
frontend/webapp/node_modules/next/dist/esm/server/after/run-with-after.js
generated
vendored
Normal file
25
frontend/webapp/node_modules/next/dist/esm/server/after/run-with-after.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import { DetachedPromise } from '../../lib/detached-promise';
|
||||
import { CloseController } from '../web/web-on-close';
|
||||
import { AwaiterOnce } from './awaiter';
|
||||
export class AfterRunner {
|
||||
async executeAfter() {
|
||||
this.closeController.dispatchClose();
|
||||
await this.awaiter.awaiting();
|
||||
// if we got an error while running the callbacks,
|
||||
// thenthis is a noop, because the promise is already rejected
|
||||
this.finishedWithoutErrors.resolve();
|
||||
return this.finishedWithoutErrors.promise;
|
||||
}
|
||||
constructor(){
|
||||
this.awaiter = new AwaiterOnce();
|
||||
this.closeController = new CloseController();
|
||||
this.finishedWithoutErrors = new DetachedPromise();
|
||||
this.context = {
|
||||
waitUntil: this.awaiter.waitUntil.bind(this.awaiter),
|
||||
onClose: this.closeController.onClose.bind(this.closeController),
|
||||
onTaskError: (error)=>this.finishedWithoutErrors.reject(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=run-with-after.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/after/run-with-after.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/after/run-with-after.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/after/run-with-after.ts"],"sourcesContent":["import { DetachedPromise } from '../../lib/detached-promise'\nimport { CloseController } from '../web/web-on-close'\nimport type { AfterContextOpts } from './after-context'\nimport { AwaiterOnce } from './awaiter'\n\ntype Ctx = {\n waitUntil: NonNullable<AfterContextOpts['waitUntil']>\n onClose: NonNullable<AfterContextOpts['onClose']>\n onTaskError: NonNullable<AfterContextOpts['onTaskError']>\n}\n\nexport class AfterRunner {\n private awaiter = new AwaiterOnce()\n private closeController = new CloseController()\n private finishedWithoutErrors = new DetachedPromise<void>()\n\n readonly context: Ctx = {\n waitUntil: this.awaiter.waitUntil.bind(this.awaiter),\n onClose: this.closeController.onClose.bind(this.closeController),\n onTaskError: (error) => this.finishedWithoutErrors.reject(error),\n }\n\n public async executeAfter() {\n this.closeController.dispatchClose()\n await this.awaiter.awaiting()\n\n // if we got an error while running the callbacks,\n // thenthis is a noop, because the promise is already rejected\n this.finishedWithoutErrors.resolve()\n\n return this.finishedWithoutErrors.promise\n }\n}\n"],"names":["DetachedPromise","CloseController","AwaiterOnce","AfterRunner","executeAfter","closeController","dispatchClose","awaiter","awaiting","finishedWithoutErrors","resolve","promise","context","waitUntil","bind","onClose","onTaskError","error","reject"],"mappings":"AAAA,SAASA,eAAe,QAAQ,6BAA4B;AAC5D,SAASC,eAAe,QAAQ,sBAAqB;AAErD,SAASC,WAAW,QAAQ,YAAW;AAQvC,OAAO,MAAMC;IAWX,MAAaC,eAAe;QAC1B,IAAI,CAACC,eAAe,CAACC,aAAa;QAClC,MAAM,IAAI,CAACC,OAAO,CAACC,QAAQ;QAE3B,kDAAkD;QAClD,8DAA8D;QAC9D,IAAI,CAACC,qBAAqB,CAACC,OAAO;QAElC,OAAO,IAAI,CAACD,qBAAqB,CAACE,OAAO;IAC3C;;aAnBQJ,UAAU,IAAIL;aACdG,kBAAkB,IAAIJ;aACtBQ,wBAAwB,IAAIT;aAE3BY,UAAe;YACtBC,WAAW,IAAI,CAACN,OAAO,CAACM,SAAS,CAACC,IAAI,CAAC,IAAI,CAACP,OAAO;YACnDQ,SAAS,IAAI,CAACV,eAAe,CAACU,OAAO,CAACD,IAAI,CAAC,IAAI,CAACT,eAAe;YAC/DW,aAAa,CAACC,QAAU,IAAI,CAACR,qBAAqB,CAACS,MAAM,CAACD;QAC5D;;AAYF"}
|
||||
15
frontend/webapp/node_modules/next/dist/esm/server/api-utils/get-cookie-parser.js
generated
vendored
Normal file
15
frontend/webapp/node_modules/next/dist/esm/server/api-utils/get-cookie-parser.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Parse cookies from the `headers` of request
|
||||
* @param req request object
|
||||
*/ export function getCookieParser(headers) {
|
||||
return function parseCookie() {
|
||||
const { cookie } = headers;
|
||||
if (!cookie) {
|
||||
return {};
|
||||
}
|
||||
const { parse: parseCookieFn } = require('next/dist/compiled/cookie');
|
||||
return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-cookie-parser.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/get-cookie-parser.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/get-cookie-parser.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/api-utils/get-cookie-parser.ts"],"sourcesContent":["import type { NextApiRequestCookies } from '.'\n\n/**\n * Parse cookies from the `headers` of request\n * @param req request object\n */\n\nexport function getCookieParser(headers: {\n [key: string]: string | string[] | null | undefined\n}): () => NextApiRequestCookies {\n return function parseCookie(): NextApiRequestCookies {\n const { cookie } = headers\n\n if (!cookie) {\n return {}\n }\n\n const { parse: parseCookieFn } = require('next/dist/compiled/cookie')\n return parseCookieFn(Array.isArray(cookie) ? cookie.join('; ') : cookie)\n }\n}\n"],"names":["getCookieParser","headers","parseCookie","cookie","parse","parseCookieFn","require","Array","isArray","join"],"mappings":"AAEA;;;CAGC,GAED,OAAO,SAASA,gBAAgBC,OAE/B;IACC,OAAO,SAASC;QACd,MAAM,EAAEC,MAAM,EAAE,GAAGF;QAEnB,IAAI,CAACE,QAAQ;YACX,OAAO,CAAC;QACV;QAEA,MAAM,EAAEC,OAAOC,aAAa,EAAE,GAAGC,QAAQ;QACzC,OAAOD,cAAcE,MAAMC,OAAO,CAACL,UAAUA,OAAOM,IAAI,CAAC,QAAQN;IACnE;AACF"}
|
||||
156
frontend/webapp/node_modules/next/dist/esm/server/api-utils/index.js
generated
vendored
Normal file
156
frontend/webapp/node_modules/next/dist/esm/server/api-utils/index.js
generated
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
import { HeadersAdapter } from '../web/spec-extension/adapters/headers';
|
||||
import { PRERENDER_REVALIDATE_HEADER, PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER } from '../../lib/constants';
|
||||
import { getTracer } from '../lib/trace/tracer';
|
||||
import { NodeSpan } from '../lib/trace/constants';
|
||||
export function wrapApiHandler(page, handler) {
|
||||
return (...args)=>{
|
||||
getTracer().setRootSpanAttribute('next.route', page);
|
||||
// Call API route method
|
||||
return getTracer().trace(NodeSpan.runHandler, {
|
||||
spanName: `executing api route (pages) ${page}`
|
||||
}, ()=>handler(...args));
|
||||
};
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param res response object
|
||||
* @param statusCode `HTTP` status code of response
|
||||
*/ export function sendStatusCode(res, statusCode) {
|
||||
res.statusCode = statusCode;
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param res response object
|
||||
* @param [statusOrUrl] `HTTP` status code of redirect
|
||||
* @param url URL of redirect
|
||||
*/ export function redirect(res, statusOrUrl, url) {
|
||||
if (typeof statusOrUrl === 'string') {
|
||||
url = statusOrUrl;
|
||||
statusOrUrl = 307;
|
||||
}
|
||||
if (typeof statusOrUrl !== 'number' || typeof url !== 'string') {
|
||||
throw Object.defineProperty(new Error(`Invalid redirect arguments. Please use a single argument URL, e.g. res.redirect('/destination') or use a status code and URL, e.g. res.redirect(307, '/destination').`), "__NEXT_ERROR_CODE", {
|
||||
value: "E389",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
res.writeHead(statusOrUrl, {
|
||||
Location: url
|
||||
});
|
||||
res.write(url);
|
||||
res.end();
|
||||
return res;
|
||||
}
|
||||
export function checkIsOnDemandRevalidate(req, previewProps) {
|
||||
const headers = HeadersAdapter.from(req.headers);
|
||||
const previewModeId = headers.get(PRERENDER_REVALIDATE_HEADER);
|
||||
const isOnDemandRevalidate = previewModeId === previewProps.previewModeId;
|
||||
const revalidateOnlyGenerated = headers.has(PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER);
|
||||
return {
|
||||
isOnDemandRevalidate,
|
||||
revalidateOnlyGenerated
|
||||
};
|
||||
}
|
||||
export const COOKIE_NAME_PRERENDER_BYPASS = `__prerender_bypass`;
|
||||
export const COOKIE_NAME_PRERENDER_DATA = `__next_preview_data`;
|
||||
export const RESPONSE_LIMIT_DEFAULT = 4 * 1024 * 1024;
|
||||
export const SYMBOL_PREVIEW_DATA = Symbol(COOKIE_NAME_PRERENDER_DATA);
|
||||
export const SYMBOL_CLEARED_COOKIES = Symbol(COOKIE_NAME_PRERENDER_BYPASS);
|
||||
export function clearPreviewData(res, options = {}) {
|
||||
if (SYMBOL_CLEARED_COOKIES in res) {
|
||||
return res;
|
||||
}
|
||||
const { serialize } = require('next/dist/compiled/cookie');
|
||||
const previous = res.getHeader('Set-Cookie');
|
||||
res.setHeader(`Set-Cookie`, [
|
||||
...typeof previous === 'string' ? [
|
||||
previous
|
||||
] : Array.isArray(previous) ? previous : [],
|
||||
serialize(COOKIE_NAME_PRERENDER_BYPASS, '', {
|
||||
// To delete a cookie, set `expires` to a date in the past:
|
||||
// https://tools.ietf.org/html/rfc6265#section-4.1.1
|
||||
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
|
||||
expires: new Date(0),
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
}),
|
||||
serialize(COOKIE_NAME_PRERENDER_DATA, '', {
|
||||
// To delete a cookie, set `expires` to a date in the past:
|
||||
// https://tools.ietf.org/html/rfc6265#section-4.1.1
|
||||
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
|
||||
expires: new Date(0),
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
})
|
||||
]);
|
||||
Object.defineProperty(res, SYMBOL_CLEARED_COOKIES, {
|
||||
value: true,
|
||||
enumerable: false
|
||||
});
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Custom error class
|
||||
*/ export class ApiError extends Error {
|
||||
constructor(statusCode, message){
|
||||
super(message);
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sends error in `response`
|
||||
* @param res response object
|
||||
* @param statusCode of response
|
||||
* @param message of response
|
||||
*/ export function sendError(res, statusCode, message) {
|
||||
res.statusCode = statusCode;
|
||||
res.statusMessage = message;
|
||||
res.end(message);
|
||||
}
|
||||
/**
|
||||
* Execute getter function only if its needed
|
||||
* @param LazyProps `req` and `params` for lazyProp
|
||||
* @param prop name of property
|
||||
* @param getter function to get data
|
||||
*/ export function setLazyProp({ req }, prop, getter) {
|
||||
const opts = {
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
};
|
||||
const optsReset = {
|
||||
...opts,
|
||||
writable: true
|
||||
};
|
||||
Object.defineProperty(req, prop, {
|
||||
...opts,
|
||||
get: ()=>{
|
||||
const value = getter();
|
||||
// we set the property on the object to avoid recalculating it
|
||||
Object.defineProperty(req, prop, {
|
||||
...optsReset,
|
||||
value
|
||||
});
|
||||
return value;
|
||||
},
|
||||
set: (value)=>{
|
||||
Object.defineProperty(req, prop, {
|
||||
...optsReset,
|
||||
value
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/index.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
363
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/api-resolver.js
generated
vendored
Normal file
363
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/api-resolver.js
generated
vendored
Normal file
@@ -0,0 +1,363 @@
|
||||
import bytes from 'next/dist/compiled/bytes';
|
||||
import { generateETag } from '../../lib/etag';
|
||||
import { sendEtagResponse } from '../../send-payload';
|
||||
import { Stream } from 'stream';
|
||||
import isError from '../../../lib/is-error';
|
||||
import { isResSent } from '../../../shared/lib/utils';
|
||||
import { interopDefault } from '../../../lib/interop-default';
|
||||
import { setLazyProp, sendStatusCode, redirect, clearPreviewData, sendError, ApiError, COOKIE_NAME_PRERENDER_BYPASS, COOKIE_NAME_PRERENDER_DATA, RESPONSE_LIMIT_DEFAULT } from './../index';
|
||||
import { getCookieParser } from './../get-cookie-parser';
|
||||
import { PRERENDER_REVALIDATE_HEADER, PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER } from '../../../lib/constants';
|
||||
import { tryGetPreviewData } from './try-get-preview-data';
|
||||
import { parseBody } from './parse-body';
|
||||
function getMaxContentLength(responseLimit) {
|
||||
if (responseLimit && typeof responseLimit !== 'boolean') {
|
||||
return bytes.parse(responseLimit);
|
||||
}
|
||||
return RESPONSE_LIMIT_DEFAULT;
|
||||
}
|
||||
/**
|
||||
* Send `any` body to response
|
||||
* @param req request object
|
||||
* @param res response object
|
||||
* @param body of response
|
||||
*/ function sendData(req, res, body) {
|
||||
if (body === null || body === undefined) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
// strip irrelevant headers/body
|
||||
if (res.statusCode === 204 || res.statusCode === 304) {
|
||||
res.removeHeader('Content-Type');
|
||||
res.removeHeader('Content-Length');
|
||||
res.removeHeader('Transfer-Encoding');
|
||||
if (process.env.NODE_ENV === 'development' && body) {
|
||||
console.warn(`A body was attempted to be set with a 204 statusCode for ${req.url}, this is invalid and the body was ignored.\n` + `See more info here https://nextjs.org/docs/messages/invalid-api-status-body`);
|
||||
}
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
const contentType = res.getHeader('Content-Type');
|
||||
if (body instanceof Stream) {
|
||||
if (!contentType) {
|
||||
res.setHeader('Content-Type', 'application/octet-stream');
|
||||
}
|
||||
body.pipe(res);
|
||||
return;
|
||||
}
|
||||
const isJSONLike = [
|
||||
'object',
|
||||
'number',
|
||||
'boolean'
|
||||
].includes(typeof body);
|
||||
const stringifiedBody = isJSONLike ? JSON.stringify(body) : body;
|
||||
const etag = generateETag(stringifiedBody);
|
||||
if (sendEtagResponse(req, res, etag)) {
|
||||
return;
|
||||
}
|
||||
if (Buffer.isBuffer(body)) {
|
||||
if (!contentType) {
|
||||
res.setHeader('Content-Type', 'application/octet-stream');
|
||||
}
|
||||
res.setHeader('Content-Length', body.length);
|
||||
res.end(body);
|
||||
return;
|
||||
}
|
||||
if (isJSONLike) {
|
||||
res.setHeader('Content-Type', 'application/json; charset=utf-8');
|
||||
}
|
||||
res.setHeader('Content-Length', Buffer.byteLength(stringifiedBody));
|
||||
res.end(stringifiedBody);
|
||||
}
|
||||
/**
|
||||
* Send `JSON` object
|
||||
* @param res response object
|
||||
* @param jsonBody of data
|
||||
*/ function sendJson(res, jsonBody) {
|
||||
// Set header to application/json
|
||||
res.setHeader('Content-Type', 'application/json; charset=utf-8');
|
||||
// Use send to handle request
|
||||
res.send(JSON.stringify(jsonBody));
|
||||
}
|
||||
function isValidData(str) {
|
||||
return typeof str === 'string' && str.length >= 16;
|
||||
}
|
||||
function setDraftMode(res, options) {
|
||||
if (!isValidData(options.previewModeId)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
|
||||
value: "E169",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const expires = options.enable ? undefined : new Date(0);
|
||||
// To delete a cookie, set `expires` to a date in the past:
|
||||
// https://tools.ietf.org/html/rfc6265#section-4.1.1
|
||||
// `Max-Age: 0` is not valid, thus ignored, and the cookie is persisted.
|
||||
const { serialize } = require('next/dist/compiled/cookie');
|
||||
const previous = res.getHeader('Set-Cookie');
|
||||
res.setHeader(`Set-Cookie`, [
|
||||
...typeof previous === 'string' ? [
|
||||
previous
|
||||
] : Array.isArray(previous) ? previous : [],
|
||||
serialize(COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
expires
|
||||
})
|
||||
]);
|
||||
return res;
|
||||
}
|
||||
function setPreviewData(res, data, options) {
|
||||
if (!isValidData(options.previewModeId)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeId'), "__NEXT_ERROR_CODE", {
|
||||
value: "E169",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (!isValidData(options.previewModeEncryptionKey)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeEncryptionKey'), "__NEXT_ERROR_CODE", {
|
||||
value: "E334",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (!isValidData(options.previewModeSigningKey)) {
|
||||
throw Object.defineProperty(new Error('invariant: invalid previewModeSigningKey'), "__NEXT_ERROR_CODE", {
|
||||
value: "E436",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
|
||||
const { encryptWithSecret } = require('../../crypto-utils');
|
||||
const payload = jsonwebtoken.sign({
|
||||
data: encryptWithSecret(Buffer.from(options.previewModeEncryptionKey), JSON.stringify(data))
|
||||
}, options.previewModeSigningKey, {
|
||||
algorithm: 'HS256',
|
||||
...options.maxAge !== undefined ? {
|
||||
expiresIn: options.maxAge
|
||||
} : undefined
|
||||
});
|
||||
// limit preview mode cookie to 2KB since we shouldn't store too much
|
||||
// data here and browsers drop cookies over 4KB
|
||||
if (payload.length > 2048) {
|
||||
throw Object.defineProperty(new Error(`Preview data is limited to 2KB currently, reduce how much data you are storing as preview data to continue`), "__NEXT_ERROR_CODE", {
|
||||
value: "E465",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { serialize } = require('next/dist/compiled/cookie');
|
||||
const previous = res.getHeader('Set-Cookie');
|
||||
res.setHeader(`Set-Cookie`, [
|
||||
...typeof previous === 'string' ? [
|
||||
previous
|
||||
] : Array.isArray(previous) ? previous : [],
|
||||
serialize(COOKIE_NAME_PRERENDER_BYPASS, options.previewModeId, {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.maxAge !== undefined ? {
|
||||
maxAge: options.maxAge
|
||||
} : undefined,
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
}),
|
||||
serialize(COOKIE_NAME_PRERENDER_DATA, payload, {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV !== 'development' ? 'none' : 'lax',
|
||||
secure: process.env.NODE_ENV !== 'development',
|
||||
path: '/',
|
||||
...options.maxAge !== undefined ? {
|
||||
maxAge: options.maxAge
|
||||
} : undefined,
|
||||
...options.path !== undefined ? {
|
||||
path: options.path
|
||||
} : undefined
|
||||
})
|
||||
]);
|
||||
return res;
|
||||
}
|
||||
async function revalidate(urlPath, opts, req, context) {
|
||||
if (typeof urlPath !== 'string' || !urlPath.startsWith('/')) {
|
||||
throw Object.defineProperty(new Error(`Invalid urlPath provided to revalidate(), must be a path e.g. /blog/post-1, received ${urlPath}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E153",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const revalidateHeaders = {
|
||||
[PRERENDER_REVALIDATE_HEADER]: context.previewModeId,
|
||||
...opts.unstable_onlyGenerated ? {
|
||||
[PRERENDER_REVALIDATE_ONLY_GENERATED_HEADER]: '1'
|
||||
} : {}
|
||||
};
|
||||
const allowedRevalidateHeaderKeys = [
|
||||
...context.allowedRevalidateHeaderKeys || []
|
||||
];
|
||||
if (context.trustHostHeader || context.dev) {
|
||||
allowedRevalidateHeaderKeys.push('cookie');
|
||||
}
|
||||
if (context.trustHostHeader) {
|
||||
allowedRevalidateHeaderKeys.push('x-vercel-protection-bypass');
|
||||
}
|
||||
for (const key of Object.keys(req.headers)){
|
||||
if (allowedRevalidateHeaderKeys.includes(key)) {
|
||||
revalidateHeaders[key] = req.headers[key];
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (context.trustHostHeader) {
|
||||
const res = await fetch(`https://${req.headers.host}${urlPath}`, {
|
||||
method: 'HEAD',
|
||||
headers: revalidateHeaders
|
||||
});
|
||||
// we use the cache header to determine successful revalidate as
|
||||
// a non-200 status code can be returned from a successful revalidate
|
||||
// e.g. notFound: true returns 404 status code but is successful
|
||||
const cacheHeader = res.headers.get('x-vercel-cache') || res.headers.get('x-nextjs-cache');
|
||||
if ((cacheHeader == null ? void 0 : cacheHeader.toUpperCase()) !== 'REVALIDATED' && res.status !== 200 && !(res.status === 404 && opts.unstable_onlyGenerated)) {
|
||||
throw Object.defineProperty(new Error(`Invalid response ${res.status}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E175",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
} else if (context.revalidate) {
|
||||
await context.revalidate({
|
||||
urlPath,
|
||||
revalidateHeaders,
|
||||
opts
|
||||
});
|
||||
} else {
|
||||
throw Object.defineProperty(new Error(`Invariant: required internal revalidate method not passed to api-utils`), "__NEXT_ERROR_CODE", {
|
||||
value: "E174",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
throw Object.defineProperty(new Error(`Failed to revalidate ${urlPath}: ${isError(err) ? err.message : err}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E240",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
export async function apiResolver(req, res, query, resolverModule, apiContext, propagateError, dev, page, onError) {
|
||||
const apiReq = req;
|
||||
const apiRes = res;
|
||||
try {
|
||||
var _config_api, _config_api1, _config_api2;
|
||||
if (!resolverModule) {
|
||||
res.statusCode = 404;
|
||||
res.end('Not Found');
|
||||
return;
|
||||
}
|
||||
const config = resolverModule.config || {};
|
||||
const bodyParser = ((_config_api = config.api) == null ? void 0 : _config_api.bodyParser) !== false;
|
||||
const responseLimit = ((_config_api1 = config.api) == null ? void 0 : _config_api1.responseLimit) ?? true;
|
||||
const externalResolver = ((_config_api2 = config.api) == null ? void 0 : _config_api2.externalResolver) || false;
|
||||
// Parsing of cookies
|
||||
setLazyProp({
|
||||
req: apiReq
|
||||
}, 'cookies', getCookieParser(req.headers));
|
||||
// Parsing query string
|
||||
apiReq.query = query;
|
||||
// Parsing preview data
|
||||
setLazyProp({
|
||||
req: apiReq
|
||||
}, 'previewData', ()=>tryGetPreviewData(req, res, apiContext, !!apiContext.multiZoneDraftMode));
|
||||
// Checking if preview mode is enabled
|
||||
setLazyProp({
|
||||
req: apiReq
|
||||
}, 'preview', ()=>apiReq.previewData !== false ? true : undefined);
|
||||
// Set draftMode to the same value as preview
|
||||
setLazyProp({
|
||||
req: apiReq
|
||||
}, 'draftMode', ()=>apiReq.preview);
|
||||
// Parsing of body
|
||||
if (bodyParser && !apiReq.body) {
|
||||
apiReq.body = await parseBody(apiReq, config.api && config.api.bodyParser && config.api.bodyParser.sizeLimit ? config.api.bodyParser.sizeLimit : '1mb');
|
||||
}
|
||||
let contentLength = 0;
|
||||
const maxContentLength = getMaxContentLength(responseLimit);
|
||||
const writeData = apiRes.write;
|
||||
const endResponse = apiRes.end;
|
||||
apiRes.write = (...args)=>{
|
||||
contentLength += Buffer.byteLength(args[0] || '');
|
||||
return writeData.apply(apiRes, args);
|
||||
};
|
||||
apiRes.end = (...args)=>{
|
||||
if (args.length && typeof args[0] !== 'function') {
|
||||
contentLength += Buffer.byteLength(args[0] || '');
|
||||
}
|
||||
if (responseLimit && contentLength >= maxContentLength) {
|
||||
console.warn(`API response for ${req.url} exceeds ${bytes.format(maxContentLength)}. API Routes are meant to respond quickly. https://nextjs.org/docs/messages/api-routes-response-size-limit`);
|
||||
}
|
||||
return endResponse.apply(apiRes, args);
|
||||
};
|
||||
apiRes.status = (statusCode)=>sendStatusCode(apiRes, statusCode);
|
||||
apiRes.send = (data)=>sendData(apiReq, apiRes, data);
|
||||
apiRes.json = (data)=>sendJson(apiRes, data);
|
||||
apiRes.redirect = (statusOrUrl, url)=>redirect(apiRes, statusOrUrl, url);
|
||||
apiRes.setDraftMode = (options = {
|
||||
enable: true
|
||||
})=>setDraftMode(apiRes, Object.assign({}, apiContext, options));
|
||||
apiRes.setPreviewData = (data, options = {})=>setPreviewData(apiRes, data, Object.assign({}, apiContext, options));
|
||||
apiRes.clearPreviewData = (options = {})=>clearPreviewData(apiRes, options);
|
||||
apiRes.revalidate = (urlPath, opts)=>revalidate(urlPath, opts || {}, req, apiContext);
|
||||
const resolver = interopDefault(resolverModule);
|
||||
let wasPiped = false;
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
// listen for pipe event and don't show resolve warning
|
||||
res.once('pipe', ()=>wasPiped = true);
|
||||
}
|
||||
const apiRouteResult = await resolver(req, res);
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (typeof apiRouteResult !== 'undefined') {
|
||||
if (apiRouteResult instanceof Response) {
|
||||
throw Object.defineProperty(new Error('API route returned a Response object in the Node.js runtime, this is not supported. Please use `runtime: "edge"` instead: https://nextjs.org/docs/api-routes/edge-api-routes'), "__NEXT_ERROR_CODE", {
|
||||
value: "E36",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
console.warn(`API handler should not return a value, received ${typeof apiRouteResult}.`);
|
||||
}
|
||||
if (!externalResolver && !isResSent(res) && !wasPiped) {
|
||||
console.warn(`API resolved without sending a response for ${req.url}, this may result in stalled requests.`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
onError == null ? void 0 : onError(err, req, {
|
||||
routerKind: 'Pages Router',
|
||||
routePath: page || '',
|
||||
routeType: 'route',
|
||||
revalidateReason: undefined
|
||||
});
|
||||
if (err instanceof ApiError) {
|
||||
sendError(apiRes, err.statusCode, err.message);
|
||||
} else {
|
||||
if (dev) {
|
||||
if (isError(err)) {
|
||||
err.page = page;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
console.error(err);
|
||||
if (propagateError) {
|
||||
throw err;
|
||||
}
|
||||
sendError(apiRes, 500, 'Internal Server Error');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=api-resolver.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/api-resolver.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/api-resolver.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
67
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/parse-body.js
generated
vendored
Normal file
67
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/parse-body.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import { parse } from 'next/dist/compiled/content-type';
|
||||
import isError from '../../../lib/is-error';
|
||||
import { ApiError } from '../index';
|
||||
/**
|
||||
* Parse `JSON` and handles invalid `JSON` strings
|
||||
* @param str `JSON` string
|
||||
*/ function parseJson(str) {
|
||||
if (str.length === 0) {
|
||||
// special-case empty json body, as it's a common client-side mistake
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch (e) {
|
||||
throw Object.defineProperty(new ApiError(400, 'Invalid JSON'), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Parse incoming message like `json` or `urlencoded`
|
||||
* @param req request object
|
||||
*/ export async function parseBody(req, limit) {
|
||||
let contentType;
|
||||
try {
|
||||
contentType = parse(req.headers['content-type'] || 'text/plain');
|
||||
} catch {
|
||||
contentType = parse('text/plain');
|
||||
}
|
||||
const { type, parameters } = contentType;
|
||||
const encoding = parameters.charset || 'utf-8';
|
||||
let buffer;
|
||||
try {
|
||||
const getRawBody = require('next/dist/compiled/raw-body');
|
||||
buffer = await getRawBody(req, {
|
||||
encoding,
|
||||
limit
|
||||
});
|
||||
} catch (e) {
|
||||
if (isError(e) && e.type === 'entity.too.large') {
|
||||
throw Object.defineProperty(new ApiError(413, `Body exceeded ${limit} limit`), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
throw Object.defineProperty(new ApiError(400, 'Invalid body'), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
const body = buffer.toString();
|
||||
if (type === 'application/json' || type === 'application/ld+json') {
|
||||
return parseJson(body);
|
||||
} else if (type === 'application/x-www-form-urlencoded') {
|
||||
const qs = require('querystring');
|
||||
return qs.decode(body);
|
||||
} else {
|
||||
return body;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=parse-body.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/parse-body.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/parse-body.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/api-utils/node/parse-body.ts"],"sourcesContent":["import type { IncomingMessage } from 'http'\n\nimport { parse } from 'next/dist/compiled/content-type'\nimport isError from '../../../lib/is-error'\nimport type { SizeLimit } from '../../../types'\nimport { ApiError } from '../index'\n\n/**\n * Parse `JSON` and handles invalid `JSON` strings\n * @param str `JSON` string\n */\nfunction parseJson(str: string): object {\n if (str.length === 0) {\n // special-case empty json body, as it's a common client-side mistake\n return {}\n }\n\n try {\n return JSON.parse(str)\n } catch (e) {\n throw new ApiError(400, 'Invalid JSON')\n }\n}\n\n/**\n * Parse incoming message like `json` or `urlencoded`\n * @param req request object\n */\nexport async function parseBody(\n req: IncomingMessage,\n limit: SizeLimit\n): Promise<any> {\n let contentType\n try {\n contentType = parse(req.headers['content-type'] || 'text/plain')\n } catch {\n contentType = parse('text/plain')\n }\n const { type, parameters } = contentType\n const encoding = parameters.charset || 'utf-8'\n\n let buffer\n\n try {\n const getRawBody =\n require('next/dist/compiled/raw-body') as typeof import('next/dist/compiled/raw-body')\n buffer = await getRawBody(req, { encoding, limit })\n } catch (e) {\n if (isError(e) && e.type === 'entity.too.large') {\n throw new ApiError(413, `Body exceeded ${limit} limit`)\n } else {\n throw new ApiError(400, 'Invalid body')\n }\n }\n\n const body = buffer.toString()\n\n if (type === 'application/json' || type === 'application/ld+json') {\n return parseJson(body)\n } else if (type === 'application/x-www-form-urlencoded') {\n const qs = require('querystring')\n return qs.decode(body)\n } else {\n return body\n }\n}\n"],"names":["parse","isError","ApiError","parseJson","str","length","JSON","e","parseBody","req","limit","contentType","headers","type","parameters","encoding","charset","buffer","getRawBody","require","body","toString","qs","decode"],"mappings":"AAEA,SAASA,KAAK,QAAQ,kCAAiC;AACvD,OAAOC,aAAa,wBAAuB;AAE3C,SAASC,QAAQ,QAAQ,WAAU;AAEnC;;;CAGC,GACD,SAASC,UAAUC,GAAW;IAC5B,IAAIA,IAAIC,MAAM,KAAK,GAAG;QACpB,qEAAqE;QACrE,OAAO,CAAC;IACV;IAEA,IAAI;QACF,OAAOC,KAAKN,KAAK,CAACI;IACpB,EAAE,OAAOG,GAAG;QACV,MAAM,qBAAiC,CAAjC,IAAIL,SAAS,KAAK,iBAAlB,qBAAA;mBAAA;wBAAA;0BAAA;QAAgC;IACxC;AACF;AAEA;;;CAGC,GACD,OAAO,eAAeM,UACpBC,GAAoB,EACpBC,KAAgB;IAEhB,IAAIC;IACJ,IAAI;QACFA,cAAcX,MAAMS,IAAIG,OAAO,CAAC,eAAe,IAAI;IACrD,EAAE,OAAM;QACND,cAAcX,MAAM;IACtB;IACA,MAAM,EAAEa,IAAI,EAAEC,UAAU,EAAE,GAAGH;IAC7B,MAAMI,WAAWD,WAAWE,OAAO,IAAI;IAEvC,IAAIC;IAEJ,IAAI;QACF,MAAMC,aACJC,QAAQ;QACVF,SAAS,MAAMC,WAAWT,KAAK;YAAEM;YAAUL;QAAM;IACnD,EAAE,OAAOH,GAAG;QACV,IAAIN,QAAQM,MAAMA,EAAEM,IAAI,KAAK,oBAAoB;YAC/C,MAAM,qBAAiD,CAAjD,IAAIX,SAAS,KAAK,CAAC,cAAc,EAAEQ,MAAM,MAAM,CAAC,GAAhD,qBAAA;uBAAA;4BAAA;8BAAA;YAAgD;QACxD,OAAO;YACL,MAAM,qBAAiC,CAAjC,IAAIR,SAAS,KAAK,iBAAlB,qBAAA;uBAAA;4BAAA;8BAAA;YAAgC;QACxC;IACF;IAEA,MAAMkB,OAAOH,OAAOI,QAAQ;IAE5B,IAAIR,SAAS,sBAAsBA,SAAS,uBAAuB;QACjE,OAAOV,UAAUiB;IACnB,OAAO,IAAIP,SAAS,qCAAqC;QACvD,MAAMS,KAAKH,QAAQ;QACnB,OAAOG,GAAGC,MAAM,CAACH;IACnB,OAAO;QACL,OAAOA;IACT;AACF"}
|
||||
76
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/try-get-preview-data.js
generated
vendored
Normal file
76
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/try-get-preview-data.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
import { checkIsOnDemandRevalidate } from '../.';
|
||||
import { clearPreviewData, COOKIE_NAME_PRERENDER_BYPASS, COOKIE_NAME_PRERENDER_DATA, SYMBOL_PREVIEW_DATA } from '../index';
|
||||
import { RequestCookies } from '../../web/spec-extension/cookies';
|
||||
import { HeadersAdapter } from '../../web/spec-extension/adapters/headers';
|
||||
export function tryGetPreviewData(req, res, options, multiZoneDraftMode) {
|
||||
var _cookies_get, _cookies_get1;
|
||||
// if an On-Demand revalidation is being done preview mode
|
||||
// is disabled
|
||||
if (options && checkIsOnDemandRevalidate(req, options).isOnDemandRevalidate) {
|
||||
return false;
|
||||
}
|
||||
// Read cached preview data if present
|
||||
// TODO: use request metadata instead of a symbol
|
||||
if (SYMBOL_PREVIEW_DATA in req) {
|
||||
return req[SYMBOL_PREVIEW_DATA];
|
||||
}
|
||||
const headers = HeadersAdapter.from(req.headers);
|
||||
const cookies = new RequestCookies(headers);
|
||||
const previewModeId = (_cookies_get = cookies.get(COOKIE_NAME_PRERENDER_BYPASS)) == null ? void 0 : _cookies_get.value;
|
||||
const tokenPreviewData = (_cookies_get1 = cookies.get(COOKIE_NAME_PRERENDER_DATA)) == null ? void 0 : _cookies_get1.value;
|
||||
// Case: preview mode cookie set but data cookie is not set
|
||||
if (previewModeId && !tokenPreviewData && previewModeId === options.previewModeId) {
|
||||
// This is "Draft Mode" which doesn't use
|
||||
// previewData, so we return an empty object
|
||||
// for backwards compat with "Preview Mode".
|
||||
const data = {};
|
||||
Object.defineProperty(req, SYMBOL_PREVIEW_DATA, {
|
||||
value: data,
|
||||
enumerable: false
|
||||
});
|
||||
return data;
|
||||
}
|
||||
// Case: neither cookie is set.
|
||||
if (!previewModeId && !tokenPreviewData) {
|
||||
return false;
|
||||
}
|
||||
// Case: one cookie is set, but not the other.
|
||||
if (!previewModeId || !tokenPreviewData) {
|
||||
if (!multiZoneDraftMode) {
|
||||
clearPreviewData(res);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Case: preview session is for an old build.
|
||||
if (previewModeId !== options.previewModeId) {
|
||||
if (!multiZoneDraftMode) {
|
||||
clearPreviewData(res);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
let encryptedPreviewData;
|
||||
try {
|
||||
const jsonwebtoken = require('next/dist/compiled/jsonwebtoken');
|
||||
encryptedPreviewData = jsonwebtoken.verify(tokenPreviewData, options.previewModeSigningKey);
|
||||
} catch {
|
||||
// TODO: warn
|
||||
clearPreviewData(res);
|
||||
return false;
|
||||
}
|
||||
const { decryptWithSecret } = require('../../crypto-utils');
|
||||
const decryptedPreviewData = decryptWithSecret(Buffer.from(options.previewModeEncryptionKey), encryptedPreviewData.data);
|
||||
try {
|
||||
// TODO: strict runtime type checking
|
||||
const data = JSON.parse(decryptedPreviewData);
|
||||
// Cache lookup
|
||||
Object.defineProperty(req, SYMBOL_PREVIEW_DATA, {
|
||||
value: data,
|
||||
enumerable: false
|
||||
});
|
||||
return data;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=try-get-preview-data.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/try-get-preview-data.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/node/try-get-preview-data.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
frontend/webapp/node_modules/next/dist/esm/server/api-utils/web.js
generated
vendored
Normal file
7
frontend/webapp/node_modules/next/dist/esm/server/api-utils/web.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings
|
||||
// supported at the moment.
|
||||
export function byteLength(payload) {
|
||||
return new TextEncoder().encode(payload).buffer.byteLength;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=web.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/web.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/api-utils/web.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/api-utils/web.ts"],"sourcesContent":["// Buffer.byteLength polyfill in the Edge runtime, with only utf8 strings\n// supported at the moment.\nexport function byteLength(payload: string): number {\n return new TextEncoder().encode(payload).buffer.byteLength\n}\n"],"names":["byteLength","payload","TextEncoder","encode","buffer"],"mappings":"AAAA,yEAAyE;AACzE,2BAA2B;AAC3B,OAAO,SAASA,WAAWC,OAAe;IACxC,OAAO,IAAIC,cAAcC,MAAM,CAACF,SAASG,MAAM,CAACJ,UAAU;AAC5D"}
|
||||
4
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage-instance.js
generated
vendored
Normal file
4
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage-instance.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { createAsyncLocalStorage } from './async-local-storage';
|
||||
export const actionAsyncStorageInstance = createAsyncLocalStorage();
|
||||
|
||||
//# sourceMappingURL=action-async-storage-instance.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage-instance.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage-instance.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/action-async-storage-instance.ts"],"sourcesContent":["import type { ActionAsyncStorage } from './action-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const actionAsyncStorageInstance: ActionAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","actionAsyncStorageInstance"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,wBAAuB;AAE/D,OAAO,MAAMC,6BACXD,0BAAyB"}
|
||||
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage.external.js
generated
vendored
Normal file
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage.external.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
// Share the instance module in the next-shared layer
|
||||
import { actionAsyncStorageInstance } from './action-async-storage-instance' with {
|
||||
'turbopack-transition': 'next-shared'
|
||||
};
|
||||
export { actionAsyncStorageInstance as actionAsyncStorage };
|
||||
|
||||
//# sourceMappingURL=action-async-storage.external.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage.external.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-async-storage.external.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/action-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { actionAsyncStorageInstance } from './action-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nexport interface ActionStore {\n readonly isAction?: boolean\n readonly isAppRoute?: boolean\n}\n\nexport type ActionAsyncStorage = AsyncLocalStorage<ActionStore>\n\nexport { actionAsyncStorageInstance as actionAsyncStorage }\n"],"names":["actionAsyncStorageInstance","actionAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,0BAA0B,QAAQ,uCAAuC;IAAE,wBAAwB;AAAc,EAAC;AAQ3H,SAASA,8BAA8BC,kBAAkB,GAAE"}
|
||||
728
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-handler.js
generated
vendored
Normal file
728
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-handler.js
generated
vendored
Normal file
@@ -0,0 +1,728 @@
|
||||
import { RSC_HEADER, RSC_CONTENT_TYPE_HEADER, NEXT_ROUTER_STATE_TREE_HEADER, ACTION_HEADER } from '../../client/components/app-router-headers';
|
||||
import { getAccessFallbackHTTPStatus, isHTTPAccessFallbackError } from '../../client/components/http-access-fallback/http-access-fallback';
|
||||
import { getRedirectTypeFromError, getURLFromRedirectError } from '../../client/components/redirect';
|
||||
import { isRedirectError } from '../../client/components/redirect-error';
|
||||
import RenderResult from '../render-result';
|
||||
import { FlightRenderResult } from './flight-render-result';
|
||||
import { filterReqHeaders, actionsForbiddenHeaders } from '../lib/server-ipc/utils';
|
||||
import { getModifiedCookieValues } from '../web/spec-extension/adapters/request-cookies';
|
||||
import { NEXT_CACHE_REVALIDATED_TAGS_HEADER, NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER } from '../../lib/constants';
|
||||
import { getServerActionRequestMetadata } from '../lib/server-action-request-meta';
|
||||
import { isCsrfOriginAllowed } from './csrf-protection';
|
||||
import { warn } from '../../build/output/log';
|
||||
import { RequestCookies, ResponseCookies } from '../web/spec-extension/cookies';
|
||||
import { HeadersAdapter } from '../web/spec-extension/adapters/headers';
|
||||
import { fromNodeOutgoingHttpHeaders } from '../web/utils';
|
||||
import { selectWorkerForForwarding } from './action-utils';
|
||||
import { isNodeNextRequest, isWebNextRequest } from '../base-http/helpers';
|
||||
import { RedirectStatusCode } from '../../client/components/redirect-status-code';
|
||||
import { synchronizeMutableCookies } from '../async-storage/request-store';
|
||||
import { workUnitAsyncStorage } from '../app-render/work-unit-async-storage.external';
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
import { executeRevalidates } from '../revalidation-utils';
|
||||
function formDataFromSearchQueryString(query) {
|
||||
const searchParams = new URLSearchParams(query);
|
||||
const formData = new FormData();
|
||||
for (const [key, value] of searchParams){
|
||||
formData.append(key, value);
|
||||
}
|
||||
return formData;
|
||||
}
|
||||
function nodeHeadersToRecord(headers) {
|
||||
const record = {};
|
||||
for (const [key, value] of Object.entries(headers)){
|
||||
if (value !== undefined) {
|
||||
record[key] = Array.isArray(value) ? value.join(', ') : `${value}`;
|
||||
}
|
||||
}
|
||||
return record;
|
||||
}
|
||||
function getForwardedHeaders(req, res) {
|
||||
// Get request headers and cookies
|
||||
const requestHeaders = req.headers;
|
||||
const requestCookies = new RequestCookies(HeadersAdapter.from(requestHeaders));
|
||||
// Get response headers and cookies
|
||||
const responseHeaders = res.getHeaders();
|
||||
const responseCookies = new ResponseCookies(fromNodeOutgoingHttpHeaders(responseHeaders));
|
||||
// Merge request and response headers
|
||||
const mergedHeaders = filterReqHeaders({
|
||||
...nodeHeadersToRecord(requestHeaders),
|
||||
...nodeHeadersToRecord(responseHeaders)
|
||||
}, actionsForbiddenHeaders);
|
||||
// Merge cookies into requestCookies, so responseCookies always take precedence
|
||||
// and overwrite/delete those from requestCookies.
|
||||
responseCookies.getAll().forEach((cookie)=>{
|
||||
if (typeof cookie.value === 'undefined') {
|
||||
requestCookies.delete(cookie.name);
|
||||
} else {
|
||||
requestCookies.set(cookie);
|
||||
}
|
||||
});
|
||||
// Update the 'cookie' header with the merged cookies
|
||||
mergedHeaders['cookie'] = requestCookies.toString();
|
||||
// Remove headers that should not be forwarded
|
||||
delete mergedHeaders['transfer-encoding'];
|
||||
return new Headers(mergedHeaders);
|
||||
}
|
||||
function addRevalidationHeader(res, { workStore, requestStore }) {
|
||||
var _workStore_pendingRevalidatedTags;
|
||||
// If a tag was revalidated, the client router needs to invalidate all the
|
||||
// client router cache as they may be stale. And if a path was revalidated, the
|
||||
// client needs to invalidate all subtrees below that path.
|
||||
// To keep the header size small, we use a tuple of
|
||||
// [[revalidatedPaths], isTagRevalidated ? 1 : 0, isCookieRevalidated ? 1 : 0]
|
||||
// instead of a JSON object.
|
||||
// TODO-APP: Currently the prefetch cache doesn't have subtree information,
|
||||
// so we need to invalidate the entire cache if a path was revalidated.
|
||||
// TODO-APP: Currently paths are treated as tags, so the second element of the tuple
|
||||
// is always empty.
|
||||
const isTagRevalidated = ((_workStore_pendingRevalidatedTags = workStore.pendingRevalidatedTags) == null ? void 0 : _workStore_pendingRevalidatedTags.length) ? 1 : 0;
|
||||
const isCookieRevalidated = getModifiedCookieValues(requestStore.mutableCookies).length ? 1 : 0;
|
||||
res.setHeader('x-action-revalidated', JSON.stringify([
|
||||
[],
|
||||
isTagRevalidated,
|
||||
isCookieRevalidated
|
||||
]));
|
||||
}
|
||||
/**
|
||||
* Forwards a server action request to a separate worker. Used when the requested action is not available in the current worker.
|
||||
*/ async function createForwardedActionResponse(req, res, host, workerPathname, basePath, workStore) {
|
||||
var _workStore_incrementalCache;
|
||||
if (!host) {
|
||||
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E226",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const forwardedHeaders = getForwardedHeaders(req, res);
|
||||
// indicate that this action request was forwarded from another worker
|
||||
// we use this to skip rendering the flight tree so that we don't update the UI
|
||||
// with the response from the forwarded worker
|
||||
forwardedHeaders.set('x-action-forwarded', '1');
|
||||
const proto = ((_workStore_incrementalCache = workStore.incrementalCache) == null ? void 0 : _workStore_incrementalCache.requestProtocol) || 'https';
|
||||
// For standalone or the serverful mode, use the internal origin directly
|
||||
// other than the host headers from the request.
|
||||
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${host.value}`;
|
||||
const fetchUrl = new URL(`${origin}${basePath}${workerPathname}`);
|
||||
try {
|
||||
var _response_headers_get;
|
||||
let body;
|
||||
if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME === 'edge' && isWebNextRequest(req)) {
|
||||
if (!req.body) {
|
||||
throw Object.defineProperty(new Error('Invariant: missing request body.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E333",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
body = req.body;
|
||||
} else if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME !== 'edge' && isNodeNextRequest(req)) {
|
||||
body = req.stream();
|
||||
} else {
|
||||
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E114",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// Forward the request to the new worker
|
||||
const response = await fetch(fetchUrl, {
|
||||
method: 'POST',
|
||||
body,
|
||||
duplex: 'half',
|
||||
headers: forwardedHeaders,
|
||||
redirect: 'manual',
|
||||
next: {
|
||||
// @ts-ignore
|
||||
internal: 1
|
||||
}
|
||||
});
|
||||
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(RSC_CONTENT_TYPE_HEADER)) {
|
||||
// copy the headers from the redirect response to the response we're sending
|
||||
for (const [key, value] of response.headers){
|
||||
if (!actionsForbiddenHeaders.includes(key)) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
return new FlightRenderResult(response.body);
|
||||
} else {
|
||||
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
|
||||
_response_body;
|
||||
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
|
||||
}
|
||||
} catch (err) {
|
||||
// we couldn't stream the forwarded response, so we'll just return an empty response
|
||||
console.error(`failed to forward action response`, err);
|
||||
}
|
||||
return RenderResult.fromStatic('{}');
|
||||
}
|
||||
/**
|
||||
* Returns the parsed redirect URL if we deem that it is hosted by us.
|
||||
*
|
||||
* We handle both relative and absolute redirect URLs.
|
||||
*
|
||||
* In case the redirect URL is not relative to the application we return `null`.
|
||||
*/ function getAppRelativeRedirectUrl(basePath, host, redirectUrl) {
|
||||
if (redirectUrl.startsWith('/') || redirectUrl.startsWith('.')) {
|
||||
// Make sure we are appending the basePath to relative URLS
|
||||
return new URL(`${basePath}${redirectUrl}`, 'http://n');
|
||||
}
|
||||
const parsedRedirectUrl = new URL(redirectUrl);
|
||||
if ((host == null ? void 0 : host.value) !== parsedRedirectUrl.host) {
|
||||
return null;
|
||||
}
|
||||
// At this point the hosts are the same, just confirm we
|
||||
// are routing to a path underneath the `basePath`
|
||||
return parsedRedirectUrl.pathname.startsWith(basePath) ? parsedRedirectUrl : null;
|
||||
}
|
||||
async function createRedirectRenderResult(req, res, originalHost, redirectUrl, redirectType, basePath, workStore) {
|
||||
res.setHeader('x-action-redirect', `${redirectUrl};${redirectType}`);
|
||||
// If we're redirecting to another route of this Next.js application, we'll
|
||||
// try to stream the response from the other worker path. When that works,
|
||||
// we can save an extra roundtrip and avoid a full page reload.
|
||||
// When the redirect URL starts with a `/` or is to the same host, under the
|
||||
// `basePath` we treat it as an app-relative redirect;
|
||||
const appRelativeRedirectUrl = getAppRelativeRedirectUrl(basePath, originalHost, redirectUrl);
|
||||
if (appRelativeRedirectUrl) {
|
||||
var _workStore_incrementalCache;
|
||||
if (!originalHost) {
|
||||
throw Object.defineProperty(new Error('Invariant: Missing `host` header from a forwarded Server Actions request.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E226",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const forwardedHeaders = getForwardedHeaders(req, res);
|
||||
forwardedHeaders.set(RSC_HEADER, '1');
|
||||
const proto = ((_workStore_incrementalCache = workStore.incrementalCache) == null ? void 0 : _workStore_incrementalCache.requestProtocol) || 'https';
|
||||
// For standalone or the serverful mode, use the internal origin directly
|
||||
// other than the host headers from the request.
|
||||
const origin = process.env.__NEXT_PRIVATE_ORIGIN || `${proto}://${originalHost.value}`;
|
||||
const fetchUrl = new URL(`${origin}${appRelativeRedirectUrl.pathname}${appRelativeRedirectUrl.search}`);
|
||||
if (workStore.pendingRevalidatedTags) {
|
||||
var _workStore_incrementalCache_prerenderManifest_preview, _workStore_incrementalCache_prerenderManifest, _workStore_incrementalCache1;
|
||||
forwardedHeaders.set(NEXT_CACHE_REVALIDATED_TAGS_HEADER, workStore.pendingRevalidatedTags.join(','));
|
||||
forwardedHeaders.set(NEXT_CACHE_REVALIDATE_TAG_TOKEN_HEADER, ((_workStore_incrementalCache1 = workStore.incrementalCache) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest = _workStore_incrementalCache1.prerenderManifest) == null ? void 0 : (_workStore_incrementalCache_prerenderManifest_preview = _workStore_incrementalCache_prerenderManifest.preview) == null ? void 0 : _workStore_incrementalCache_prerenderManifest_preview.previewModeId) || '');
|
||||
}
|
||||
// Ensures that when the path was revalidated we don't return a partial response on redirects
|
||||
forwardedHeaders.delete(NEXT_ROUTER_STATE_TREE_HEADER);
|
||||
// When an action follows a redirect, it's no longer handling an action: it's just a normal RSC request
|
||||
// to the requested URL. We should remove the `next-action` header so that it's not treated as an action
|
||||
forwardedHeaders.delete(ACTION_HEADER);
|
||||
try {
|
||||
var _response_headers_get;
|
||||
const response = await fetch(fetchUrl, {
|
||||
method: 'GET',
|
||||
headers: forwardedHeaders,
|
||||
next: {
|
||||
// @ts-ignore
|
||||
internal: 1
|
||||
}
|
||||
});
|
||||
if ((_response_headers_get = response.headers.get('content-type')) == null ? void 0 : _response_headers_get.startsWith(RSC_CONTENT_TYPE_HEADER)) {
|
||||
// copy the headers from the redirect response to the response we're sending
|
||||
for (const [key, value] of response.headers){
|
||||
if (!actionsForbiddenHeaders.includes(key)) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
return new FlightRenderResult(response.body);
|
||||
} else {
|
||||
var // Since we aren't consuming the response body, we cancel it to avoid memory leaks
|
||||
_response_body;
|
||||
(_response_body = response.body) == null ? void 0 : _response_body.cancel();
|
||||
}
|
||||
} catch (err) {
|
||||
// we couldn't stream the redirect response, so we'll just do a normal redirect
|
||||
console.error(`failed to get redirect response`, err);
|
||||
}
|
||||
}
|
||||
return RenderResult.fromStatic('{}');
|
||||
}
|
||||
;
|
||||
/**
|
||||
* Ensures the value of the header can't create long logs.
|
||||
*/ function limitUntrustedHeaderValueForLogs(value) {
|
||||
return value.length > 100 ? value.slice(0, 100) + '...' : value;
|
||||
}
|
||||
export function parseHostHeader(headers, originDomain) {
|
||||
var _forwardedHostHeader_split_, _forwardedHostHeader_split;
|
||||
const forwardedHostHeader = headers['x-forwarded-host'];
|
||||
const forwardedHostHeaderValue = forwardedHostHeader && Array.isArray(forwardedHostHeader) ? forwardedHostHeader[0] : forwardedHostHeader == null ? void 0 : (_forwardedHostHeader_split = forwardedHostHeader.split(',')) == null ? void 0 : (_forwardedHostHeader_split_ = _forwardedHostHeader_split[0]) == null ? void 0 : _forwardedHostHeader_split_.trim();
|
||||
const hostHeader = headers['host'];
|
||||
if (originDomain) {
|
||||
return forwardedHostHeaderValue === originDomain ? {
|
||||
type: "x-forwarded-host",
|
||||
value: forwardedHostHeaderValue
|
||||
} : hostHeader === originDomain ? {
|
||||
type: "host",
|
||||
value: hostHeader
|
||||
} : undefined;
|
||||
}
|
||||
return forwardedHostHeaderValue ? {
|
||||
type: "x-forwarded-host",
|
||||
value: forwardedHostHeaderValue
|
||||
} : hostHeader ? {
|
||||
type: "host",
|
||||
value: hostHeader
|
||||
} : undefined;
|
||||
}
|
||||
export async function handleAction({ req, res, ComponentMod, serverModuleMap, generateFlight, workStore, requestStore, serverActions, ctx }) {
|
||||
const contentType = req.headers['content-type'];
|
||||
const { serverActionsManifest, page } = ctx.renderOpts;
|
||||
const { actionId, isURLEncodedAction, isMultipartAction, isFetchAction, isPossibleServerAction } = getServerActionRequestMetadata(req);
|
||||
// If it can't be a Server Action, skip handling.
|
||||
// Note that this can be a false positive -- any multipart/urlencoded POST can get us here,
|
||||
// But won't know if it's an MPA action or not until we call `decodeAction` below.
|
||||
if (!isPossibleServerAction) {
|
||||
return;
|
||||
}
|
||||
if (workStore.isStaticGeneration) {
|
||||
throw Object.defineProperty(new Error("Invariant: server actions can't be handled during static rendering"), "__NEXT_ERROR_CODE", {
|
||||
value: "E359",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let temporaryReferences;
|
||||
const finalizeAndGenerateFlight = (...args)=>{
|
||||
// When we switch to the render phase, cookies() will return
|
||||
// `workUnitStore.cookies` instead of `workUnitStore.userspaceMutableCookies`.
|
||||
// We want the render to see any cookie writes that we performed during the action,
|
||||
// so we need to update the immutable cookies to reflect the changes.
|
||||
synchronizeMutableCookies(requestStore);
|
||||
// The server action might have toggled draft mode, so we need to reflect
|
||||
// that in the work store to be up-to-date for subsequent rendering.
|
||||
workStore.isDraftMode = requestStore.draftMode.isEnabled;
|
||||
return generateFlight(...args);
|
||||
};
|
||||
// When running actions the default is no-store, you can still `cache: 'force-cache'`
|
||||
workStore.fetchCache = 'default-no-store';
|
||||
const originDomain = typeof req.headers['origin'] === 'string' ? new URL(req.headers['origin']).host : undefined;
|
||||
const host = parseHostHeader(req.headers);
|
||||
let warning = undefined;
|
||||
function warnBadServerActionRequest() {
|
||||
if (warning) {
|
||||
warn(warning);
|
||||
}
|
||||
}
|
||||
// This is to prevent CSRF attacks. If `x-forwarded-host` is set, we need to
|
||||
// ensure that the request is coming from the same host.
|
||||
if (!originDomain) {
|
||||
// This might be an old browser that doesn't send `host` header. We ignore
|
||||
// this case.
|
||||
warning = 'Missing `origin` header from a forwarded Server Actions request.';
|
||||
} else if (!host || originDomain !== host.value) {
|
||||
// If the customer sets a list of allowed origins, we'll allow the request.
|
||||
// These are considered safe but might be different from forwarded host set
|
||||
// by the infra (i.e. reverse proxies).
|
||||
if (isCsrfOriginAllowed(originDomain, serverActions == null ? void 0 : serverActions.allowedOrigins)) {
|
||||
// Ignore it
|
||||
} else {
|
||||
if (host) {
|
||||
// This seems to be an CSRF attack. We should not proceed the action.
|
||||
console.error(`\`${host.type}\` header with value \`${limitUntrustedHeaderValueForLogs(host.value)}\` does not match \`origin\` header with value \`${limitUntrustedHeaderValueForLogs(originDomain)}\` from a forwarded Server Actions request. Aborting the action.`);
|
||||
} else {
|
||||
// This is an attack. We should not proceed the action.
|
||||
console.error(`\`x-forwarded-host\` or \`host\` headers are not provided. One of these is needed to compare the \`origin\` header from a forwarded Server Actions request. Aborting the action.`);
|
||||
}
|
||||
const error = Object.defineProperty(new Error('Invalid Server Actions request.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E80",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
if (isFetchAction) {
|
||||
res.statusCode = 500;
|
||||
await executeRevalidates(workStore);
|
||||
const promise = Promise.reject(error);
|
||||
try {
|
||||
// we need to await the promise to trigger the rejection early
|
||||
// so that it's already handled by the time we call
|
||||
// the RSC runtime. Otherwise, it will throw an unhandled
|
||||
// promise rejection error in the renderer.
|
||||
await promise;
|
||||
} catch {
|
||||
// swallow error, it's gonna be handled on the client
|
||||
}
|
||||
return {
|
||||
type: 'done',
|
||||
result: await finalizeAndGenerateFlight(req, ctx, requestStore, {
|
||||
actionResult: promise,
|
||||
// if the page was not revalidated, we can skip the rendering the flight tree
|
||||
skipFlight: !workStore.pathWasRevalidated,
|
||||
temporaryReferences
|
||||
})
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// ensure we avoid caching server actions unexpectedly
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate');
|
||||
let boundActionArguments = [];
|
||||
const { actionAsyncStorage } = ComponentMod;
|
||||
let actionResult;
|
||||
let formState;
|
||||
let actionModId;
|
||||
const actionWasForwarded = Boolean(req.headers['x-action-forwarded']);
|
||||
if (actionId) {
|
||||
const forwardedWorker = selectWorkerForForwarding(actionId, page, serverActionsManifest);
|
||||
// If forwardedWorker is truthy, it means there isn't a worker for the action
|
||||
// in the current handler, so we forward the request to a worker that has the action.
|
||||
if (forwardedWorker) {
|
||||
return {
|
||||
type: 'done',
|
||||
result: await createForwardedActionResponse(req, res, host, forwardedWorker, ctx.renderOpts.basePath, workStore)
|
||||
};
|
||||
}
|
||||
}
|
||||
try {
|
||||
await actionAsyncStorage.run({
|
||||
isAction: true
|
||||
}, async ()=>{
|
||||
if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME === 'edge' && isWebNextRequest(req)) {
|
||||
if (!req.body) {
|
||||
throw Object.defineProperty(new Error('invariant: Missing request body.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E364",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// TODO: add body limit
|
||||
// Use react-server-dom-webpack/server.edge
|
||||
const { createTemporaryReferenceSet, decodeReply, decodeAction, decodeFormState } = ComponentMod;
|
||||
temporaryReferences = createTemporaryReferenceSet();
|
||||
if (isMultipartAction) {
|
||||
// TODO-APP: Add streaming support
|
||||
const formData = await req.request.formData();
|
||||
if (isFetchAction) {
|
||||
boundActionArguments = await decodeReply(formData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
} else {
|
||||
const action = await decodeAction(formData, serverModuleMap);
|
||||
if (typeof action === 'function') {
|
||||
// Only warn if it's a server action, otherwise skip for other post requests
|
||||
warnBadServerActionRequest();
|
||||
let actionReturnedState;
|
||||
requestStore.phase = 'action';
|
||||
try {
|
||||
actionReturnedState = await workUnitAsyncStorage.run(requestStore, action);
|
||||
} finally{
|
||||
requestStore.phase = 'render';
|
||||
}
|
||||
formState = await decodeFormState(actionReturnedState, formData, serverModuleMap);
|
||||
}
|
||||
// Skip the fetch path
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
actionModId = getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
if (actionId !== null) {
|
||||
console.error(err);
|
||||
}
|
||||
return {
|
||||
type: 'not-found'
|
||||
};
|
||||
}
|
||||
const chunks = [];
|
||||
const reader = req.body.getReader();
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
chunks.push(value);
|
||||
}
|
||||
const actionData = Buffer.concat(chunks).toString('utf-8');
|
||||
if (isURLEncodedAction) {
|
||||
const formData = formDataFromSearchQueryString(actionData);
|
||||
boundActionArguments = await decodeReply(formData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
} else {
|
||||
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (// The type check here ensures that `req` is correctly typed, and the
|
||||
// environment variable check provides dead code elimination.
|
||||
process.env.NEXT_RUNTIME !== 'edge' && isNodeNextRequest(req)) {
|
||||
// Use react-server-dom-webpack/server.node which supports streaming
|
||||
const { createTemporaryReferenceSet, decodeReply, decodeReplyFromBusboy, decodeAction, decodeFormState } = require(`./react-server.node`);
|
||||
temporaryReferences = createTemporaryReferenceSet();
|
||||
const { Transform } = require('node:stream');
|
||||
const defaultBodySizeLimit = '1 MB';
|
||||
const bodySizeLimit = (serverActions == null ? void 0 : serverActions.bodySizeLimit) ?? defaultBodySizeLimit;
|
||||
const bodySizeLimitBytes = bodySizeLimit !== defaultBodySizeLimit ? require('next/dist/compiled/bytes').parse(bodySizeLimit) : 1024 * 1024 // 1 MB
|
||||
;
|
||||
let size = 0;
|
||||
const body = req.body.pipe(new Transform({
|
||||
transform (chunk, encoding, callback) {
|
||||
size += Buffer.byteLength(chunk, encoding);
|
||||
if (size > bodySizeLimitBytes) {
|
||||
const { ApiError } = require('../api-utils');
|
||||
callback(Object.defineProperty(new ApiError(413, `Body exceeded ${bodySizeLimit} limit.
|
||||
To configure the body size limit for Server Actions, see: https://nextjs.org/docs/app/api-reference/next-config-js/serverActions#bodysizelimit`), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
return;
|
||||
}
|
||||
callback(null, chunk);
|
||||
}
|
||||
}));
|
||||
if (isMultipartAction) {
|
||||
if (isFetchAction) {
|
||||
const busboy = require('busboy')({
|
||||
defParamCharset: 'utf8',
|
||||
headers: req.headers,
|
||||
limits: {
|
||||
fieldSize: bodySizeLimitBytes
|
||||
}
|
||||
});
|
||||
body.pipe(busboy);
|
||||
boundActionArguments = await decodeReplyFromBusboy(busboy, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
} else {
|
||||
// React doesn't yet publish a busboy version of decodeAction
|
||||
// so we polyfill the parsing of FormData.
|
||||
const fakeRequest = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
// @ts-expect-error
|
||||
headers: {
|
||||
'Content-Type': contentType
|
||||
},
|
||||
body: new ReadableStream({
|
||||
start: (controller)=>{
|
||||
body.on('data', (chunk)=>{
|
||||
controller.enqueue(new Uint8Array(chunk));
|
||||
});
|
||||
body.on('end', ()=>{
|
||||
controller.close();
|
||||
});
|
||||
body.on('error', (err)=>{
|
||||
controller.error(err);
|
||||
});
|
||||
}
|
||||
}),
|
||||
duplex: 'half'
|
||||
});
|
||||
const formData = await fakeRequest.formData();
|
||||
const action = await decodeAction(formData, serverModuleMap);
|
||||
if (typeof action === 'function') {
|
||||
// Only warn if it's a server action, otherwise skip for other post requests
|
||||
warnBadServerActionRequest();
|
||||
let actionReturnedState;
|
||||
requestStore.phase = 'action';
|
||||
try {
|
||||
actionReturnedState = await workUnitAsyncStorage.run(requestStore, action);
|
||||
} finally{
|
||||
requestStore.phase = 'render';
|
||||
}
|
||||
formState = await decodeFormState(actionReturnedState, formData, serverModuleMap);
|
||||
}
|
||||
// Skip the fetch path
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
actionModId = getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
if (actionId !== null) {
|
||||
console.error(err);
|
||||
}
|
||||
return {
|
||||
type: 'not-found'
|
||||
};
|
||||
}
|
||||
const chunks = [];
|
||||
for await (const chunk of req.body){
|
||||
chunks.push(Buffer.from(chunk));
|
||||
}
|
||||
const actionData = Buffer.concat(chunks).toString('utf-8');
|
||||
if (isURLEncodedAction) {
|
||||
const formData = formDataFromSearchQueryString(actionData);
|
||||
boundActionArguments = await decodeReply(formData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
} else {
|
||||
boundActionArguments = await decodeReply(actionData, serverModuleMap, {
|
||||
temporaryReferences
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw Object.defineProperty(new Error('Invariant: Unknown request type.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E114",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// actions.js
|
||||
// app/page.js
|
||||
// action worker1
|
||||
// appRender1
|
||||
// app/foo/page.js
|
||||
// action worker2
|
||||
// appRender
|
||||
// / -> fire action -> POST / -> appRender1 -> modId for the action file
|
||||
// /foo -> fire action -> POST /foo -> appRender2 -> modId for the action file
|
||||
try {
|
||||
actionModId = actionModId ?? getActionModIdOrError(actionId, serverModuleMap);
|
||||
} catch (err) {
|
||||
if (actionId !== null) {
|
||||
console.error(err);
|
||||
}
|
||||
return {
|
||||
type: 'not-found'
|
||||
};
|
||||
}
|
||||
const actionMod = await ComponentMod.__next_app__.require(actionModId);
|
||||
const actionHandler = actionMod[// `actionId` must exist if we got here, as otherwise we would have thrown an error above
|
||||
actionId];
|
||||
let returnVal;
|
||||
requestStore.phase = 'action';
|
||||
try {
|
||||
returnVal = await workUnitAsyncStorage.run(requestStore, ()=>actionHandler.apply(null, boundActionArguments));
|
||||
} finally{
|
||||
requestStore.phase = 'render';
|
||||
}
|
||||
// For form actions, we need to continue rendering the page.
|
||||
if (isFetchAction) {
|
||||
await executeRevalidates(workStore);
|
||||
addRevalidationHeader(res, {
|
||||
workStore,
|
||||
requestStore
|
||||
});
|
||||
actionResult = await finalizeAndGenerateFlight(req, ctx, requestStore, {
|
||||
actionResult: Promise.resolve(returnVal),
|
||||
// if the page was not revalidated, or if the action was forwarded from another worker, we can skip the rendering the flight tree
|
||||
skipFlight: !workStore.pathWasRevalidated || actionWasForwarded,
|
||||
temporaryReferences
|
||||
});
|
||||
}
|
||||
});
|
||||
return {
|
||||
type: 'done',
|
||||
result: actionResult,
|
||||
formState
|
||||
};
|
||||
} catch (err) {
|
||||
if (isRedirectError(err)) {
|
||||
const redirectUrl = getURLFromRedirectError(err);
|
||||
const redirectType = getRedirectTypeFromError(err);
|
||||
await executeRevalidates(workStore);
|
||||
addRevalidationHeader(res, {
|
||||
workStore,
|
||||
requestStore
|
||||
});
|
||||
// if it's a fetch action, we'll set the status code for logging/debugging purposes
|
||||
// but we won't set a Location header, as the redirect will be handled by the client router
|
||||
res.statusCode = RedirectStatusCode.SeeOther;
|
||||
if (isFetchAction) {
|
||||
return {
|
||||
type: 'done',
|
||||
result: await createRedirectRenderResult(req, res, host, redirectUrl, redirectType, ctx.renderOpts.basePath, workStore)
|
||||
};
|
||||
}
|
||||
res.setHeader('Location', redirectUrl);
|
||||
return {
|
||||
type: 'done',
|
||||
result: RenderResult.fromStatic('')
|
||||
};
|
||||
} else if (isHTTPAccessFallbackError(err)) {
|
||||
res.statusCode = getAccessFallbackHTTPStatus(err);
|
||||
await executeRevalidates(workStore);
|
||||
addRevalidationHeader(res, {
|
||||
workStore,
|
||||
requestStore
|
||||
});
|
||||
if (isFetchAction) {
|
||||
const promise = Promise.reject(err);
|
||||
try {
|
||||
// we need to await the promise to trigger the rejection early
|
||||
// so that it's already handled by the time we call
|
||||
// the RSC runtime. Otherwise, it will throw an unhandled
|
||||
// promise rejection error in the renderer.
|
||||
await promise;
|
||||
} catch {
|
||||
// swallow error, it's gonna be handled on the client
|
||||
}
|
||||
return {
|
||||
type: 'done',
|
||||
result: await finalizeAndGenerateFlight(req, ctx, requestStore, {
|
||||
skipFlight: false,
|
||||
actionResult: promise,
|
||||
temporaryReferences
|
||||
})
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: 'not-found'
|
||||
};
|
||||
}
|
||||
if (isFetchAction) {
|
||||
res.statusCode = 500;
|
||||
await executeRevalidates(workStore);
|
||||
const promise = Promise.reject(err);
|
||||
try {
|
||||
// we need to await the promise to trigger the rejection early
|
||||
// so that it's already handled by the time we call
|
||||
// the RSC runtime. Otherwise, it will throw an unhandled
|
||||
// promise rejection error in the renderer.
|
||||
await promise;
|
||||
} catch {
|
||||
// swallow error, it's gonna be handled on the client
|
||||
}
|
||||
return {
|
||||
type: 'done',
|
||||
result: await generateFlight(req, ctx, requestStore, {
|
||||
actionResult: promise,
|
||||
// if the page was not revalidated, or if the action was forwarded from another worker, we can skip the rendering the flight tree
|
||||
skipFlight: !workStore.pathWasRevalidated || actionWasForwarded,
|
||||
temporaryReferences
|
||||
})
|
||||
};
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Attempts to find the module ID for the action from the module map. When this fails, it could be a deployment skew where
|
||||
* the action came from a different deployment. It could also simply be an invalid POST request that is not a server action.
|
||||
* In either case, we'll throw an error to be handled by the caller.
|
||||
*/ function getActionModIdOrError(actionId, serverModuleMap) {
|
||||
var _serverModuleMap_actionId;
|
||||
// if we're missing the action ID header, we can't do any further processing
|
||||
if (!actionId) {
|
||||
throw Object.defineProperty(new InvariantError("Missing 'next-action' header."), "__NEXT_ERROR_CODE", {
|
||||
value: "E664",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const actionModId = (_serverModuleMap_actionId = serverModuleMap[actionId]) == null ? void 0 : _serverModuleMap_actionId.id;
|
||||
if (!actionModId) {
|
||||
throw Object.defineProperty(new Error(`Failed to find Server Action "${actionId}". This request might be from an older or newer deployment.\nRead more: https://nextjs.org/docs/messages/failed-to-find-server-action`), "__NEXT_ERROR_CODE", {
|
||||
value: "E665",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return actionModId;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=action-handler.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-handler.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-handler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
75
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-utils.js
generated
vendored
Normal file
75
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-utils.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import { normalizeAppPath } from '../../shared/lib/router/utils/app-paths';
|
||||
import { pathHasPrefix } from '../../shared/lib/router/utils/path-has-prefix';
|
||||
import { removePathPrefix } from '../../shared/lib/router/utils/remove-path-prefix';
|
||||
import { workAsyncStorage } from './work-async-storage.external';
|
||||
// This function creates a Flight-acceptable server module map proxy from our
|
||||
// Server Reference Manifest similar to our client module map.
|
||||
// This is because our manifest contains a lot of internal Next.js data that
|
||||
// are relevant to the runtime, workers, etc. that React doesn't need to know.
|
||||
export function createServerModuleMap({ serverActionsManifest }) {
|
||||
return new Proxy({}, {
|
||||
get: (_, id)=>{
|
||||
var _serverActionsManifest__id, _serverActionsManifest_;
|
||||
const workers = (_serverActionsManifest_ = serverActionsManifest[process.env.NEXT_RUNTIME === 'edge' ? 'edge' : 'node']) == null ? void 0 : (_serverActionsManifest__id = _serverActionsManifest_[id]) == null ? void 0 : _serverActionsManifest__id.workers;
|
||||
if (!workers) {
|
||||
return undefined;
|
||||
}
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
let workerEntry;
|
||||
if (workStore) {
|
||||
workerEntry = workers[normalizeWorkerPageName(workStore.page)];
|
||||
} else {
|
||||
// If there's no work store defined, we can assume that a server
|
||||
// module map is needed during module evaluation, e.g. to create a
|
||||
// server action using a higher-order function. Therefore it should be
|
||||
// safe to return any entry from the manifest that matches the action
|
||||
// ID. They all refer to the same module ID, which must also exist in
|
||||
// the current page bundle. TODO: This is currently not guaranteed in
|
||||
// Turbopack, and needs to be fixed.
|
||||
workerEntry = Object.values(workers).at(0);
|
||||
}
|
||||
if (!workerEntry) {
|
||||
return undefined;
|
||||
}
|
||||
const { moduleId, async } = workerEntry;
|
||||
return {
|
||||
id: moduleId,
|
||||
name: id,
|
||||
chunks: [],
|
||||
async
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Checks if the requested action has a worker for the current page.
|
||||
* If not, it returns the first worker that has a handler for the action.
|
||||
*/ export function selectWorkerForForwarding(actionId, pageName, serverActionsManifest) {
|
||||
var _serverActionsManifest__actionId;
|
||||
const workers = (_serverActionsManifest__actionId = serverActionsManifest[process.env.NEXT_RUNTIME === 'edge' ? 'edge' : 'node'][actionId]) == null ? void 0 : _serverActionsManifest__actionId.workers;
|
||||
const workerName = normalizeWorkerPageName(pageName);
|
||||
// no workers, nothing to forward to
|
||||
if (!workers) return;
|
||||
// if there is a worker for this page, no need to forward it.
|
||||
if (workers[workerName]) {
|
||||
return;
|
||||
}
|
||||
// otherwise, grab the first worker that has a handler for this action id
|
||||
return denormalizeWorkerPageName(Object.keys(workers)[0]);
|
||||
}
|
||||
/**
|
||||
* The flight entry loader keys actions by bundlePath.
|
||||
* bundlePath corresponds with the relative path (including 'app') to the page entrypoint.
|
||||
*/ function normalizeWorkerPageName(pageName) {
|
||||
if (pathHasPrefix(pageName, 'app')) {
|
||||
return pageName;
|
||||
}
|
||||
return 'app' + pageName;
|
||||
}
|
||||
/**
|
||||
* Converts a bundlePath (relative path to the entrypoint) to a routable page name
|
||||
*/ function denormalizeWorkerPageName(bundlePath) {
|
||||
return normalizeAppPath(removePathPrefix(bundlePath, 'app'));
|
||||
}
|
||||
|
||||
//# sourceMappingURL=action-utils.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-utils.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/action-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage-instance.js
generated
vendored
Normal file
4
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage-instance.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { createAsyncLocalStorage } from './async-local-storage';
|
||||
export const afterTaskAsyncStorageInstance = createAsyncLocalStorage();
|
||||
|
||||
//# sourceMappingURL=after-task-async-storage-instance.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage-instance.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage-instance.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage-instance.ts"],"sourcesContent":["import type { AfterTaskAsyncStorage } from './after-task-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["createAsyncLocalStorage","afterTaskAsyncStorageInstance"],"mappings":"AACA,SAASA,uBAAuB,QAAQ,wBAAuB;AAE/D,OAAO,MAAMC,gCACXD,0BAAyB"}
|
||||
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage.external.js
generated
vendored
Normal file
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage.external.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
// Share the instance module in the next-shared layer
|
||||
import { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with {
|
||||
'turbopack-transition': 'next-shared'
|
||||
};
|
||||
export { afterTaskAsyncStorage };
|
||||
|
||||
//# sourceMappingURL=after-task-async-storage.external.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage.external.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/after-task-async-storage.external.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nimport type { WorkUnitStore } from './work-unit-async-storage.external'\n\nexport interface AfterTaskStore {\n /** The phase in which the topmost `after` was called.\n *\n * NOTE: Can be undefined when running `generateStaticParams`,\n * where we only have a `workStore`, no `workUnitStore`.\n */\n readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined\n}\n\nexport type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>\n\nexport { afterTaskAsyncStorage }\n"],"names":["afterTaskAsyncStorageInstance","afterTaskAsyncStorage"],"mappings":"AAEA,qDAAqD;AACrD,SAASA,iCAAiCC,qBAAqB,QAAQ,2CAA2C;IAAE,wBAAwB;AAAc,EAAC;AAc3J,SAASA,qBAAqB,GAAE"}
|
||||
402
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-prerender-utils.js
generated
vendored
Normal file
402
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-prerender-utils.js
generated
vendored
Normal file
@@ -0,0 +1,402 @@
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
import { isPrerenderInterruptedError } from './dynamic-rendering';
|
||||
/**
|
||||
* This is a utility function to make scheduling sequential tasks that run back to back easier.
|
||||
* We schedule on the same queue (setImmediate) at the same time to ensure no other events can sneak in between.
|
||||
*/ export function prerenderAndAbortInSequentialTasks(prerender, abort) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new InvariantError('`prerenderAndAbortInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E538",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
let pendingResult;
|
||||
setImmediate(()=>{
|
||||
try {
|
||||
pendingResult = prerender();
|
||||
pendingResult.catch(()=>{});
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
setImmediate(()=>{
|
||||
abort();
|
||||
resolve(pendingResult);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
export function prerenderServerWithPhases(signal, render, ...remainingPhases) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new InvariantError('`prerenderAndAbortInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E538",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
let result;
|
||||
signal.addEventListener('abort', ()=>{
|
||||
if (isPrerenderInterruptedError(signal.reason)) {
|
||||
result.markInterrupted();
|
||||
} else {
|
||||
result.markComplete();
|
||||
}
|
||||
}, {
|
||||
once: true
|
||||
});
|
||||
setImmediate(()=>{
|
||||
try {
|
||||
result = new ServerPrerenderStreamResult(render());
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
function runFinalTask() {
|
||||
try {
|
||||
if (result) {
|
||||
result.markComplete();
|
||||
this();
|
||||
}
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
function runNextTask() {
|
||||
try {
|
||||
if (result) {
|
||||
result.markPhase();
|
||||
this();
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
let i = 0;
|
||||
for(; i < remainingPhases.length - 1; i++){
|
||||
const phase = remainingPhases[i];
|
||||
setImmediate(runNextTask.bind(phase));
|
||||
}
|
||||
if (remainingPhases[i]) {
|
||||
const finalPhase = remainingPhases[i];
|
||||
setImmediate(runFinalTask.bind(finalPhase));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
const PENDING = 0;
|
||||
const COMPLETE = 1;
|
||||
const INTERRUPTED = 2;
|
||||
const ERRORED = 3;
|
||||
export class ServerPrerenderStreamResult {
|
||||
constructor(stream){
|
||||
this.status = PENDING;
|
||||
this.reason = null;
|
||||
this.trailingChunks = [];
|
||||
this.currentChunks = [];
|
||||
this.chunksByPhase = [
|
||||
this.currentChunks
|
||||
];
|
||||
const reader = stream.getReader();
|
||||
const progress = ({ done, value })=>{
|
||||
if (done) {
|
||||
if (this.status === PENDING) {
|
||||
this.status = COMPLETE;
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.status === PENDING || this.status === INTERRUPTED) {
|
||||
this.currentChunks.push(value);
|
||||
} else {
|
||||
this.trailingChunks.push(value);
|
||||
}
|
||||
reader.read().then(progress, error);
|
||||
};
|
||||
const error = (reason)=>{
|
||||
this.status = ERRORED;
|
||||
this.reason = reason;
|
||||
};
|
||||
reader.read().then(progress, error);
|
||||
}
|
||||
markPhase() {
|
||||
this.currentChunks = [];
|
||||
this.chunksByPhase.push(this.currentChunks);
|
||||
}
|
||||
markComplete() {
|
||||
if (this.status === PENDING) {
|
||||
this.status = COMPLETE;
|
||||
}
|
||||
}
|
||||
markInterrupted() {
|
||||
this.status = INTERRUPTED;
|
||||
}
|
||||
/**
|
||||
* Returns a stream which only releases chunks when `releasePhase` is called. This stream will never "complete" because
|
||||
* we rely upon the stream remaining open when prerendering to avoid triggering errors for incomplete chunks in the client.
|
||||
*
|
||||
* asPhasedStream is expected to be called once per result however it is safe to call multiple times as long as we have not
|
||||
* transferred the underlying data. Generally this will only happen when streaming to a response
|
||||
*/ asPhasedStream() {
|
||||
switch(this.status){
|
||||
case COMPLETE:
|
||||
case INTERRUPTED:
|
||||
return new PhasedStream(this.chunksByPhase);
|
||||
default:
|
||||
throw Object.defineProperty(new InvariantError(`ServerPrerenderStreamResult cannot be consumed as a stream because it is not yet complete. status: ${this.status}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E612",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a stream which will release all chunks immediately. This stream will "complete" synchronously. It should be used outside
|
||||
* of render use cases like loading client chunks ahead of SSR or writing the streamed content to disk.
|
||||
*/ asStream() {
|
||||
switch(this.status){
|
||||
case COMPLETE:
|
||||
case INTERRUPTED:
|
||||
const chunksByPhase = this.chunksByPhase;
|
||||
const trailingChunks = this.trailingChunks;
|
||||
return new ReadableStream({
|
||||
start (controller) {
|
||||
for(let i = 0; i < chunksByPhase.length; i++){
|
||||
const chunks = chunksByPhase[i];
|
||||
for(let j = 0; j < chunks.length; j++){
|
||||
controller.enqueue(chunks[j]);
|
||||
}
|
||||
}
|
||||
for(let i = 0; i < trailingChunks.length; i++){
|
||||
controller.enqueue(trailingChunks[i]);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
default:
|
||||
throw Object.defineProperty(new InvariantError(`ServerPrerenderStreamResult cannot be consumed as a stream because it is not yet complete. status: ${this.status}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E612",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
class PhasedStream extends ReadableStream {
|
||||
constructor(chunksByPhase){
|
||||
if (chunksByPhase.length === 0) {
|
||||
throw Object.defineProperty(new InvariantError('PhasedStream expected at least one phase but none were found.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E574",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let destination;
|
||||
super({
|
||||
start (controller) {
|
||||
destination = controller;
|
||||
}
|
||||
});
|
||||
// the start function above is called synchronously during construction so we will always have a destination
|
||||
// We wait to assign it until after the super call because we cannot access `this` before calling super
|
||||
this.destination = destination;
|
||||
this.nextPhase = 0;
|
||||
this.chunksByPhase = chunksByPhase;
|
||||
this.releasePhase();
|
||||
}
|
||||
releasePhase() {
|
||||
if (this.nextPhase < this.chunksByPhase.length) {
|
||||
const chunks = this.chunksByPhase[this.nextPhase++];
|
||||
for(let i = 0; i < chunks.length; i++){
|
||||
this.destination.enqueue(chunks[i]);
|
||||
}
|
||||
} else {
|
||||
throw Object.defineProperty(new InvariantError('PhasedStream expected more phases to release but none were found.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E541",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
assertExhausted() {
|
||||
if (this.nextPhase < this.chunksByPhase.length) {
|
||||
throw Object.defineProperty(new InvariantError('PhasedStream expected no more phases to release but some were found.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E584",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
export function prerenderClientWithPhases(render, ...remainingPhases) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new InvariantError('`prerenderAndAbortInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E538",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
let pendingResult;
|
||||
setImmediate(()=>{
|
||||
try {
|
||||
pendingResult = render();
|
||||
pendingResult.catch((err)=>reject(err));
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
function runFinalTask() {
|
||||
try {
|
||||
this();
|
||||
resolve(pendingResult);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
function runNextTask() {
|
||||
try {
|
||||
this();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
let i = 0;
|
||||
for(; i < remainingPhases.length - 1; i++){
|
||||
const phase = remainingPhases[i];
|
||||
setImmediate(runNextTask.bind(phase));
|
||||
}
|
||||
if (remainingPhases[i]) {
|
||||
const finalPhase = remainingPhases[i];
|
||||
setImmediate(runFinalTask.bind(finalPhase));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// React's RSC prerender function will emit an incomplete flight stream when using `prerender`. If the connection
|
||||
// closes then whatever hanging chunks exist will be errored. This is because prerender (an experimental feature)
|
||||
// has not yet implemented a concept of resume. For now we will simulate a paused connection by wrapping the stream
|
||||
// in one that doesn't close even when the underlying is complete.
|
||||
export class ReactServerResult {
|
||||
constructor(stream){
|
||||
this._stream = stream;
|
||||
}
|
||||
tee() {
|
||||
if (this._stream === null) {
|
||||
throw Object.defineProperty(new Error('Cannot tee a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
|
||||
value: "E106",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const tee = this._stream.tee();
|
||||
this._stream = tee[0];
|
||||
return tee[1];
|
||||
}
|
||||
consume() {
|
||||
if (this._stream === null) {
|
||||
throw Object.defineProperty(new Error('Cannot consume a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
|
||||
value: "E470",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const stream = this._stream;
|
||||
this._stream = null;
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
export async function createReactServerPrerenderResult(underlying) {
|
||||
const chunks = [];
|
||||
const { prelude } = await underlying;
|
||||
const reader = prelude.getReader();
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
return new ReactServerPrerenderResult(chunks);
|
||||
} else {
|
||||
chunks.push(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function createReactServerPrerenderResultFromRender(underlying) {
|
||||
const chunks = [];
|
||||
const reader = underlying.getReader();
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
} else {
|
||||
chunks.push(value);
|
||||
}
|
||||
}
|
||||
return new ReactServerPrerenderResult(chunks);
|
||||
}
|
||||
export class ReactServerPrerenderResult {
|
||||
assertChunks(expression) {
|
||||
if (this._chunks === null) {
|
||||
throw Object.defineProperty(new InvariantError(`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E593",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return this._chunks;
|
||||
}
|
||||
consumeChunks(expression) {
|
||||
const chunks = this.assertChunks(expression);
|
||||
this.consume();
|
||||
return chunks;
|
||||
}
|
||||
consume() {
|
||||
this._chunks = null;
|
||||
}
|
||||
constructor(chunks){
|
||||
this._chunks = chunks;
|
||||
}
|
||||
asUnclosingStream() {
|
||||
const chunks = this.assertChunks('asUnclosingStream()');
|
||||
return createUnclosingStream(chunks);
|
||||
}
|
||||
consumeAsUnclosingStream() {
|
||||
const chunks = this.consumeChunks('consumeAsUnclosingStream()');
|
||||
return createUnclosingStream(chunks);
|
||||
}
|
||||
asStream() {
|
||||
const chunks = this.assertChunks('asStream()');
|
||||
return createClosingStream(chunks);
|
||||
}
|
||||
consumeAsStream() {
|
||||
const chunks = this.consumeChunks('consumeAsStream()');
|
||||
return createClosingStream(chunks);
|
||||
}
|
||||
}
|
||||
function createUnclosingStream(chunks) {
|
||||
let i = 0;
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
if (i < chunks.length) {
|
||||
controller.enqueue(chunks[i++]);
|
||||
}
|
||||
// we intentionally keep the stream open. The consumer will clear
|
||||
// out chunks once finished and the remaining memory will be GC'd
|
||||
// when this object goes out of scope
|
||||
}
|
||||
});
|
||||
}
|
||||
function createClosingStream(chunks) {
|
||||
let i = 0;
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
if (i < chunks.length) {
|
||||
controller.enqueue(chunks[i++]);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-render-prerender-utils.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-prerender-utils.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-prerender-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
30
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-render-utils.js
generated
vendored
Normal file
30
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-render-utils.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
/**
|
||||
* This is a utility function to make scheduling sequential tasks that run back to back easier.
|
||||
* We schedule on the same queue (setImmediate) at the same time to ensure no other events can sneak in between.
|
||||
*/ export function scheduleInSequentialTasks(render, followup) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new InvariantError('`scheduleInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E591",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject)=>{
|
||||
let pendingResult;
|
||||
setImmediate(()=>{
|
||||
try {
|
||||
pendingResult = render();
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
setImmediate(()=>{
|
||||
followup();
|
||||
resolve(pendingResult);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-render-render-utils.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-render-utils.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render-render-utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/app-render-render-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\n\n/**\n * This is a utility function to make scheduling sequential tasks that run back to back easier.\n * We schedule on the same queue (setImmediate) at the same time to ensure no other events can sneak in between.\n */\nexport function scheduleInSequentialTasks<R>(\n render: () => R | Promise<R>,\n followup: () => void\n): Promise<R> {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n '`scheduleInSequentialTasks` should not be called in edge runtime.'\n )\n } else {\n return new Promise((resolve, reject) => {\n let pendingResult: R | Promise<R>\n setImmediate(() => {\n try {\n pendingResult = render()\n } catch (err) {\n reject(err)\n }\n })\n setImmediate(() => {\n followup()\n resolve(pendingResult)\n })\n })\n }\n}\n"],"names":["InvariantError","scheduleInSequentialTasks","render","followup","process","env","NEXT_RUNTIME","Promise","resolve","reject","pendingResult","setImmediate","err"],"mappings":"AAAA,SAASA,cAAc,QAAQ,mCAAkC;AAEjE;;;CAGC,GACD,OAAO,SAASC,0BACdC,MAA4B,EAC5BC,QAAoB;IAEpB,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIN,eACR,sEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF,OAAO;QACL,OAAO,IAAIO,QAAQ,CAACC,SAASC;YAC3B,IAAIC;YACJC,aAAa;gBACX,IAAI;oBACFD,gBAAgBR;gBAClB,EAAE,OAAOU,KAAK;oBACZH,OAAOG;gBACT;YACF;YACAD,aAAa;gBACXR;gBACAK,QAAQE;YACV;QACF;IACF;AACF"}
|
||||
2708
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render.js
generated
vendored
Normal file
2708
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/app-render.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
49
frontend/webapp/node_modules/next/dist/esm/server/app-render/async-local-storage.js
generated
vendored
Normal file
49
frontend/webapp/node_modules/next/dist/esm/server/app-render/async-local-storage.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
const sharedAsyncLocalStorageNotAvailableError = Object.defineProperty(new Error('Invariant: AsyncLocalStorage accessed in runtime where it is not available'), "__NEXT_ERROR_CODE", {
|
||||
value: "E504",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
class FakeAsyncLocalStorage {
|
||||
disable() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
getStore() {
|
||||
// This fake implementation of AsyncLocalStorage always returns `undefined`.
|
||||
return undefined;
|
||||
}
|
||||
run() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
exit() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
enterWith() {
|
||||
throw sharedAsyncLocalStorageNotAvailableError;
|
||||
}
|
||||
static bind(fn) {
|
||||
return fn;
|
||||
}
|
||||
}
|
||||
const maybeGlobalAsyncLocalStorage = typeof globalThis !== 'undefined' && globalThis.AsyncLocalStorage;
|
||||
export function createAsyncLocalStorage() {
|
||||
if (maybeGlobalAsyncLocalStorage) {
|
||||
return new maybeGlobalAsyncLocalStorage();
|
||||
}
|
||||
return new FakeAsyncLocalStorage();
|
||||
}
|
||||
export function bindSnapshot(fn) {
|
||||
if (maybeGlobalAsyncLocalStorage) {
|
||||
return maybeGlobalAsyncLocalStorage.bind(fn);
|
||||
}
|
||||
return FakeAsyncLocalStorage.bind(fn);
|
||||
}
|
||||
export function createSnapshot() {
|
||||
if (maybeGlobalAsyncLocalStorage) {
|
||||
return maybeGlobalAsyncLocalStorage.snapshot();
|
||||
}
|
||||
return function(fn, ...args) {
|
||||
return fn(...args);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=async-local-storage.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/async-local-storage.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/async-local-storage.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/async-local-storage.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\nconst sharedAsyncLocalStorageNotAvailableError = new Error(\n 'Invariant: AsyncLocalStorage accessed in runtime where it is not available'\n)\n\nclass FakeAsyncLocalStorage<Store extends {}>\n implements AsyncLocalStorage<Store>\n{\n disable(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n getStore(): Store | undefined {\n // This fake implementation of AsyncLocalStorage always returns `undefined`.\n return undefined\n }\n\n run<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n exit<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n enterWith(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n static bind<T>(fn: T): T {\n return fn\n }\n}\n\nconst maybeGlobalAsyncLocalStorage =\n typeof globalThis !== 'undefined' && (globalThis as any).AsyncLocalStorage\n\nexport function createAsyncLocalStorage<\n Store extends {},\n>(): AsyncLocalStorage<Store> {\n if (maybeGlobalAsyncLocalStorage) {\n return new maybeGlobalAsyncLocalStorage()\n }\n return new FakeAsyncLocalStorage()\n}\n\nexport function bindSnapshot<T>(fn: T): T {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.bind(fn)\n }\n return FakeAsyncLocalStorage.bind(fn)\n}\n\nexport function createSnapshot(): <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.snapshot()\n }\n return function (fn: any, ...args: any[]) {\n return fn(...args)\n }\n}\n"],"names":["sharedAsyncLocalStorageNotAvailableError","Error","FakeAsyncLocalStorage","disable","getStore","undefined","run","exit","enterWith","bind","fn","maybeGlobalAsyncLocalStorage","globalThis","AsyncLocalStorage","createAsyncLocalStorage","bindSnapshot","createSnapshot","snapshot","args"],"mappings":"AAEA,MAAMA,2CAA2C,qBAEhD,CAFgD,IAAIC,MACnD,+EAD+C,qBAAA;WAAA;gBAAA;kBAAA;AAEjD;AAEA,MAAMC;IAGJC,UAAgB;QACd,MAAMH;IACR;IAEAI,WAA8B;QAC5B,4EAA4E;QAC5E,OAAOC;IACT;IAEAC,MAAY;QACV,MAAMN;IACR;IAEAO,OAAa;QACX,MAAMP;IACR;IAEAQ,YAAkB;QAChB,MAAMR;IACR;IAEA,OAAOS,KAAQC,EAAK,EAAK;QACvB,OAAOA;IACT;AACF;AAEA,MAAMC,+BACJ,OAAOC,eAAe,eAAe,AAACA,WAAmBC,iBAAiB;AAE5E,OAAO,SAASC;IAGd,IAAIH,8BAA8B;QAChC,OAAO,IAAIA;IACb;IACA,OAAO,IAAIT;AACb;AAEA,OAAO,SAASa,aAAgBL,EAAK;IACnC,IAAIC,8BAA8B;QAChC,OAAOA,6BAA6BF,IAAI,CAACC;IAC3C;IACA,OAAOR,sBAAsBO,IAAI,CAACC;AACpC;AAEA,OAAO,SAASM;IAId,IAAIL,8BAA8B;QAChC,OAAOA,6BAA6BM,QAAQ;IAC9C;IACA,OAAO,SAAUP,EAAO,EAAE,GAAGQ,IAAW;QACtC,OAAOR,MAAMQ;IACf;AACF"}
|
||||
80
frontend/webapp/node_modules/next/dist/esm/server/app-render/cache-signal.js
generated
vendored
Normal file
80
frontend/webapp/node_modules/next/dist/esm/server/app-render/cache-signal.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* This class is used to detect when all cache reads for a given render are settled.
|
||||
* We do this to allow for cache warming the prerender without having to continue rendering
|
||||
* the remainder of the page. This feature is really only useful when the dynamicIO flag is on
|
||||
* and should only be used in codepaths gated with this feature.
|
||||
*/ export class CacheSignal {
|
||||
constructor(){
|
||||
this.count = 0;
|
||||
this.earlyListeners = [];
|
||||
this.listeners = [];
|
||||
this.tickPending = false;
|
||||
this.taskPending = false;
|
||||
}
|
||||
noMorePendingCaches() {
|
||||
if (!this.tickPending) {
|
||||
this.tickPending = true;
|
||||
process.nextTick(()=>{
|
||||
this.tickPending = false;
|
||||
if (this.count === 0) {
|
||||
for(let i = 0; i < this.earlyListeners.length; i++){
|
||||
this.earlyListeners[i]();
|
||||
}
|
||||
this.earlyListeners.length = 0;
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!this.taskPending) {
|
||||
this.taskPending = true;
|
||||
setTimeout(()=>{
|
||||
this.taskPending = false;
|
||||
if (this.count === 0) {
|
||||
for(let i = 0; i < this.listeners.length; i++){
|
||||
this.listeners[i]();
|
||||
}
|
||||
this.listeners.length = 0;
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This promise waits until there are no more in progress cache reads but no later.
|
||||
* This allows for adding more cache reads after to delay cacheReady.
|
||||
*/ inputReady() {
|
||||
return new Promise((resolve)=>{
|
||||
this.earlyListeners.push(resolve);
|
||||
if (this.count === 0) {
|
||||
this.noMorePendingCaches();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* If there are inflight cache reads this Promise can resolve in a microtask however
|
||||
* if there are no inflight cache reads then we wait at least one task to allow initial
|
||||
* cache reads to be initiated.
|
||||
*/ cacheReady() {
|
||||
return new Promise((resolve)=>{
|
||||
this.listeners.push(resolve);
|
||||
if (this.count === 0) {
|
||||
this.noMorePendingCaches();
|
||||
}
|
||||
});
|
||||
}
|
||||
beginRead() {
|
||||
this.count++;
|
||||
}
|
||||
endRead() {
|
||||
// If this is the last read we need to wait a task before we can claim the cache is settled.
|
||||
// The cache read will likely ping a Server Component which can read from the cache again and this
|
||||
// will play out in a microtask so we need to only resolve pending listeners if we're still at 0
|
||||
// after at least one task.
|
||||
// We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.
|
||||
// If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency
|
||||
this.count--;
|
||||
if (this.count === 0) {
|
||||
this.noMorePendingCaches();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-signal.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/cache-signal.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/cache-signal.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/cache-signal.ts"],"sourcesContent":["/**\n * This class is used to detect when all cache reads for a given render are settled.\n * We do this to allow for cache warming the prerender without having to continue rendering\n * the remainder of the page. This feature is really only useful when the dynamicIO flag is on\n * and should only be used in codepaths gated with this feature.\n */\n\nexport class CacheSignal {\n private count: number\n private earlyListeners: Array<() => void>\n private listeners: Array<() => void>\n private tickPending: boolean\n private taskPending: boolean\n\n constructor() {\n this.count = 0\n this.earlyListeners = []\n this.listeners = []\n this.tickPending = false\n this.taskPending = false\n }\n\n private noMorePendingCaches() {\n if (!this.tickPending) {\n this.tickPending = true\n process.nextTick(() => {\n this.tickPending = false\n if (this.count === 0) {\n for (let i = 0; i < this.earlyListeners.length; i++) {\n this.earlyListeners[i]()\n }\n this.earlyListeners.length = 0\n }\n })\n }\n if (!this.taskPending) {\n this.taskPending = true\n setTimeout(() => {\n this.taskPending = false\n if (this.count === 0) {\n for (let i = 0; i < this.listeners.length; i++) {\n this.listeners[i]()\n }\n this.listeners.length = 0\n }\n }, 0)\n }\n }\n\n /**\n * This promise waits until there are no more in progress cache reads but no later.\n * This allows for adding more cache reads after to delay cacheReady.\n */\n inputReady() {\n return new Promise<void>((resolve) => {\n this.earlyListeners.push(resolve)\n if (this.count === 0) {\n this.noMorePendingCaches()\n }\n })\n }\n\n /**\n * If there are inflight cache reads this Promise can resolve in a microtask however\n * if there are no inflight cache reads then we wait at least one task to allow initial\n * cache reads to be initiated.\n */\n cacheReady() {\n return new Promise<void>((resolve) => {\n this.listeners.push(resolve)\n if (this.count === 0) {\n this.noMorePendingCaches()\n }\n })\n }\n\n beginRead() {\n this.count++\n }\n\n endRead() {\n // If this is the last read we need to wait a task before we can claim the cache is settled.\n // The cache read will likely ping a Server Component which can read from the cache again and this\n // will play out in a microtask so we need to only resolve pending listeners if we're still at 0\n // after at least one task.\n // We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.\n // If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency\n this.count--\n if (this.count === 0) {\n this.noMorePendingCaches()\n }\n }\n}\n"],"names":["CacheSignal","constructor","count","earlyListeners","listeners","tickPending","taskPending","noMorePendingCaches","process","nextTick","i","length","setTimeout","inputReady","Promise","resolve","push","cacheReady","beginRead","endRead"],"mappings":"AAAA;;;;;CAKC,GAED,OAAO,MAAMA;IAOXC,aAAc;QACZ,IAAI,CAACC,KAAK,GAAG;QACb,IAAI,CAACC,cAAc,GAAG,EAAE;QACxB,IAAI,CAACC,SAAS,GAAG,EAAE;QACnB,IAAI,CAACC,WAAW,GAAG;QACnB,IAAI,CAACC,WAAW,GAAG;IACrB;IAEQC,sBAAsB;QAC5B,IAAI,CAAC,IAAI,CAACF,WAAW,EAAE;YACrB,IAAI,CAACA,WAAW,GAAG;YACnBG,QAAQC,QAAQ,CAAC;gBACf,IAAI,CAACJ,WAAW,GAAG;gBACnB,IAAI,IAAI,CAACH,KAAK,KAAK,GAAG;oBACpB,IAAK,IAAIQ,IAAI,GAAGA,IAAI,IAAI,CAACP,cAAc,CAACQ,MAAM,EAAED,IAAK;wBACnD,IAAI,CAACP,cAAc,CAACO,EAAE;oBACxB;oBACA,IAAI,CAACP,cAAc,CAACQ,MAAM,GAAG;gBAC/B;YACF;QACF;QACA,IAAI,CAAC,IAAI,CAACL,WAAW,EAAE;YACrB,IAAI,CAACA,WAAW,GAAG;YACnBM,WAAW;gBACT,IAAI,CAACN,WAAW,GAAG;gBACnB,IAAI,IAAI,CAACJ,KAAK,KAAK,GAAG;oBACpB,IAAK,IAAIQ,IAAI,GAAGA,IAAI,IAAI,CAACN,SAAS,CAACO,MAAM,EAAED,IAAK;wBAC9C,IAAI,CAACN,SAAS,CAACM,EAAE;oBACnB;oBACA,IAAI,CAACN,SAAS,CAACO,MAAM,GAAG;gBAC1B;YACF,GAAG;QACL;IACF;IAEA;;;GAGC,GACDE,aAAa;QACX,OAAO,IAAIC,QAAc,CAACC;YACxB,IAAI,CAACZ,cAAc,CAACa,IAAI,CAACD;YACzB,IAAI,IAAI,CAACb,KAAK,KAAK,GAAG;gBACpB,IAAI,CAACK,mBAAmB;YAC1B;QACF;IACF;IAEA;;;;GAIC,GACDU,aAAa;QACX,OAAO,IAAIH,QAAc,CAACC;YACxB,IAAI,CAACX,SAAS,CAACY,IAAI,CAACD;YACpB,IAAI,IAAI,CAACb,KAAK,KAAK,GAAG;gBACpB,IAAI,CAACK,mBAAmB;YAC1B;QACF;IACF;IAEAW,YAAY;QACV,IAAI,CAAChB,KAAK;IACZ;IAEAiB,UAAU;QACR,4FAA4F;QAC5F,kGAAkG;QAClG,gGAAgG;QAChG,2BAA2B;QAC3B,+GAA+G;QAC/G,mHAAmH;QACnH,IAAI,CAACjB,KAAK;QACV,IAAI,IAAI,CAACA,KAAK,KAAK,GAAG;YACpB,IAAI,CAACK,mBAAmB;QAC1B;IACF;AACF"}
|
||||
4
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot-instance.js
generated
vendored
Normal file
4
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot-instance.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { createSnapshot } from '../app-render/async-local-storage';
|
||||
export const runInCleanSnapshot = createSnapshot();
|
||||
|
||||
//# sourceMappingURL=clean-async-snapshot-instance.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot-instance.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot-instance.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/clean-async-snapshot-instance.ts"],"sourcesContent":["import { createSnapshot } from '../app-render/async-local-storage'\n\nexport const runInCleanSnapshot: <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R = createSnapshot()\n"],"names":["createSnapshot","runInCleanSnapshot"],"mappings":"AAAA,SAASA,cAAc,QAAQ,oCAAmC;AAElE,OAAO,MAAMC,qBAGJD,iBAAgB"}
|
||||
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot.external.js
generated
vendored
Normal file
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot.external.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
// Share the instance module in the next-shared layer
|
||||
import { runInCleanSnapshot } from './clean-async-snapshot-instance' with {
|
||||
'turbopack-transition': 'next-shared'
|
||||
};
|
||||
export { runInCleanSnapshot };
|
||||
|
||||
//# sourceMappingURL=clean-async-snapshot.external.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot.external.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/clean-async-snapshot.external.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/clean-async-snapshot.external.ts"],"sourcesContent":["// Share the instance module in the next-shared layer\nimport { runInCleanSnapshot } from './clean-async-snapshot-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport { runInCleanSnapshot }\n"],"names":["runInCleanSnapshot"],"mappings":"AAAA,qDAAqD;AACrD,SAASA,kBAAkB,QAAQ,uCAAuC;IAAE,wBAAwB;AAAc,EAAC;AAEnH,SAASA,kBAAkB,GAAE"}
|
||||
259
frontend/webapp/node_modules/next/dist/esm/server/app-render/collect-segment-data.js
generated
vendored
Normal file
259
frontend/webapp/node_modules/next/dist/esm/server/app-render/collect-segment-data.js
generated
vendored
Normal file
@@ -0,0 +1,259 @@
|
||||
import { jsx as _jsx } from "react/jsx-runtime";
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import { createFromReadableStream } from 'react-server-dom-webpack/client.edge';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import { unstable_prerender as prerender } from 'react-server-dom-webpack/static.edge';
|
||||
import { streamFromBuffer, streamToBuffer } from '../stream-utils/node-web-streams-helper';
|
||||
import { waitAtLeastOneReactRenderTask } from '../../lib/scheduler';
|
||||
import { encodeChildSegmentKey, encodeSegment, ROOT_SEGMENT_KEY } from '../../shared/lib/segment-cache/segment-value-encoding';
|
||||
import { getDigestForWellKnownError } from './create-error-handler';
|
||||
function onSegmentPrerenderError(error) {
|
||||
const digest = getDigestForWellKnownError(error);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
// We don't need to log the errors because we would have already done that
|
||||
// when generating the original Flight stream for the whole page.
|
||||
}
|
||||
export async function collectSegmentData(shouldAssumePartialData, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, fallbackRouteParams) {
|
||||
// Traverse the router tree and generate a prefetch response for each segment.
|
||||
// A mutable map to collect the results as we traverse the route tree.
|
||||
const resultMap = new Map();
|
||||
// Before we start, warm up the module cache by decoding the page data once.
|
||||
// Then we can assume that any remaining async tasks that occur the next time
|
||||
// are due to hanging promises caused by dynamic data access. Note we only
|
||||
// have to do this once per page, not per individual segment.
|
||||
//
|
||||
try {
|
||||
await createFromReadableStream(streamFromBuffer(fullPageDataBuffer), {
|
||||
serverConsumerManifest
|
||||
});
|
||||
await waitAtLeastOneReactRenderTask();
|
||||
} catch {}
|
||||
// Create an abort controller that we'll use to stop the stream.
|
||||
const abortController = new AbortController();
|
||||
const onCompletedProcessingRouteTree = async ()=>{
|
||||
// Since all we're doing is decoding and re-encoding a cached prerender, if
|
||||
// serializing the stream takes longer than a microtask, it must because of
|
||||
// hanging promises caused by dynamic data.
|
||||
await waitAtLeastOneReactRenderTask();
|
||||
abortController.abort();
|
||||
};
|
||||
// Generate a stream for the route tree prefetch. While we're walking the
|
||||
// tree, we'll also spawn additional tasks to generate the segment prefetches.
|
||||
// The promises for these tasks are pushed to a mutable array that we will
|
||||
// await once the route tree is fully rendered.
|
||||
const segmentTasks = [];
|
||||
const { prelude: treeStream } = await prerender(// RootTreePrefetch is not a valid return type for a React component, but
|
||||
// we need to use a component so that when we decode the original stream
|
||||
// inside of it, the side effects are transferred to the new stream.
|
||||
// @ts-expect-error
|
||||
/*#__PURE__*/ _jsx(PrefetchTreeData, {
|
||||
shouldAssumePartialData: shouldAssumePartialData,
|
||||
fullPageDataBuffer: fullPageDataBuffer,
|
||||
fallbackRouteParams: fallbackRouteParams,
|
||||
serverConsumerManifest: serverConsumerManifest,
|
||||
clientModules: clientModules,
|
||||
staleTime: staleTime,
|
||||
segmentTasks: segmentTasks,
|
||||
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree
|
||||
}), clientModules, {
|
||||
signal: abortController.signal,
|
||||
onError: onSegmentPrerenderError
|
||||
});
|
||||
// Write the route tree to a special `/_tree` segment.
|
||||
const treeBuffer = await streamToBuffer(treeStream);
|
||||
resultMap.set('/_tree', treeBuffer);
|
||||
// Now that we've finished rendering the route tree, all the segment tasks
|
||||
// should have been spawned. Await them in parallel and write the segment
|
||||
// prefetches to the result map.
|
||||
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
|
||||
resultMap.set(segmentPath, buffer);
|
||||
}
|
||||
return resultMap;
|
||||
}
|
||||
async function PrefetchTreeData({ shouldAssumePartialData, fullPageDataBuffer, fallbackRouteParams, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree }) {
|
||||
// We're currently rendering a Flight response for the route tree prefetch.
|
||||
// Inside this component, decode the Flight stream for the whole page. This is
|
||||
// a hack to transfer the side effects from the original Flight stream (e.g.
|
||||
// Float preloads) onto the Flight stream for the tree prefetch.
|
||||
// TODO: React needs a better way to do this. Needed for Server Actions, too.
|
||||
const initialRSCPayload = await createFromReadableStream(createUnclosingPrefetchStream(streamFromBuffer(fullPageDataBuffer)), {
|
||||
serverConsumerManifest
|
||||
});
|
||||
const buildId = initialRSCPayload.b;
|
||||
// FlightDataPath is an unsound type, hence the additional checks.
|
||||
const flightDataPaths = initialRSCPayload.f;
|
||||
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
|
||||
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
|
||||
return null;
|
||||
}
|
||||
const flightRouterState = flightDataPaths[0][0];
|
||||
const seedData = flightDataPaths[0][1];
|
||||
const head = flightDataPaths[0][2];
|
||||
// Compute the route metadata tree by traversing the FlightRouterState. As we
|
||||
// walk the tree, we will also spawn a task to produce a prefetch response for
|
||||
// each segment.
|
||||
const tree = collectSegmentDataImpl(shouldAssumePartialData, flightRouterState, buildId, seedData, fallbackRouteParams, fullPageDataBuffer, clientModules, serverConsumerManifest, ROOT_SEGMENT_KEY, segmentTasks);
|
||||
const isHeadPartial = shouldAssumePartialData || await isPartialRSCData(head, clientModules);
|
||||
// Notify the abort controller that we're done processing the route tree.
|
||||
// Anything async that happens after this point must be due to hanging
|
||||
// promises in the original stream.
|
||||
onCompletedProcessingRouteTree();
|
||||
// Render the route tree to a special `/_tree` segment.
|
||||
const treePrefetch = {
|
||||
buildId,
|
||||
tree,
|
||||
head,
|
||||
isHeadPartial,
|
||||
staleTime
|
||||
};
|
||||
return treePrefetch;
|
||||
}
|
||||
function collectSegmentDataImpl(shouldAssumePartialData, route, buildId, seedData, fallbackRouteParams, fullPageDataBuffer, clientModules, serverConsumerManifest, key, segmentTasks) {
|
||||
// Metadata about the segment. Sent as part of the tree prefetch. Null if
|
||||
// there are no children.
|
||||
let slotMetadata = null;
|
||||
const children = route[1];
|
||||
const seedDataChildren = seedData !== null ? seedData[2] : null;
|
||||
for(const parallelRouteKey in children){
|
||||
const childRoute = children[parallelRouteKey];
|
||||
const childSegment = childRoute[0];
|
||||
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
|
||||
const childKey = encodeChildSegmentKey(key, parallelRouteKey, Array.isArray(childSegment) && fallbackRouteParams !== null ? encodeSegmentWithPossibleFallbackParam(childSegment, fallbackRouteParams) : encodeSegment(childSegment));
|
||||
const childTree = collectSegmentDataImpl(shouldAssumePartialData, childRoute, buildId, childSeedData, fallbackRouteParams, fullPageDataBuffer, clientModules, serverConsumerManifest, childKey, segmentTasks);
|
||||
if (slotMetadata === null) {
|
||||
slotMetadata = {};
|
||||
}
|
||||
slotMetadata[parallelRouteKey] = childTree;
|
||||
}
|
||||
if (seedData !== null) {
|
||||
// Spawn a task to write the segment data to a new Flight stream.
|
||||
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
|
||||
// current task to escape the current rendering context.
|
||||
waitAtLeastOneReactRenderTask().then(()=>renderSegmentPrefetch(shouldAssumePartialData, buildId, seedData, key, clientModules)));
|
||||
} else {
|
||||
// This segment does not have any seed data. Skip generating a prefetch
|
||||
// response for it. We'll still include it in the route tree, though.
|
||||
// TODO: We should encode in the route tree whether a segment is missing
|
||||
// so we don't attempt to fetch it for no reason. As of now this shouldn't
|
||||
// ever happen in practice, though.
|
||||
}
|
||||
// Metadata about the segment. Sent to the client as part of the
|
||||
// tree prefetch.
|
||||
return {
|
||||
segment: route[0],
|
||||
slots: slotMetadata,
|
||||
isRootLayout: route[4] === true
|
||||
};
|
||||
}
|
||||
function encodeSegmentWithPossibleFallbackParam(segment, fallbackRouteParams) {
|
||||
const name = segment[0];
|
||||
if (!fallbackRouteParams.has(name)) {
|
||||
// Normal case. No matching fallback parameter.
|
||||
return encodeSegment(segment);
|
||||
}
|
||||
// This segment includes a fallback parameter. During prerendering, a random
|
||||
// placeholder value was used; however, for segment prefetches, we need the
|
||||
// segment path to be predictable so the server can create a rewrite for it.
|
||||
// So, replace the placeholder segment value with a "template" string,
|
||||
// e.g. `[name]`.
|
||||
// TODO: This will become a bit cleaner once remove route parameters from the
|
||||
// server response, and instead add them to the segment keys on the client.
|
||||
// Instead of a string replacement, like we do here, route params will always
|
||||
// be encoded in separate step from the rest of the segment, not just in the
|
||||
// case of fallback params.
|
||||
const encodedSegment = encodeSegment(segment);
|
||||
const lastIndex = encodedSegment.lastIndexOf('$');
|
||||
const encodedFallbackSegment = // NOTE: This is guaranteed not to clash with the rest of the segment
|
||||
// because non-simple characters (including [ and ]) trigger a base
|
||||
// 64 encoding.
|
||||
encodedSegment.substring(0, lastIndex + 1) + `[${name}]`;
|
||||
return encodedFallbackSegment;
|
||||
}
|
||||
async function renderSegmentPrefetch(shouldAssumePartialData, buildId, seedData, key, clientModules) {
|
||||
// Render the segment data to a stream.
|
||||
// In the future, this is where we can include additional metadata, like the
|
||||
// stale time and cache tags.
|
||||
const rsc = seedData[1];
|
||||
const loading = seedData[3];
|
||||
const segmentPrefetch = {
|
||||
buildId,
|
||||
rsc,
|
||||
loading,
|
||||
isPartial: shouldAssumePartialData || await isPartialRSCData(rsc, clientModules)
|
||||
};
|
||||
// Since all we're doing is decoding and re-encoding a cached prerender, if
|
||||
// it takes longer than a microtask, it must because of hanging promises
|
||||
// caused by dynamic data. Abort the stream at the end of the current task.
|
||||
const abortController = new AbortController();
|
||||
waitAtLeastOneReactRenderTask().then(()=>abortController.abort());
|
||||
const { prelude: segmentStream } = await prerender(segmentPrefetch, clientModules, {
|
||||
signal: abortController.signal,
|
||||
onError: onSegmentPrerenderError
|
||||
});
|
||||
const segmentBuffer = await streamToBuffer(segmentStream);
|
||||
if (key === ROOT_SEGMENT_KEY) {
|
||||
return [
|
||||
'/_index',
|
||||
segmentBuffer
|
||||
];
|
||||
} else {
|
||||
return [
|
||||
key,
|
||||
segmentBuffer
|
||||
];
|
||||
}
|
||||
}
|
||||
async function isPartialRSCData(rsc, clientModules) {
|
||||
// We can determine if a segment contains only partial data if it takes longer
|
||||
// than a task to encode, because dynamic data is encoded as an infinite
|
||||
// promise. We must do this in a separate Flight prerender from the one that
|
||||
// actually generates the prefetch stream because we need to include
|
||||
// `isPartial` in the stream itself.
|
||||
let isPartial = false;
|
||||
const abortController = new AbortController();
|
||||
waitAtLeastOneReactRenderTask().then(()=>{
|
||||
// If we haven't yet finished the outer task, then it must be because we
|
||||
// accessed dynamic data.
|
||||
isPartial = true;
|
||||
abortController.abort();
|
||||
});
|
||||
await prerender(rsc, clientModules, {
|
||||
signal: abortController.signal,
|
||||
onError () {}
|
||||
});
|
||||
return isPartial;
|
||||
}
|
||||
function createUnclosingPrefetchStream(originalFlightStream) {
|
||||
// When PPR is enabled, prefetch streams may contain references that never
|
||||
// resolve, because that's how we encode dynamic data access. In the decoded
|
||||
// object returned by the Flight client, these are reified into hanging
|
||||
// promises that suspend during render, which is effectively what we want.
|
||||
// The UI resolves when it switches to the dynamic data stream
|
||||
// (via useDeferredValue(dynamic, static)).
|
||||
//
|
||||
// However, the Flight implementation currently errors if the server closes
|
||||
// the response before all the references are resolved. As a cheat to work
|
||||
// around this, we wrap the original stream in a new stream that never closes,
|
||||
// and therefore doesn't error.
|
||||
const reader = originalFlightStream.getReader();
|
||||
return new ReadableStream({
|
||||
async pull (controller) {
|
||||
while(true){
|
||||
const { done, value } = await reader.read();
|
||||
if (!done) {
|
||||
// Pass to the target stream and keep consuming the Flight response
|
||||
// from the server.
|
||||
controller.enqueue(value);
|
||||
continue;
|
||||
}
|
||||
// The server stream has closed. Exit, but intentionally do not close
|
||||
// the target stream.
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=collect-segment-data.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/collect-segment-data.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/collect-segment-data.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
23
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-styles-and-scripts.js
generated
vendored
Normal file
23
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-styles-and-scripts.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import { jsx as _jsx } from "react/jsx-runtime";
|
||||
import React from 'react';
|
||||
import { interopDefault } from './interop-default';
|
||||
import { getLinkAndScriptTags } from './get-css-inlined-link-tags';
|
||||
import { getAssetQueryString } from './get-asset-query-string';
|
||||
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
|
||||
import { renderCssResource } from './render-css-resource';
|
||||
export async function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx }) {
|
||||
const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(ctx.clientReferenceManifest, filePath, injectedCSS, injectedJS);
|
||||
const styles = renderCssResource(entryCssFiles, ctx);
|
||||
const scripts = jsHrefs ? jsHrefs.map((href, index)=>/*#__PURE__*/ _jsx("script", {
|
||||
src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,
|
||||
async: true
|
||||
}, `script-${index}`)) : null;
|
||||
const Comp = interopDefault(await getComponent());
|
||||
return [
|
||||
Comp,
|
||||
styles,
|
||||
scripts
|
||||
];
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-component-styles-and-scripts.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-styles-and-scripts.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-styles-and-scripts.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/create-component-styles-and-scripts.tsx"],"sourcesContent":["import React from 'react'\nimport { interopDefault } from './interop-default'\nimport { getLinkAndScriptTags } from './get-css-inlined-link-tags'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport { renderCssResource } from './render-css-resource'\n\nexport async function createComponentStylesAndScripts({\n filePath,\n getComponent,\n injectedCSS,\n injectedJS,\n ctx,\n}: {\n filePath: string\n getComponent: () => any\n injectedCSS: Set<string>\n injectedJS: Set<string>\n ctx: AppRenderContext\n}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]> {\n const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(\n ctx.clientReferenceManifest,\n filePath,\n injectedCSS,\n injectedJS\n )\n\n const styles = renderCssResource(entryCssFiles, ctx)\n\n const scripts = jsHrefs\n ? jsHrefs.map((href, index) => (\n <script\n src={`${ctx.assetPrefix}/_next/${encodeURIPath(\n href\n )}${getAssetQueryString(ctx, true)}`}\n async={true}\n key={`script-${index}`}\n />\n ))\n : null\n\n const Comp = interopDefault(await getComponent())\n\n return [Comp, styles, scripts]\n}\n"],"names":["React","interopDefault","getLinkAndScriptTags","getAssetQueryString","encodeURIPath","renderCssResource","createComponentStylesAndScripts","filePath","getComponent","injectedCSS","injectedJS","ctx","styles","entryCssFiles","scripts","jsHrefs","clientReferenceManifest","map","href","index","script","src","assetPrefix","async","Comp"],"mappings":";AAAA,OAAOA,WAAW,QAAO;AACzB,SAASC,cAAc,QAAQ,oBAAmB;AAClD,SAASC,oBAAoB,QAAQ,8BAA6B;AAElE,SAASC,mBAAmB,QAAQ,2BAA0B;AAC9D,SAASC,aAAa,QAAQ,mCAAkC;AAChE,SAASC,iBAAiB,QAAQ,wBAAuB;AAEzD,OAAO,eAAeC,gCAAgC,EACpDC,QAAQ,EACRC,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,GAAG,EAOJ;IACC,MAAM,EAAEC,QAAQC,aAAa,EAAEC,SAASC,OAAO,EAAE,GAAGb,qBAClDS,IAAIK,uBAAuB,EAC3BT,UACAE,aACAC;IAGF,MAAME,SAASP,kBAAkBQ,eAAeF;IAEhD,MAAMG,UAAUC,UACZA,QAAQE,GAAG,CAAC,CAACC,MAAMC,sBACjB,KAACC;YACCC,KAAK,GAAGV,IAAIW,WAAW,CAAC,OAAO,EAAElB,cAC/Bc,QACEf,oBAAoBQ,KAAK,OAAO;YACpCY,OAAO;WACF,CAAC,OAAO,EAAEJ,OAAO,KAG1B;IAEJ,MAAMK,OAAOvB,eAAe,MAAMO;IAElC,OAAO;QAACgB;QAAMZ;QAAQE;KAAQ;AAChC"}
|
||||
672
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-tree.js
generated
vendored
Normal file
672
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-tree.js
generated
vendored
Normal file
@@ -0,0 +1,672 @@
|
||||
import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
|
||||
import React from 'react';
|
||||
import { isClientReference, isUseCacheFunction } from '../../lib/client-and-server-references';
|
||||
import { getLayoutOrPageModule } from '../lib/app-dir-module';
|
||||
import { interopDefault } from './interop-default';
|
||||
import { parseLoaderTree } from './parse-loader-tree';
|
||||
import { createComponentStylesAndScripts } from './create-component-styles-and-scripts';
|
||||
import { getLayerAssets } from './get-layer-assets';
|
||||
import { hasLoadingComponentInTree } from './has-loading-component-in-tree';
|
||||
import { validateRevalidate } from '../lib/patch-fetch';
|
||||
import { PARALLEL_ROUTE_DEFAULT_PATH } from '../../client/components/parallel-route-default';
|
||||
import { getTracer } from '../lib/trace/tracer';
|
||||
import { NextNodeServerSpan } from '../lib/trace/constants';
|
||||
import { StaticGenBailoutError } from '../../client/components/static-generation-bailout';
|
||||
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
|
||||
import { OUTLET_BOUNDARY_NAME } from '../../lib/metadata/metadata-constants';
|
||||
/**
|
||||
* Use the provided loader tree to create the React Component tree.
|
||||
*/ export function createComponentTree(props) {
|
||||
return getTracer().trace(NextNodeServerSpan.createComponentTree, {
|
||||
spanName: 'build component tree'
|
||||
}, ()=>createComponentTreeInternal(props));
|
||||
}
|
||||
function errorMissingDefaultExport(pagePath, convention) {
|
||||
const normalizedPagePath = pagePath === '/' ? '' : pagePath;
|
||||
throw Object.defineProperty(new Error(`The default export is not a React Component in "${normalizedPagePath}/${convention}"`), "__NEXT_ERROR_CODE", {
|
||||
value: "E45",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const cacheNodeKey = 'c';
|
||||
async function createComponentTreeInternal({ loaderTree: tree, parentParams, rootLayoutIncluded, injectedCSS, injectedJS, injectedFontPreloadTags, getViewportReady, getMetadataReady, ctx, missingSlots, preloadCallbacks, authInterrupts, StreamingMetadata, StreamingMetadataOutlet }) {
|
||||
const { renderOpts: { nextConfigOutput, experimental }, workStore, componentMod: { HTTPAccessFallbackBoundary, LayoutRouter, RenderFromTemplateContext, OutletBoundary, ClientPageRoot, ClientSegmentRoot, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks: { DynamicServerError }, Postpone }, pagePath, getDynamicParamFromSegment, isPrefetch, query } = ctx;
|
||||
const { page, layoutOrPagePath, segment, modules, parallelRoutes } = parseLoaderTree(tree);
|
||||
const { layout, template, error, loading, 'not-found': notFound, forbidden, unauthorized } = modules;
|
||||
const injectedCSSWithCurrentLayout = new Set(injectedCSS);
|
||||
const injectedJSWithCurrentLayout = new Set(injectedJS);
|
||||
const injectedFontPreloadTagsWithCurrentLayout = new Set(injectedFontPreloadTags);
|
||||
const layerAssets = getLayerAssets({
|
||||
preloadCallbacks,
|
||||
ctx,
|
||||
layoutOrPagePath,
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout,
|
||||
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout
|
||||
});
|
||||
const [Template, templateStyles, templateScripts] = template ? await createComponentStylesAndScripts({
|
||||
ctx,
|
||||
filePath: template[1],
|
||||
getComponent: template[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [
|
||||
React.Fragment
|
||||
];
|
||||
const [ErrorComponent, errorStyles, errorScripts] = error ? await createComponentStylesAndScripts({
|
||||
ctx,
|
||||
filePath: error[1],
|
||||
getComponent: error[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const [Loading, loadingStyles, loadingScripts] = loading ? await createComponentStylesAndScripts({
|
||||
ctx,
|
||||
filePath: loading[1],
|
||||
getComponent: loading[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const isLayout = typeof layout !== 'undefined';
|
||||
const isPage = typeof page !== 'undefined';
|
||||
const { mod: layoutOrPageMod, modType } = await getTracer().trace(NextNodeServerSpan.getLayoutOrPageModule, {
|
||||
hideSpan: !(isLayout || isPage),
|
||||
spanName: 'resolve segment modules',
|
||||
attributes: {
|
||||
'next.segment': segment
|
||||
}
|
||||
}, ()=>getLayoutOrPageModule(tree));
|
||||
/**
|
||||
* Checks if the current segment is a root layout.
|
||||
*/ const rootLayoutAtThisLevel = isLayout && !rootLayoutIncluded;
|
||||
/**
|
||||
* Checks if the current segment or any level above it has a root layout.
|
||||
*/ const rootLayoutIncludedAtThisLevelOrAbove = rootLayoutIncluded || rootLayoutAtThisLevel;
|
||||
const [NotFound, notFoundStyles] = notFound ? await createComponentStylesAndScripts({
|
||||
ctx,
|
||||
filePath: notFound[1],
|
||||
getComponent: notFound[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const [Forbidden, forbiddenStyles] = authInterrupts && forbidden ? await createComponentStylesAndScripts({
|
||||
ctx,
|
||||
filePath: forbidden[1],
|
||||
getComponent: forbidden[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
const [Unauthorized, unauthorizedStyles] = authInterrupts && unauthorized ? await createComponentStylesAndScripts({
|
||||
ctx,
|
||||
filePath: unauthorized[1],
|
||||
getComponent: unauthorized[0],
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout
|
||||
}) : [];
|
||||
let dynamic = layoutOrPageMod == null ? void 0 : layoutOrPageMod.dynamic;
|
||||
if (nextConfigOutput === 'export') {
|
||||
if (!dynamic || dynamic === 'auto') {
|
||||
dynamic = 'error';
|
||||
} else if (dynamic === 'force-dynamic') {
|
||||
// force-dynamic is always incompatible with 'export'. We must interrupt the build
|
||||
throw Object.defineProperty(new StaticGenBailoutError(`Page with \`dynamic = "force-dynamic"\` couldn't be exported. \`output: "export"\` requires all pages be renderable statically because there is no runtime server to dynamically render routes in this output format. Learn more: https://nextjs.org/docs/app/building-your-application/deploying/static-exports`), "__NEXT_ERROR_CODE", {
|
||||
value: "E527",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (typeof dynamic === 'string') {
|
||||
// the nested most config wins so we only force-static
|
||||
// if it's configured above any parent that configured
|
||||
// otherwise
|
||||
if (dynamic === 'error') {
|
||||
workStore.dynamicShouldError = true;
|
||||
} else if (dynamic === 'force-dynamic') {
|
||||
workStore.forceDynamic = true;
|
||||
// TODO: (PPR) remove this bailout once PPR is the default
|
||||
if (workStore.isStaticGeneration && !experimental.isRoutePPREnabled) {
|
||||
// If the postpone API isn't available, we can't postpone the render and
|
||||
// therefore we can't use the dynamic API.
|
||||
const err = Object.defineProperty(new DynamicServerError(`Page with \`dynamic = "force-dynamic"\` won't be rendered statically.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E585",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
workStore.dynamicUsageDescription = err.message;
|
||||
workStore.dynamicUsageStack = err.stack;
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
workStore.dynamicShouldError = false;
|
||||
workStore.forceStatic = dynamic === 'force-static';
|
||||
}
|
||||
}
|
||||
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache) === 'string') {
|
||||
workStore.fetchCache = layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache;
|
||||
}
|
||||
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) !== 'undefined') {
|
||||
validateRevalidate(layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate, workStore.route);
|
||||
}
|
||||
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) === 'number') {
|
||||
const defaultRevalidate = layoutOrPageMod.revalidate;
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
if (workUnitStore.type === 'prerender' || workUnitStore.type === 'prerender-legacy' || workUnitStore.type === 'prerender-ppr' || workUnitStore.type === 'cache') {
|
||||
if (workUnitStore.revalidate > defaultRevalidate) {
|
||||
workUnitStore.revalidate = defaultRevalidate;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!workStore.forceStatic && workStore.isStaticGeneration && defaultRevalidate === 0 && // If the postpone API isn't available, we can't postpone the render and
|
||||
// therefore we can't use the dynamic API.
|
||||
!experimental.isRoutePPREnabled) {
|
||||
const dynamicUsageDescription = `revalidate: 0 configured ${segment}`;
|
||||
workStore.dynamicUsageDescription = dynamicUsageDescription;
|
||||
throw Object.defineProperty(new DynamicServerError(dynamicUsageDescription), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
const isStaticGeneration = workStore.isStaticGeneration;
|
||||
// Assume the segment we're rendering contains only partial data if PPR is
|
||||
// enabled and this is a statically generated response. This is used by the
|
||||
// client Segment Cache after a prefetch to determine if it can skip the
|
||||
// second request to fill in the dynamic data.
|
||||
//
|
||||
// It's OK for this to be `true` when the data is actually fully static, but
|
||||
// it's not OK for this to be `false` when the data possibly contains holes.
|
||||
// Although the value here is overly pessimistic, for prefetches, it will be
|
||||
// replaced by a more specific value when the data is later processed into
|
||||
// per-segment responses (see collect-segment-data.tsx)
|
||||
//
|
||||
// For dynamic requests, this must always be `false` because dynamic responses
|
||||
// are never partial.
|
||||
const isPossiblyPartialResponse = isStaticGeneration && experimental.isRoutePPREnabled === true;
|
||||
const LayoutOrPage = layoutOrPageMod ? interopDefault(layoutOrPageMod) : undefined;
|
||||
/**
|
||||
* The React Component to render.
|
||||
*/ let MaybeComponent = LayoutOrPage;
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
const { isValidElementType } = require('next/dist/compiled/react-is');
|
||||
if (typeof MaybeComponent !== 'undefined' && !isValidElementType(MaybeComponent)) {
|
||||
errorMissingDefaultExport(pagePath, modType ?? 'page');
|
||||
}
|
||||
if (typeof ErrorComponent !== 'undefined' && !isValidElementType(ErrorComponent)) {
|
||||
errorMissingDefaultExport(pagePath, 'error');
|
||||
}
|
||||
if (typeof Loading !== 'undefined' && !isValidElementType(Loading)) {
|
||||
errorMissingDefaultExport(pagePath, 'loading');
|
||||
}
|
||||
if (typeof NotFound !== 'undefined' && !isValidElementType(NotFound)) {
|
||||
errorMissingDefaultExport(pagePath, 'not-found');
|
||||
}
|
||||
if (typeof Forbidden !== 'undefined' && !isValidElementType(Forbidden)) {
|
||||
errorMissingDefaultExport(pagePath, 'forbidden');
|
||||
}
|
||||
if (typeof Unauthorized !== 'undefined' && !isValidElementType(Unauthorized)) {
|
||||
errorMissingDefaultExport(pagePath, 'unauthorized');
|
||||
}
|
||||
}
|
||||
// Handle dynamic segment params.
|
||||
const segmentParam = getDynamicParamFromSegment(segment);
|
||||
// Create object holding the parent params and current params
|
||||
let currentParams = parentParams;
|
||||
if (segmentParam && segmentParam.value !== null) {
|
||||
currentParams = {
|
||||
...parentParams,
|
||||
[segmentParam.param]: segmentParam.value
|
||||
};
|
||||
}
|
||||
// Resolve the segment param
|
||||
const actualSegment = segmentParam ? segmentParam.treeSegment : segment;
|
||||
const metadata = StreamingMetadata ? /*#__PURE__*/ _jsx(StreamingMetadata, {}) : undefined;
|
||||
// Use the same condition to render metadataOutlet as metadata
|
||||
const metadataOutlet = StreamingMetadataOutlet ? /*#__PURE__*/ _jsx(StreamingMetadataOutlet, {}) : undefined;
|
||||
const notFoundElement = NotFound ? /*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
/*#__PURE__*/ _jsx(NotFound, {}),
|
||||
notFoundStyles
|
||||
]
|
||||
}) : undefined;
|
||||
const forbiddenElement = Forbidden ? /*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
/*#__PURE__*/ _jsx(Forbidden, {}),
|
||||
forbiddenStyles
|
||||
]
|
||||
}) : undefined;
|
||||
const unauthorizedElement = Unauthorized ? /*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
/*#__PURE__*/ _jsx(Unauthorized, {}),
|
||||
unauthorizedStyles
|
||||
]
|
||||
}) : undefined;
|
||||
// TODO: Combine this `map` traversal with the loop below that turns the array
|
||||
// into an object.
|
||||
const parallelRouteMap = await Promise.all(Object.keys(parallelRoutes).map(async (parallelRouteKey)=>{
|
||||
const isChildrenRouteKey = parallelRouteKey === 'children';
|
||||
const parallelRoute = parallelRoutes[parallelRouteKey];
|
||||
const notFoundComponent = isChildrenRouteKey ? notFoundElement : undefined;
|
||||
const forbiddenComponent = isChildrenRouteKey ? forbiddenElement : undefined;
|
||||
const unauthorizedComponent = isChildrenRouteKey ? unauthorizedElement : undefined;
|
||||
// if we're prefetching and that there's a Loading component, we bail out
|
||||
// otherwise we keep rendering for the prefetch.
|
||||
// We also want to bail out if there's no Loading component in the tree.
|
||||
let childCacheNodeSeedData = null;
|
||||
if (// Before PPR, the way instant navigations work in Next.js is we
|
||||
// prefetch everything up to the first route segment that defines a
|
||||
// loading.tsx boundary. (We do the same if there's no loading
|
||||
// boundary in the entire tree, because we don't want to prefetch too
|
||||
// much) The rest of the tree is deferred until the actual navigation.
|
||||
// It does not take into account whether the data is dynamic — even if
|
||||
// the tree is completely static, it will still defer everything
|
||||
// inside the loading boundary.
|
||||
//
|
||||
// This behavior predates PPR and is only relevant if the
|
||||
// PPR flag is not enabled.
|
||||
isPrefetch && (Loading || !hasLoadingComponentInTree(parallelRoute)) && // The approach with PPR is different — loading.tsx behaves like a
|
||||
// regular Suspense boundary and has no special behavior.
|
||||
//
|
||||
// With PPR, we prefetch as deeply as possible, and only defer when
|
||||
// dynamic data is accessed. If so, we only defer the nearest parent
|
||||
// Suspense boundary of the dynamic data access, regardless of whether
|
||||
// the boundary is defined by loading.tsx or a normal <Suspense>
|
||||
// component in userspace.
|
||||
//
|
||||
// NOTE: In practice this usually means we'll end up prefetching more
|
||||
// than we were before PPR, which may or may not be considered a
|
||||
// performance regression by some apps. The plan is to address this
|
||||
// before General Availability of PPR by introducing granular
|
||||
// per-segment fetching, so we can reuse as much of the tree as
|
||||
// possible during both prefetches and dynamic navigations. But during
|
||||
// the beta period, we should be clear about this trade off in our
|
||||
// communications.
|
||||
!experimental.isRoutePPREnabled) {
|
||||
// Don't prefetch this child. This will trigger a lazy fetch by the
|
||||
// client router.
|
||||
} else {
|
||||
// Create the child component
|
||||
if (process.env.NODE_ENV === 'development' && missingSlots) {
|
||||
var _parsedTree_layoutOrPagePath;
|
||||
// When we detect the default fallback (which triggers a 404), we collect the missing slots
|
||||
// to provide more helpful debug information during development mode.
|
||||
const parsedTree = parseLoaderTree(parallelRoute);
|
||||
if ((_parsedTree_layoutOrPagePath = parsedTree.layoutOrPagePath) == null ? void 0 : _parsedTree_layoutOrPagePath.endsWith(PARALLEL_ROUTE_DEFAULT_PATH)) {
|
||||
missingSlots.add(parallelRouteKey);
|
||||
}
|
||||
}
|
||||
const seedData = await createComponentTreeInternal({
|
||||
loaderTree: parallelRoute,
|
||||
parentParams: currentParams,
|
||||
rootLayoutIncluded: rootLayoutIncludedAtThisLevelOrAbove,
|
||||
injectedCSS: injectedCSSWithCurrentLayout,
|
||||
injectedJS: injectedJSWithCurrentLayout,
|
||||
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout,
|
||||
// `getMetadataReady` and `getViewportReady` are used to conditionally throw. In the case of parallel routes we will have more than one page
|
||||
// but we only want to throw on the first one.
|
||||
getMetadataReady: isChildrenRouteKey ? getMetadataReady : ()=>Promise.resolve(),
|
||||
getViewportReady: isChildrenRouteKey ? getViewportReady : ()=>Promise.resolve(),
|
||||
ctx,
|
||||
missingSlots,
|
||||
preloadCallbacks,
|
||||
authInterrupts,
|
||||
StreamingMetadata: isChildrenRouteKey ? StreamingMetadata : null,
|
||||
// `StreamingMetadataOutlet` is used to conditionally throw. In the case of parallel routes we will have more than one page
|
||||
// but we only want to throw on the first one.
|
||||
StreamingMetadataOutlet: isChildrenRouteKey ? StreamingMetadataOutlet : null
|
||||
});
|
||||
childCacheNodeSeedData = seedData;
|
||||
}
|
||||
// This is turned back into an object below.
|
||||
return [
|
||||
parallelRouteKey,
|
||||
/*#__PURE__*/ _jsx(LayoutRouter, {
|
||||
parallelRouterKey: parallelRouteKey,
|
||||
// TODO-APP: Add test for loading returning `undefined`. This currently can't be tested as the `webdriver()` tab will wait for the full page to load before returning.
|
||||
error: ErrorComponent,
|
||||
errorStyles: errorStyles,
|
||||
errorScripts: errorScripts,
|
||||
template: /*#__PURE__*/ _jsx(Template, {
|
||||
children: /*#__PURE__*/ _jsx(RenderFromTemplateContext, {})
|
||||
}),
|
||||
templateStyles: templateStyles,
|
||||
templateScripts: templateScripts,
|
||||
notFound: notFoundComponent,
|
||||
forbidden: forbiddenComponent,
|
||||
unauthorized: unauthorizedComponent
|
||||
}),
|
||||
childCacheNodeSeedData
|
||||
];
|
||||
}));
|
||||
// Convert the parallel route map into an object after all promises have been resolved.
|
||||
let parallelRouteProps = {};
|
||||
let parallelRouteCacheNodeSeedData = {};
|
||||
for (const parallelRoute of parallelRouteMap){
|
||||
const [parallelRouteKey, parallelRouteProp, flightData] = parallelRoute;
|
||||
parallelRouteProps[parallelRouteKey] = parallelRouteProp;
|
||||
parallelRouteCacheNodeSeedData[parallelRouteKey] = flightData;
|
||||
}
|
||||
const loadingData = Loading ? [
|
||||
/*#__PURE__*/ _jsx(Loading, {}, "l"),
|
||||
loadingStyles,
|
||||
loadingScripts
|
||||
] : null;
|
||||
// When the segment does not have a layout or page we still have to add the layout router to ensure the path holds the loading component
|
||||
if (!MaybeComponent) {
|
||||
return [
|
||||
actualSegment,
|
||||
/*#__PURE__*/ _jsxs(React.Fragment, {
|
||||
children: [
|
||||
layerAssets,
|
||||
parallelRouteProps.children
|
||||
]
|
||||
}, cacheNodeKey),
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
isPossiblyPartialResponse
|
||||
];
|
||||
}
|
||||
const Component = MaybeComponent;
|
||||
// If force-dynamic is used and the current render supports postponing, we
|
||||
// replace it with a node that will postpone the render. This ensures that the
|
||||
// postpone is invoked during the react render phase and not during the next
|
||||
// render phase.
|
||||
// @TODO this does not actually do what it seems like it would or should do. The idea is that
|
||||
// if we are rendering in a force-dynamic mode and we can postpone we should only make the segments
|
||||
// that ask for force-dynamic to be dynamic, allowing other segments to still prerender. However
|
||||
// because this comes after the children traversal and the static generation store is mutated every segment
|
||||
// along the parent path of a force-dynamic segment will hit this condition effectively making the entire
|
||||
// render force-dynamic. We should refactor this function so that we can correctly track which segments
|
||||
// need to be dynamic
|
||||
if (workStore.isStaticGeneration && workStore.forceDynamic && experimental.isRoutePPREnabled) {
|
||||
return [
|
||||
actualSegment,
|
||||
/*#__PURE__*/ _jsxs(React.Fragment, {
|
||||
children: [
|
||||
/*#__PURE__*/ _jsx(Postpone, {
|
||||
reason: 'dynamic = "force-dynamic" was used',
|
||||
route: workStore.route
|
||||
}),
|
||||
layerAssets
|
||||
]
|
||||
}, cacheNodeKey),
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
true
|
||||
];
|
||||
}
|
||||
const isClientComponent = isClientReference(layoutOrPageMod);
|
||||
if (process.env.NODE_ENV === 'development' && 'params' in parallelRouteProps) {
|
||||
// @TODO consider making this an error and running the check in build as well
|
||||
console.error(`"params" is a reserved prop in Layouts and Pages and cannot be used as the name of a parallel route in ${segment}`);
|
||||
}
|
||||
if (isPage) {
|
||||
const PageComponent = Component;
|
||||
// Assign searchParams to props if this is a page
|
||||
let pageElement;
|
||||
if (isClientComponent) {
|
||||
if (isStaticGeneration) {
|
||||
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams, workStore);
|
||||
const promiseOfSearchParams = createPrerenderSearchParamsForClientPage(workStore);
|
||||
pageElement = /*#__PURE__*/ _jsx(ClientPageRoot, {
|
||||
Component: PageComponent,
|
||||
searchParams: query,
|
||||
params: currentParams,
|
||||
promises: [
|
||||
promiseOfSearchParams,
|
||||
promiseOfParams
|
||||
]
|
||||
});
|
||||
} else {
|
||||
pageElement = /*#__PURE__*/ _jsx(ClientPageRoot, {
|
||||
Component: PageComponent,
|
||||
searchParams: query,
|
||||
params: currentParams
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// If we are passing params to a server component Page we need to track
|
||||
// their usage in case the current render mode tracks dynamic API usage.
|
||||
const params = createServerParamsForServerSegment(currentParams, workStore);
|
||||
// TODO(useCache): Should we use this trick also if dynamicIO is enabled,
|
||||
// instead of relying on the searchParams being a hanging promise?
|
||||
if (!experimental.dynamicIO && isUseCacheFunction(PageComponent)) {
|
||||
const UseCachePageComponent = PageComponent;
|
||||
// The "use cache" wrapper takes care of converting this into an
|
||||
// erroring search params promise when passing it to the original
|
||||
// function.
|
||||
const searchParams = Promise.resolve({});
|
||||
pageElement = /*#__PURE__*/ _jsx(UseCachePageComponent, {
|
||||
params: params,
|
||||
searchParams: searchParams,
|
||||
$$isPageComponent: true
|
||||
});
|
||||
} else {
|
||||
// If we are passing searchParams to a server component Page we need to
|
||||
// track their usage in case the current render mode tracks dynamic API
|
||||
// usage.
|
||||
const searchParams = createServerSearchParamsForServerPage(query, workStore);
|
||||
pageElement = /*#__PURE__*/ _jsx(PageComponent, {
|
||||
params: params,
|
||||
searchParams: searchParams
|
||||
});
|
||||
}
|
||||
}
|
||||
return [
|
||||
actualSegment,
|
||||
/*#__PURE__*/ _jsxs(React.Fragment, {
|
||||
children: [
|
||||
pageElement,
|
||||
metadata,
|
||||
layerAssets,
|
||||
/*#__PURE__*/ _jsxs(OutletBoundary, {
|
||||
children: [
|
||||
/*#__PURE__*/ _jsx(MetadataOutlet, {
|
||||
ready: getViewportReady
|
||||
}),
|
||||
/*#__PURE__*/ _jsx(MetadataOutlet, {
|
||||
ready: getMetadataReady
|
||||
}),
|
||||
metadataOutlet
|
||||
]
|
||||
})
|
||||
]
|
||||
}, cacheNodeKey),
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
isPossiblyPartialResponse
|
||||
];
|
||||
} else {
|
||||
const SegmentComponent = Component;
|
||||
const isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot = rootLayoutAtThisLevel && 'children' in parallelRoutes && Object.keys(parallelRoutes).length > 1;
|
||||
let segmentNode;
|
||||
if (isClientComponent) {
|
||||
let clientSegment;
|
||||
if (isStaticGeneration) {
|
||||
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams, workStore);
|
||||
clientSegment = /*#__PURE__*/ _jsx(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: parallelRouteProps,
|
||||
params: currentParams,
|
||||
promise: promiseOfParams
|
||||
});
|
||||
} else {
|
||||
clientSegment = /*#__PURE__*/ _jsx(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: parallelRouteProps,
|
||||
params: currentParams
|
||||
});
|
||||
}
|
||||
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
|
||||
let notfoundClientSegment;
|
||||
let forbiddenClientSegment;
|
||||
let unauthorizedClientSegment;
|
||||
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
|
||||
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
|
||||
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
|
||||
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
|
||||
// rely on the `NotFound` behavior.
|
||||
notfoundClientSegment = createErrorBoundaryClientSegmentRoot({
|
||||
ErrorBoundaryComponent: NotFound,
|
||||
errorElement: notFoundElement,
|
||||
ClientSegmentRoot,
|
||||
layerAssets,
|
||||
SegmentComponent,
|
||||
currentParams
|
||||
});
|
||||
forbiddenClientSegment = createErrorBoundaryClientSegmentRoot({
|
||||
ErrorBoundaryComponent: Forbidden,
|
||||
errorElement: forbiddenElement,
|
||||
ClientSegmentRoot,
|
||||
layerAssets,
|
||||
SegmentComponent,
|
||||
currentParams
|
||||
});
|
||||
unauthorizedClientSegment = createErrorBoundaryClientSegmentRoot({
|
||||
ErrorBoundaryComponent: Unauthorized,
|
||||
errorElement: unauthorizedElement,
|
||||
ClientSegmentRoot,
|
||||
layerAssets,
|
||||
SegmentComponent,
|
||||
currentParams
|
||||
});
|
||||
if (notfoundClientSegment || forbiddenClientSegment || unauthorizedClientSegment) {
|
||||
segmentNode = /*#__PURE__*/ _jsxs(HTTPAccessFallbackBoundary, {
|
||||
notFound: notfoundClientSegment,
|
||||
forbidden: forbiddenClientSegment,
|
||||
unauthorized: unauthorizedClientSegment,
|
||||
children: [
|
||||
layerAssets,
|
||||
clientSegment
|
||||
]
|
||||
}, cacheNodeKey);
|
||||
} else {
|
||||
segmentNode = /*#__PURE__*/ _jsxs(React.Fragment, {
|
||||
children: [
|
||||
layerAssets,
|
||||
clientSegment
|
||||
]
|
||||
}, cacheNodeKey);
|
||||
}
|
||||
} else {
|
||||
segmentNode = /*#__PURE__*/ _jsxs(React.Fragment, {
|
||||
children: [
|
||||
layerAssets,
|
||||
clientSegment
|
||||
]
|
||||
}, cacheNodeKey);
|
||||
}
|
||||
} else {
|
||||
const params = createServerParamsForServerSegment(currentParams, workStore);
|
||||
let serverSegment = /*#__PURE__*/ _jsx(SegmentComponent, {
|
||||
...parallelRouteProps,
|
||||
params: params
|
||||
});
|
||||
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
|
||||
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
|
||||
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
|
||||
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
|
||||
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
|
||||
// rely on the `NotFound` behavior.
|
||||
segmentNode = /*#__PURE__*/ _jsxs(HTTPAccessFallbackBoundary, {
|
||||
notFound: NotFound ? /*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
layerAssets,
|
||||
/*#__PURE__*/ _jsxs(SegmentComponent, {
|
||||
params: params,
|
||||
children: [
|
||||
notFoundStyles,
|
||||
/*#__PURE__*/ _jsx(NotFound, {})
|
||||
]
|
||||
})
|
||||
]
|
||||
}) : undefined,
|
||||
children: [
|
||||
layerAssets,
|
||||
serverSegment
|
||||
]
|
||||
}, cacheNodeKey);
|
||||
} else {
|
||||
segmentNode = /*#__PURE__*/ _jsxs(React.Fragment, {
|
||||
children: [
|
||||
layerAssets,
|
||||
serverSegment
|
||||
]
|
||||
}, cacheNodeKey);
|
||||
}
|
||||
}
|
||||
// For layouts we just render the component
|
||||
return [
|
||||
actualSegment,
|
||||
segmentNode,
|
||||
parallelRouteCacheNodeSeedData,
|
||||
loadingData,
|
||||
isPossiblyPartialResponse
|
||||
];
|
||||
}
|
||||
}
|
||||
async function MetadataOutlet({ ready }) {
|
||||
const r = ready();
|
||||
// We can avoid a extra microtask by unwrapping the instrumented promise directly if available.
|
||||
if (r.status === 'rejected') {
|
||||
throw r.value;
|
||||
} else if (r.status !== 'fulfilled') {
|
||||
await r;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
MetadataOutlet.displayName = OUTLET_BOUNDARY_NAME;
|
||||
function createErrorBoundaryClientSegmentRoot({ ErrorBoundaryComponent, errorElement, ClientSegmentRoot, layerAssets, SegmentComponent, currentParams }) {
|
||||
if (ErrorBoundaryComponent) {
|
||||
const notFoundParallelRouteProps = {
|
||||
children: errorElement
|
||||
};
|
||||
return /*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
layerAssets,
|
||||
/*#__PURE__*/ _jsx(ClientSegmentRoot, {
|
||||
Component: SegmentComponent,
|
||||
slots: notFoundParallelRouteProps,
|
||||
params: currentParams
|
||||
})
|
||||
]
|
||||
});
|
||||
}
|
||||
return null;
|
||||
}
|
||||
export function getRootParams(loaderTree, getDynamicParamFromSegment) {
|
||||
return getRootParamsImpl({}, loaderTree, getDynamicParamFromSegment);
|
||||
}
|
||||
function getRootParamsImpl(parentParams, loaderTree, getDynamicParamFromSegment) {
|
||||
const { segment, modules: { layout }, parallelRoutes } = parseLoaderTree(loaderTree);
|
||||
const segmentParam = getDynamicParamFromSegment(segment);
|
||||
let currentParams = parentParams;
|
||||
if (segmentParam && segmentParam.value !== null) {
|
||||
currentParams = {
|
||||
...parentParams,
|
||||
[segmentParam.param]: segmentParam.value
|
||||
};
|
||||
}
|
||||
const isRootLayout = typeof layout !== 'undefined';
|
||||
if (isRootLayout) {
|
||||
return currentParams;
|
||||
} else if (!parallelRoutes.children) {
|
||||
// This should really be an error but there are bugs in Turbopack that cause
|
||||
// the _not-found LoaderTree to not have any layouts. For rootParams sake
|
||||
// this is somewhat irrelevant when you are not customizing the 404 page.
|
||||
// If you are customizing 404
|
||||
// TODO update rootParams to make all params optional if `/app/not-found.tsx` is defined
|
||||
return currentParams;
|
||||
} else {
|
||||
return getRootParamsImpl(currentParams, // We stop looking for root params as soon as we hit the first layout
|
||||
// and it is not possible to use parallel route children above the root layout
|
||||
// so every parallelRoutes object that this function can visit will necessarily
|
||||
// have a single `children` prop and no others.
|
||||
parallelRoutes.children, getDynamicParamFromSegment);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-component-tree.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-tree.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-component-tree.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
163
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-error-handler.js
generated
vendored
Normal file
163
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-error-handler.js
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
import stringHash from 'next/dist/compiled/string-hash';
|
||||
import { formatServerError } from '../../lib/format-server-error';
|
||||
import { SpanStatusCode, getTracer } from '../lib/trace/tracer';
|
||||
import { isAbortError } from '../pipe-readable';
|
||||
import { isBailoutToCSRError } from '../../shared/lib/lazy-dynamic/bailout-to-csr';
|
||||
import { isDynamicServerError } from '../../client/components/hooks-server-context';
|
||||
import { isNextRouterError } from '../../client/components/is-next-router-error';
|
||||
import { getProperError } from '../../lib/is-error';
|
||||
import { createDigestWithErrorCode } from '../../lib/error-telemetry-utils';
|
||||
/**
|
||||
* Returns a digest for well-known Next.js errors, otherwise `undefined`. If a
|
||||
* digest is returned this also means that the error does not need to be
|
||||
* reported.
|
||||
*/ export function getDigestForWellKnownError(error) {
|
||||
// If we're bailing out to CSR, we don't need to log the error.
|
||||
if (isBailoutToCSRError(error)) return error.digest;
|
||||
// If this is a navigation error, we don't need to log the error.
|
||||
if (isNextRouterError(error)) return error.digest;
|
||||
// If this error occurs, we know that we should be stopping the static
|
||||
// render. This is only thrown in static generation when PPR is not enabled,
|
||||
// which causes the whole page to be marked as dynamic. We don't need to
|
||||
// tell the user about this error, as it's not actionable.
|
||||
if (isDynamicServerError(error)) return error.digest;
|
||||
return undefined;
|
||||
}
|
||||
export function createFlightReactServerErrorHandler(shouldFormatError, onReactServerRenderError) {
|
||||
return (thrownValue)=>{
|
||||
if (typeof thrownValue === 'string') {
|
||||
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
|
||||
return stringHash(thrownValue).toString();
|
||||
}
|
||||
// If the response was closed, we don't need to log the error.
|
||||
if (isAbortError(thrownValue)) return;
|
||||
const digest = getDigestForWellKnownError(thrownValue);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
const err = getProperError(thrownValue);
|
||||
// If the error already has a digest, respect the original digest,
|
||||
// so it won't get re-generated into another new error.
|
||||
if (!err.digest) {
|
||||
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
|
||||
err.digest = stringHash(err.message + err.stack || '').toString();
|
||||
}
|
||||
// Format server errors in development to add more helpful error messages
|
||||
if (shouldFormatError) {
|
||||
formatServerError(err);
|
||||
}
|
||||
// Record exception in an active span, if available.
|
||||
const span = getTracer().getActiveScopeSpan();
|
||||
if (span) {
|
||||
span.recordException(err);
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: err.message
|
||||
});
|
||||
}
|
||||
onReactServerRenderError(err);
|
||||
return createDigestWithErrorCode(thrownValue, err.digest);
|
||||
};
|
||||
}
|
||||
export function createHTMLReactServerErrorHandler(shouldFormatError, isNextExport, reactServerErrors, silenceLogger, onReactServerRenderError) {
|
||||
return (thrownValue)=>{
|
||||
var _err_message;
|
||||
if (typeof thrownValue === 'string') {
|
||||
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
|
||||
return stringHash(thrownValue).toString();
|
||||
}
|
||||
// If the response was closed, we don't need to log the error.
|
||||
if (isAbortError(thrownValue)) return;
|
||||
const digest = getDigestForWellKnownError(thrownValue);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
const err = getProperError(thrownValue);
|
||||
// If the error already has a digest, respect the original digest,
|
||||
// so it won't get re-generated into another new error.
|
||||
if (!err.digest) {
|
||||
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
|
||||
err.digest = stringHash(err.message + (err.stack || '')).toString();
|
||||
}
|
||||
// @TODO by putting this here and not at the top it is possible that
|
||||
// we don't error the build in places we actually expect to
|
||||
if (!reactServerErrors.has(err.digest)) {
|
||||
reactServerErrors.set(err.digest, err);
|
||||
}
|
||||
// Format server errors in development to add more helpful error messages
|
||||
if (shouldFormatError) {
|
||||
formatServerError(err);
|
||||
}
|
||||
// Don't log the suppressed error during export
|
||||
if (!(isNextExport && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
|
||||
// Record exception in an active span, if available.
|
||||
const span = getTracer().getActiveScopeSpan();
|
||||
if (span) {
|
||||
span.recordException(err);
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: err.message
|
||||
});
|
||||
}
|
||||
if (!silenceLogger) {
|
||||
onReactServerRenderError == null ? void 0 : onReactServerRenderError(err);
|
||||
}
|
||||
}
|
||||
return createDigestWithErrorCode(thrownValue, err.digest);
|
||||
};
|
||||
}
|
||||
export function createHTMLErrorHandler(shouldFormatError, isNextExport, reactServerErrors, allCapturedErrors, silenceLogger, onHTMLRenderSSRError) {
|
||||
return (thrownValue, errorInfo)=>{
|
||||
var _err_message;
|
||||
let isSSRError = true;
|
||||
allCapturedErrors.push(thrownValue);
|
||||
// If the response was closed, we don't need to log the error.
|
||||
if (isAbortError(thrownValue)) return;
|
||||
const digest = getDigestForWellKnownError(thrownValue);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
const err = getProperError(thrownValue);
|
||||
// If the error already has a digest, respect the original digest,
|
||||
// so it won't get re-generated into another new error.
|
||||
if (err.digest) {
|
||||
if (reactServerErrors.has(err.digest)) {
|
||||
// This error is likely an obfuscated error from react-server.
|
||||
// We recover the original error here.
|
||||
thrownValue = reactServerErrors.get(err.digest);
|
||||
isSSRError = false;
|
||||
} else {
|
||||
// The error is not from react-server but has a digest
|
||||
// from other means so we don't need to produce a new one
|
||||
}
|
||||
} else {
|
||||
err.digest = stringHash(err.message + ((errorInfo == null ? void 0 : errorInfo.componentStack) || err.stack || '')).toString();
|
||||
}
|
||||
// Format server errors in development to add more helpful error messages
|
||||
if (shouldFormatError) {
|
||||
formatServerError(err);
|
||||
}
|
||||
// Don't log the suppressed error during export
|
||||
if (!(isNextExport && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
|
||||
// Record exception in an active span, if available.
|
||||
const span = getTracer().getActiveScopeSpan();
|
||||
if (span) {
|
||||
span.recordException(err);
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: err.message
|
||||
});
|
||||
}
|
||||
if (!silenceLogger && // HTML errors contain RSC errors as well, filter them out before reporting
|
||||
isSSRError) {
|
||||
onHTMLRenderSSRError(err, errorInfo);
|
||||
}
|
||||
}
|
||||
return createDigestWithErrorCode(thrownValue, err.digest);
|
||||
};
|
||||
}
|
||||
export function isUserLandError(err) {
|
||||
return !isAbortError(err) && !isBailoutToCSRError(err) && !isNextRouterError(err);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-error-handler.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-error-handler.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-error-handler.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
20
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-flight-router-state-from-loader-tree.js
generated
vendored
Normal file
20
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-flight-router-state-from-loader-tree.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { addSearchParamsIfPageSegment } from '../../shared/lib/segment';
|
||||
export function createFlightRouterStateFromLoaderTree([segment, parallelRoutes, { layout }], getDynamicParamFromSegment, searchParams, rootLayoutIncluded = false) {
|
||||
const dynamicParam = getDynamicParamFromSegment(segment);
|
||||
const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment;
|
||||
const segmentTree = [
|
||||
addSearchParamsIfPageSegment(treeSegment, searchParams),
|
||||
{}
|
||||
];
|
||||
if (!rootLayoutIncluded && typeof layout !== 'undefined') {
|
||||
rootLayoutIncluded = true;
|
||||
segmentTree[4] = true;
|
||||
}
|
||||
segmentTree[1] = Object.keys(parallelRoutes).reduce((existingValue, currentValue)=>{
|
||||
existingValue[currentValue] = createFlightRouterStateFromLoaderTree(parallelRoutes[currentValue], getDynamicParamFromSegment, searchParams, rootLayoutIncluded);
|
||||
return existingValue;
|
||||
}, {});
|
||||
return segmentTree;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-flight-router-state-from-loader-tree.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-flight-router-state-from-loader-tree.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/create-flight-router-state-from-loader-tree.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/create-flight-router-state-from-loader-tree.ts"],"sourcesContent":["import type { LoaderTree } from '../lib/app-dir-module'\nimport type { FlightRouterState } from './types'\nimport type { GetDynamicParamFromSegment } from './app-render'\nimport { addSearchParamsIfPageSegment } from '../../shared/lib/segment'\n\nexport function createFlightRouterStateFromLoaderTree(\n [segment, parallelRoutes, { layout }]: LoaderTree,\n getDynamicParamFromSegment: GetDynamicParamFromSegment,\n searchParams: any,\n rootLayoutIncluded = false\n): FlightRouterState {\n const dynamicParam = getDynamicParamFromSegment(segment)\n const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment\n\n const segmentTree: FlightRouterState = [\n addSearchParamsIfPageSegment(treeSegment, searchParams),\n {},\n ]\n\n if (!rootLayoutIncluded && typeof layout !== 'undefined') {\n rootLayoutIncluded = true\n segmentTree[4] = true\n }\n\n segmentTree[1] = Object.keys(parallelRoutes).reduce(\n (existingValue, currentValue) => {\n existingValue[currentValue] = createFlightRouterStateFromLoaderTree(\n parallelRoutes[currentValue],\n getDynamicParamFromSegment,\n searchParams,\n rootLayoutIncluded\n )\n return existingValue\n },\n {} as FlightRouterState[1]\n )\n\n return segmentTree\n}\n"],"names":["addSearchParamsIfPageSegment","createFlightRouterStateFromLoaderTree","segment","parallelRoutes","layout","getDynamicParamFromSegment","searchParams","rootLayoutIncluded","dynamicParam","treeSegment","segmentTree","Object","keys","reduce","existingValue","currentValue"],"mappings":"AAGA,SAASA,4BAA4B,QAAQ,2BAA0B;AAEvE,OAAO,SAASC,sCACd,CAACC,SAASC,gBAAgB,EAAEC,MAAM,EAAE,CAAa,EACjDC,0BAAsD,EACtDC,YAAiB,EACjBC,qBAAqB,KAAK;IAE1B,MAAMC,eAAeH,2BAA2BH;IAChD,MAAMO,cAAcD,eAAeA,aAAaC,WAAW,GAAGP;IAE9D,MAAMQ,cAAiC;QACrCV,6BAA6BS,aAAaH;QAC1C,CAAC;KACF;IAED,IAAI,CAACC,sBAAsB,OAAOH,WAAW,aAAa;QACxDG,qBAAqB;QACrBG,WAAW,CAAC,EAAE,GAAG;IACnB;IAEAA,WAAW,CAAC,EAAE,GAAGC,OAAOC,IAAI,CAACT,gBAAgBU,MAAM,CACjD,CAACC,eAAeC;QACdD,aAAa,CAACC,aAAa,GAAGd,sCAC5BE,cAAc,CAACY,aAAa,EAC5BV,4BACAC,cACAC;QAEF,OAAOO;IACT,GACA,CAAC;IAGH,OAAOJ;AACT"}
|
||||
64
frontend/webapp/node_modules/next/dist/esm/server/app-render/csrf-protection.js
generated
vendored
Normal file
64
frontend/webapp/node_modules/next/dist/esm/server/app-render/csrf-protection.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function
|
||||
// can be run from edge. This is a simple implementation that safely achieves the required functionality.
|
||||
// the goal is to match the functionality for remotePatterns as defined here -
|
||||
// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns
|
||||
// TODO - retrofit micromatch to work in edge and use that instead
|
||||
function matchWildcardDomain(domain, pattern) {
|
||||
const domainParts = domain.split('.');
|
||||
const patternParts = pattern.split('.');
|
||||
if (patternParts.length < 1) {
|
||||
// pattern is empty and therefore invalid to match against
|
||||
return false;
|
||||
}
|
||||
if (domainParts.length < patternParts.length) {
|
||||
// domain has too few segments and thus cannot match
|
||||
return false;
|
||||
}
|
||||
// Prevent wildcards from matching entire domains (e.g. '**' or '*.com')
|
||||
// This ensures wildcards can only match subdomains, not the main domain
|
||||
if (patternParts.length === 1 && (patternParts[0] === '*' || patternParts[0] === '**')) {
|
||||
return false;
|
||||
}
|
||||
while(patternParts.length){
|
||||
const patternPart = patternParts.pop();
|
||||
const domainPart = domainParts.pop();
|
||||
switch(patternPart){
|
||||
case '':
|
||||
{
|
||||
// invalid pattern. pattern segments must be non empty
|
||||
return false;
|
||||
}
|
||||
case '*':
|
||||
{
|
||||
// wildcard matches anything so we continue if the domain part is non-empty
|
||||
if (domainPart) {
|
||||
continue;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
case '**':
|
||||
{
|
||||
// if this is not the last item in the pattern the pattern is invalid
|
||||
if (patternParts.length > 0) {
|
||||
return false;
|
||||
}
|
||||
// recursive wildcard matches anything so we terminate here if the domain part is non empty
|
||||
return domainPart !== undefined;
|
||||
}
|
||||
default:
|
||||
{
|
||||
if (domainPart !== patternPart) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// We exhausted the pattern. If we also exhausted the domain we have a match
|
||||
return domainParts.length === 0;
|
||||
}
|
||||
export const isCsrfOriginAllowed = (originDomain, allowedOrigins = [])=>{
|
||||
return allowedOrigins.some((allowedOrigin)=>allowedOrigin && (allowedOrigin === originDomain || matchWildcardDomain(originDomain, allowedOrigin)));
|
||||
};
|
||||
|
||||
//# sourceMappingURL=csrf-protection.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/csrf-protection.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/csrf-protection.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/csrf-protection.ts"],"sourcesContent":["// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function\n// can be run from edge. This is a simple implementation that safely achieves the required functionality.\n// the goal is to match the functionality for remotePatterns as defined here -\n// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns\n// TODO - retrofit micromatch to work in edge and use that instead\nfunction matchWildcardDomain(domain: string, pattern: string) {\n const domainParts = domain.split('.')\n const patternParts = pattern.split('.')\n\n if (patternParts.length < 1) {\n // pattern is empty and therefore invalid to match against\n return false\n }\n\n if (domainParts.length < patternParts.length) {\n // domain has too few segments and thus cannot match\n return false\n }\n\n // Prevent wildcards from matching entire domains (e.g. '**' or '*.com')\n // This ensures wildcards can only match subdomains, not the main domain\n if (\n patternParts.length === 1 &&\n (patternParts[0] === '*' || patternParts[0] === '**')\n ) {\n return false\n }\n\n while (patternParts.length) {\n const patternPart = patternParts.pop()\n const domainPart = domainParts.pop()\n\n switch (patternPart) {\n case '': {\n // invalid pattern. pattern segments must be non empty\n return false\n }\n case '*': {\n // wildcard matches anything so we continue if the domain part is non-empty\n if (domainPart) {\n continue\n } else {\n return false\n }\n }\n case '**': {\n // if this is not the last item in the pattern the pattern is invalid\n if (patternParts.length > 0) {\n return false\n }\n // recursive wildcard matches anything so we terminate here if the domain part is non empty\n return domainPart !== undefined\n }\n default: {\n if (domainPart !== patternPart) {\n return false\n }\n }\n }\n }\n\n // We exhausted the pattern. If we also exhausted the domain we have a match\n return domainParts.length === 0\n}\n\nexport const isCsrfOriginAllowed = (\n originDomain: string,\n allowedOrigins: string[] = []\n): boolean => {\n return allowedOrigins.some(\n (allowedOrigin) =>\n allowedOrigin &&\n (allowedOrigin === originDomain ||\n matchWildcardDomain(originDomain, allowedOrigin))\n )\n}\n"],"names":["matchWildcardDomain","domain","pattern","domainParts","split","patternParts","length","patternPart","pop","domainPart","undefined","isCsrfOriginAllowed","originDomain","allowedOrigins","some","allowedOrigin"],"mappings":"AAAA,uHAAuH;AACvH,yGAAyG;AACzG,8EAA8E;AAC9E,4EAA4E;AAC5E,kEAAkE;AAClE,SAASA,oBAAoBC,MAAc,EAAEC,OAAe;IAC1D,MAAMC,cAAcF,OAAOG,KAAK,CAAC;IACjC,MAAMC,eAAeH,QAAQE,KAAK,CAAC;IAEnC,IAAIC,aAAaC,MAAM,GAAG,GAAG;QAC3B,0DAA0D;QAC1D,OAAO;IACT;IAEA,IAAIH,YAAYG,MAAM,GAAGD,aAAaC,MAAM,EAAE;QAC5C,oDAAoD;QACpD,OAAO;IACT;IAEA,wEAAwE;IACxE,wEAAwE;IACxE,IACED,aAAaC,MAAM,KAAK,KACvBD,CAAAA,YAAY,CAAC,EAAE,KAAK,OAAOA,YAAY,CAAC,EAAE,KAAK,IAAG,GACnD;QACA,OAAO;IACT;IAEA,MAAOA,aAAaC,MAAM,CAAE;QAC1B,MAAMC,cAAcF,aAAaG,GAAG;QACpC,MAAMC,aAAaN,YAAYK,GAAG;QAElC,OAAQD;YACN,KAAK;gBAAI;oBACP,sDAAsD;oBACtD,OAAO;gBACT;YACA,KAAK;gBAAK;oBACR,2EAA2E;oBAC3E,IAAIE,YAAY;wBACd;oBACF,OAAO;wBACL,OAAO;oBACT;gBACF;YACA,KAAK;gBAAM;oBACT,qEAAqE;oBACrE,IAAIJ,aAAaC,MAAM,GAAG,GAAG;wBAC3B,OAAO;oBACT;oBACA,2FAA2F;oBAC3F,OAAOG,eAAeC;gBACxB;YACA;gBAAS;oBACP,IAAID,eAAeF,aAAa;wBAC9B,OAAO;oBACT;gBACF;QACF;IACF;IAEA,4EAA4E;IAC5E,OAAOJ,YAAYG,MAAM,KAAK;AAChC;AAEA,OAAO,MAAMK,sBAAsB,CACjCC,cACAC,iBAA2B,EAAE;IAE7B,OAAOA,eAAeC,IAAI,CACxB,CAACC,gBACCA,iBACCA,CAAAA,kBAAkBH,gBACjBZ,oBAAoBY,cAAcG,cAAa;AAEvD,EAAC"}
|
||||
476
frontend/webapp/node_modules/next/dist/esm/server/app-render/dynamic-rendering.js
generated
vendored
Normal file
476
frontend/webapp/node_modules/next/dist/esm/server/app-render/dynamic-rendering.js
generated
vendored
Normal file
@@ -0,0 +1,476 @@
|
||||
/**
|
||||
* The functions provided by this module are used to communicate certain properties
|
||||
* about the currently running code so that Next.js can make decisions on how to handle
|
||||
* the current execution in different rendering modes such as pre-rendering, resuming, and SSR.
|
||||
*
|
||||
* Today Next.js treats all code as potentially static. Certain APIs may only make sense when dynamically rendering.
|
||||
* Traditionally this meant deopting the entire render to dynamic however with PPR we can now deopt parts
|
||||
* of a React tree as dynamic while still keeping other parts static. There are really two different kinds of
|
||||
* Dynamic indications.
|
||||
*
|
||||
* The first is simply an intention to be dynamic. unstable_noStore is an example of this where
|
||||
* the currently executing code simply declares that the current scope is dynamic but if you use it
|
||||
* inside unstable_cache it can still be cached. This type of indication can be removed if we ever
|
||||
* make the default dynamic to begin with because the only way you would ever be static is inside
|
||||
* a cache scope which this indication does not affect.
|
||||
*
|
||||
* The second is an indication that a dynamic data source was read. This is a stronger form of dynamic
|
||||
* because it means that it is inappropriate to cache this at all. using a dynamic data source inside
|
||||
* unstable_cache should error. If you want to use some dynamic data inside unstable_cache you should
|
||||
* read that data outside the cache and pass it in as an argument to the cached function.
|
||||
*/ // Once postpone is in stable we should switch to importing the postpone export directly
|
||||
import React from 'react';
|
||||
import { DynamicServerError } from '../../client/components/hooks-server-context';
|
||||
import { StaticGenBailoutError } from '../../client/components/static-generation-bailout';
|
||||
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
|
||||
import { workAsyncStorage } from '../app-render/work-async-storage.external';
|
||||
import { makeHangingPromise } from '../dynamic-rendering-utils';
|
||||
import { METADATA_BOUNDARY_NAME, VIEWPORT_BOUNDARY_NAME, OUTLET_BOUNDARY_NAME } from '../../lib/metadata/metadata-constants';
|
||||
import { scheduleOnNextTick } from '../../lib/scheduler';
|
||||
const hasPostpone = typeof React.unstable_postpone === 'function';
|
||||
export function createDynamicTrackingState(isDebugDynamicAccesses) {
|
||||
return {
|
||||
isDebugDynamicAccesses,
|
||||
dynamicAccesses: [],
|
||||
syncDynamicExpression: undefined,
|
||||
syncDynamicErrorWithStack: null
|
||||
};
|
||||
}
|
||||
export function createDynamicValidationState() {
|
||||
return {
|
||||
hasSuspendedDynamic: false,
|
||||
hasDynamicMetadata: false,
|
||||
hasDynamicViewport: false,
|
||||
hasSyncDynamicErrors: false,
|
||||
dynamicErrors: []
|
||||
};
|
||||
}
|
||||
export function getFirstDynamicReason(trackingState) {
|
||||
var _trackingState_dynamicAccesses_;
|
||||
return (_trackingState_dynamicAccesses_ = trackingState.dynamicAccesses[0]) == null ? void 0 : _trackingState_dynamicAccesses_.expression;
|
||||
}
|
||||
/**
|
||||
* This function communicates that the current scope should be treated as dynamic.
|
||||
*
|
||||
* In most cases this function is a no-op but if called during
|
||||
* a PPR prerender it will postpone the current sub-tree and calling
|
||||
* it during a normal prerender will cause the entire prerender to abort
|
||||
*/ export function markCurrentScopeAsDynamic(store, workUnitStore, expression) {
|
||||
if (workUnitStore) {
|
||||
if (workUnitStore.type === 'cache' || workUnitStore.type === 'unstable-cache') {
|
||||
// inside cache scopes marking a scope as dynamic has no effect because the outer cache scope
|
||||
// creates a cache boundary. This is subtly different from reading a dynamic data source which is
|
||||
// forbidden inside a cache scope.
|
||||
return;
|
||||
}
|
||||
}
|
||||
// If we're forcing dynamic rendering or we're forcing static rendering, we
|
||||
// don't need to do anything here because the entire page is already dynamic
|
||||
// or it's static and it should not throw or postpone here.
|
||||
if (store.forceDynamic || store.forceStatic) return;
|
||||
if (store.dynamicShouldError) {
|
||||
throw Object.defineProperty(new StaticGenBailoutError(`Route ${store.route} with \`dynamic = "error"\` couldn't be rendered statically because it used \`${expression}\`. See more info here: https://nextjs.org/docs/app/building-your-application/rendering/static-and-dynamic#dynamic-rendering`), "__NEXT_ERROR_CODE", {
|
||||
value: "E553",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (workUnitStore) {
|
||||
if (workUnitStore.type === 'prerender-ppr') {
|
||||
postponeWithTracking(store.route, expression, workUnitStore.dynamicTracking);
|
||||
} else if (workUnitStore.type === 'prerender-legacy') {
|
||||
workUnitStore.revalidate = 0;
|
||||
// We aren't prerendering but we are generating a static page. We need to bail out of static generation
|
||||
const err = Object.defineProperty(new DynamicServerError(`Route ${store.route} couldn't be rendered statically because it used ${expression}. See more info here: https://nextjs.org/docs/messages/dynamic-server-error`), "__NEXT_ERROR_CODE", {
|
||||
value: "E550",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
store.dynamicUsageDescription = expression;
|
||||
store.dynamicUsageStack = err.stack;
|
||||
throw err;
|
||||
} else if (process.env.NODE_ENV === 'development' && workUnitStore && workUnitStore.type === 'request') {
|
||||
workUnitStore.usedDynamic = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This function communicates that some dynamic path parameter was read. This
|
||||
* differs from the more general `trackDynamicDataAccessed` in that it is will
|
||||
* not error when `dynamic = "error"` is set.
|
||||
*
|
||||
* @param store The static generation store
|
||||
* @param expression The expression that was accessed dynamically
|
||||
*/ export function trackFallbackParamAccessed(store, expression) {
|
||||
const prerenderStore = workUnitAsyncStorage.getStore();
|
||||
if (!prerenderStore || prerenderStore.type !== 'prerender-ppr') return;
|
||||
postponeWithTracking(store.route, expression, prerenderStore.dynamicTracking);
|
||||
}
|
||||
/**
|
||||
* This function is meant to be used when prerendering without dynamicIO or PPR.
|
||||
* When called during a build it will cause Next.js to consider the route as dynamic.
|
||||
*
|
||||
* @internal
|
||||
*/ export function throwToInterruptStaticGeneration(expression, store, prerenderStore) {
|
||||
// We aren't prerendering but we are generating a static page. We need to bail out of static generation
|
||||
const err = Object.defineProperty(new DynamicServerError(`Route ${store.route} couldn't be rendered statically because it used \`${expression}\`. See more info here: https://nextjs.org/docs/messages/dynamic-server-error`), "__NEXT_ERROR_CODE", {
|
||||
value: "E558",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
prerenderStore.revalidate = 0;
|
||||
store.dynamicUsageDescription = expression;
|
||||
store.dynamicUsageStack = err.stack;
|
||||
throw err;
|
||||
}
|
||||
/**
|
||||
* This function should be used to track whether something dynamic happened even when
|
||||
* we are in a dynamic render. This is useful for Dev where all renders are dynamic but
|
||||
* we still track whether dynamic APIs were accessed for helpful messaging
|
||||
*
|
||||
* @internal
|
||||
*/ export function trackDynamicDataInDynamicRender(_store, workUnitStore) {
|
||||
if (workUnitStore) {
|
||||
if (workUnitStore.type === 'cache' || workUnitStore.type === 'unstable-cache') {
|
||||
// inside cache scopes marking a scope as dynamic has no effect because the outer cache scope
|
||||
// creates a cache boundary. This is subtly different from reading a dynamic data source which is
|
||||
// forbidden inside a cache scope.
|
||||
return;
|
||||
}
|
||||
if (workUnitStore.type === 'prerender' || workUnitStore.type === 'prerender-legacy') {
|
||||
workUnitStore.revalidate = 0;
|
||||
}
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.type === 'request') {
|
||||
workUnitStore.usedDynamic = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Despite it's name we don't actually abort unless we have a controller to call abort on
|
||||
// There are times when we let a prerender run long to discover caches where we want the semantics
|
||||
// of tracking dynamic access without terminating the prerender early
|
||||
function abortOnSynchronousDynamicDataAccess(route, expression, prerenderStore) {
|
||||
const reason = `Route ${route} needs to bail out of prerendering at this point because it used ${expression}.`;
|
||||
const error = createPrerenderInterruptedError(reason);
|
||||
prerenderStore.controller.abort(error);
|
||||
const dynamicTracking = prerenderStore.dynamicTracking;
|
||||
if (dynamicTracking) {
|
||||
dynamicTracking.dynamicAccesses.push({
|
||||
// When we aren't debugging, we don't need to create another error for the
|
||||
// stack trace.
|
||||
stack: dynamicTracking.isDebugDynamicAccesses ? new Error().stack : undefined,
|
||||
expression
|
||||
});
|
||||
}
|
||||
}
|
||||
export function abortOnSynchronousPlatformIOAccess(route, expression, errorWithStack, prerenderStore) {
|
||||
const dynamicTracking = prerenderStore.dynamicTracking;
|
||||
if (dynamicTracking) {
|
||||
if (dynamicTracking.syncDynamicErrorWithStack === null) {
|
||||
dynamicTracking.syncDynamicExpression = expression;
|
||||
dynamicTracking.syncDynamicErrorWithStack = errorWithStack;
|
||||
}
|
||||
}
|
||||
abortOnSynchronousDynamicDataAccess(route, expression, prerenderStore);
|
||||
}
|
||||
export function trackSynchronousPlatformIOAccessInDev(requestStore) {
|
||||
// We don't actually have a controller to abort but we do the semantic equivalent by
|
||||
// advancing the request store out of prerender mode
|
||||
requestStore.prerenderPhase = false;
|
||||
}
|
||||
/**
|
||||
* use this function when prerendering with dynamicIO. If we are doing a
|
||||
* prospective prerender we don't actually abort because we want to discover
|
||||
* all caches for the shell. If this is the actual prerender we do abort.
|
||||
*
|
||||
* This function accepts a prerenderStore but the caller should ensure we're
|
||||
* actually running in dynamicIO mode.
|
||||
*
|
||||
* @internal
|
||||
*/ export function abortAndThrowOnSynchronousRequestDataAccess(route, expression, errorWithStack, prerenderStore) {
|
||||
const prerenderSignal = prerenderStore.controller.signal;
|
||||
if (prerenderSignal.aborted === false) {
|
||||
// TODO it would be better to move this aborted check into the callsite so we can avoid making
|
||||
// the error object when it isn't relevant to the aborting of the prerender however
|
||||
// since we need the throw semantics regardless of whether we abort it is easier to land
|
||||
// this way. See how this was handled with `abortOnSynchronousPlatformIOAccess` for a closer
|
||||
// to ideal implementation
|
||||
const dynamicTracking = prerenderStore.dynamicTracking;
|
||||
if (dynamicTracking) {
|
||||
if (dynamicTracking.syncDynamicErrorWithStack === null) {
|
||||
dynamicTracking.syncDynamicExpression = expression;
|
||||
dynamicTracking.syncDynamicErrorWithStack = errorWithStack;
|
||||
if (prerenderStore.validating === true) {
|
||||
// We always log Request Access in dev at the point of calling the function
|
||||
// So we mark the dynamic validation as not requiring it to be printed
|
||||
dynamicTracking.syncDynamicLogged = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
abortOnSynchronousDynamicDataAccess(route, expression, prerenderStore);
|
||||
}
|
||||
throw createPrerenderInterruptedError(`Route ${route} needs to bail out of prerendering at this point because it used ${expression}.`);
|
||||
}
|
||||
// For now these implementations are the same so we just reexport
|
||||
export const trackSynchronousRequestDataAccessInDev = trackSynchronousPlatformIOAccessInDev;
|
||||
export function Postpone({ reason, route }) {
|
||||
const prerenderStore = workUnitAsyncStorage.getStore();
|
||||
const dynamicTracking = prerenderStore && prerenderStore.type === 'prerender-ppr' ? prerenderStore.dynamicTracking : null;
|
||||
postponeWithTracking(route, reason, dynamicTracking);
|
||||
}
|
||||
export function postponeWithTracking(route, expression, dynamicTracking) {
|
||||
assertPostpone();
|
||||
if (dynamicTracking) {
|
||||
dynamicTracking.dynamicAccesses.push({
|
||||
// When we aren't debugging, we don't need to create another error for the
|
||||
// stack trace.
|
||||
stack: dynamicTracking.isDebugDynamicAccesses ? new Error().stack : undefined,
|
||||
expression
|
||||
});
|
||||
}
|
||||
React.unstable_postpone(createPostponeReason(route, expression));
|
||||
}
|
||||
function createPostponeReason(route, expression) {
|
||||
return `Route ${route} needs to bail out of prerendering at this point because it used ${expression}. ` + `React throws this special object to indicate where. It should not be caught by ` + `your own try/catch. Learn more: https://nextjs.org/docs/messages/ppr-caught-error`;
|
||||
}
|
||||
export function isDynamicPostpone(err) {
|
||||
if (typeof err === 'object' && err !== null && typeof err.message === 'string') {
|
||||
return isDynamicPostponeReason(err.message);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isDynamicPostponeReason(reason) {
|
||||
return reason.includes('needs to bail out of prerendering at this point because it used') && reason.includes('Learn more: https://nextjs.org/docs/messages/ppr-caught-error');
|
||||
}
|
||||
if (isDynamicPostponeReason(createPostponeReason('%%%', '^^^')) === false) {
|
||||
throw Object.defineProperty(new Error('Invariant: isDynamicPostpone misidentified a postpone reason. This is a bug in Next.js'), "__NEXT_ERROR_CODE", {
|
||||
value: "E296",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const NEXT_PRERENDER_INTERRUPTED = 'NEXT_PRERENDER_INTERRUPTED';
|
||||
function createPrerenderInterruptedError(message) {
|
||||
const error = Object.defineProperty(new Error(message), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
error.digest = NEXT_PRERENDER_INTERRUPTED;
|
||||
return error;
|
||||
}
|
||||
export function isPrerenderInterruptedError(error) {
|
||||
return typeof error === 'object' && error !== null && error.digest === NEXT_PRERENDER_INTERRUPTED && 'name' in error && 'message' in error && error instanceof Error;
|
||||
}
|
||||
export function accessedDynamicData(dynamicAccesses) {
|
||||
return dynamicAccesses.length > 0;
|
||||
}
|
||||
export function consumeDynamicAccess(serverDynamic, clientDynamic) {
|
||||
// We mutate because we only call this once we are no longer writing
|
||||
// to the dynamicTrackingState and it's more efficient than creating a new
|
||||
// array.
|
||||
serverDynamic.dynamicAccesses.push(...clientDynamic.dynamicAccesses);
|
||||
return serverDynamic.dynamicAccesses;
|
||||
}
|
||||
export function formatDynamicAPIAccesses(dynamicAccesses) {
|
||||
return dynamicAccesses.filter((access)=>typeof access.stack === 'string' && access.stack.length > 0).map(({ expression, stack })=>{
|
||||
stack = stack.split('\n')// Remove the "Error: " prefix from the first line of the stack trace as
|
||||
// well as the first 4 lines of the stack trace which is the distance
|
||||
// from the user code and the `new Error().stack` call.
|
||||
.slice(4).filter((line)=>{
|
||||
// Exclude Next.js internals from the stack trace.
|
||||
if (line.includes('node_modules/next/')) {
|
||||
return false;
|
||||
}
|
||||
// Exclude anonymous functions from the stack trace.
|
||||
if (line.includes(' (<anonymous>)')) {
|
||||
return false;
|
||||
}
|
||||
// Exclude Node.js internals from the stack trace.
|
||||
if (line.includes(' (node:')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}).join('\n');
|
||||
return `Dynamic API Usage Debug - ${expression}:\n${stack}`;
|
||||
});
|
||||
}
|
||||
function assertPostpone() {
|
||||
if (!hasPostpone) {
|
||||
throw Object.defineProperty(new Error(`Invariant: React.unstable_postpone is not defined. This suggests the wrong version of React was loaded. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
|
||||
value: "E224",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This is a bit of a hack to allow us to abort a render using a Postpone instance instead of an Error which changes React's
|
||||
* abort semantics slightly.
|
||||
*/ export function createPostponedAbortSignal(reason) {
|
||||
assertPostpone();
|
||||
const controller = new AbortController();
|
||||
// We get our hands on a postpone instance by calling postpone and catching the throw
|
||||
try {
|
||||
React.unstable_postpone(reason);
|
||||
} catch (x) {
|
||||
controller.abort(x);
|
||||
}
|
||||
return controller.signal;
|
||||
}
|
||||
/**
|
||||
* In a prerender, we may end up with hanging Promises as inputs due them
|
||||
* stalling on connection() or because they're loading dynamic data. In that
|
||||
* case we need to abort the encoding of arguments since they'll never complete.
|
||||
*/ export function createHangingInputAbortSignal(workUnitStore) {
|
||||
const controller = new AbortController();
|
||||
if (workUnitStore.cacheSignal) {
|
||||
// If we have a cacheSignal it means we're in a prospective render. If the input
|
||||
// we're waiting on is coming from another cache, we do want to wait for it so that
|
||||
// we can resolve this cache entry too.
|
||||
workUnitStore.cacheSignal.inputReady().then(()=>{
|
||||
controller.abort();
|
||||
});
|
||||
} else {
|
||||
// Otherwise we're in the final render and we should already have all our caches
|
||||
// filled. We might still be waiting on some microtasks so we wait one tick before
|
||||
// giving up. When we give up, we still want to render the content of this cache
|
||||
// as deeply as we can so that we can suspend as deeply as possible in the tree
|
||||
// or not at all if we don't end up waiting for the input.
|
||||
scheduleOnNextTick(()=>controller.abort());
|
||||
}
|
||||
return controller.signal;
|
||||
}
|
||||
export function annotateDynamicAccess(expression, prerenderStore) {
|
||||
const dynamicTracking = prerenderStore.dynamicTracking;
|
||||
if (dynamicTracking) {
|
||||
dynamicTracking.dynamicAccesses.push({
|
||||
stack: dynamicTracking.isDebugDynamicAccesses ? new Error().stack : undefined,
|
||||
expression
|
||||
});
|
||||
}
|
||||
}
|
||||
export function useDynamicRouteParams(expression) {
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
if (workStore && workStore.isStaticGeneration && workStore.fallbackRouteParams && workStore.fallbackRouteParams.size > 0) {
|
||||
// There are fallback route params, we should track these as dynamic
|
||||
// accesses.
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
// We're prerendering with dynamicIO or PPR or both
|
||||
if (workUnitStore.type === 'prerender') {
|
||||
// We are in a prerender with dynamicIO semantics
|
||||
// We are going to hang here and never resolve. This will cause the currently
|
||||
// rendering component to effectively be a dynamic hole
|
||||
React.use(makeHangingPromise(workUnitStore.renderSignal, expression));
|
||||
} else if (workUnitStore.type === 'prerender-ppr') {
|
||||
// We're prerendering with PPR
|
||||
postponeWithTracking(workStore.route, expression, workUnitStore.dynamicTracking);
|
||||
} else if (workUnitStore.type === 'prerender-legacy') {
|
||||
throwToInterruptStaticGeneration(expression, workStore, workUnitStore);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const hasSuspenseRegex = /\n\s+at Suspense \(<anonymous>\)/;
|
||||
const hasMetadataRegex = new RegExp(`\\n\\s+at ${METADATA_BOUNDARY_NAME}[\\n\\s]`);
|
||||
const hasViewportRegex = new RegExp(`\\n\\s+at ${VIEWPORT_BOUNDARY_NAME}[\\n\\s]`);
|
||||
const hasOutletRegex = new RegExp(`\\n\\s+at ${OUTLET_BOUNDARY_NAME}[\\n\\s]`);
|
||||
export function trackAllowedDynamicAccess(route, componentStack, dynamicValidation, serverDynamic, clientDynamic) {
|
||||
if (hasOutletRegex.test(componentStack)) {
|
||||
// We don't need to track that this is dynamic. It is only so when something else is also dynamic.
|
||||
return;
|
||||
} else if (hasMetadataRegex.test(componentStack)) {
|
||||
dynamicValidation.hasDynamicMetadata = true;
|
||||
return;
|
||||
} else if (hasViewportRegex.test(componentStack)) {
|
||||
dynamicValidation.hasDynamicViewport = true;
|
||||
return;
|
||||
} else if (hasSuspenseRegex.test(componentStack)) {
|
||||
dynamicValidation.hasSuspendedDynamic = true;
|
||||
return;
|
||||
} else if (serverDynamic.syncDynamicErrorWithStack || clientDynamic.syncDynamicErrorWithStack) {
|
||||
dynamicValidation.hasSyncDynamicErrors = true;
|
||||
return;
|
||||
} else {
|
||||
const message = `Route "${route}": A component accessed data, headers, params, searchParams, or a short-lived cache without a Suspense boundary nor a "use cache" above it. We don't have the exact line number added to error messages yet but you can see which component in the stack below. See more info: https://nextjs.org/docs/messages/next-prerender-missing-suspense`;
|
||||
const error = createErrorWithComponentStack(message, componentStack);
|
||||
dynamicValidation.dynamicErrors.push(error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
function createErrorWithComponentStack(message, componentStack) {
|
||||
const error = Object.defineProperty(new Error(message), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
error.stack = 'Error: ' + message + componentStack;
|
||||
return error;
|
||||
}
|
||||
export function throwIfDisallowedDynamic(route, dynamicValidation, serverDynamic, clientDynamic) {
|
||||
let syncError;
|
||||
let syncExpression;
|
||||
let syncLogged;
|
||||
if (serverDynamic.syncDynamicErrorWithStack) {
|
||||
syncError = serverDynamic.syncDynamicErrorWithStack;
|
||||
syncExpression = serverDynamic.syncDynamicExpression;
|
||||
syncLogged = serverDynamic.syncDynamicLogged === true;
|
||||
} else if (clientDynamic.syncDynamicErrorWithStack) {
|
||||
syncError = clientDynamic.syncDynamicErrorWithStack;
|
||||
syncExpression = clientDynamic.syncDynamicExpression;
|
||||
syncLogged = clientDynamic.syncDynamicLogged === true;
|
||||
} else {
|
||||
syncError = null;
|
||||
syncExpression = undefined;
|
||||
syncLogged = false;
|
||||
}
|
||||
if (dynamicValidation.hasSyncDynamicErrors && syncError) {
|
||||
if (!syncLogged) {
|
||||
// In dev we already log errors about sync dynamic access. But during builds we need to ensure
|
||||
// the offending sync error is logged before we exit the build
|
||||
console.error(syncError);
|
||||
}
|
||||
// The actual error should have been logged when the sync access ocurred
|
||||
throw new StaticGenBailoutError();
|
||||
}
|
||||
const dynamicErrors = dynamicValidation.dynamicErrors;
|
||||
if (dynamicErrors.length) {
|
||||
for(let i = 0; i < dynamicErrors.length; i++){
|
||||
console.error(dynamicErrors[i]);
|
||||
}
|
||||
throw new StaticGenBailoutError();
|
||||
}
|
||||
if (!dynamicValidation.hasSuspendedDynamic) {
|
||||
if (dynamicValidation.hasDynamicMetadata) {
|
||||
if (syncError) {
|
||||
console.error(syncError);
|
||||
throw Object.defineProperty(new StaticGenBailoutError(`Route "${route}" has a \`generateMetadata\` that could not finish rendering before ${syncExpression} was used. Follow the instructions in the error for this expression to resolve.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E608",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
throw Object.defineProperty(new StaticGenBailoutError(`Route "${route}" has a \`generateMetadata\` that depends on Request data (\`cookies()\`, etc...) or external data (\`fetch(...)\`, etc...) but the rest of the route was static or only used cached data (\`"use cache"\`). If you expected this route to be prerenderable update your \`generateMetadata\` to not use Request data and only use cached external data. Otherwise, add \`await connection()\` somewhere within this route to indicate explicitly it should not be prerendered.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E534",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else if (dynamicValidation.hasDynamicViewport) {
|
||||
if (syncError) {
|
||||
console.error(syncError);
|
||||
throw Object.defineProperty(new StaticGenBailoutError(`Route "${route}" has a \`generateViewport\` that could not finish rendering before ${syncExpression} was used. Follow the instructions in the error for this expression to resolve.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E573",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
throw Object.defineProperty(new StaticGenBailoutError(`Route "${route}" has a \`generateViewport\` that depends on Request data (\`cookies()\`, etc...) or external data (\`fetch(...)\`, etc...) but the rest of the route was static or only used cached data (\`"use cache"\`). If you expected this route to be prerenderable update your \`generateViewport\` to not use Request data and only use cached external data. Otherwise, add \`await connection()\` somewhere within this route to indicate explicitly it should not be prerendered.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E590",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=dynamic-rendering.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/dynamic-rendering.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/dynamic-rendering.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
99
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils-server.js
generated
vendored
Normal file
99
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils-server.js
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
// This file should never be bundled into application's runtime code and should
|
||||
// stay in the Next.js server.
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { getStorageDirectory } from '../cache-dir';
|
||||
import { arrayBufferToString } from './encryption-utils';
|
||||
// Keep the key in memory as it should never change during the lifetime of the server in
|
||||
// both development and production.
|
||||
let __next_encryption_key_generation_promise = null;
|
||||
const CONFIG_FILE = '.rscinfo';
|
||||
const ENCRYPTION_KEY = 'encryption.key';
|
||||
const ENCRYPTION_EXPIRE_AT = 'encryption.expire_at';
|
||||
const EXPIRATION = 1000 * 60 * 60 * 24 * 14 // 14 days
|
||||
;
|
||||
async function writeCache(distDir, configValue) {
|
||||
const cacheBaseDir = getStorageDirectory(distDir);
|
||||
if (!cacheBaseDir) return;
|
||||
const configPath = path.join(cacheBaseDir, CONFIG_FILE);
|
||||
if (!fs.existsSync(cacheBaseDir)) {
|
||||
await fs.promises.mkdir(cacheBaseDir, {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
await fs.promises.writeFile(configPath, JSON.stringify({
|
||||
[ENCRYPTION_KEY]: configValue,
|
||||
[ENCRYPTION_EXPIRE_AT]: Date.now() + EXPIRATION
|
||||
}));
|
||||
}
|
||||
// This utility is used to get a key for the cache directory. If the
|
||||
// key is not present, it will generate a new one and store it in the
|
||||
// cache directory inside dist.
|
||||
// The key will also expire after a certain amount of time. Once it
|
||||
// expires, a new one will be generated.
|
||||
// During the lifetime of the server, it will be reused and never refreshed.
|
||||
async function loadOrGenerateKey(distDir, isBuild, generateKey) {
|
||||
const cacheBaseDir = getStorageDirectory(distDir);
|
||||
if (!cacheBaseDir) {
|
||||
// There's no persistent storage available. We generate a new key.
|
||||
// This also covers development time.
|
||||
return await generateKey();
|
||||
}
|
||||
const configPath = path.join(cacheBaseDir, CONFIG_FILE);
|
||||
async function hasCachedKey() {
|
||||
if (!fs.existsSync(configPath)) return false;
|
||||
try {
|
||||
const config = JSON.parse(await fs.promises.readFile(configPath, 'utf8'));
|
||||
if (!config) return false;
|
||||
if (typeof config[ENCRYPTION_KEY] !== 'string' || typeof config[ENCRYPTION_EXPIRE_AT] !== 'number') {
|
||||
return false;
|
||||
}
|
||||
// For build time, we need to rotate the key if it's expired. Otherwise
|
||||
// (next start) we have to keep the key as it is so the runtime key matches
|
||||
// the build time key.
|
||||
if (isBuild && config[ENCRYPTION_EXPIRE_AT] < Date.now()) {
|
||||
return false;
|
||||
}
|
||||
const cachedKey = config[ENCRYPTION_KEY];
|
||||
// If encryption key is provided via env, and it's not same as valid cache,
|
||||
// we should not use the cached key and respect the env key.
|
||||
if (cachedKey && process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY && cachedKey !== process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY) {
|
||||
return false;
|
||||
}
|
||||
return cachedKey;
|
||||
} catch {
|
||||
// Broken config file. We should generate a new key and overwrite it.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const maybeValidKey = await hasCachedKey();
|
||||
if (typeof maybeValidKey === 'string') {
|
||||
return maybeValidKey;
|
||||
}
|
||||
const key = await generateKey();
|
||||
await writeCache(distDir, key);
|
||||
return key;
|
||||
}
|
||||
export async function generateEncryptionKeyBase64({ isBuild, distDir }) {
|
||||
// This avoids it being generated multiple times in parallel.
|
||||
if (!__next_encryption_key_generation_promise) {
|
||||
__next_encryption_key_generation_promise = loadOrGenerateKey(distDir, isBuild, async ()=>{
|
||||
const providedKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY;
|
||||
if (providedKey) {
|
||||
return providedKey;
|
||||
}
|
||||
const key = await crypto.subtle.generateKey({
|
||||
name: 'AES-GCM',
|
||||
length: 256
|
||||
}, true, [
|
||||
'encrypt',
|
||||
'decrypt'
|
||||
]);
|
||||
const exported = await crypto.subtle.exportKey('raw', key);
|
||||
return btoa(arrayBufferToString(exported));
|
||||
});
|
||||
}
|
||||
return __next_encryption_key_generation_promise;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=encryption-utils-server.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils-server.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils-server.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
152
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils.js
generated
vendored
Normal file
152
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
import { normalizeAppPath } from '../../shared/lib/router/utils/app-paths';
|
||||
import { workAsyncStorage } from './work-async-storage.external';
|
||||
let __next_loaded_action_key;
|
||||
export function arrayBufferToString(buffer) {
|
||||
const bytes = new Uint8Array(buffer);
|
||||
const len = bytes.byteLength;
|
||||
// @anonrig: V8 has a limit of 65535 arguments in a function.
|
||||
// For len < 65535, this is faster.
|
||||
// https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623
|
||||
if (len < 65535) {
|
||||
return String.fromCharCode.apply(null, bytes);
|
||||
}
|
||||
let binary = '';
|
||||
for(let i = 0; i < len; i++){
|
||||
binary += String.fromCharCode(bytes[i]);
|
||||
}
|
||||
return binary;
|
||||
}
|
||||
export function stringToUint8Array(binary) {
|
||||
const len = binary.length;
|
||||
const arr = new Uint8Array(len);
|
||||
for(let i = 0; i < len; i++){
|
||||
arr[i] = binary.charCodeAt(i);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
export function encrypt(key, iv, data) {
|
||||
return crypto.subtle.encrypt({
|
||||
name: 'AES-GCM',
|
||||
iv
|
||||
}, key, data);
|
||||
}
|
||||
export function decrypt(key, iv, data) {
|
||||
return crypto.subtle.decrypt({
|
||||
name: 'AES-GCM',
|
||||
iv
|
||||
}, key, data);
|
||||
}
|
||||
// This is a global singleton that is used to encode/decode the action bound args from
|
||||
// the closure. This can't be using a AsyncLocalStorage as it might happen on the module
|
||||
// level. Since the client reference manifest won't be mutated, let's use a global singleton
|
||||
// to keep it.
|
||||
const SERVER_ACTION_MANIFESTS_SINGLETON = Symbol.for('next.server.action-manifests');
|
||||
export function setReferenceManifestsSingleton({ page, clientReferenceManifest, serverActionsManifest, serverModuleMap }) {
|
||||
var _globalThis_SERVER_ACTION_MANIFESTS_SINGLETON;
|
||||
// @ts-expect-error
|
||||
const clientReferenceManifestsPerPage = (_globalThis_SERVER_ACTION_MANIFESTS_SINGLETON = globalThis[SERVER_ACTION_MANIFESTS_SINGLETON]) == null ? void 0 : _globalThis_SERVER_ACTION_MANIFESTS_SINGLETON.clientReferenceManifestsPerPage;
|
||||
// @ts-expect-error
|
||||
globalThis[SERVER_ACTION_MANIFESTS_SINGLETON] = {
|
||||
clientReferenceManifestsPerPage: {
|
||||
...clientReferenceManifestsPerPage,
|
||||
[normalizeAppPath(page)]: clientReferenceManifest
|
||||
},
|
||||
serverActionsManifest,
|
||||
serverModuleMap
|
||||
};
|
||||
}
|
||||
export function getServerModuleMap() {
|
||||
const serverActionsManifestSingleton = globalThis[SERVER_ACTION_MANIFESTS_SINGLETON];
|
||||
if (!serverActionsManifestSingleton) {
|
||||
throw Object.defineProperty(new InvariantError('Missing manifest for Server Actions.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E606",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return serverActionsManifestSingleton.serverModuleMap;
|
||||
}
|
||||
export function getClientReferenceManifestForRsc() {
|
||||
const serverActionsManifestSingleton = globalThis[SERVER_ACTION_MANIFESTS_SINGLETON];
|
||||
if (!serverActionsManifestSingleton) {
|
||||
throw Object.defineProperty(new InvariantError('Missing manifest for Server Actions.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E606",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { clientReferenceManifestsPerPage } = serverActionsManifestSingleton;
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
// If there's no work store defined, we can assume that a client reference
|
||||
// manifest is needed during module evaluation, e.g. to create a server
|
||||
// action using a higher-order function. This might also use client
|
||||
// components which need to be serialized by Flight, and therefore client
|
||||
// references need to be resolvable. To make this work, we're returning a
|
||||
// merged manifest across all pages. This is fine as long as the module IDs
|
||||
// are not page specific, which they are not for Webpack. TODO: Fix this in
|
||||
// Turbopack.
|
||||
return mergeClientReferenceManifests(clientReferenceManifestsPerPage);
|
||||
}
|
||||
const clientReferenceManifest = clientReferenceManifestsPerPage[workStore.route];
|
||||
if (!clientReferenceManifest) {
|
||||
throw Object.defineProperty(new InvariantError(`Missing Client Reference Manifest for ${workStore.route}.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E570",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return clientReferenceManifest;
|
||||
}
|
||||
export async function getActionEncryptionKey() {
|
||||
if (__next_loaded_action_key) {
|
||||
return __next_loaded_action_key;
|
||||
}
|
||||
const serverActionsManifestSingleton = globalThis[SERVER_ACTION_MANIFESTS_SINGLETON];
|
||||
if (!serverActionsManifestSingleton) {
|
||||
throw Object.defineProperty(new InvariantError('Missing manifest for Server Actions.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E606",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const rawKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY || serverActionsManifestSingleton.serverActionsManifest.encryptionKey;
|
||||
if (rawKey === undefined) {
|
||||
throw Object.defineProperty(new InvariantError('Missing encryption key for Server Actions'), "__NEXT_ERROR_CODE", {
|
||||
value: "E571",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
__next_loaded_action_key = await crypto.subtle.importKey('raw', stringToUint8Array(atob(rawKey)), 'AES-GCM', true, [
|
||||
'encrypt',
|
||||
'decrypt'
|
||||
]);
|
||||
return __next_loaded_action_key;
|
||||
}
|
||||
function mergeClientReferenceManifests(clientReferenceManifestsPerPage) {
|
||||
const clientReferenceManifests = Object.values(clientReferenceManifestsPerPage);
|
||||
const mergedClientReferenceManifest = {
|
||||
clientModules: {},
|
||||
edgeRscModuleMapping: {},
|
||||
rscModuleMapping: {}
|
||||
};
|
||||
for (const clientReferenceManifest of clientReferenceManifests){
|
||||
mergedClientReferenceManifest.clientModules = {
|
||||
...mergedClientReferenceManifest.clientModules,
|
||||
...clientReferenceManifest.clientModules
|
||||
};
|
||||
mergedClientReferenceManifest.edgeRscModuleMapping = {
|
||||
...mergedClientReferenceManifest.edgeRscModuleMapping,
|
||||
...clientReferenceManifest.edgeRscModuleMapping
|
||||
};
|
||||
mergedClientReferenceManifest.rscModuleMapping = {
|
||||
...mergedClientReferenceManifest.rscModuleMapping,
|
||||
...clientReferenceManifest.rscModuleMapping
|
||||
};
|
||||
}
|
||||
return mergedClientReferenceManifest;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=encryption-utils.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
166
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption.js
generated
vendored
Normal file
166
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
/* eslint-disable import/no-extraneous-dependencies */ import 'server-only';
|
||||
/* eslint-disable import/no-extraneous-dependencies */ import { renderToReadableStream } from 'react-server-dom-webpack/server.edge';
|
||||
/* eslint-disable import/no-extraneous-dependencies */ import { createFromReadableStream } from 'react-server-dom-webpack/client.edge';
|
||||
import { streamToString } from '../stream-utils/node-web-streams-helper';
|
||||
import { arrayBufferToString, decrypt, encrypt, getActionEncryptionKey, getClientReferenceManifestForRsc, getServerModuleMap, stringToUint8Array } from './encryption-utils';
|
||||
import { getPrerenderResumeDataCache, getRenderResumeDataCache, workUnitAsyncStorage } from './work-unit-async-storage.external';
|
||||
import { createHangingInputAbortSignal } from './dynamic-rendering';
|
||||
import React from 'react';
|
||||
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
|
||||
const textEncoder = new TextEncoder();
|
||||
const textDecoder = new TextDecoder();
|
||||
/**
|
||||
* Decrypt the serialized string with the action id as the salt.
|
||||
*/ async function decodeActionBoundArg(actionId, arg) {
|
||||
const key = await getActionEncryptionKey();
|
||||
if (typeof key === 'undefined') {
|
||||
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
|
||||
value: "E65",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// Get the iv (16 bytes) and the payload from the arg.
|
||||
const originalPayload = atob(arg);
|
||||
const ivValue = originalPayload.slice(0, 16);
|
||||
const payload = originalPayload.slice(16);
|
||||
const decrypted = textDecoder.decode(await decrypt(key, stringToUint8Array(ivValue), stringToUint8Array(payload)));
|
||||
if (!decrypted.startsWith(actionId)) {
|
||||
throw Object.defineProperty(new Error('Invalid Server Action payload: failed to decrypt.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E191",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return decrypted.slice(actionId.length);
|
||||
}
|
||||
/**
|
||||
* Encrypt the serialized string with the action id as the salt. Add a prefix to
|
||||
* later ensure that the payload is correctly decrypted, similar to a checksum.
|
||||
*/ async function encodeActionBoundArg(actionId, arg) {
|
||||
const key = await getActionEncryptionKey();
|
||||
if (key === undefined) {
|
||||
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
|
||||
value: "E65",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// Get 16 random bytes as iv.
|
||||
const randomBytes = new Uint8Array(16);
|
||||
workUnitAsyncStorage.exit(()=>crypto.getRandomValues(randomBytes));
|
||||
const ivValue = arrayBufferToString(randomBytes.buffer);
|
||||
const encrypted = await encrypt(key, randomBytes, textEncoder.encode(actionId + arg));
|
||||
return btoa(ivValue + arrayBufferToString(encrypted));
|
||||
}
|
||||
// Encrypts the action's bound args into a string. For the same combination of
|
||||
// actionId and args the same cached promise is returned. This ensures reference
|
||||
// equality for returned objects from "use cache" functions when they're invoked
|
||||
// multiple times within one render pass using the same bound args.
|
||||
export const encryptActionBoundArgs = React.cache(async function encryptActionBoundArgs(actionId, ...args) {
|
||||
const { clientModules } = getClientReferenceManifestForRsc();
|
||||
// Create an error before any asynchronous calls, to capture the original
|
||||
// call stack in case we need it when the serialization errors.
|
||||
const error = new Error();
|
||||
Error.captureStackTrace(error, encryptActionBoundArgs);
|
||||
let didCatchError = false;
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
const hangingInputAbortSignal = (workUnitStore == null ? void 0 : workUnitStore.type) === 'prerender' ? createHangingInputAbortSignal(workUnitStore) : undefined;
|
||||
// Using Flight to serialize the args into a string.
|
||||
const serialized = await streamToString(renderToReadableStream(args, clientModules, {
|
||||
signal: hangingInputAbortSignal,
|
||||
onError (err) {
|
||||
if (hangingInputAbortSignal == null ? void 0 : hangingInputAbortSignal.aborted) {
|
||||
return;
|
||||
}
|
||||
// We're only reporting one error at a time, starting with the first.
|
||||
if (didCatchError) {
|
||||
return;
|
||||
}
|
||||
didCatchError = true;
|
||||
// Use the original error message together with the previously created
|
||||
// stack, because err.stack is a useless Flight Server call stack.
|
||||
error.message = err instanceof Error ? err.message : String(err);
|
||||
}
|
||||
}), // We pass the abort signal to `streamToString` so that no chunks are
|
||||
// included that are emitted after the signal was already aborted. This
|
||||
// ensures that we can encode hanging promises.
|
||||
hangingInputAbortSignal);
|
||||
if (didCatchError) {
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
// Logging the error is needed for server functions that are passed to the
|
||||
// client where the decryption is not done during rendering. Console
|
||||
// replaying allows us to still show the error dev overlay in this case.
|
||||
console.error(error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
if (!workUnitStore) {
|
||||
return encodeActionBoundArg(actionId, serialized);
|
||||
}
|
||||
const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore);
|
||||
const renderResumeDataCache = getRenderResumeDataCache(workUnitStore);
|
||||
const cacheKey = actionId + serialized;
|
||||
const cachedEncrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.get(cacheKey)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.encryptedBoundArgs.get(cacheKey));
|
||||
if (cachedEncrypted) {
|
||||
return cachedEncrypted;
|
||||
}
|
||||
const cacheSignal = workUnitStore.type === 'prerender' ? workUnitStore.cacheSignal : undefined;
|
||||
cacheSignal == null ? void 0 : cacheSignal.beginRead();
|
||||
const encrypted = await encodeActionBoundArg(actionId, serialized);
|
||||
cacheSignal == null ? void 0 : cacheSignal.endRead();
|
||||
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.set(cacheKey, encrypted);
|
||||
return encrypted;
|
||||
});
|
||||
// Decrypts the action's bound args from the encrypted string.
|
||||
export async function decryptActionBoundArgs(actionId, encryptedPromise) {
|
||||
const encrypted = await encryptedPromise;
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
let decrypted;
|
||||
if (workUnitStore) {
|
||||
const cacheSignal = workUnitStore.type === 'prerender' ? workUnitStore.cacheSignal : undefined;
|
||||
const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore);
|
||||
const renderResumeDataCache = getRenderResumeDataCache(workUnitStore);
|
||||
decrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.get(encrypted)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.decryptedBoundArgs.get(encrypted));
|
||||
if (!decrypted) {
|
||||
cacheSignal == null ? void 0 : cacheSignal.beginRead();
|
||||
decrypted = await decodeActionBoundArg(actionId, encrypted);
|
||||
cacheSignal == null ? void 0 : cacheSignal.endRead();
|
||||
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.set(encrypted, decrypted);
|
||||
}
|
||||
} else {
|
||||
decrypted = await decodeActionBoundArg(actionId, encrypted);
|
||||
}
|
||||
const { edgeRscModuleMapping, rscModuleMapping } = getClientReferenceManifestForRsc();
|
||||
// Using Flight to deserialize the args from the string.
|
||||
const deserialized = await createFromReadableStream(new ReadableStream({
|
||||
start (controller) {
|
||||
controller.enqueue(textEncoder.encode(decrypted));
|
||||
if ((workUnitStore == null ? void 0 : workUnitStore.type) === 'prerender') {
|
||||
// Explicitly don't close the stream here (until prerendering is
|
||||
// complete) so that hanging promises are not rejected.
|
||||
if (workUnitStore.renderSignal.aborted) {
|
||||
controller.close();
|
||||
} else {
|
||||
workUnitStore.renderSignal.addEventListener('abort', ()=>controller.close(), {
|
||||
once: true
|
||||
});
|
||||
}
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
}
|
||||
}), {
|
||||
serverConsumerManifest: {
|
||||
// moduleLoading must be null because we don't want to trigger preloads of ClientReferences
|
||||
// to be added to the current execution. Instead, we'll wait for any ClientReference
|
||||
// to be emitted which themselves will handle the preloading.
|
||||
moduleLoading: null,
|
||||
moduleMap: isEdgeRuntime ? edgeRscModuleMapping : rscModuleMapping,
|
||||
serverModuleMap: getServerModuleMap()
|
||||
}
|
||||
});
|
||||
return deserialized;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=encryption.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/encryption.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
frontend/webapp/node_modules/next/dist/esm/server/app-render/entry-base.js
generated
vendored
Normal file
35
frontend/webapp/node_modules/next/dist/esm/server/app-render/entry-base.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
export { createTemporaryReferenceSet, renderToReadableStream, decodeReply, decodeAction, decodeFormState } from 'react-server-dom-webpack/server.edge';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
export { unstable_prerender as prerender } from 'react-server-dom-webpack/static.edge';
|
||||
import LayoutRouter from '../../client/components/layout-router';
|
||||
import RenderFromTemplateContext from '../../client/components/render-from-template-context';
|
||||
import { workAsyncStorage } from '../app-render/work-async-storage.external';
|
||||
import { workUnitAsyncStorage } from './work-unit-async-storage.external';
|
||||
import { actionAsyncStorage } from '../app-render/action-async-storage.external';
|
||||
import { ClientPageRoot } from '../../client/components/client-page';
|
||||
import { ClientSegmentRoot } from '../../client/components/client-segment';
|
||||
import { createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage } from '../request/search-params';
|
||||
import { createServerParamsForServerSegment, createPrerenderParamsForClientSegment } from '../request/params';
|
||||
import * as serverHooks from '../../client/components/hooks-server-context';
|
||||
import { HTTPAccessFallbackBoundary } from '../../client/components/http-access-fallback/error-boundary';
|
||||
import { createMetadataComponents } from '../../lib/metadata/metadata';
|
||||
import { patchFetch as _patchFetch } from '../lib/patch-fetch';
|
||||
// not being used but needs to be included in the client manifest for /_not-found
|
||||
import '../../client/components/error-boundary';
|
||||
import { MetadataBoundary, ViewportBoundary, OutletBoundary } from '../../client/components/metadata/metadata-boundary';
|
||||
import { preloadStyle, preloadFont, preconnect } from './rsc/preloads';
|
||||
import { Postpone } from './rsc/postpone';
|
||||
import { taintObjectReference } from './rsc/taint';
|
||||
export { collectSegmentData } from './collect-segment-data';
|
||||
// patchFetch makes use of APIs such as `React.unstable_postpone` which are only available
|
||||
// in the experimental channel of React, so export it from here so that it comes from the bundled runtime
|
||||
function patchFetch() {
|
||||
return _patchFetch({
|
||||
workAsyncStorage,
|
||||
workUnitAsyncStorage
|
||||
});
|
||||
}
|
||||
export { LayoutRouter, RenderFromTemplateContext, workAsyncStorage, workUnitAsyncStorage, actionAsyncStorage, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks, preloadStyle, preloadFont, preconnect, Postpone, MetadataBoundary, ViewportBoundary, OutletBoundary, taintObjectReference, ClientPageRoot, ClientSegmentRoot, HTTPAccessFallbackBoundary, patchFetch, createMetadataComponents, };
|
||||
|
||||
//# sourceMappingURL=entry-base.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/entry-base.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/entry-base.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/entry-base.ts"],"sourcesContent":["// eslint-disable-next-line import/no-extraneous-dependencies\nexport {\n createTemporaryReferenceSet,\n renderToReadableStream,\n decodeReply,\n decodeAction,\n decodeFormState,\n} from 'react-server-dom-webpack/server.edge'\n\n// eslint-disable-next-line import/no-extraneous-dependencies\nexport { unstable_prerender as prerender } from 'react-server-dom-webpack/static.edge'\n\nimport LayoutRouter from '../../client/components/layout-router'\nimport RenderFromTemplateContext from '../../client/components/render-from-template-context'\nimport { workAsyncStorage } from '../app-render/work-async-storage.external'\nimport { workUnitAsyncStorage } from './work-unit-async-storage.external'\nimport { actionAsyncStorage } from '../app-render/action-async-storage.external'\nimport { ClientPageRoot } from '../../client/components/client-page'\nimport { ClientSegmentRoot } from '../../client/components/client-segment'\nimport {\n createServerSearchParamsForServerPage,\n createPrerenderSearchParamsForClientPage,\n} from '../request/search-params'\nimport {\n createServerParamsForServerSegment,\n createPrerenderParamsForClientSegment,\n} from '../request/params'\nimport * as serverHooks from '../../client/components/hooks-server-context'\nimport { HTTPAccessFallbackBoundary } from '../../client/components/http-access-fallback/error-boundary'\nimport { createMetadataComponents } from '../../lib/metadata/metadata'\nimport { patchFetch as _patchFetch } from '../lib/patch-fetch'\n// not being used but needs to be included in the client manifest for /_not-found\nimport '../../client/components/error-boundary'\nimport {\n MetadataBoundary,\n ViewportBoundary,\n OutletBoundary,\n} from '../../client/components/metadata/metadata-boundary'\n\nimport { preloadStyle, preloadFont, preconnect } from './rsc/preloads'\nimport { Postpone } from './rsc/postpone'\nimport { taintObjectReference } from './rsc/taint'\nexport { collectSegmentData } from './collect-segment-data'\n\n// patchFetch makes use of APIs such as `React.unstable_postpone` which are only available\n// in the experimental channel of React, so export it from here so that it comes from the bundled runtime\nfunction patchFetch() {\n return _patchFetch({\n workAsyncStorage,\n workUnitAsyncStorage,\n })\n}\n\nexport {\n LayoutRouter,\n RenderFromTemplateContext,\n workAsyncStorage,\n workUnitAsyncStorage,\n actionAsyncStorage,\n createServerSearchParamsForServerPage,\n createPrerenderSearchParamsForClientPage,\n createServerParamsForServerSegment,\n createPrerenderParamsForClientSegment,\n serverHooks,\n preloadStyle,\n preloadFont,\n preconnect,\n Postpone,\n MetadataBoundary,\n ViewportBoundary,\n OutletBoundary,\n taintObjectReference,\n ClientPageRoot,\n ClientSegmentRoot,\n HTTPAccessFallbackBoundary,\n patchFetch,\n createMetadataComponents,\n}\n"],"names":["createTemporaryReferenceSet","renderToReadableStream","decodeReply","decodeAction","decodeFormState","unstable_prerender","prerender","LayoutRouter","RenderFromTemplateContext","workAsyncStorage","workUnitAsyncStorage","actionAsyncStorage","ClientPageRoot","ClientSegmentRoot","createServerSearchParamsForServerPage","createPrerenderSearchParamsForClientPage","createServerParamsForServerSegment","createPrerenderParamsForClientSegment","serverHooks","HTTPAccessFallbackBoundary","createMetadataComponents","patchFetch","_patchFetch","MetadataBoundary","ViewportBoundary","OutletBoundary","preloadStyle","preloadFont","preconnect","Postpone","taintObjectReference","collectSegmentData"],"mappings":"AAAA,6DAA6D;AAC7D,SACEA,2BAA2B,EAC3BC,sBAAsB,EACtBC,WAAW,EACXC,YAAY,EACZC,eAAe,QACV,uCAAsC;AAE7C,6DAA6D;AAC7D,SAASC,sBAAsBC,SAAS,QAAQ,uCAAsC;AAEtF,OAAOC,kBAAkB,wCAAuC;AAChE,OAAOC,+BAA+B,uDAAsD;AAC5F,SAASC,gBAAgB,QAAQ,4CAA2C;AAC5E,SAASC,oBAAoB,QAAQ,qCAAoC;AACzE,SAASC,kBAAkB,QAAQ,8CAA6C;AAChF,SAASC,cAAc,QAAQ,sCAAqC;AACpE,SAASC,iBAAiB,QAAQ,yCAAwC;AAC1E,SACEC,qCAAqC,EACrCC,wCAAwC,QACnC,2BAA0B;AACjC,SACEC,kCAAkC,EAClCC,qCAAqC,QAChC,oBAAmB;AAC1B,YAAYC,iBAAiB,+CAA8C;AAC3E,SAASC,0BAA0B,QAAQ,8DAA6D;AACxG,SAASC,wBAAwB,QAAQ,8BAA6B;AACtE,SAASC,cAAcC,WAAW,QAAQ,qBAAoB;AAC9D,iFAAiF;AACjF,OAAO,yCAAwC;AAC/C,SACEC,gBAAgB,EAChBC,gBAAgB,EAChBC,cAAc,QACT,qDAAoD;AAE3D,SAASC,YAAY,EAAEC,WAAW,EAAEC,UAAU,QAAQ,iBAAgB;AACtE,SAASC,QAAQ,QAAQ,iBAAgB;AACzC,SAASC,oBAAoB,QAAQ,cAAa;AAClD,SAASC,kBAAkB,QAAQ,yBAAwB;AAE3D,0FAA0F;AAC1F,yGAAyG;AACzG,SAASV;IACP,OAAOC,YAAY;QACjBb;QACAC;IACF;AACF;AAEA,SACEH,YAAY,EACZC,yBAAyB,EACzBC,gBAAgB,EAChBC,oBAAoB,EACpBC,kBAAkB,EAClBG,qCAAqC,EACrCC,wCAAwC,EACxCC,kCAAkC,EAClCC,qCAAqC,EACrCC,WAAW,EACXQ,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,QAAQ,EACRN,gBAAgB,EAChBC,gBAAgB,EAChBC,cAAc,EACdK,oBAAoB,EACpBlB,cAAc,EACdC,iBAAiB,EACjBM,0BAA0B,EAC1BE,UAAU,EACVD,wBAAwB,KACzB"}
|
||||
14
frontend/webapp/node_modules/next/dist/esm/server/app-render/flight-render-result.js
generated
vendored
Normal file
14
frontend/webapp/node_modules/next/dist/esm/server/app-render/flight-render-result.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers';
|
||||
import RenderResult from '../render-result';
|
||||
/**
|
||||
* Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.
|
||||
*/ export class FlightRenderResult extends RenderResult {
|
||||
constructor(response, metadata = {}){
|
||||
super(response, {
|
||||
contentType: RSC_CONTENT_TYPE_HEADER,
|
||||
metadata
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=flight-render-result.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/flight-render-result.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/flight-render-result.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/flight-render-result.ts"],"sourcesContent":["import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers'\nimport RenderResult, { type RenderResultMetadata } from '../render-result'\n\n/**\n * Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.\n */\nexport class FlightRenderResult extends RenderResult {\n constructor(\n response: string | ReadableStream<Uint8Array>,\n metadata: RenderResultMetadata = {}\n ) {\n super(response, { contentType: RSC_CONTENT_TYPE_HEADER, metadata })\n }\n}\n"],"names":["RSC_CONTENT_TYPE_HEADER","RenderResult","FlightRenderResult","constructor","response","metadata","contentType"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,6CAA4C;AACpF,OAAOC,kBAAiD,mBAAkB;AAE1E;;CAEC,GACD,OAAO,MAAMC,2BAA2BD;IACtCE,YACEC,QAA6C,EAC7CC,WAAiC,CAAC,CAAC,CACnC;QACA,KAAK,CAACD,UAAU;YAAEE,aAAaN;YAAyBK;QAAS;IACnE;AACF"}
|
||||
19
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-asset-query-string.js
generated
vendored
Normal file
19
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-asset-query-string.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
const isDev = process.env.NODE_ENV === 'development';
|
||||
const isTurbopack = !!process.env.TURBOPACK;
|
||||
export function getAssetQueryString(ctx, addTimestamp) {
|
||||
let qs = '';
|
||||
// In development we add the request timestamp to allow react to
|
||||
// reload assets when a new RSC response is received.
|
||||
// Turbopack handles HMR of assets itself and react doesn't need to reload them
|
||||
// so this approach is not needed for Turbopack.
|
||||
const shouldAddVersion = isDev && !isTurbopack && addTimestamp;
|
||||
if (shouldAddVersion) {
|
||||
qs += `?v=${ctx.requestTimestamp}`;
|
||||
}
|
||||
if (ctx.renderOpts.deploymentId) {
|
||||
qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.renderOpts.deploymentId}`;
|
||||
}
|
||||
return qs;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-asset-query-string.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-asset-query-string.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-asset-query-string.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/get-asset-query-string.ts"],"sourcesContent":["import type { AppRenderContext } from './app-render'\n\nconst isDev = process.env.NODE_ENV === 'development'\nconst isTurbopack = !!process.env.TURBOPACK\n\nexport function getAssetQueryString(\n ctx: AppRenderContext,\n addTimestamp: boolean\n) {\n let qs = ''\n\n // In development we add the request timestamp to allow react to\n // reload assets when a new RSC response is received.\n // Turbopack handles HMR of assets itself and react doesn't need to reload them\n // so this approach is not needed for Turbopack.\n const shouldAddVersion = isDev && !isTurbopack && addTimestamp\n if (shouldAddVersion) {\n qs += `?v=${ctx.requestTimestamp}`\n }\n\n if (ctx.renderOpts.deploymentId) {\n qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.renderOpts.deploymentId}`\n }\n return qs\n}\n"],"names":["isDev","process","env","NODE_ENV","isTurbopack","TURBOPACK","getAssetQueryString","ctx","addTimestamp","qs","shouldAddVersion","requestTimestamp","renderOpts","deploymentId"],"mappings":"AAEA,MAAMA,QAAQC,QAAQC,GAAG,CAACC,QAAQ,KAAK;AACvC,MAAMC,cAAc,CAAC,CAACH,QAAQC,GAAG,CAACG,SAAS;AAE3C,OAAO,SAASC,oBACdC,GAAqB,EACrBC,YAAqB;IAErB,IAAIC,KAAK;IAET,gEAAgE;IAChE,qDAAqD;IACrD,+EAA+E;IAC/E,gDAAgD;IAChD,MAAMC,mBAAmBV,SAAS,CAACI,eAAeI;IAClD,IAAIE,kBAAkB;QACpBD,MAAM,CAAC,GAAG,EAAEF,IAAII,gBAAgB,EAAE;IACpC;IAEA,IAAIJ,IAAIK,UAAU,CAACC,YAAY,EAAE;QAC/BJ,MAAM,GAAGC,mBAAmB,MAAM,IAAI,IAAI,EAAEH,IAAIK,UAAU,CAACC,YAAY,EAAE;IAC3E;IACA,OAAOJ;AACT"}
|
||||
40
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-css-inlined-link-tags.js
generated
vendored
Normal file
40
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-css-inlined-link-tags.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Get external stylesheet link hrefs based on server CSS manifest.
|
||||
*/ export function getLinkAndScriptTags(clientReferenceManifest, filePath, injectedCSS, injectedScripts, collectNewImports) {
|
||||
var _clientReferenceManifest_entryJSFiles;
|
||||
const filePathWithoutExt = filePath.replace(/\.[^.]+$/, '');
|
||||
const cssChunks = new Set();
|
||||
const jsChunks = new Set();
|
||||
const entryCSSFiles = clientReferenceManifest.entryCSSFiles[filePathWithoutExt];
|
||||
const entryJSFiles = ((_clientReferenceManifest_entryJSFiles = clientReferenceManifest.entryJSFiles) == null ? void 0 : _clientReferenceManifest_entryJSFiles[filePathWithoutExt]) ?? [];
|
||||
if (entryCSSFiles) {
|
||||
for (const css of entryCSSFiles){
|
||||
if (!injectedCSS.has(css.path)) {
|
||||
if (collectNewImports) {
|
||||
injectedCSS.add(css.path);
|
||||
}
|
||||
cssChunks.add(css);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (entryJSFiles) {
|
||||
for (const file of entryJSFiles){
|
||||
if (!injectedScripts.has(file)) {
|
||||
if (collectNewImports) {
|
||||
injectedScripts.add(file);
|
||||
}
|
||||
jsChunks.add(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
styles: [
|
||||
...cssChunks
|
||||
],
|
||||
scripts: [
|
||||
...jsChunks
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-css-inlined-link-tags.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-css-inlined-link-tags.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-css-inlined-link-tags.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/get-css-inlined-link-tags.tsx"],"sourcesContent":["import type {\n ClientReferenceManifest,\n CssResource,\n} from '../../build/webpack/plugins/flight-manifest-plugin'\nimport type { DeepReadonly } from '../../shared/lib/deep-readonly'\n\n/**\n * Get external stylesheet link hrefs based on server CSS manifest.\n */\nexport function getLinkAndScriptTags(\n clientReferenceManifest: DeepReadonly<ClientReferenceManifest>,\n filePath: string,\n injectedCSS: Set<string>,\n injectedScripts: Set<string>,\n collectNewImports?: boolean\n): { styles: CssResource[]; scripts: string[] } {\n const filePathWithoutExt = filePath.replace(/\\.[^.]+$/, '')\n const cssChunks = new Set<CssResource>()\n const jsChunks = new Set<string>()\n\n const entryCSSFiles =\n clientReferenceManifest.entryCSSFiles[filePathWithoutExt]\n const entryJSFiles =\n clientReferenceManifest.entryJSFiles?.[filePathWithoutExt] ?? []\n\n if (entryCSSFiles) {\n for (const css of entryCSSFiles) {\n if (!injectedCSS.has(css.path)) {\n if (collectNewImports) {\n injectedCSS.add(css.path)\n }\n cssChunks.add(css)\n }\n }\n }\n\n if (entryJSFiles) {\n for (const file of entryJSFiles) {\n if (!injectedScripts.has(file)) {\n if (collectNewImports) {\n injectedScripts.add(file)\n }\n jsChunks.add(file)\n }\n }\n }\n\n return { styles: [...cssChunks], scripts: [...jsChunks] }\n}\n"],"names":["getLinkAndScriptTags","clientReferenceManifest","filePath","injectedCSS","injectedScripts","collectNewImports","filePathWithoutExt","replace","cssChunks","Set","jsChunks","entryCSSFiles","entryJSFiles","css","has","path","add","file","styles","scripts"],"mappings":"AAMA;;CAEC,GACD,OAAO,SAASA,qBACdC,uBAA8D,EAC9DC,QAAgB,EAChBC,WAAwB,EACxBC,eAA4B,EAC5BC,iBAA2B;QASzBJ;IAPF,MAAMK,qBAAqBJ,SAASK,OAAO,CAAC,YAAY;IACxD,MAAMC,YAAY,IAAIC;IACtB,MAAMC,WAAW,IAAID;IAErB,MAAME,gBACJV,wBAAwBU,aAAa,CAACL,mBAAmB;IAC3D,MAAMM,eACJX,EAAAA,wCAAAA,wBAAwBW,YAAY,qBAApCX,qCAAsC,CAACK,mBAAmB,KAAI,EAAE;IAElE,IAAIK,eAAe;QACjB,KAAK,MAAME,OAAOF,cAAe;YAC/B,IAAI,CAACR,YAAYW,GAAG,CAACD,IAAIE,IAAI,GAAG;gBAC9B,IAAIV,mBAAmB;oBACrBF,YAAYa,GAAG,CAACH,IAAIE,IAAI;gBAC1B;gBACAP,UAAUQ,GAAG,CAACH;YAChB;QACF;IACF;IAEA,IAAID,cAAc;QAChB,KAAK,MAAMK,QAAQL,aAAc;YAC/B,IAAI,CAACR,gBAAgBU,GAAG,CAACG,OAAO;gBAC9B,IAAIZ,mBAAmB;oBACrBD,gBAAgBY,GAAG,CAACC;gBACtB;gBACAP,SAASM,GAAG,CAACC;YACf;QACF;IACF;IAEA,OAAO;QAAEC,QAAQ;eAAIV;SAAU;QAAEW,SAAS;eAAIT;SAAS;IAAC;AAC1D"}
|
||||
55
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-layer-assets.js
generated
vendored
Normal file
55
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-layer-assets.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
import { jsx as _jsx } from "react/jsx-runtime";
|
||||
import React from 'react';
|
||||
import { getLinkAndScriptTags } from './get-css-inlined-link-tags';
|
||||
import { getPreloadableFonts } from './get-preloadable-fonts';
|
||||
import { getAssetQueryString } from './get-asset-query-string';
|
||||
import { encodeURIPath } from '../../shared/lib/encode-uri-path';
|
||||
import { renderCssResource } from './render-css-resource';
|
||||
export function getLayerAssets({ ctx, layoutOrPagePath, injectedCSS: injectedCSSWithCurrentLayout, injectedJS: injectedJSWithCurrentLayout, injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout, preloadCallbacks }) {
|
||||
const { styles: styleTags, scripts: scriptTags } = layoutOrPagePath ? getLinkAndScriptTags(ctx.clientReferenceManifest, layoutOrPagePath, injectedCSSWithCurrentLayout, injectedJSWithCurrentLayout, true) : {
|
||||
styles: [],
|
||||
scripts: []
|
||||
};
|
||||
const preloadedFontFiles = layoutOrPagePath ? getPreloadableFonts(ctx.renderOpts.nextFontManifest, layoutOrPagePath, injectedFontPreloadTagsWithCurrentLayout) : null;
|
||||
if (preloadedFontFiles) {
|
||||
if (preloadedFontFiles.length) {
|
||||
for(let i = 0; i < preloadedFontFiles.length; i++){
|
||||
const fontFilename = preloadedFontFiles[i];
|
||||
const ext = /\.(woff|woff2|eot|ttf|otf)$/.exec(fontFilename)[1];
|
||||
const type = `font/${ext}`;
|
||||
const href = `${ctx.assetPrefix}/_next/${encodeURIPath(fontFilename)}`;
|
||||
preloadCallbacks.push(()=>{
|
||||
ctx.componentMod.preloadFont(href, type, ctx.renderOpts.crossOrigin, ctx.nonce);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
let url = new URL(ctx.assetPrefix);
|
||||
preloadCallbacks.push(()=>{
|
||||
ctx.componentMod.preconnect(url.origin, 'anonymous', ctx.nonce);
|
||||
});
|
||||
} catch (error) {
|
||||
// assetPrefix must not be a fully qualified domain name. We assume
|
||||
// we should preconnect to same origin instead
|
||||
preloadCallbacks.push(()=>{
|
||||
ctx.componentMod.preconnect('/', 'anonymous', ctx.nonce);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
const styles = renderCssResource(styleTags, ctx, preloadCallbacks);
|
||||
const scripts = scriptTags ? scriptTags.map((href, index)=>{
|
||||
const fullSrc = `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`;
|
||||
return /*#__PURE__*/ _jsx("script", {
|
||||
src: fullSrc,
|
||||
async: true,
|
||||
nonce: ctx.nonce
|
||||
}, `script-${index}`);
|
||||
}) : [];
|
||||
return styles.length || scripts.length ? [
|
||||
...styles,
|
||||
...scripts
|
||||
] : null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-layer-assets.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-layer-assets.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-layer-assets.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-preloadable-fonts.js
generated
vendored
Normal file
35
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-preloadable-fonts.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Get hrefs for fonts to preload
|
||||
* Returns null if there are no fonts at all.
|
||||
* Returns string[] if there are fonts to preload (font paths)
|
||||
* Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded
|
||||
* Returns null if there are fonts but none to preload and at least some were previously preloaded
|
||||
*/ export function getPreloadableFonts(nextFontManifest, filePath, injectedFontPreloadTags) {
|
||||
if (!nextFontManifest || !filePath) {
|
||||
return null;
|
||||
}
|
||||
const filepathWithoutExtension = filePath.replace(/\.[^.]+$/, '');
|
||||
const fontFiles = new Set();
|
||||
let foundFontUsage = false;
|
||||
const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension];
|
||||
if (preloadedFontFiles) {
|
||||
foundFontUsage = true;
|
||||
for (const fontFile of preloadedFontFiles){
|
||||
if (!injectedFontPreloadTags.has(fontFile)) {
|
||||
fontFiles.add(fontFile);
|
||||
injectedFontPreloadTags.add(fontFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fontFiles.size) {
|
||||
return [
|
||||
...fontFiles
|
||||
].sort();
|
||||
} else if (foundFontUsage && injectedFontPreloadTags.size === 0) {
|
||||
return [];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-preloadable-fonts.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-preloadable-fonts.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-preloadable-fonts.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/get-preloadable-fonts.tsx"],"sourcesContent":["import type { NextFontManifest } from '../../build/webpack/plugins/next-font-manifest-plugin'\nimport type { DeepReadonly } from '../../shared/lib/deep-readonly'\n\n/**\n * Get hrefs for fonts to preload\n * Returns null if there are no fonts at all.\n * Returns string[] if there are fonts to preload (font paths)\n * Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded\n * Returns null if there are fonts but none to preload and at least some were previously preloaded\n */\nexport function getPreloadableFonts(\n nextFontManifest: DeepReadonly<NextFontManifest> | undefined,\n filePath: string | undefined,\n injectedFontPreloadTags: Set<string>\n): string[] | null {\n if (!nextFontManifest || !filePath) {\n return null\n }\n const filepathWithoutExtension = filePath.replace(/\\.[^.]+$/, '')\n const fontFiles = new Set<string>()\n let foundFontUsage = false\n\n const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension]\n if (preloadedFontFiles) {\n foundFontUsage = true\n for (const fontFile of preloadedFontFiles) {\n if (!injectedFontPreloadTags.has(fontFile)) {\n fontFiles.add(fontFile)\n injectedFontPreloadTags.add(fontFile)\n }\n }\n }\n\n if (fontFiles.size) {\n return [...fontFiles].sort()\n } else if (foundFontUsage && injectedFontPreloadTags.size === 0) {\n return []\n } else {\n return null\n }\n}\n"],"names":["getPreloadableFonts","nextFontManifest","filePath","injectedFontPreloadTags","filepathWithoutExtension","replace","fontFiles","Set","foundFontUsage","preloadedFontFiles","app","fontFile","has","add","size","sort"],"mappings":"AAGA;;;;;;CAMC,GACD,OAAO,SAASA,oBACdC,gBAA4D,EAC5DC,QAA4B,EAC5BC,uBAAoC;IAEpC,IAAI,CAACF,oBAAoB,CAACC,UAAU;QAClC,OAAO;IACT;IACA,MAAME,2BAA2BF,SAASG,OAAO,CAAC,YAAY;IAC9D,MAAMC,YAAY,IAAIC;IACtB,IAAIC,iBAAiB;IAErB,MAAMC,qBAAqBR,iBAAiBS,GAAG,CAACN,yBAAyB;IACzE,IAAIK,oBAAoB;QACtBD,iBAAiB;QACjB,KAAK,MAAMG,YAAYF,mBAAoB;YACzC,IAAI,CAACN,wBAAwBS,GAAG,CAACD,WAAW;gBAC1CL,UAAUO,GAAG,CAACF;gBACdR,wBAAwBU,GAAG,CAACF;YAC9B;QACF;IACF;IAEA,IAAIL,UAAUQ,IAAI,EAAE;QAClB,OAAO;eAAIR;SAAU,CAACS,IAAI;IAC5B,OAAO,IAAIP,kBAAkBL,wBAAwBW,IAAI,KAAK,GAAG;QAC/D,OAAO,EAAE;IACX,OAAO;QACL,OAAO;IACT;AACF"}
|
||||
34
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-script-nonce-from-header.js
generated
vendored
Normal file
34
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-script-nonce-from-header.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
import { ESCAPE_REGEX } from '../htmlescape';
|
||||
export function getScriptNonceFromHeader(cspHeaderValue) {
|
||||
var _directive_split_slice_map_find;
|
||||
const directives = cspHeaderValue// Directives are split by ';'.
|
||||
.split(';').map((directive)=>directive.trim());
|
||||
// First try to find the directive for the 'script-src', otherwise try to
|
||||
// fallback to the 'default-src'.
|
||||
const directive = directives.find((dir)=>dir.startsWith('script-src')) || directives.find((dir)=>dir.startsWith('default-src'));
|
||||
// If no directive could be found, then we're done.
|
||||
if (!directive) {
|
||||
return;
|
||||
}
|
||||
// Extract the nonce from the directive
|
||||
const nonce = (_directive_split_slice_map_find = directive.split(' ')// Remove the 'strict-src'/'default-src' string, this can't be the nonce.
|
||||
.slice(1).map((source)=>source.trim())// Find the first source with the 'nonce-' prefix.
|
||||
.find((source)=>source.startsWith("'nonce-") && source.length > 8 && source.endsWith("'"))) == null ? void 0 : _directive_split_slice_map_find.slice(7, -1);
|
||||
// If we could't find the nonce, then we're done.
|
||||
if (!nonce) {
|
||||
return;
|
||||
}
|
||||
// Don't accept the nonce value if it contains HTML escape characters.
|
||||
// Technically, the spec requires a base64'd value, but this is just an
|
||||
// extra layer.
|
||||
if (ESCAPE_REGEX.test(nonce)) {
|
||||
throw Object.defineProperty(new Error('Nonce value from Content-Security-Policy contained HTML escape characters.\nLearn more: https://nextjs.org/docs/messages/nonce-contained-invalid-characters'), "__NEXT_ERROR_CODE", {
|
||||
value: "E440",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return nonce;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-script-nonce-from-header.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-script-nonce-from-header.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-script-nonce-from-header.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/get-script-nonce-from-header.tsx"],"sourcesContent":["import { ESCAPE_REGEX } from '../htmlescape'\n\nexport function getScriptNonceFromHeader(\n cspHeaderValue: string\n): string | undefined {\n const directives = cspHeaderValue\n // Directives are split by ';'.\n .split(';')\n .map((directive) => directive.trim())\n\n // First try to find the directive for the 'script-src', otherwise try to\n // fallback to the 'default-src'.\n const directive =\n directives.find((dir) => dir.startsWith('script-src')) ||\n directives.find((dir) => dir.startsWith('default-src'))\n\n // If no directive could be found, then we're done.\n if (!directive) {\n return\n }\n\n // Extract the nonce from the directive\n const nonce = directive\n .split(' ')\n // Remove the 'strict-src'/'default-src' string, this can't be the nonce.\n .slice(1)\n .map((source) => source.trim())\n // Find the first source with the 'nonce-' prefix.\n .find(\n (source) =>\n source.startsWith(\"'nonce-\") &&\n source.length > 8 &&\n source.endsWith(\"'\")\n )\n // Grab the nonce by trimming the 'nonce-' prefix.\n ?.slice(7, -1)\n\n // If we could't find the nonce, then we're done.\n if (!nonce) {\n return\n }\n\n // Don't accept the nonce value if it contains HTML escape characters.\n // Technically, the spec requires a base64'd value, but this is just an\n // extra layer.\n if (ESCAPE_REGEX.test(nonce)) {\n throw new Error(\n 'Nonce value from Content-Security-Policy contained HTML escape characters.\\nLearn more: https://nextjs.org/docs/messages/nonce-contained-invalid-characters'\n )\n }\n\n return nonce\n}\n"],"names":["ESCAPE_REGEX","getScriptNonceFromHeader","cspHeaderValue","directive","directives","split","map","trim","find","dir","startsWith","nonce","slice","source","length","endsWith","test","Error"],"mappings":"AAAA,SAASA,YAAY,QAAQ,gBAAe;AAE5C,OAAO,SAASC,yBACdC,cAAsB;QAmBRC;IAjBd,MAAMC,aAAaF,cACjB,+BAA+B;KAC9BG,KAAK,CAAC,KACNC,GAAG,CAAC,CAACH,YAAcA,UAAUI,IAAI;IAEpC,yEAAyE;IACzE,iCAAiC;IACjC,MAAMJ,YACJC,WAAWI,IAAI,CAAC,CAACC,MAAQA,IAAIC,UAAU,CAAC,kBACxCN,WAAWI,IAAI,CAAC,CAACC,MAAQA,IAAIC,UAAU,CAAC;IAE1C,mDAAmD;IACnD,IAAI,CAACP,WAAW;QACd;IACF;IAEA,uCAAuC;IACvC,MAAMQ,SAAQR,kCAAAA,UACXE,KAAK,CAAC,IACP,yEAAyE;KACxEO,KAAK,CAAC,GACNN,GAAG,CAAC,CAACO,SAAWA,OAAON,IAAI,GAC5B,kDAAkD;KACjDC,IAAI,CACH,CAACK,SACCA,OAAOH,UAAU,CAAC,cAClBG,OAAOC,MAAM,GAAG,KAChBD,OAAOE,QAAQ,CAAC,0BAVRZ,gCAaVS,KAAK,CAAC,GAAG,CAAC;IAEd,iDAAiD;IACjD,IAAI,CAACD,OAAO;QACV;IACF;IAEA,sEAAsE;IACtE,uEAAuE;IACvE,eAAe;IACf,IAAIX,aAAagB,IAAI,CAACL,QAAQ;QAC5B,MAAM,qBAEL,CAFK,IAAIM,MACR,gKADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF;IAEA,OAAON;AACT"}
|
||||
34
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-segment-param.js
generated
vendored
Normal file
34
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-segment-param.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
import { INTERCEPTION_ROUTE_MARKERS } from '../../shared/lib/router/utils/interception-routes';
|
||||
/**
|
||||
* Parse dynamic route segment to type of parameter
|
||||
*/ export function getSegmentParam(segment) {
|
||||
const interceptionMarker = INTERCEPTION_ROUTE_MARKERS.find((marker)=>segment.startsWith(marker));
|
||||
// if an interception marker is part of the path segment, we need to jump ahead
|
||||
// to the relevant portion for param parsing
|
||||
if (interceptionMarker) {
|
||||
segment = segment.slice(interceptionMarker.length);
|
||||
}
|
||||
if (segment.startsWith('[[...') && segment.endsWith(']]')) {
|
||||
return {
|
||||
// TODO-APP: Optional catchall does not currently work with parallel routes,
|
||||
// so for now aren't handling a potential interception marker.
|
||||
type: 'optional-catchall',
|
||||
param: segment.slice(5, -2)
|
||||
};
|
||||
}
|
||||
if (segment.startsWith('[...') && segment.endsWith(']')) {
|
||||
return {
|
||||
type: interceptionMarker ? 'catchall-intercepted' : 'catchall',
|
||||
param: segment.slice(4, -1)
|
||||
};
|
||||
}
|
||||
if (segment.startsWith('[') && segment.endsWith(']')) {
|
||||
return {
|
||||
type: interceptionMarker ? 'dynamic-intercepted' : 'dynamic',
|
||||
param: segment.slice(1, -1)
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-segment-param.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-segment-param.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-segment-param.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/get-segment-param.tsx"],"sourcesContent":["import { INTERCEPTION_ROUTE_MARKERS } from '../../shared/lib/router/utils/interception-routes'\nimport type { DynamicParamTypes } from './types'\n\n/**\n * Parse dynamic route segment to type of parameter\n */\nexport function getSegmentParam(segment: string): {\n param: string\n type: DynamicParamTypes\n} | null {\n const interceptionMarker = INTERCEPTION_ROUTE_MARKERS.find((marker) =>\n segment.startsWith(marker)\n )\n\n // if an interception marker is part of the path segment, we need to jump ahead\n // to the relevant portion for param parsing\n if (interceptionMarker) {\n segment = segment.slice(interceptionMarker.length)\n }\n\n if (segment.startsWith('[[...') && segment.endsWith(']]')) {\n return {\n // TODO-APP: Optional catchall does not currently work with parallel routes,\n // so for now aren't handling a potential interception marker.\n type: 'optional-catchall',\n param: segment.slice(5, -2),\n }\n }\n\n if (segment.startsWith('[...') && segment.endsWith(']')) {\n return {\n type: interceptionMarker ? 'catchall-intercepted' : 'catchall',\n param: segment.slice(4, -1),\n }\n }\n\n if (segment.startsWith('[') && segment.endsWith(']')) {\n return {\n type: interceptionMarker ? 'dynamic-intercepted' : 'dynamic',\n param: segment.slice(1, -1),\n }\n }\n\n return null\n}\n"],"names":["INTERCEPTION_ROUTE_MARKERS","getSegmentParam","segment","interceptionMarker","find","marker","startsWith","slice","length","endsWith","type","param"],"mappings":"AAAA,SAASA,0BAA0B,QAAQ,oDAAmD;AAG9F;;CAEC,GACD,OAAO,SAASC,gBAAgBC,OAAe;IAI7C,MAAMC,qBAAqBH,2BAA2BI,IAAI,CAAC,CAACC,SAC1DH,QAAQI,UAAU,CAACD;IAGrB,+EAA+E;IAC/E,4CAA4C;IAC5C,IAAIF,oBAAoB;QACtBD,UAAUA,QAAQK,KAAK,CAACJ,mBAAmBK,MAAM;IACnD;IAEA,IAAIN,QAAQI,UAAU,CAAC,YAAYJ,QAAQO,QAAQ,CAAC,OAAO;QACzD,OAAO;YACL,4EAA4E;YAC5E,8DAA8D;YAC9DC,MAAM;YACNC,OAAOT,QAAQK,KAAK,CAAC,GAAG,CAAC;QAC3B;IACF;IAEA,IAAIL,QAAQI,UAAU,CAAC,WAAWJ,QAAQO,QAAQ,CAAC,MAAM;QACvD,OAAO;YACLC,MAAMP,qBAAqB,yBAAyB;YACpDQ,OAAOT,QAAQK,KAAK,CAAC,GAAG,CAAC;QAC3B;IACF;IAEA,IAAIL,QAAQI,UAAU,CAAC,QAAQJ,QAAQO,QAAQ,CAAC,MAAM;QACpD,OAAO;YACLC,MAAMP,qBAAqB,wBAAwB;YACnDQ,OAAOT,QAAQK,KAAK,CAAC,GAAG,CAAC;QAC3B;IACF;IAEA,OAAO;AACT"}
|
||||
22
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-short-dynamic-param-type.js
generated
vendored
Normal file
22
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-short-dynamic-param-type.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
export const dynamicParamTypes = {
|
||||
catchall: 'c',
|
||||
'catchall-intercepted': 'ci',
|
||||
'optional-catchall': 'oc',
|
||||
dynamic: 'd',
|
||||
'dynamic-intercepted': 'di'
|
||||
};
|
||||
/**
|
||||
* Shorten the dynamic param in order to make it smaller when transmitted to the browser.
|
||||
*/ export function getShortDynamicParamType(type) {
|
||||
const short = dynamicParamTypes[type];
|
||||
if (!short) {
|
||||
throw Object.defineProperty(new Error('Unknown dynamic param type'), "__NEXT_ERROR_CODE", {
|
||||
value: "E378",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return short;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=get-short-dynamic-param-type.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-short-dynamic-param-type.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/get-short-dynamic-param-type.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/get-short-dynamic-param-type.tsx"],"sourcesContent":["import type { DynamicParamTypes, DynamicParamTypesShort } from './types'\n\nexport const dynamicParamTypes: Record<\n DynamicParamTypes,\n DynamicParamTypesShort\n> = {\n catchall: 'c',\n 'catchall-intercepted': 'ci',\n 'optional-catchall': 'oc',\n dynamic: 'd',\n 'dynamic-intercepted': 'di',\n}\n\n/**\n * Shorten the dynamic param in order to make it smaller when transmitted to the browser.\n */\nexport function getShortDynamicParamType(\n type: DynamicParamTypes\n): DynamicParamTypesShort {\n const short = dynamicParamTypes[type]\n if (!short) {\n throw new Error('Unknown dynamic param type')\n }\n return short\n}\n"],"names":["dynamicParamTypes","catchall","dynamic","getShortDynamicParamType","type","short","Error"],"mappings":"AAEA,OAAO,MAAMA,oBAGT;IACFC,UAAU;IACV,wBAAwB;IACxB,qBAAqB;IACrBC,SAAS;IACT,uBAAuB;AACzB,EAAC;AAED;;CAEC,GACD,OAAO,SAASC,yBACdC,IAAuB;IAEvB,MAAMC,QAAQL,iBAAiB,CAACI,KAAK;IACrC,IAAI,CAACC,OAAO;QACV,MAAM,qBAAuC,CAAvC,IAAIC,MAAM,+BAAV,qBAAA;mBAAA;wBAAA;0BAAA;QAAsC;IAC9C;IACA,OAAOD;AACT"}
|
||||
9
frontend/webapp/node_modules/next/dist/esm/server/app-render/has-loading-component-in-tree.js
generated
vendored
Normal file
9
frontend/webapp/node_modules/next/dist/esm/server/app-render/has-loading-component-in-tree.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export function hasLoadingComponentInTree(tree) {
|
||||
const [, parallelRoutes, { loading }] = tree;
|
||||
if (loading) {
|
||||
return true;
|
||||
}
|
||||
return Object.values(parallelRoutes).some((parallelRoute)=>hasLoadingComponentInTree(parallelRoute));
|
||||
}
|
||||
|
||||
//# sourceMappingURL=has-loading-component-in-tree.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/has-loading-component-in-tree.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/has-loading-component-in-tree.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/has-loading-component-in-tree.tsx"],"sourcesContent":["import type { LoaderTree } from '../lib/app-dir-module'\n\nexport function hasLoadingComponentInTree(tree: LoaderTree): boolean {\n const [, parallelRoutes, { loading }] = tree\n\n if (loading) {\n return true\n }\n\n return Object.values(parallelRoutes).some((parallelRoute) =>\n hasLoadingComponentInTree(parallelRoute)\n ) as boolean\n}\n"],"names":["hasLoadingComponentInTree","tree","parallelRoutes","loading","Object","values","some","parallelRoute"],"mappings":"AAEA,OAAO,SAASA,0BAA0BC,IAAgB;IACxD,MAAM,GAAGC,gBAAgB,EAAEC,OAAO,EAAE,CAAC,GAAGF;IAExC,IAAIE,SAAS;QACX,OAAO;IACT;IAEA,OAAOC,OAAOC,MAAM,CAACH,gBAAgBI,IAAI,CAAC,CAACC,gBACzCP,0BAA0BO;AAE9B"}
|
||||
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/interop-default.js
generated
vendored
Normal file
7
frontend/webapp/node_modules/next/dist/esm/server/app-render/interop-default.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Interop between "export default" and "module.exports".
|
||||
*/ export function interopDefault(mod) {
|
||||
return mod.default || mod;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=interop-default.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/interop-default.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/interop-default.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../src/server/app-render/interop-default.ts"],"sourcesContent":["/**\n * Interop between \"export default\" and \"module.exports\".\n */\nexport function interopDefault(mod: any) {\n return mod.default || mod\n}\n"],"names":["interopDefault","mod","default"],"mappings":"AAAA;;CAEC,GACD,OAAO,SAASA,eAAeC,GAAQ;IACrC,OAAOA,IAAIC,OAAO,IAAID;AACxB"}
|
||||
76
frontend/webapp/node_modules/next/dist/esm/server/app-render/make-get-server-inserted-html.js
generated
vendored
Normal file
76
frontend/webapp/node_modules/next/dist/esm/server/app-render/make-get-server-inserted-html.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
|
||||
import React from 'react';
|
||||
import { isHTTPAccessFallbackError } from '../../client/components/http-access-fallback/http-access-fallback';
|
||||
import { getURLFromRedirectError, getRedirectStatusCodeFromError } from '../../client/components/redirect';
|
||||
import { isRedirectError } from '../../client/components/redirect-error';
|
||||
import { renderToReadableStream } from 'react-dom/server.edge';
|
||||
import { streamToString } from '../stream-utils/node-web-streams-helper';
|
||||
import { RedirectStatusCode } from '../../client/components/redirect-status-code';
|
||||
import { addPathPrefix } from '../../shared/lib/router/utils/add-path-prefix';
|
||||
export function makeGetServerInsertedHTML({ polyfills, renderServerInsertedHTML, serverCapturedErrors, tracingMetadata, basePath }) {
|
||||
let flushedErrorMetaTagsUntilIndex = 0;
|
||||
// flag for static content that only needs to be flushed once
|
||||
let hasFlushedInitially = false;
|
||||
const polyfillTags = polyfills.map((polyfill)=>{
|
||||
return /*#__PURE__*/ _jsx("script", {
|
||||
...polyfill
|
||||
}, polyfill.src);
|
||||
});
|
||||
return async function getServerInsertedHTML() {
|
||||
// Loop through all the errors that have been captured but not yet
|
||||
// flushed.
|
||||
const errorMetaTags = [];
|
||||
while(flushedErrorMetaTagsUntilIndex < serverCapturedErrors.length){
|
||||
const error = serverCapturedErrors[flushedErrorMetaTagsUntilIndex];
|
||||
flushedErrorMetaTagsUntilIndex++;
|
||||
if (isHTTPAccessFallbackError(error)) {
|
||||
errorMetaTags.push(/*#__PURE__*/ _jsx("meta", {
|
||||
name: "robots",
|
||||
content: "noindex"
|
||||
}, error.digest), process.env.NODE_ENV === 'development' ? /*#__PURE__*/ _jsx("meta", {
|
||||
name: "next-error",
|
||||
content: "not-found"
|
||||
}, "next-error") : null);
|
||||
} else if (isRedirectError(error)) {
|
||||
const redirectUrl = addPathPrefix(getURLFromRedirectError(error), basePath);
|
||||
const statusCode = getRedirectStatusCodeFromError(error);
|
||||
const isPermanent = statusCode === RedirectStatusCode.PermanentRedirect ? true : false;
|
||||
if (redirectUrl) {
|
||||
errorMetaTags.push(/*#__PURE__*/ _jsx("meta", {
|
||||
id: "__next-page-redirect",
|
||||
httpEquiv: "refresh",
|
||||
content: `${isPermanent ? 0 : 1};url=${redirectUrl}`
|
||||
}, error.digest));
|
||||
}
|
||||
}
|
||||
}
|
||||
const traceMetaTags = (tracingMetadata || []).map(({ key, value }, index)=>/*#__PURE__*/ _jsx("meta", {
|
||||
name: key,
|
||||
content: value
|
||||
}, `next-trace-data-${index}`));
|
||||
const serverInsertedHTML = renderServerInsertedHTML();
|
||||
// Skip React rendering if we know the content is empty.
|
||||
if (polyfillTags.length === 0 && traceMetaTags.length === 0 && errorMetaTags.length === 0 && Array.isArray(serverInsertedHTML) && serverInsertedHTML.length === 0) {
|
||||
return '';
|
||||
}
|
||||
const stream = await renderToReadableStream(/*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
/* Insert the polyfills if they haven't been flushed yet. */ hasFlushedInitially ? null : polyfillTags,
|
||||
serverInsertedHTML,
|
||||
hasFlushedInitially ? null : traceMetaTags,
|
||||
errorMetaTags
|
||||
]
|
||||
}), {
|
||||
// Larger chunk because this isn't sent over the network.
|
||||
// Let's set it to 1MB.
|
||||
progressiveChunkSize: 1024 * 1024
|
||||
});
|
||||
hasFlushedInitially = true;
|
||||
// There's no need to wait for the stream to be ready
|
||||
// e.g. calling `await stream.allReady` because `streamToString` will
|
||||
// wait and decode the stream progressively with better parallelism.
|
||||
return streamToString(stream);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=make-get-server-inserted-html.js.map
|
||||
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/make-get-server-inserted-html.js.map
generated
vendored
Normal file
1
frontend/webapp/node_modules/next/dist/esm/server/app-render/make-get-server-inserted-html.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
47
frontend/webapp/node_modules/next/dist/esm/server/app-render/metadata-insertion/create-server-inserted-metadata.js
generated
vendored
Normal file
47
frontend/webapp/node_modules/next/dist/esm/server/app-render/metadata-insertion/create-server-inserted-metadata.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
|
||||
import React from 'react';
|
||||
import { renderToReadableStream } from 'react-dom/server.edge';
|
||||
import { ServerInsertedMetadataContext } from '../../../shared/lib/server-inserted-metadata.shared-runtime';
|
||||
import { renderToString } from '../render-to-string';
|
||||
/**
|
||||
* For chromium based browsers (Chrome, Edge, etc.) and Safari,
|
||||
* icons need to stay under <head> to be picked up by the browser.
|
||||
*
|
||||
*/ const REINSERT_ICON_SCRIPT = `\
|
||||
document.querySelectorAll('body link[rel="icon"], body link[rel="apple-touch-icon"]').forEach(el => document.head.appendChild(el))`;
|
||||
export function createServerInsertedMetadata(nonce) {
|
||||
let metadataResolver = null;
|
||||
let metadataToFlush = null;
|
||||
const setMetadataResolver = (resolver)=>{
|
||||
metadataResolver = resolver;
|
||||
};
|
||||
return {
|
||||
ServerInsertedMetadataProvider: ({ children })=>{
|
||||
return /*#__PURE__*/ _jsx(ServerInsertedMetadataContext.Provider, {
|
||||
value: setMetadataResolver,
|
||||
children: children
|
||||
});
|
||||
},
|
||||
async getServerInsertedMetadata () {
|
||||
if (!metadataResolver || metadataToFlush) {
|
||||
return '';
|
||||
}
|
||||
metadataToFlush = metadataResolver();
|
||||
const html = await renderToString({
|
||||
renderToReadableStream,
|
||||
element: /*#__PURE__*/ _jsxs(_Fragment, {
|
||||
children: [
|
||||
metadataToFlush,
|
||||
/*#__PURE__*/ _jsx("script", {
|
||||
nonce: nonce,
|
||||
children: REINSERT_ICON_SCRIPT
|
||||
})
|
||||
]
|
||||
})
|
||||
});
|
||||
return html;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-server-inserted-metadata.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user