fix(vercel): Update request-transforms.ts (#5514)
* fix(vercel): Update set-cookie header hanlder * Update from SvelteKit adapter * Updated changeset
This commit is contained in:
parent
3c44033e4e
commit
a1885ea2f5
4 changed files with 154 additions and 59 deletions
5
.changeset/plenty-tigers-pretend.md
Normal file
5
.changeset/plenty-tigers-pretend.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
'@astrojs/vercel': patch
|
||||
---
|
||||
|
||||
Updated request-transform methods
|
|
@ -46,9 +46,11 @@
|
|||
"dependencies": {
|
||||
"@astrojs/webapi": "^1.1.1",
|
||||
"@vercel/nft": "^0.22.1",
|
||||
"fast-glob": "^3.2.11"
|
||||
"fast-glob": "^3.2.11",
|
||||
"set-cookie-parser": "^2.5.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/set-cookie-parser": "^2.4.2",
|
||||
"astro": "workspace:*",
|
||||
"astro-scripts": "workspace:*",
|
||||
"chai": "^4.3.6",
|
||||
|
|
|
@ -1,64 +1,104 @@
|
|||
import type { App } from 'astro/app';
|
||||
import type { IncomingMessage, ServerResponse } from 'node:http';
|
||||
import { Readable } from 'node:stream';
|
||||
import { splitCookiesString } from 'set-cookie-parser';
|
||||
|
||||
const clientAddressSymbol = Symbol.for('astro.clientAddress');
|
||||
|
||||
/*
|
||||
Credits to the SvelteKit team
|
||||
https://github.com/sveltejs/kit/blob/69913e9fda054fa6a62a80e2bb4ee7dca1005796/packages/kit/src/node.js
|
||||
https://github.com/sveltejs/kit/blob/dd380b38c322272b414a7ec3ac2911f2db353f5c/packages/kit/src/exports/node/index.js
|
||||
*/
|
||||
|
||||
function get_raw_body(req: IncomingMessage) {
|
||||
return new Promise<Uint8Array | null>((fulfil, reject) => {
|
||||
const h = req.headers;
|
||||
function get_raw_body(req: IncomingMessage, body_size_limit?: number): ReadableStream | null {
|
||||
const h = req.headers;
|
||||
|
||||
if (!h['content-type']) {
|
||||
return fulfil(null);
|
||||
if (!h['content-type']) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const content_length = Number(h['content-length']);
|
||||
|
||||
// check if no request body
|
||||
if (
|
||||
(req.httpVersionMajor === 1 && isNaN(content_length) && h['transfer-encoding'] == null) ||
|
||||
content_length === 0
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let length = content_length;
|
||||
|
||||
if (body_size_limit) {
|
||||
if (!length) {
|
||||
length = body_size_limit;
|
||||
} else if (length > body_size_limit) {
|
||||
throw new Error(
|
||||
`Received content-length of ${length}, but only accept up to ${body_size_limit} bytes.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
req.on('error', reject);
|
||||
if (req.destroyed) {
|
||||
const readable = new ReadableStream();
|
||||
readable.cancel();
|
||||
return readable;
|
||||
}
|
||||
|
||||
const length = Number(h['content-length']);
|
||||
let size = 0;
|
||||
let cancelled = false;
|
||||
|
||||
// https://github.com/jshttp/type-is/blob/c1f4388c71c8a01f79934e68f630ca4a15fffcd6/index.js#L81-L95
|
||||
if (isNaN(length) && h['transfer-encoding'] == null) {
|
||||
return fulfil(null);
|
||||
}
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
req.on('error', (error) => {
|
||||
cancelled = true;
|
||||
controller.error(error);
|
||||
});
|
||||
|
||||
let data = new Uint8Array(length || 0);
|
||||
req.on('end', () => {
|
||||
if (cancelled) return;
|
||||
controller.close();
|
||||
});
|
||||
|
||||
if (length > 0) {
|
||||
let offset = 0;
|
||||
req.on('data', (chunk) => {
|
||||
const new_len = offset + Buffer.byteLength(chunk);
|
||||
if (cancelled) return;
|
||||
|
||||
if (new_len > length) {
|
||||
return reject({
|
||||
status: 413,
|
||||
reason: 'Exceeded "Content-Length" limit',
|
||||
});
|
||||
size += chunk.length;
|
||||
if (size > length) {
|
||||
cancelled = true;
|
||||
controller.error(
|
||||
new Error(
|
||||
`request body size exceeded ${
|
||||
content_length ? "'content-length'" : 'BODY_SIZE_LIMIT'
|
||||
} of ${length}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
data.set(chunk, offset);
|
||||
offset = new_len;
|
||||
});
|
||||
} else {
|
||||
req.on('data', (chunk) => {
|
||||
const new_data = new Uint8Array(data.length + chunk.length);
|
||||
new_data.set(data, 0);
|
||||
new_data.set(chunk, data.length);
|
||||
data = new_data;
|
||||
});
|
||||
}
|
||||
controller.enqueue(chunk);
|
||||
|
||||
req.on('end', () => {
|
||||
fulfil(data);
|
||||
});
|
||||
if (controller.desiredSize === null || controller.desiredSize <= 0) {
|
||||
req.pause();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
pull() {
|
||||
req.resume();
|
||||
},
|
||||
|
||||
cancel(reason) {
|
||||
cancelled = true;
|
||||
req.destroy(reason);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getRequest(base: string, req: IncomingMessage): Promise<Request> {
|
||||
export async function getRequest(
|
||||
base: string,
|
||||
req: IncomingMessage,
|
||||
bodySizeLimit?: number
|
||||
): Promise<Request> {
|
||||
let headers = req.headers as Record<string, string>;
|
||||
if (req.httpVersionMajor === 2) {
|
||||
// we need to strip out the HTTP/2 pseudo-headers because node-fetch's
|
||||
|
@ -72,40 +112,79 @@ export async function getRequest(base: string, req: IncomingMessage): Promise<Re
|
|||
const request = new Request(base + req.url, {
|
||||
method: req.method,
|
||||
headers,
|
||||
body: await get_raw_body(req), // TODO stream rather than buffer
|
||||
body: get_raw_body(req, bodySizeLimit),
|
||||
});
|
||||
Reflect.set(request, clientAddressSymbol, headers['x-forwarded-for']);
|
||||
return request;
|
||||
}
|
||||
|
||||
export async function setResponse(
|
||||
app: App,
|
||||
res: ServerResponse,
|
||||
response: Response
|
||||
): Promise<void> {
|
||||
export async function setResponse(app: App, res: ServerResponse, response: Response) {
|
||||
const headers = Object.fromEntries(response.headers);
|
||||
let cookies: string[] = [];
|
||||
|
||||
if (response.headers.has('set-cookie')) {
|
||||
// @ts-expect-error (headers.raw() is non-standard)
|
||||
headers['set-cookie'] = response.headers.raw()['set-cookie'];
|
||||
const header = response.headers.get('set-cookie')!;
|
||||
const split = splitCookiesString(header);
|
||||
cookies = split;
|
||||
}
|
||||
|
||||
if (app.setCookieHeaders) {
|
||||
const setCookieHeaders: Array<string> = Array.from(app.setCookieHeaders(response));
|
||||
if (setCookieHeaders.length) {
|
||||
res.setHeader('Set-Cookie', setCookieHeaders);
|
||||
}
|
||||
const setCookieHeaders = Array.from(app.setCookieHeaders(response));
|
||||
cookies.push(...setCookieHeaders);
|
||||
}
|
||||
|
||||
res.writeHead(response.status, headers);
|
||||
|
||||
if (response.body instanceof Readable) {
|
||||
response.body.pipe(res);
|
||||
} else {
|
||||
if (response.body) {
|
||||
res.write(await response.arrayBuffer());
|
||||
}
|
||||
res.writeHead(response.status, { ...headers, 'set-cookie': cookies });
|
||||
|
||||
if (!response.body) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.body.locked) {
|
||||
res.write(
|
||||
'Fatal error: Response body is locked. ' +
|
||||
`This can happen when the response was already read (for example through 'response.json()' or 'response.text()').`
|
||||
);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const reader = response.body.getReader();
|
||||
|
||||
if (res.destroyed) {
|
||||
reader.cancel();
|
||||
return;
|
||||
}
|
||||
|
||||
const cancel = (error?: Error) => {
|
||||
res.off('close', cancel);
|
||||
res.off('error', cancel);
|
||||
|
||||
// If the reader has already been interrupted with an error earlier,
|
||||
// then it will appear here, it is useless, but it needs to be catch.
|
||||
reader.cancel(error).catch(() => {});
|
||||
if (error) res.destroy(error);
|
||||
};
|
||||
|
||||
res.on('close', cancel);
|
||||
res.on('error', cancel);
|
||||
|
||||
next();
|
||||
async function next() {
|
||||
try {
|
||||
for (;;) {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
if (done) break;
|
||||
|
||||
if (!res.write(value)) {
|
||||
res.once('drain', next);
|
||||
return;
|
||||
}
|
||||
}
|
||||
res.end();
|
||||
} catch (error) {
|
||||
cancel(error instanceof Error ? error : new Error(String(error)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3225,17 +3225,21 @@ importers:
|
|||
packages/integrations/vercel:
|
||||
specifiers:
|
||||
'@astrojs/webapi': ^1.1.1
|
||||
'@types/set-cookie-parser': ^2.4.2
|
||||
'@vercel/nft': ^0.22.1
|
||||
astro: workspace:*
|
||||
astro-scripts: workspace:*
|
||||
chai: ^4.3.6
|
||||
fast-glob: ^3.2.11
|
||||
mocha: ^9.2.2
|
||||
set-cookie-parser: ^2.5.1
|
||||
dependencies:
|
||||
'@astrojs/webapi': link:../../webapi
|
||||
'@vercel/nft': 0.22.1
|
||||
fast-glob: 3.2.12
|
||||
set-cookie-parser: 2.5.1
|
||||
devDependencies:
|
||||
'@types/set-cookie-parser': 2.4.2
|
||||
astro: link:../../astro
|
||||
astro-scripts: link:../../../scripts
|
||||
chai: 4.3.7
|
||||
|
@ -9872,6 +9876,12 @@ packages:
|
|||
'@types/node': 18.11.9
|
||||
dev: true
|
||||
|
||||
/@types/set-cookie-parser/2.4.2:
|
||||
resolution: {integrity: sha512-fBZgytwhYAUkj/jC/FAV4RQ5EerRup1YQsXQCh8rZfiHkc4UahC192oH0smGwsXol3cL3A5oETuAHeQHmhXM4w==}
|
||||
dependencies:
|
||||
'@types/node': 18.11.9
|
||||
dev: true
|
||||
|
||||
/@types/sharp/0.30.5:
|
||||
resolution: {integrity: sha512-EhO29617AIBqxoVtpd1qdBanWpspk/kD2B6qTFRJ31Q23Rdf+DNU1xlHSwtqvwq1vgOqBwq1i38SX+HGCymIQg==}
|
||||
dependencies:
|
||||
|
@ -16745,7 +16755,6 @@ packages:
|
|||
|
||||
/set-cookie-parser/2.5.1:
|
||||
resolution: {integrity: sha512-1jeBGaKNGdEq4FgIrORu/N570dwoPYio8lSoYLWmX7sQ//0JY08Xh9o5pBcgmHQ/MbsYp/aZnOe1s1lIsbLprQ==}
|
||||
dev: true
|
||||
|
||||
/setprototypeof/1.2.0:
|
||||
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
|
||||
|
|
Loading…
Reference in a new issue