mirror of
https://github.com/denoland/deno.git
synced 2024-12-29 02:29:06 -05:00
60d9ab08db
If the request body is using chunked encoding, errors may be thrown in "request.finalize()". In this case, we should untrack and close the connection.
380 lines
11 KiB
TypeScript
380 lines
11 KiB
TypeScript
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
|
import { BufReader, BufWriter } from "../io/bufio.ts";
|
|
import { TextProtoReader } from "../textproto/mod.ts";
|
|
import { assert } from "../_util/assert.ts";
|
|
import { encoder } from "../encoding/utf8.ts";
|
|
import { Response, ServerRequest } from "./server.ts";
|
|
import { STATUS_TEXT } from "./http_status.ts";
|
|
|
|
export function emptyReader(): Deno.Reader {
|
|
return {
|
|
read(_: Uint8Array): Promise<number | null> {
|
|
return Promise.resolve(null);
|
|
},
|
|
};
|
|
}
|
|
|
|
export function bodyReader(contentLength: number, r: BufReader): Deno.Reader {
|
|
let totalRead = 0;
|
|
let finished = false;
|
|
async function read(buf: Uint8Array): Promise<number | null> {
|
|
if (finished) return null;
|
|
let result: number | null;
|
|
const remaining = contentLength - totalRead;
|
|
if (remaining >= buf.byteLength) {
|
|
result = await r.read(buf);
|
|
} else {
|
|
const readBuf = buf.subarray(0, remaining);
|
|
result = await r.read(readBuf);
|
|
}
|
|
if (result !== null) {
|
|
totalRead += result;
|
|
}
|
|
finished = totalRead === contentLength;
|
|
return result;
|
|
}
|
|
return { read };
|
|
}
|
|
|
|
export function chunkedBodyReader(h: Headers, r: BufReader): Deno.Reader {
|
|
// Based on https://tools.ietf.org/html/rfc2616#section-19.4.6
|
|
const tp = new TextProtoReader(r);
|
|
let finished = false;
|
|
const chunks: Array<{
|
|
offset: number;
|
|
data: Uint8Array;
|
|
}> = [];
|
|
async function read(buf: Uint8Array): Promise<number | null> {
|
|
if (finished) return null;
|
|
const [chunk] = chunks;
|
|
if (chunk) {
|
|
const chunkRemaining = chunk.data.byteLength - chunk.offset;
|
|
const readLength = Math.min(chunkRemaining, buf.byteLength);
|
|
for (let i = 0; i < readLength; i++) {
|
|
buf[i] = chunk.data[chunk.offset + i];
|
|
}
|
|
chunk.offset += readLength;
|
|
if (chunk.offset === chunk.data.byteLength) {
|
|
chunks.shift();
|
|
// Consume \r\n;
|
|
if ((await tp.readLine()) === null) {
|
|
throw new Deno.errors.UnexpectedEof();
|
|
}
|
|
}
|
|
return readLength;
|
|
}
|
|
const line = await tp.readLine();
|
|
if (line === null) throw new Deno.errors.UnexpectedEof();
|
|
// TODO: handle chunk extension
|
|
const [chunkSizeString] = line.split(";");
|
|
const chunkSize = parseInt(chunkSizeString, 16);
|
|
if (Number.isNaN(chunkSize) || chunkSize < 0) {
|
|
throw new Deno.errors.InvalidData("Invalid chunk size");
|
|
}
|
|
if (chunkSize > 0) {
|
|
if (chunkSize > buf.byteLength) {
|
|
let eof = await r.readFull(buf);
|
|
if (eof === null) {
|
|
throw new Deno.errors.UnexpectedEof();
|
|
}
|
|
const restChunk = new Uint8Array(chunkSize - buf.byteLength);
|
|
eof = await r.readFull(restChunk);
|
|
if (eof === null) {
|
|
throw new Deno.errors.UnexpectedEof();
|
|
} else {
|
|
chunks.push({
|
|
offset: 0,
|
|
data: restChunk,
|
|
});
|
|
}
|
|
return buf.byteLength;
|
|
} else {
|
|
const bufToFill = buf.subarray(0, chunkSize);
|
|
const eof = await r.readFull(bufToFill);
|
|
if (eof === null) {
|
|
throw new Deno.errors.UnexpectedEof();
|
|
}
|
|
// Consume \r\n
|
|
if ((await tp.readLine()) === null) {
|
|
throw new Deno.errors.UnexpectedEof();
|
|
}
|
|
return chunkSize;
|
|
}
|
|
} else {
|
|
assert(chunkSize === 0);
|
|
// Consume \r\n
|
|
if ((await r.readLine()) === null) {
|
|
throw new Deno.errors.UnexpectedEof();
|
|
}
|
|
await readTrailers(h, r);
|
|
finished = true;
|
|
return null;
|
|
}
|
|
}
|
|
return { read };
|
|
}
|
|
|
|
function isProhibidedForTrailer(key: string): boolean {
|
|
const s = new Set(["transfer-encoding", "content-length", "trailer"]);
|
|
return s.has(key.toLowerCase());
|
|
}
|
|
|
|
/** Read trailer headers from reader and append values to headers. "trailer"
|
|
* field will be deleted. */
|
|
export async function readTrailers(
|
|
headers: Headers,
|
|
r: BufReader,
|
|
): Promise<void> {
|
|
const trailers = parseTrailer(headers.get("trailer"));
|
|
if (trailers == null) return;
|
|
const trailerNames = [...trailers.keys()];
|
|
const tp = new TextProtoReader(r);
|
|
const result = await tp.readMIMEHeader();
|
|
if (result == null) {
|
|
throw new Deno.errors.InvalidData("Missing trailer header.");
|
|
}
|
|
const undeclared = [...result.keys()].filter(
|
|
(k) => !trailerNames.includes(k),
|
|
);
|
|
if (undeclared.length > 0) {
|
|
throw new Deno.errors.InvalidData(
|
|
`Undeclared trailers: ${Deno.inspect(undeclared)}.`,
|
|
);
|
|
}
|
|
for (const [k, v] of result) {
|
|
headers.append(k, v);
|
|
}
|
|
const missingTrailers = trailerNames.filter((k) => !result.has(k));
|
|
if (missingTrailers.length > 0) {
|
|
throw new Deno.errors.InvalidData(
|
|
`Missing trailers: ${Deno.inspect(missingTrailers)}.`,
|
|
);
|
|
}
|
|
headers.delete("trailer");
|
|
}
|
|
|
|
function parseTrailer(field: string | null): Headers | undefined {
|
|
if (field == null) {
|
|
return undefined;
|
|
}
|
|
const trailerNames = field.split(",").map((v) => v.trim().toLowerCase());
|
|
if (trailerNames.length === 0) {
|
|
throw new Deno.errors.InvalidData("Empty trailer header.");
|
|
}
|
|
const prohibited = trailerNames.filter((k) => isProhibidedForTrailer(k));
|
|
if (prohibited.length > 0) {
|
|
throw new Deno.errors.InvalidData(
|
|
`Prohibited trailer names: ${Deno.inspect(prohibited)}.`,
|
|
);
|
|
}
|
|
return new Headers(trailerNames.map((key) => [key, ""]));
|
|
}
|
|
|
|
export async function writeChunkedBody(
|
|
w: BufWriter,
|
|
r: Deno.Reader,
|
|
): Promise<void> {
|
|
for await (const chunk of Deno.iter(r)) {
|
|
if (chunk.byteLength <= 0) continue;
|
|
const start = encoder.encode(`${chunk.byteLength.toString(16)}\r\n`);
|
|
const end = encoder.encode("\r\n");
|
|
await w.write(start);
|
|
await w.write(chunk);
|
|
await w.write(end);
|
|
await w.flush();
|
|
}
|
|
|
|
const endChunk = encoder.encode("0\r\n\r\n");
|
|
await w.write(endChunk);
|
|
}
|
|
|
|
/** Write trailer headers to writer. It should mostly should be called after
|
|
* `writeResponse()`. */
|
|
export async function writeTrailers(
|
|
w: Deno.Writer,
|
|
headers: Headers,
|
|
trailers: Headers,
|
|
): Promise<void> {
|
|
const trailer = headers.get("trailer");
|
|
if (trailer === null) {
|
|
throw new TypeError("Missing trailer header.");
|
|
}
|
|
const transferEncoding = headers.get("transfer-encoding");
|
|
if (transferEncoding === null || !transferEncoding.match(/^chunked/)) {
|
|
throw new TypeError(
|
|
`Trailers are only allowed for "transfer-encoding: chunked", got "transfer-encoding: ${transferEncoding}".`,
|
|
);
|
|
}
|
|
const writer = BufWriter.create(w);
|
|
const trailerNames = trailer.split(",").map((s) => s.trim().toLowerCase());
|
|
const prohibitedTrailers = trailerNames.filter((k) =>
|
|
isProhibidedForTrailer(k)
|
|
);
|
|
if (prohibitedTrailers.length > 0) {
|
|
throw new TypeError(
|
|
`Prohibited trailer names: ${Deno.inspect(prohibitedTrailers)}.`,
|
|
);
|
|
}
|
|
const undeclared = [...trailers.keys()].filter(
|
|
(k) => !trailerNames.includes(k),
|
|
);
|
|
if (undeclared.length > 0) {
|
|
throw new TypeError(`Undeclared trailers: ${Deno.inspect(undeclared)}.`);
|
|
}
|
|
for (const [key, value] of trailers) {
|
|
await writer.write(encoder.encode(`${key}: ${value}\r\n`));
|
|
}
|
|
await writer.write(encoder.encode("\r\n"));
|
|
await writer.flush();
|
|
}
|
|
|
|
export async function writeResponse(
|
|
w: Deno.Writer,
|
|
r: Response,
|
|
): Promise<void> {
|
|
const protoMajor = 1;
|
|
const protoMinor = 1;
|
|
const statusCode = r.status || 200;
|
|
const statusText = STATUS_TEXT.get(statusCode);
|
|
const writer = BufWriter.create(w);
|
|
if (!statusText) {
|
|
throw new Deno.errors.InvalidData("Bad status code");
|
|
}
|
|
if (!r.body) {
|
|
r.body = new Uint8Array();
|
|
}
|
|
if (typeof r.body === "string") {
|
|
r.body = encoder.encode(r.body);
|
|
}
|
|
|
|
let out = `HTTP/${protoMajor}.${protoMinor} ${statusCode} ${statusText}\r\n`;
|
|
|
|
const headers = r.headers ?? new Headers();
|
|
|
|
if (r.body && !headers.get("content-length")) {
|
|
if (r.body instanceof Uint8Array) {
|
|
out += `content-length: ${r.body.byteLength}\r\n`;
|
|
} else if (!headers.get("transfer-encoding")) {
|
|
out += "transfer-encoding: chunked\r\n";
|
|
}
|
|
}
|
|
|
|
for (const [key, value] of headers) {
|
|
out += `${key}: ${value}\r\n`;
|
|
}
|
|
|
|
out += `\r\n`;
|
|
|
|
const header = encoder.encode(out);
|
|
const n = await writer.write(header);
|
|
assert(n === header.byteLength);
|
|
|
|
if (r.body instanceof Uint8Array) {
|
|
const n = await writer.write(r.body);
|
|
assert(n === r.body.byteLength);
|
|
} else if (headers.has("content-length")) {
|
|
const contentLength = headers.get("content-length");
|
|
assert(contentLength != null);
|
|
const bodyLength = parseInt(contentLength);
|
|
const n = await Deno.copy(r.body, writer);
|
|
assert(n === bodyLength);
|
|
} else {
|
|
await writeChunkedBody(writer, r.body);
|
|
}
|
|
if (r.trailers) {
|
|
const t = await r.trailers();
|
|
await writeTrailers(writer, headers, t);
|
|
}
|
|
await writer.flush();
|
|
}
|
|
|
|
/**
|
|
* ParseHTTPVersion parses a HTTP version string.
|
|
* "HTTP/1.0" returns (1, 0).
|
|
* Ported from https://github.com/golang/go/blob/f5c43b9/src/net/http/request.go#L766-L792
|
|
*/
|
|
export function parseHTTPVersion(vers: string): [number, number] {
|
|
switch (vers) {
|
|
case "HTTP/1.1":
|
|
return [1, 1];
|
|
|
|
case "HTTP/1.0":
|
|
return [1, 0];
|
|
|
|
default: {
|
|
const Big = 1000000; // arbitrary upper bound
|
|
|
|
if (!vers.startsWith("HTTP/")) {
|
|
break;
|
|
}
|
|
|
|
const dot = vers.indexOf(".");
|
|
if (dot < 0) {
|
|
break;
|
|
}
|
|
|
|
const majorStr = vers.substring(vers.indexOf("/") + 1, dot);
|
|
const major = Number(majorStr);
|
|
if (!Number.isInteger(major) || major < 0 || major > Big) {
|
|
break;
|
|
}
|
|
|
|
const minorStr = vers.substring(dot + 1);
|
|
const minor = Number(minorStr);
|
|
if (!Number.isInteger(minor) || minor < 0 || minor > Big) {
|
|
break;
|
|
}
|
|
|
|
return [major, minor];
|
|
}
|
|
}
|
|
|
|
throw new Error(`malformed HTTP version ${vers}`);
|
|
}
|
|
|
|
export async function readRequest(
|
|
conn: Deno.Conn,
|
|
bufr: BufReader,
|
|
): Promise<ServerRequest | null> {
|
|
const tp = new TextProtoReader(bufr);
|
|
const firstLine = await tp.readLine(); // e.g. GET /index.html HTTP/1.0
|
|
if (firstLine === null) return null;
|
|
const headers = await tp.readMIMEHeader();
|
|
if (headers === null) throw new Deno.errors.UnexpectedEof();
|
|
|
|
const req = new ServerRequest();
|
|
req.conn = conn;
|
|
req.r = bufr;
|
|
[req.method, req.url, req.proto] = firstLine.split(" ", 3);
|
|
[req.protoMinor, req.protoMajor] = parseHTTPVersion(req.proto);
|
|
req.headers = headers;
|
|
fixLength(req);
|
|
return req;
|
|
}
|
|
|
|
function fixLength(req: ServerRequest): void {
|
|
const contentLength = req.headers.get("Content-Length");
|
|
if (contentLength) {
|
|
const arrClen = contentLength.split(",");
|
|
if (arrClen.length > 1) {
|
|
const distinct = [...new Set(arrClen.map((e): string => e.trim()))];
|
|
if (distinct.length > 1) {
|
|
throw Error("cannot contain multiple Content-Length headers");
|
|
} else {
|
|
req.headers.set("Content-Length", distinct[0]);
|
|
}
|
|
}
|
|
const c = req.headers.get("Content-Length");
|
|
if (req.method === "HEAD" && c && c !== "0") {
|
|
throw Error("http: method cannot contain a Content-Length");
|
|
}
|
|
if (c && req.headers.has("transfer-encoding")) {
|
|
// A sender MUST NOT send a Content-Length header field in any message
|
|
// that contains a Transfer-Encoding header field.
|
|
// rfc: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
|
throw new Error(
|
|
"http: Transfer-Encoding and Content-Length cannot be send together",
|
|
);
|
|
}
|
|
}
|
|
}
|