2018-12-17 11:49:10 -05:00
|
|
|
// Copyright 2010 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
// Ported from
|
|
|
|
// https://github.com/golang/go/blob/master/src/net/http/responsewrite_test.go
|
|
|
|
|
2019-02-26 00:35:50 -05:00
|
|
|
const { Buffer } = Deno;
|
2019-02-19 12:38:19 -05:00
|
|
|
import { assertEqual, test } from "../testing/mod.ts";
|
|
|
|
import { Response, ServerRequest } from "./server.ts";
|
|
|
|
import { BufReader, BufWriter } from "../io/bufio.ts";
|
2018-12-17 11:49:10 -05:00
|
|
|
|
|
|
|
interface ResponseTest {
|
2019-02-19 12:38:19 -05:00
|
|
|
response: Response;
|
2018-12-17 11:49:10 -05:00
|
|
|
raw: string;
|
|
|
|
}
|
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
const enc = new TextEncoder();
|
|
|
|
const dec = new TextDecoder();
|
|
|
|
|
2018-12-17 11:49:10 -05:00
|
|
|
const responseTests: ResponseTest[] = [
|
|
|
|
// Default response
|
|
|
|
{
|
|
|
|
response: {},
|
|
|
|
raw: "HTTP/1.1 200 OK\r\n" + "\r\n"
|
|
|
|
},
|
|
|
|
// HTTP/1.1, chunked coding; empty trailer; close
|
|
|
|
{
|
|
|
|
response: {
|
|
|
|
status: 200,
|
2019-02-19 12:38:19 -05:00
|
|
|
body: new Buffer(new TextEncoder().encode("abcdef"))
|
2018-12-17 11:49:10 -05:00
|
|
|
},
|
|
|
|
|
|
|
|
raw:
|
|
|
|
"HTTP/1.1 200 OK\r\n" +
|
|
|
|
"transfer-encoding: chunked\r\n\r\n" +
|
|
|
|
"6\r\nabcdef\r\n0\r\n\r\n"
|
|
|
|
}
|
|
|
|
];
|
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
test(async function responseWrite() {
|
|
|
|
for (const testCase of responseTests) {
|
2018-12-17 11:49:10 -05:00
|
|
|
const buf = new Buffer();
|
2019-02-19 12:38:19 -05:00
|
|
|
const bufw = new BufWriter(buf);
|
|
|
|
const request = new ServerRequest();
|
|
|
|
request.w = bufw;
|
2018-12-18 20:48:05 -05:00
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
await request.respond(testCase.response);
|
|
|
|
assertEqual(buf.toString(), testCase.raw);
|
|
|
|
}
|
2019-02-15 11:03:57 -05:00
|
|
|
});
|
2018-12-18 20:48:05 -05:00
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
test(async function requestBodyWithContentLength() {
|
|
|
|
{
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("content-length", "5");
|
|
|
|
const buf = new Buffer(enc.encode("Hello"));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const body = dec.decode(await req.body());
|
|
|
|
assertEqual(body, "Hello");
|
|
|
|
}
|
2018-12-18 20:48:05 -05:00
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
// Larger than internal buf
|
|
|
|
{
|
|
|
|
const longText = "1234\n".repeat(1000);
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("Content-Length", "5000");
|
|
|
|
const buf = new Buffer(enc.encode(longText));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const body = dec.decode(await req.body());
|
|
|
|
assertEqual(body, longText);
|
|
|
|
}
|
2019-02-18 18:32:31 -05:00
|
|
|
});
|
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
test(async function requestBodyWithTransferEncoding() {
|
|
|
|
{
|
|
|
|
const shortText = "Hello";
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("transfer-encoding", "chunked");
|
|
|
|
let chunksData = "";
|
|
|
|
let chunkOffset = 0;
|
|
|
|
const maxChunkSize = 70;
|
|
|
|
while (chunkOffset < shortText.length) {
|
|
|
|
const chunkSize = Math.min(maxChunkSize, shortText.length - chunkOffset);
|
|
|
|
chunksData += `${chunkSize.toString(16)}\r\n${shortText.substr(
|
|
|
|
chunkOffset,
|
|
|
|
chunkSize
|
|
|
|
)}\r\n`;
|
|
|
|
chunkOffset += chunkSize;
|
2018-12-18 20:48:05 -05:00
|
|
|
}
|
2019-02-19 12:38:19 -05:00
|
|
|
chunksData += "0\r\n\r\n";
|
|
|
|
const buf = new Buffer(enc.encode(chunksData));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const body = dec.decode(await req.body());
|
|
|
|
assertEqual(body, shortText);
|
2018-12-18 20:48:05 -05:00
|
|
|
}
|
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
// Larger than internal buf
|
|
|
|
{
|
|
|
|
const longText = "1234\n".repeat(1000);
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("transfer-encoding", "chunked");
|
|
|
|
let chunksData = "";
|
|
|
|
let chunkOffset = 0;
|
|
|
|
const maxChunkSize = 70;
|
|
|
|
while (chunkOffset < longText.length) {
|
|
|
|
const chunkSize = Math.min(maxChunkSize, longText.length - chunkOffset);
|
|
|
|
chunksData += `${chunkSize.toString(16)}\r\n${longText.substr(
|
|
|
|
chunkOffset,
|
|
|
|
chunkSize
|
|
|
|
)}\r\n`;
|
|
|
|
chunkOffset += chunkSize;
|
2018-12-18 20:48:05 -05:00
|
|
|
}
|
2019-02-19 12:38:19 -05:00
|
|
|
chunksData += "0\r\n\r\n";
|
|
|
|
const buf = new Buffer(enc.encode(chunksData));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const body = dec.decode(await req.body());
|
|
|
|
assertEqual(body, longText);
|
|
|
|
}
|
2019-02-15 11:03:57 -05:00
|
|
|
});
|
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
test(async function requestBodyStreamWithContentLength() {
|
|
|
|
{
|
|
|
|
const shortText = "Hello";
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("content-length", "" + shortText.length);
|
|
|
|
const buf = new Buffer(enc.encode(shortText));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const it = await req.bodyStream();
|
|
|
|
let offset = 0;
|
|
|
|
for await (const chunk of it) {
|
|
|
|
const s = dec.decode(chunk);
|
|
|
|
assertEqual(shortText.substr(offset, s.length), s);
|
|
|
|
offset += s.length;
|
|
|
|
}
|
|
|
|
}
|
2018-12-18 20:48:05 -05:00
|
|
|
|
2019-02-19 12:38:19 -05:00
|
|
|
// Larger than internal buf
|
2018-12-18 20:48:05 -05:00
|
|
|
{
|
2019-02-19 12:38:19 -05:00
|
|
|
const longText = "1234\n".repeat(1000);
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("Content-Length", "5000");
|
|
|
|
const buf = new Buffer(enc.encode(longText));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const it = await req.bodyStream();
|
|
|
|
let offset = 0;
|
|
|
|
for await (const chunk of it) {
|
|
|
|
const s = dec.decode(chunk);
|
|
|
|
assertEqual(longText.substr(offset, s.length), s);
|
|
|
|
offset += s.length;
|
|
|
|
}
|
2018-12-18 20:48:05 -05:00
|
|
|
}
|
2019-02-19 12:38:19 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
test(async function requestBodyStreamWithTransferEncoding() {
|
2018-12-18 20:48:05 -05:00
|
|
|
{
|
2019-02-19 12:38:19 -05:00
|
|
|
const shortText = "Hello";
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("transfer-encoding", "chunked");
|
|
|
|
let chunksData = "";
|
|
|
|
let chunkOffset = 0;
|
|
|
|
const maxChunkSize = 70;
|
|
|
|
while (chunkOffset < shortText.length) {
|
|
|
|
const chunkSize = Math.min(maxChunkSize, shortText.length - chunkOffset);
|
|
|
|
chunksData += `${chunkSize.toString(16)}\r\n${shortText.substr(
|
|
|
|
chunkOffset,
|
|
|
|
chunkSize
|
|
|
|
)}\r\n`;
|
|
|
|
chunkOffset += chunkSize;
|
|
|
|
}
|
|
|
|
chunksData += "0\r\n\r\n";
|
|
|
|
const buf = new Buffer(enc.encode(chunksData));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const it = await req.bodyStream();
|
|
|
|
let offset = 0;
|
|
|
|
for await (const chunk of it) {
|
|
|
|
const s = dec.decode(chunk);
|
|
|
|
assertEqual(shortText.substr(offset, s.length), s);
|
|
|
|
offset += s.length;
|
|
|
|
}
|
2019-02-15 11:03:57 -05:00
|
|
|
}
|
2019-02-19 12:38:19 -05:00
|
|
|
|
|
|
|
// Larger than internal buf
|
2019-02-15 11:03:57 -05:00
|
|
|
{
|
2019-02-19 12:38:19 -05:00
|
|
|
const longText = "1234\n".repeat(1000);
|
|
|
|
const req = new ServerRequest();
|
|
|
|
req.headers = new Headers();
|
|
|
|
req.headers.set("transfer-encoding", "chunked");
|
|
|
|
let chunksData = "";
|
|
|
|
let chunkOffset = 0;
|
|
|
|
const maxChunkSize = 70;
|
|
|
|
while (chunkOffset < longText.length) {
|
|
|
|
const chunkSize = Math.min(maxChunkSize, longText.length - chunkOffset);
|
|
|
|
chunksData += `${chunkSize.toString(16)}\r\n${longText.substr(
|
|
|
|
chunkOffset,
|
|
|
|
chunkSize
|
|
|
|
)}\r\n`;
|
|
|
|
chunkOffset += chunkSize;
|
|
|
|
}
|
|
|
|
chunksData += "0\r\n\r\n";
|
|
|
|
const buf = new Buffer(enc.encode(chunksData));
|
|
|
|
req.r = new BufReader(buf);
|
|
|
|
const it = await req.bodyStream();
|
|
|
|
let offset = 0;
|
|
|
|
for await (const chunk of it) {
|
|
|
|
const s = dec.decode(chunk);
|
|
|
|
assertEqual(longText.substr(offset, s.length), s);
|
|
|
|
offset += s.length;
|
|
|
|
}
|
2018-12-18 20:48:05 -05:00
|
|
|
}
|
|
|
|
});
|