diff --git a/.dprint.json b/.dprint.json index d20b1673ba..1decc7863e 100644 --- a/.dprint.json +++ b/.dprint.json @@ -13,7 +13,9 @@ "associations": "**/*.rs", "rustfmt": "rustfmt --config imports_granularity=item" }, - "includes": ["**/*.{ts,tsx,js,jsx,json,md,toml,rs}"], + "includes": [ + "**/*.{ts,tsx,js,jsx,json,md,toml,rs}" + ], "excludes": [ ".cargo_home", ".git", @@ -33,6 +35,8 @@ "cli/tests/testdata/byte_order_mark.ts", "cli/tests/testdata/encoding", "cli/tests/testdata/fmt/*", + "cli/tests/testdata/lint/glob/*", + "cli/tests/testdata/test/glob/*", "cli/tests/testdata/import_assertions/json_with_shebang.json", "cli/tests/testdata/run/inline_js_source_map*", "cli/tests/testdata/malformed_config/*", @@ -46,14 +50,15 @@ "test_util/wpt", "third_party", "tools/node_compat/TODO.md", - "tools/node_compat/versions", + "tools/node_compat/node", "tools/wpt/expectation.json", - "tools/wpt/manifest.json" + "tools/wpt/manifest.json", + "ext/websocket/autobahn/reports" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.84.0.wasm", - "https://plugins.dprint.dev/json-0.17.0.wasm", - "https://plugins.dprint.dev/markdown-0.15.2.wasm", + "https://plugins.dprint.dev/typescript-0.85.0.wasm", + "https://plugins.dprint.dev/json-0.17.3.wasm", + "https://plugins.dprint.dev/markdown-0.15.3.wasm", "https://plugins.dprint.dev/toml-0.5.4.wasm", "https://plugins.dprint.dev/exec-0.3.5.json@d687dda57be0fe9a0088ccdaefa5147649ff24127d8b3ea227536c68ee7abeab" ] diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 44c670d275..e6e5a41f03 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,5 @@ \n\n\n\nImplementation of function.prototype.bind\n\n## Example\n\nI mainly do this for unit tests I run on phantomjs.\nPhantomJS does not have Function.prototype.bind :(\n\n```js\nFunction.prototype.bind = require(\"function-bind\")\n```\n\n## Installation\n\n`npm install function-bind`\n\n## Contributors\n\n - Raynos\n\n## MIT Licenced\n\n [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg\n [travis-url]: https://travis-ci.org/Raynos/function-bind\n [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg\n [npm-url]: https://npmjs.org/package/function-bind\n [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png\n [6]: https://coveralls.io/r/Raynos/function-bind\n [7]: https://gemnasium.com/Raynos/function-bind.png\n [8]: https://gemnasium.com/Raynos/function-bind\n [deps-svg]: https://david-dm.org/Raynos/function-bind.svg\n [deps-url]: https://david-dm.org/Raynos/function-bind\n [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg\n [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies\n [11]: https://ci.testling.com/Raynos/function-bind.png\n [12]: https://ci.testling.com/Raynos/function-bind\n","maintainers":[{"name":"raynos","email":"raynos2@gmail.com"},{"name":"ljharb","email":"ljharb@gmail.com"}],"time":{"modified":"2022-06-18T04:14:28.973Z","created":"2013-06-16T23:25:41.232Z","0.1.0":"2013-06-16T23:25:42.888Z","1.0.0":"2014-08-09T17:02:51.069Z","1.0.1":"2014-10-03T07:38:13.045Z","1.0.2":"2014-10-05T07:23:52.930Z","1.1.0":"2016-02-14T08:28:42.411Z","1.1.1":"2017-08-28T07:51:35.937Z"},"author":{"name":"Raynos","email":"raynos2@gmail.com"},"repository":{"type":"git","url":"git://github.com/Raynos/function-bind.git"},"homepage":"https://github.com/Raynos/function-bind","keywords":["function","bind","shim","es5"],"contributors":[{"name":"Raynos"},{"name":"Jordan Harband","url":"https://github.com/ljharb"}],"bugs":{"url":"https://github.com/Raynos/function-bind/issues","email":"raynos2@gmail.com"},"readmeFilename":"README.md","users":{},"license":"MIT"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz b/cli/tests/testdata/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz new file mode 100644 index 0000000000..b55e814a0a Binary files /dev/null and b/cli/tests/testdata/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz differ diff --git a/cli/tests/testdata/npm/registry/get-intrinsic/registry.json b/cli/tests/testdata/npm/registry/get-intrinsic/registry.json new file mode 100644 index 0000000000..b09a472f65 --- /dev/null +++ b/cli/tests/testdata/npm/registry/get-intrinsic/registry.json @@ -0,0 +1 @@ +{"_id":"get-intrinsic","_rev":"8-56a236fdf4f8cc6cb2833aa6d5ee81d7","name":"get-intrinsic","dist-tags":{"latest":"1.2.0"},"versions":{"1.0.0":{"name":"get-intrinsic","version":"1.0.0","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"tape 'test/*'","coverage":"nyc npm run tests-only","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.2.0","aud":"^1.1.2","auto-changelog":"^2.2.1","es-abstract":"^1.18.0-next.1","eslint":"^7.12.1","foreach":"^2.0.5","has-bigints":"^1.0.0","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.8.0","tape":"^5.0.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"gitHead":"516f403fe75287a2a80a8d48c2061f6b3238ec0c","_id":"get-intrinsic@1.0.0","_nodeVersion":"14.15.0","_npmVersion":"6.14.8","dist":{"integrity":"sha512-EMuu0ud8uAP4Zs6tQqMeHiY1PbIBDcZ92QVxqeLfqTMbyvqcDbrtHjfu0RWh8QaUNJ3lP1DSX3J2okgj9JE47g==","shasum":"035ccf14a00ae2eb3d110a00fcd10e74706a8fe7","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.0.0.tgz","fileCount":11,"unpackedSize":25104,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJfnCtMCRA9TVsSAnZWagAAwm0P/0h8E1pcOVKKP6XQ6No4\n9tdWqfwRUlG8RTYs8sXW8g2qL3PxQdM1ql5GztOTUSstrtEE2sux290V6w1B\n829I8YHJbw667RuqIOuUBnXjaFm3Eb6S1Tvhvlbff0MtEoP9dZwgvqHn6yLx\niIBIRDCEJhuqrfVmjbpy6hLDEsxhaWsSxPj81gm+aHY6xVb4f/dZvrDp8R9j\nlaEwsE7EK+cEn3ifTQYYHlv8an9QkPFTHDLjeZ+wdWBnut+tepMeFM+ZjG+d\ngdTg2IeNfXFw/QSU5eDQtjqHZ2Fv2T4fFn2blhkrIbEMmwxczzM6QuQiOGc8\n1suIs9vDdt8qq6h8ESs9hr5I2hgE3M4Xxt5ziZ95TifSDRNyyQGbMy5vj3CY\n0z2e5M6zr5b2mkiWm0A5tZI4Mdy/2XrpJxTE6/opYgvA5mQ0GIYzO7r1Zt+G\nmHD/MDeTe2WxBWizo3nv0IGRvZeHZ/JjcRHdHeRAq+rqJ6o4hvYanxfoGlGA\njCUXYsZzR2XLfxBiTeSUO9VQ5YSBtsfU+egeRNwOw5PwxpGwfW4VUVOPHwHJ\n5dHlRGuWHDOn+4uF+09o5B70By6rcGZsHV62jX5ci5JclHswBdrvcftucfyG\nyR2qyuEnxq7O+S2D/uMylQLqTdCdJ6Bf58TKGSzpsp45oWrSmIsSTdiVWIsG\nz7pB\r\n=DQ3f\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIEehDACke//ohQCAy5pJo/R/9J5UGrufkNBiQJqe3y2DAiBC7txNPrBmQB4PjK/Ydow1627eRDEIl0wz1IkhWkBAkw=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.0.0_1604070219549_0.3039159077605891"},"_hasShrinkwrap":false},"1.0.1":{"name":"get-intrinsic","version":"1.0.1","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/*'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.2.0","aud":"^1.1.2","auto-changelog":"^2.2.1","es-abstract":"^1.18.0-next.1","es-value-fixtures":"^1.0.0","eslint":"^7.12.1","foreach":"^2.0.5","has-bigints":"^1.0.0","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.8.0","tape":"^5.0.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"1802957d1ff6a04965505f54c3d354ad7fa31034","_id":"get-intrinsic@1.0.1","_nodeVersion":"14.15.0","_npmVersion":"6.14.8","dist":{"integrity":"sha512-ZnWP+AmS1VUaLgTRy47+zKtjTxz+0xMpx3I52i+aalBK1QP19ggLF3Db89KJX7kjfOfP2eoa01qc++GwPgufPg==","shasum":"94a9768fcbdd0595a1c9273aacf4c89d075631be","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.0.1.tgz","fileCount":12,"unpackedSize":26012,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJfnPbUCRA9TVsSAnZWagAAO2UP/37xSp1p1f50Pt6yyL4k\n1BKcgA+OfPCEhFnJA1AKqYeL8rVBLr7VoSvMzTQ9JonFIIXLlnlVe8P91KWE\n1AXoYJr/dW8ZG7vHs37jK7aiEweyYlgLebWPOM2T2bU0WFoaaIws1fa+TwTS\neCqY8Q7XysXV3syWXX1El/2TIXzSVa8g8gOVJy/j8j+fthSAPD0H6ZTCvYQ6\nPSWIFAYhRIWXLGel3T/TE1p61AWZuEtf8B+e6K8hPiMuzhNjODCBqJQV246D\nPznhAbJV81wNIdM0ohuT19+t7GqjjKbKKMpU0LZzSCjZF3Q+zLI4H+qMY0Bl\nHFiqspAfS0r/wHWoBkzODoHWMduJ/JPtE/uee8ae92iC9fR9Y8fSOWXTt07W\nFWSGyLyJ6CQS7d+dJwFb+2cQNckV/9VKu+y58z+i6x6/FExmHNBdYt4ps3ju\nH89DQEmfq5wyLcceng9K0a7A6vfLM6MvEk8FugXVhGORioFOkscE3f8gS7Sc\nIzbl739iiG3oGvNzRgF229t2xwUZXVNqGJ4Sg3AQM/RX75+Mu1Jlx52z0ECY\nLZGX16A+J3N955DxJktRA1l7RA+zihIs1fZKHm+fErP547biV5p+TNocKrrn\nwghypBweNbcNkzNds6qczoB/3Vsc2OxHaVUBWNleqmlrQ6Qk1AA4ZRIdhQZ8\njWu7\r\n=BhVO\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFzb1R9CMnRu3GNwT893R3yms0wnrxROjmNn7s1aWEdLAiEAgP3VkUFew7/H+j05N3mW3XntPRU+Smw2z+q8kZl3CC4="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.0.1_1604122323843_0.022947285149020447"},"_hasShrinkwrap":false},"1.0.2":{"name":"get-intrinsic","version":"1.0.2","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/*'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.3.0","aud":"^1.1.3","auto-changelog":"^2.2.1","es-abstract":"^1.18.0-next.1","es-value-fixtures":"^1.0.0","eslint":"^7.15.0","foreach":"^2.0.5","has-bigints":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.9.0","tape":"^5.0.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"eec980691af2fafb4e0d9207e473c9e1eb7995e6","_id":"get-intrinsic@1.0.2","_nodeVersion":"14.15.1","_npmVersion":"6.14.8","dist":{"integrity":"sha512-aeX0vrFm21ILl3+JpFFRNe9aUvp6VFZb2/CTbgLb8j75kOhvoNYjt9d8KA/tJG4gSo8nzEDedRl0h7vDmBYRVg==","shasum":"6820da226e50b24894e08859469dc68361545d49","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.0.2.tgz","fileCount":13,"unpackedSize":92891,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJf3FiZCRA9TVsSAnZWagAAhqwP/ApVcuwN67ClrOqU4sXI\nq1LZjlVXwkM54mRbfqChOsZUZsxW1V8xCpeaaZE0h2JKH+PnzS/GUvAcd0iA\nXbyjsIfvc66lu0bBIMKrP/zLMQ7LIm3q9Vr7iLIWi7LXXCGNqhtNx0rGgPVi\npk6c0o6MUK6Tr1RGtdpQZVnJqF9veFC6RVApu+xFYt8QWXuYDTGKrS093aRU\noI3SmbrjLSlskjXSVREGFja/L5JsiHbds7meSHPWdF57AhatrEb9X8h93fdy\n4Pz1yUKjd1QFXoAg4Pw+TLRPO0VN4JYeHWwaQ+mmOl5RViz+Yiq6joR+Fo1r\nsdSWHdijgx3XzGH4nbiP9mjR/TcypqZQeEP1H5TDZfDSSRSg9Eus0BQuHwOa\n9kNLDQywTsBBsB8S5tlJ4QSrTSn6Y8q5RsQIl9IIHwAUW/0GyiAUfCJMYCiE\n9A13GnS6ZPJEdJu960P7ZlbvnfpPbiQaMOMyC6kXOfACBkcxhhc4SofQkMZw\n5v7Xjg3Nz6inEnpbXuuU3Tj3WmDMMWoyX06sDbv50X/gzciNSy6ptcJgultt\n8aGrP+i/QWfHzGdguIVlz+2wf5kYG1jRyvbKsVZKJ2wvnnxUC8Ji0yjFO6j/\nKuwcsva60yDyaAjpy4Sbw7WSE1etufVa0rXf96788xqPhAhvFcGzzVKiOigr\nUP2R\r\n=SifI\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDLO/WsMu1yogrwHBInw7hC3MUpX9G+E/a0CiaubHBZGAIhAOSWmWgEOV39qmGn7YkvbHgzc+IxhjCPiTch0LAig1NA"}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.0.2_1608276120877_0.84071357918607"},"_hasShrinkwrap":false},"1.1.0":{"name":"get-intrinsic","version":"1.1.0","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.5.0","aud":"^1.1.3","auto-changelog":"^2.2.1","call-bind":"^1.0.2","es-abstract":"^1.18.0-next.2","es-value-fixtures":"^1.0.0","eslint":"^7.18.0","foreach":"^2.0.5","has-bigints":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.9.0","tape":"^5.1.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"aaaaa0d5cd17d4b0b274cdaa1f7f3e6007fc9e59","_id":"get-intrinsic@1.1.0","_nodeVersion":"14.15.4","_npmVersion":"6.14.10","dist":{"integrity":"sha512-M11rgtQp5GZMZzDL7jLTNxbDfurpzuau5uqRWDPvlHjfvg3TdScAZo96GLvhMjImrmR8uAt0FS2RLoMrfWGKlg==","shasum":"892e62931e6938c8a23ea5aaebcfb67bd97da97e","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.0.tgz","fileCount":10,"unpackedSize":29482,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJgD0SqCRA9TVsSAnZWagAAVjQP/276R/hJLC3r7gUvPhUj\n8mwJCVpdCjIzaf4fBVvu0mB4cXe4G+t+N1w3JG9wkBqTpoHjNRzUUxhcFYeX\nnos9b4CeNftDFVgwixFHcRS0Nk0A6SUSj7jdmLiyrM3Lc0KVrMfe7G7ECeSV\nKGWViXtP8oEZJ6FZURMS9yMraQzeh5ChjcGKXsX0Jf0IpUXlDaib0ElChkLr\nN6iXsGveM9tYf15JjBW/gyJXhMPQLGE37jdCBkoW6WeOT7twWr0KDcmn6QHg\n775CZxPl1VJpEiXIoSk0PnAxRN95MIRZvdQ9k1ctSuE5kpErrTZk7j7i4i2T\n5bbOOcLvxX+StCvNtOh7M52RyDxPaagFSoKaNHxmW4e2muDDuvWRPA3n/FI4\nuXw1J1Lb1lvbhx/L9wLNN9SdPFcFOA2+t23SJE/F8abLHNsdhoBlCyoCmULL\nIKdrVXWxFbopnQF3n18ajCIDJ9E4J1vr6XU7+xYc7Pl1Nuel9AfQU5PuLAFy\nj0ziiUntUOuYWC0xHuhnYVHDWmU+1UB5IoxlQi9uAYp0/RBWg4mmAcQ2dK9B\nJDxOaa/Rmkp1F/5htSqD6hvfAH8Pv/SpEglGRUPH4mmHF183iLEwls2GfOAJ\nh2Baw9u2yX7COfPYqDG2MVbwB6wafDPgUKNNCZ+FjMRgLCt9VrzwVjPu9QF/\n5V7l\r\n=QaDr\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIA+e/mSh+QgkqBLYqQTWcVvq5FQ05WbdMfaLjOMgM3N5AiB29JZnuXxh8SsBV17yu9nizQr5iwkWwK5HPPpe9Dx8Vg=="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.0_1611613354056_0.8648044903277086"},"_hasShrinkwrap":false},"1.1.1":{"name":"get-intrinsic","version":"1.1.1","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.5.0","aud":"^1.1.3","auto-changelog":"^2.2.1","call-bind":"^1.0.2","es-abstract":"^1.18.0-next.2","es-value-fixtures":"^1.0.0","eslint":"^7.19.0","evalmd":"^0.0.19","foreach":"^2.0.5","has-bigints":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.9.0","tape":"^5.1.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"efa0daa5166f1a06658001e34f49b5f1185786eb","_id":"get-intrinsic@1.1.1","_nodeVersion":"14.15.4","_npmVersion":"6.14.10","dist":{"integrity":"sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==","shasum":"15f59f376f855c446963948f0d24cd3637b4abc6","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.1.tgz","fileCount":10,"unpackedSize":32513,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJgGroRCRA9TVsSAnZWagAAAaMP/1kYGifz/BvcSYhnlVk+\nSnCwbyuOaTefaIpH15yyWb9sjo+1fgUw4Ej3GmVdpmyW45Tj0WePwRWhbpok\n1aKIx3P8/q8m95HymXcR50VRByFyxpNFxtWuo674yTzvYxN1+QqXVSO7xeLI\nL+bRYOScvb+f5DI8t5LqhZlvQgfiqyWXZI4L+gbwfIIrE7EUg5DZJZrzIBOY\n5SExvgueChcIptQgu8ppE5kADlGqmTHUBt3P68EU5HRc5Z/LN5csgTu63VkJ\nxx3pTXa/Q672C9qj1CqedmughzgkfBjSuKOhbQWgILCbNy0A6TKKVirpc2fB\nuI0f4vWTf1ImGrspsfIH2IR4SQqMmVy8qpgwG/YtU3q9Si9pOcXQ1q+JnyD6\nDoLaiTEVPC8ks/bKGjtNBDUmlnEuyluaaFuK3cfJQMGp2n+FNLXI5LBz9uoR\nkpqUHNJBFJ9HbbMfBUmTS3K3duAkgOR+izFQgAJJWzYbuAvM7GGAoy1eQUrY\nuD1tAQglMbB0YwsjnDxvGcV32iFoMttrcXb5xKUOlVaFMD2D9PDryeO/gu0N\nm3wDWCKhmMjGNWV6WA9q0mD6YRCPHZUwmb4xSFdz/i1MP4iVjVKc1tz6RAiT\nLqxKnm4uPjTsPPGrXWYdRs5EEF2/QHcmCex2kwk5Ul4fsVayaNOAzB3F+iSp\nbWf1\r\n=5aSN\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCNo13JEcIXzNTEsjtVSMSsBL9CAqU56ZzTh56ilFAwmgIgCFy2IWS7fXyDYWF1/aSqiRCTW9wVIONaN0YUk7J0diM="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.1_1612364304893_0.18784978138621788"},"_hasShrinkwrap":false},"1.1.2":{"name":"get-intrinsic","version":"1.1.2","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.0","aud":"^2.0.0","auto-changelog":"^2.4.0","call-bind":"^1.0.2","es-abstract":"^1.20.1","es-value-fixtures":"^1.4.1","eslint":"=8.8.0","evalmd":"^0.0.19","for-each":"^0.3.3","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","mock-property":"^1.0.0","npmignore":"^0.3.0","nyc":"^10.3.2","object-inspect":"^1.12.2","safe-publish-latest":"^2.0.0","tape":"^5.5.3"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.3"},"publishConfig":{"ignore":[".github/workflows"]},"gitHead":"1692762305146cdee0bd0a31cb0a57ffd9240c8c","_id":"get-intrinsic@1.1.2","_nodeVersion":"18.3.0","_npmVersion":"8.11.0","dist":{"integrity":"sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==","shasum":"336975123e05ad0b7ba41f152ee4aadbea6cf598","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.2.tgz","fileCount":9,"unpackedSize":36671,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIGk690kzjXZ7zcAng4wWyvMEdoQ4xPaEtBm2SQIm48nMAiBm4P1A9nW2MVt9ngQfwiaKLc6wAZZBcdlpzlq8Br1v2Q=="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJioLxOACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2Vmqs3A//RY5KjVotePADUXDnTk/obYp33AmFt7aXnVRafoQNIhvDC7Ya\r\nMj9g+8NGAG2D1xBgD+Q/dhvvfZQlpuLRzkfQg4V92liFmgpoEB0ue6BP0TD8\r\n37S9yioBWG6LTJkqbvjc68V3gi3t5jWTqHJeYPW4mxJF6MMCx7m9EYWGtqUR\r\n0AnVNqH9j4SJ/X3qHach0vgsI8hnb8iXeTg8X7465MmQke+tygQbT3rYLN/L\r\nSni+uwm6EMybJ8Lh5GKq5U6aKr+inAYm/h47js4D7/A+tvfzYfWvLjr1l4J5\r\n+cMKLskFEP6g/Xz9jaYCCRxe7YGaiTmH/sUgT+kTzo2oJaYh6xd/6bgvGCut\r\nPFBBxh0lknSR1wbiQz3hcdHu42D0a9jiOmtc3DlkiRzrez6pEJMDnKu+Pbck\r\nkqhrBMLYyLYkLHJzeB07aN+KuspIZgjMJ/rSsgqla8JHv6TqWx0BbaoZ53VA\r\nPzf3fs73zh7IhNLznCQVNHR9iM0w+dUMI2n6c0QlOaimFkJ+61cHA13zU20x\r\nllmejv9s5XEkvuVSU/ibuYEkbnqDg62sWcm0HgGuL6k+RKe7Mj/gOds/Zn9n\r\ngvMIl1y7zeaIrEmQHfj6ndXAB1Mv9eIySBA4//nd+oVZLibt4pkAJIIy8xY1\r\nnHBGLFN4jrtJBI6I36xrNfrC0DVumPnTh8A=\r\n=ZCcH\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.2_1654701133878_0.7209001100988714"},"_hasShrinkwrap":false},"1.1.3":{"name":"get-intrinsic","version":"1.1.3","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.0","aud":"^2.0.0","auto-changelog":"^2.4.0","call-bind":"^1.0.2","es-abstract":"^1.20.2","es-value-fixtures":"^1.4.2","eslint":"=8.8.0","evalmd":"^0.0.19","for-each":"^0.3.3","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","mock-property":"^1.0.0","npmignore":"^0.3.0","nyc":"^10.3.2","object-inspect":"^1.12.2","safe-publish-latest":"^2.0.0","tape":"^5.6.0"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.3"},"testling":{"files":"test/GetIntrinsic.js"},"publishConfig":{"ignore":[".github/workflows"]},"gitHead":"65cac0bca7cf7db4d1594bd1f7c68e921adedb5b","_id":"get-intrinsic@1.1.3","_nodeVersion":"18.9.0","_npmVersion":"8.19.1","dist":{"integrity":"sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==","shasum":"063c84329ad93e83893c7f4f243ef63ffa351385","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.3.tgz","fileCount":9,"unpackedSize":37128,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFx7EpcX7UchnW1MjTW4LY/IDpL1jl3H+M29ezR+WSHQAiEA7sXR/8EoSjeBOAK0Z3he//k1OtgvYgkt6hGAGtrHojM="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJjIACSACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmpAuA//URHhCHAEO247jjMd3BcpIpNWgbXTtnIEpHqbGulxZuzwd8hD\r\n7a9fqywLrEQq10reRxRNeS6Zk7BSv9QqwtZx7dTmi95ZxPETZvF2khJ6ggyj\r\nhAMonDjaP79Ki5Dwz/JH2WxsefDcAAPRRftEmm73oSJgt9EEdssmmAXgG5JS\r\n5OU3tCLGb4ricSaPNv2g2QDDLuLh/j6axKGn5bsQZFCvK87PV1vR/9Q6EVUz\r\nNDgWOxcgQTXgpVJYPsd6j8FiB3PiuFmd7/aLiqUMncStQDzklRHd8zUcxay3\r\n+0NplukrzPQPRDjMLuLeIX6WX+145sPZcThc7s9nrfmk2ODpDmLUYPZdki6U\r\nUBBa9aK3kDBIocvwVrleIzyY53SKvmmZ6jqmP5wS9pEWPa1gdD+VugZGazEK\r\noYK1MH77WG9fJb/2n27AWhJ/Tm9m177G+9rYQKIA+Q9JmZom+qNQviXkSkHL\r\n9MhOdjGzH0hnhX25ml81l6I2a/spKuN6RsHKNruUEUUxAyQYxIm6ZJs6D2Hy\r\nDjd+LklfZnCUsJUIJarqkB8XnRYsrKR+zrcTjxuRS0vQMBs+t/DYyXaS1k73\r\n4SR/biyt43/SOVtwZ25ThMxfGBZ+gwIqsoih3Rovs18QsrZNDyeU3fzfcTCM\r\nRioeF4ejfq26VnL5JSIEvGDWBNihLZhCw5U=\r\n=Yum9\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.3_1663041682205_0.23362607287463288"},"_hasShrinkwrap":false},"1.2.0":{"name":"get-intrinsic","version":"1.2.0","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.1","aud":"^2.0.2","auto-changelog":"^2.4.0","call-bind":"^1.0.2","es-abstract":"^1.21.1","es-value-fixtures":"^1.4.2","eslint":"=8.8.0","evalmd":"^0.0.19","for-each":"^0.3.3","gopd":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","mock-property":"^1.0.0","npmignore":"^0.3.0","nyc":"^10.3.2","object-inspect":"^1.12.3","safe-publish-latest":"^2.0.0","tape":"^5.6.3"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.3"},"testling":{"files":"test/GetIntrinsic.js"},"publishConfig":{"ignore":[".github/workflows"]},"gitHead":"0b60d7ac9d93e8824a36ddd52635be1fc13758d1","_id":"get-intrinsic@1.2.0","_nodeVersion":"19.4.0","_npmVersion":"9.2.0","dist":{"integrity":"sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==","shasum":"7ad1dc0535f3a2904bba075772763e5051f6d05f","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz","fileCount":9,"unpackedSize":38691,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDb29OYVbJKfex+ljyYg1fRxZiHvAcbeMgBRIcq6cP6MgIhAMPAotqdPrJxkwnAeSq+RDK//aoFWESiSJuvWBmlhUAH"}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJjykKtACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmoUEQ/+PfWdGnewUZa86B0H4haSxBRBlwuFGg7GpdoEhJ3Ll1A9p3Jb\r\nvU2+9RyeNp1p2LNrktOAenAcs7I3dLl0dAspHjjL3uLNhPkrlpeVqOktXs+H\r\n7l0VaAOrLZVi1f+akY117IO0OO6FwRLV42VdM7QIH2BcfXuCyDDke41rq5oS\r\nR9I+8C2SCW2/OxXcMG9nYOpW494hmHRRYh9mpovJUOpAerMUgy334rK72ArR\r\nNsgnAu4luu/7RmC5BNPS26Q7NVCVf7THdx2v3OSkgFvTrdS+wu0NhqkakppS\r\nfGTYkR1m+7vX9YLHIokoIDjHtHaNPMUb7e51OxegjtPEh7FBacfRs0bxfx7Z\r\nJLhYAbjSanGci/gfC2gT1YIPUgydWbx1Ejmol9j7QmA9BQuHSxHu+SiaRA46\r\n+F/Fzbkp1sC0gqo4qGN04Lw8+2g2DHGfBygd6vcUtnaHMz2coCF4rlvcW2fN\r\nz6tT4pcE/AWtC6l9yCWzAWDjEZjF2kBycuiY36IlhhPjtj3qiGQqnTPLL10d\r\nUWA9ZTqFH2k+o4tKhz8g1kQeBApgpRgr9FfukaNq/TZi2tguQ2MlHQ+0R0ZC\r\nZZRtnin4nEpjZ+GkAcfnm9QCrripiWwDtSgXsKvgSICOdp9urrSgfcEAuEvM\r\nrTjOosJAUVohG06+klaUIe6mIssavg3AgjU=\r\n=CxS4\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.2.0_1674199725115_0.9427568240984563"},"_hasShrinkwrap":false}},"time":{"created":"2020-10-30T15:03:39.549Z","1.0.0":"2020-10-30T15:03:39.692Z","modified":"2023-01-20T07:28:45.383Z","1.0.1":"2020-10-31T05:32:03.992Z","1.0.2":"2020-12-18T07:22:01.056Z","1.1.0":"2021-01-25T22:22:34.211Z","1.1.1":"2021-02-03T14:58:25.007Z","1.1.2":"2022-06-08T15:12:14.076Z","1.1.3":"2022-09-13T04:01:22.362Z","1.2.0":"2023-01-20T07:28:45.291Z"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"description":"Get and robustly cache all JS language-level intrinsics at first require time","homepage":"https://github.com/ljharb/get-intrinsic#readme","keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"license":"MIT","readme":"# get-intrinsic [![Version Badge][npm-version-svg]][package-url]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\nGet and robustly cache all JS language-level intrinsics at first require time.\n\nSee the syntax described [in the JS spec](https://tc39.es/ecma262/#sec-well-known-intrinsic-objects) for reference.\n\n## Example\n\n```js\nvar GetIntrinsic = require('get-intrinsic');\nvar assert = require('assert');\n\n// static methods\nassert.equal(GetIntrinsic('%Math.pow%'), Math.pow);\nassert.equal(Math.pow(2, 3), 8);\nassert.equal(GetIntrinsic('%Math.pow%')(2, 3), 8);\ndelete Math.pow;\nassert.equal(GetIntrinsic('%Math.pow%')(2, 3), 8);\n\n// instance methods\nvar arr = [1];\nassert.equal(GetIntrinsic('%Array.prototype.push%'), Array.prototype.push);\nassert.deepEqual(arr, [1]);\n\narr.push(2);\nassert.deepEqual(arr, [1, 2]);\n\nGetIntrinsic('%Array.prototype.push%').call(arr, 3);\nassert.deepEqual(arr, [1, 2, 3]);\n\ndelete Array.prototype.push;\nGetIntrinsic('%Array.prototype.push%').call(arr, 4);\nassert.deepEqual(arr, [1, 2, 3, 4]);\n\n// missing features\ndelete JSON.parse; // to simulate a real intrinsic that is missing in the environment\nassert.throws(() => GetIntrinsic('%JSON.parse%'));\nassert.equal(undefined, GetIntrinsic('%JSON.parse%', true));\n```\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n## Security\n\nPlease email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report.\n\n[package-url]: https://npmjs.org/package/get-intrinsic\n[npm-version-svg]: https://versionbadg.es/ljharb/get-intrinsic.svg\n[deps-svg]: https://david-dm.org/ljharb/get-intrinsic.svg\n[deps-url]: https://david-dm.org/ljharb/get-intrinsic\n[dev-deps-svg]: https://david-dm.org/ljharb/get-intrinsic/dev-status.svg\n[dev-deps-url]: https://david-dm.org/ljharb/get-intrinsic#info=devDependencies\n[npm-badge-png]: https://nodei.co/npm/get-intrinsic.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/get-intrinsic.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/get-intrinsic.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=get-intrinsic\n[codecov-image]: https://codecov.io/gh/ljharb/get-intrinsic/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/ljharb/get-intrinsic/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/get-intrinsic\n[actions-url]: https://github.com/ljharb/get-intrinsic/actions\n","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz b/cli/tests/testdata/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz new file mode 100644 index 0000000000..ee60a4f9e0 Binary files /dev/null and b/cli/tests/testdata/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz differ diff --git a/cli/tests/testdata/npm/registry/has-property-descriptors/registry.json b/cli/tests/testdata/npm/registry/has-property-descriptors/registry.json new file mode 100644 index 0000000000..6cc2fae107 --- /dev/null +++ b/cli/tests/testdata/npm/registry/has-property-descriptors/registry.json @@ -0,0 +1 @@ +{"_id":"has-property-descriptors","name":"has-property-descriptors","dist-tags":{"latest":"1.0.0"},"versions":{"1.0.0":{"name":"has-property-descriptors","version":"1.0.0","description":"Does the environment have full property descriptor support? Handles IE 8's broken defineProperty/gOPD.","main":"index.js","exports":{".":"./index.js","./package.json":"./package.json"},"sideEffects":false,"scripts":{"prepublishOnly":"safe-publish-latest","prepublish":"not-in-publish || npm run prepublishOnly","pretest":"npm run lint","prelint":"evalmd README.md","lint":"eslint --ext=js,mjs .","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/inspect-js/has-property-descriptors.git"},"keywords":["property","descriptors","has","environment","env","defineProperty","getOwnPropertyDescriptor"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/inspect-js/has-property-descriptors/issues"},"homepage":"https://github.com/inspect-js/has-property-descriptors#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.0","aud":"^2.0.0","auto-changelog":"^2.4.0","eslint":"=8.8.0","in-publish":"^2.0.1","evalmd":"^0.0.19","nyc":"^10.3.2","safe-publish-latest":"^2.0.0","tape":"^5.5.3"},"dependencies":{"get-intrinsic":"^1.1.1"},"testling":{"files":"test/index.js"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"gitHead":"3771c8b4f20e963d3a64b101b3233c20791c32ae","_id":"has-property-descriptors@1.0.0","_nodeVersion":"17.9.0","_npmVersion":"8.3.1","dist":{"integrity":"sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==","shasum":"610708600606d36961ed04c196193b6a607fa861","tarball":"http://localhost:4545/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz","fileCount":9,"unpackedSize":9308,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIGu4gOEZPx0AUfM6YuqldUOElOureYihKd6CDr1Dpv9gAiBYuTEkAw8K4moKvJ7BXTohQQAJNKNWCnAJlOEyg06yYg=="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJiWQgoACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmpuEhAAlWpOnMjEOy8ZCmrOUmE82lejonXdgy/c0+t609UYz2VO0nKr\r\n0RqhNm3xSvSU/0wMa1LOKb0rcNnRNk9YFffxDMWi6xE84n7jjWbf1vcZ1xEb\r\nFLb5T7MGEveF6lNeeMLOZPJVyQ3WDEwio5meyayWVRzEBrJq5yT+e5/hgFwz\r\nLDxMfil2CosRkDeqr+YHJC5s57qcTOkM0SKLv7pfvtVymnFPuVjTkZfwb26g\r\nwRu7oVkZFjIBf6bG0wCxj9fMCMsHpKI27rU9O3K+U0DCJLtSG92bTyvDJ0ig\r\nNLBiX5zwelnLHUEGmvIwt3/V2ZxFvK5Soymnk4COvCI3QgJkGAKoBJDgsLmP\r\nDcvHe5NEidZqvh/8kfiqwHqQ0tAUImPGQoQ3j+Sx6oN3+q+6d9RWkUyfv69I\r\n0268s/Mf2Rf7Ow0PbgkQn3qq/dxR/PvPKDSTz53gpmiDd79Hqjv9KTNTawBG\r\nHF/Nga5rVOUZHQgvhaOoXrGDsIVLfKeda+UrFwKHN4zkbvO58LaBoIMjHKz3\r\nLB8Qddh4Cqm4QdK6fBgmrDyCI79AIICCeETfQCGU/gitLcS0mQTCIHFSdwtR\r\nwt0t85krp1lpjhKA8HfHgwj5Ky9A/KdFPI1DrbbqjJiRCAnsSJzdJuw0eXcn\r\nUai87G82D9Q2HmEpEgBhWKsa8PQU4pO18E0=\r\n=iq2o\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-property-descriptors_1.0.0_1650001960160_0.19595316522875494"},"_hasShrinkwrap":false}},"time":{"created":"2022-04-15T05:52:40.160Z","1.0.0":"2022-04-15T05:52:40.310Z","modified":"2022-04-15T05:52:40.433Z"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"description":"Does the environment have full property descriptor support? Handles IE 8's broken defineProperty/gOPD.","homepage":"https://github.com/inspect-js/has-property-descriptors#readme","keywords":["property","descriptors","has","environment","env","defineProperty","getOwnPropertyDescriptor"],"repository":{"type":"git","url":"git+https://github.com/inspect-js/has-property-descriptors.git"},"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"bugs":{"url":"https://github.com/inspect-js/has-property-descriptors/issues"},"license":"MIT","readme":"# has-property-descriptors [![Version Badge][npm-version-svg]][package-url]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\nDoes the environment have full property descriptor support? Handles IE 8's broken defineProperty/gOPD.\n\n## Example\n\n```js\nvar hasPropertyDescriptors = require('has-property-descriptors');\nvar assert = require('assert');\n\nassert.equal(hasPropertyDescriptors(), true); // will be `false` in IE 6-8, and ES5 engines\n\n// Arrays can not have their length `[[Defined]]` in some engines\nassert.equal(hasPropertyDescriptors.hasArrayLengthDefineBug(), false); // will be `true` in Firefox 4-22, and node v0.6\n```\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[package-url]: https://npmjs.org/package/has-property-descriptors\n[npm-version-svg]: https://versionbadg.es/inspect-js/has-property-descriptors.svg\n[deps-svg]: https://david-dm.org/inspect-js/has-property-descriptors.svg\n[deps-url]: https://david-dm.org/inspect-js/has-property-descriptors\n[dev-deps-svg]: https://david-dm.org/inspect-js/has-property-descriptors/dev-status.svg\n[dev-deps-url]: https://david-dm.org/inspect-js/has-property-descriptors#info=devDependencies\n[npm-badge-png]: https://nodei.co/npm/has-property-descriptors.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/has-property-descriptors.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/has-property-descriptors.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=has-property-descriptors\n[codecov-image]: https://codecov.io/gh/inspect-js/has-property-descriptors/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/inspect-js/has-property-descriptors/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/has-property-descriptors\n[actions-url]: https://github.com/inspect-js/has-property-descriptors/actions\n","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/has-symbols/has-symbols-1.0.3.tgz b/cli/tests/testdata/npm/registry/has-symbols/has-symbols-1.0.3.tgz new file mode 100644 index 0000000000..a5f34be724 Binary files /dev/null and b/cli/tests/testdata/npm/registry/has-symbols/has-symbols-1.0.3.tgz differ diff --git a/cli/tests/testdata/npm/registry/has-symbols/registry.json b/cli/tests/testdata/npm/registry/has-symbols/registry.json new file mode 100644 index 0000000000..c8602cac7c --- /dev/null +++ b/cli/tests/testdata/npm/registry/has-symbols/registry.json @@ -0,0 +1 @@ +{"_id":"has-symbols","_rev":"9-4d7f5f8fd9b1e0675ff88a8f88b1f511","name":"has-symbols","description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","dist-tags":{"latest":"1.0.3"},"versions":{"1.0.0":{"name":"has-symbols","version":"1.0.0","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","license":"MIT","main":"index.js","scripts":{"prepublish":"safe-publish-latest","pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"npm run --silent test:stock && npm run --silent test:staging && npm run --silent test:shams","test:stock":"node test","test:staging":"node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"node test/shams/core-js.js","test:shams:getownpropertysymbols":"node test/shams/get-own-property-symbols.js","lint":"eslint *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"dependencies":{},"devDependencies":{"tape":"^4.6.0","nsp":"^2.6.1","safe-publish-latest":"^1.0.1","eslint":"^3.5.0","@ljharb/eslint-config":"^8.0.0","get-own-property-symbols":"^0.9.2","core-js":"^2.4.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"e4a5e7028c87d509902ff292f4da3ea45c7c50cf","bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"homepage":"https://github.com/ljharb/has-symbols#readme","_id":"has-symbols@1.0.0","_shasum":"ba1a8f1af2a0fc39650f5c850367704122063b44","_from":".","_npmVersion":"3.10.3","_nodeVersion":"6.6.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"ba1a8f1af2a0fc39650f5c850367704122063b44","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.0.tgz","integrity":"sha512-QfcgWpH8qn5qhNMg3wfXf2FD/rSA4TwNiDDthKqXe7v6oBW0YKWcnfwMAApgWq9Lh+Yu+fQWVhHPohlD/S6uoQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCraijP8TUgset3RP/0apBanH6US79uNIP6cuXhcXO2XAIgcC1SUirdAx9l8oZX/ALh1KkxopaC+SvsCad2NUwNfMs="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"packages-12-west.internal.npmjs.com","tmp":"tmp/has-symbols-1.0.0.tgz_1474328796481_0.2780582248233259"},"directories":{}},"1.0.1":{"name":"has-symbols","version":"1.0.1","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"funding":{"url":"https://github.com/sponsors/ljharb"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","license":"MIT","main":"index.js","scripts":{"prepublish":"safe-publish-latest","pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npx aud","tests-only":"npm run --silent test:stock && npm run --silent test:staging && npm run --silent test:shams","test:stock":"node test","test:staging":"node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"node test/shams/core-js.js","test:shams:getownpropertysymbols":"node test/shams/get-own-property-symbols.js","lint":"eslint *.js","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/ljharb/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^15.0.1","auto-changelog":"^1.16.2","core-js":"^2.6.10","eslint":"^6.6.0","get-own-property-symbols":"^0.9.4","safe-publish-latest":"^1.1.4","tape":"^4.11.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false},"gitHead":"132fe9ce5c2e443e0570606d4568a242eb86b5f5","bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"homepage":"https://github.com/ljharb/has-symbols#readme","_id":"has-symbols@1.0.1","_nodeVersion":"13.1.0","_npmVersion":"6.12.1","dist":{"integrity":"sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==","shasum":"9f5214758a44196c406d9bd76cebf81ec2dd31e8","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.1.tgz","fileCount":14,"unpackedSize":15474,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJd0I28CRA9TVsSAnZWagAAt2AP/jan/+oerqF7TJJ1/7C0\nDib5YuePKj9dBimLNNxyNbDCo9+XGPPXoDd5OuGVQ8hePAe0pFxsenbtyT+Y\n+empPCZMrgUJfP7Umo6FYPE7EChp7ES7pPua2oeoKzMhK3xH+sBXj9MQ60Al\nunwIhq1k0idyeHT/9iJegP+wGF5pDe/EZUVbzt9r6JU4WhCNopdta+BZwIRP\nmE/NCdPjZ2jHbjYVJlT7b7uFrA4KXtRXtaJKhy97biek3xfgP4WLKvRaPTKo\nlTXtw/UXk8L42RYfJFlIJ2nyLeorwS6QObZPi9tB8BmIogvrSjthvcVL6DSN\nWJTjxpu43zbRS6mHK5nBAnXcshB/mvM9E8hTxrhG2jfSwAR3RexMLsOqgEsH\nTUTxRdB4Zox0nUD7rSahulvEtjl0bCRwo+oeuNfNDgf34sAjhIsaBMzH1rQY\nuTI8DKB4s1wKbth7YWUvacPay0+vvIbJnq4AMjoIDXezAKdDVIG6zB5rt5vN\nPxaAnRkB2htFP8MEHbqAvhk2ibqGdQvdVW2QPeTjDc7bXeymiXI93nrZyiw6\nq5alXDWN8ubB8A9A7HvKa+XU3dsOoYW/Ypx/h/ca17m0Gc0LwfJ2o0lC4sLQ\n3akrlFDjuBdlt2tWWzCRD5e7av8jJZ5C5ZKBeRkL2Xod4iyMFAD5wtefsXvR\nELxD\r\n=shCA\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCCFSo29lhXirX0Rs+Cuj11qyhheYjpANIVYXAMEcNrhwIgGt53b4yRyWBuEHwcnk8h23iA8dpWARLe+Ojp9kyWxO8="}]},"maintainers":[{"email":"ljharb@gmail.com","name":"ljharb"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-symbols_1.0.1_1573948860346_0.7408930604026625"},"_hasShrinkwrap":false},"1.0.2":{"name":"has-symbols","version":"1.0.2","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"funding":{"url":"https://github.com/sponsors/ljharb"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","license":"MIT","main":"index.js","scripts":{"prepublish":"safe-publish-latest","pretest":"npm run --silent lint","test":"npm run tests-only","posttest":"aud --production","tests-only":"npm run test:stock && npm run test:staging && npm run test:shams","test:stock":"nyc node test","test:staging":"nyc node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"nyc node test/shams/core-js.js","test:shams:getownpropertysymbols":"nyc node test/shams/get-own-property-symbols.js","lint":"eslint --ext=js,mjs .","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/inspect-js/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"devDependencies":{"@ljharb/eslint-config":"^17.5.1","aud":"^1.1.4","auto-changelog":"^2.2.1","core-js":"^2.6.12","eslint":"^7.20.0","get-own-property-symbols":"^0.9.5","nyc":"^10.3.2","safe-publish-latest":"^1.1.4","tape":"^5.2.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"greenkeeper":{"ignore":["core-js"]},"gitHead":"32b16a3809db3bbb463df501c3984a333f1979f3","bugs":{"url":"https://github.com/inspect-js/has-symbols/issues"},"homepage":"https://github.com/inspect-js/has-symbols#readme","_id":"has-symbols@1.0.2","_nodeVersion":"15.10.0","_npmVersion":"7.5.6","dist":{"integrity":"sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==","shasum":"165d3070c00309752a1236a479331e3ac56f1423","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.2.tgz","fileCount":14,"unpackedSize":18056,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJgOnPHCRA9TVsSAnZWagAA+tEP/296p4+KYc4qJLrX0uxY\nDA8r08WOCWwnIEHw6X1O12XSFB4d0bMTA1vIZEAl/GosbM4wvVdO4JWYaxL+\nAe8m2GxNjz3KXswq4SELhwf8c6xk3Q294qHiUeByfvxK4OwfyKyZ7iI4YnV/\n4jT/FE+AOlNqdAO3izGfm94UW8X1g/6S2X82JTxKngl1/YoAOraEjtD+XF2f\nPTcL4SmyoL7xTTMtPEOSXRAB73Y2KeNbFC8Ee1r/vU0C62MscsFD6Whc3lH4\naxT1ccSUO8YUftLzdMJY3R6jqF8ZKAx7rYdPPDEkm4fJ/MAsw9pQKKf71Dnf\nDAYBAGsFByUVGraRgWXx01w6NnVNbHqBBbLlTK1e0JpCvrSpkFpX/kfERpEB\nhaFUc8n7SDIeAFgBidagI5HYHwbUmlPkZ46NXZcL0xLMx8SNXyRvImsXOR5m\nnV6ReVLSJP/VmTFYjOVuFsteXU2Ot7ZtnHy/eOP8WXYuWn02CuqFC+i8xFxn\nEXCqT3wC1ObdFkV9E2WGVQpB6U2UoJCiRQJO0KjP+arqcS22zL+IXDgGxr/a\nw8f4erWtCTSJquDY9+P7VCSbjGTxfmkunAXUcwMCEiVfLc8wwrg+vHJF3Sgi\n2QqXRdNu1JKHGXfnpnJm1rj2oZV9/5ZjYQFB+CeWM70TyKrYeTgysG0pa8y5\nc8++\r\n=JCcn\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCICmk/GnP0bgLMLKwOsqmi85pgPZF8i7IN0E1zAWz8kQIAiEA7agW7tNwD7vJ3WADcThU+35O/hHKm2DzykwgXewNWdU="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-symbols_1.0.2_1614443462507_0.38046500905605685"},"_hasShrinkwrap":false},"1.0.3":{"name":"has-symbols","version":"1.0.3","description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","main":"index.js","scripts":{"prepublishOnly":"safe-publish-latest","prepublish":"not-in-publish || npm run prepublishOnly","pretest":"npm run --silent lint","test":"npm run tests-only","posttest":"aud --production","tests-only":"npm run test:stock && npm run test:staging && npm run test:shams","test:stock":"nyc node test","test:staging":"nyc node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"nyc node test/shams/core-js.js","test:shams:getownpropertysymbols":"nyc node test/shams/get-own-property-symbols.js","lint":"eslint --ext=js,mjs .","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/inspect-js/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"homepage":"https://github.com/ljharb/has-symbols#readme","devDependencies":{"@ljharb/eslint-config":"^20.2.3","aud":"^2.0.0","auto-changelog":"^2.4.0","core-js":"^2.6.12","eslint":"=8.8.0","get-own-property-symbols":"^0.9.5","nyc":"^10.3.2","safe-publish-latest":"^2.0.0","tape":"^5.5.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"greenkeeper":{"ignore":["core-js"]},"gitHead":"444dc14d035df9891743a28cbc5d6ecdb0cb3b01","_id":"has-symbols@1.0.3","_nodeVersion":"17.6.0","_npmVersion":"8.5.2","dist":{"integrity":"sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==","shasum":"bb7b2c4349251dce87b125f7bdf874aa7c8b39f8","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.3.tgz","fileCount":13,"unpackedSize":20603,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJiHo7dACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmoJTg//VKZyTT/GxVMPZNQFC6Q05AQ+zwFmm1ePSsyP3+hebhjz0KMZ\r\nZh8Z3oaFj53lk6p6hl6wJgJh8v+4H8tYi90zewuk2/sv/r4gS8KKOJkEU5hS\r\nExpiO/FlpW1EBW0kHcPOLiYkyvhm5iNX17o0qUXw62EVu9pFdzLuMLtoVch9\r\n0RC3armyFU5YXjpr4lQCbHCAK6okYFFh6BGQYB0k/to/o1YZ3QijFZ7cDlyl\r\nUSH33b6VFsD9gVT6pVYGmhwPfbxrUzvgpmMeJqdL940V3BgVDu9h/lXFDpvC\r\nyf9vmUEiVkcxeiIbJuusCQjMbPT31uYDaAYY+W+v4pbD552jb/7Gm2ttl1uV\r\n1yx9J3M5aKbjZWMVfRinlfGoyUIs0rpxhSsQTp84skwPLkXC1YfODYNhy4+o\r\nVR5GNTIDDOB4i4y7lGVvx7Vd4ySP+Tz9YpmFI9ZrCnEVXggUn9y+PU8R19UJ\r\nrOVAYikVzsyC5PT9PKr2lvITXDb8siGUNt8YmJhZupzv3K+I5sEojmpqCGvP\r\nW748lmzXQAFYUY/BL1/zChahtp6w5mBaX79uF/xO7h/owukFCK2Y1Seyz4HP\r\nFzn6kDQM+TcUD9GlOhy1OsSLVhuK+gbGupNtSG52OaR9JVtmxSzd83TujaTF\r\ncDmeevwmNQi4Gnt70AILlnaCxXanGrp0epk=\r\n=B5Gh\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDwzczMy98ycY151XrPoURp8chFzfXRYegRhpOydLT8UgIgb/6c33xTl81h3biIUwEWPJAVPlOf6E2AEaWCvLmPOck="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-symbols_1.0.3_1646169820978_0.6668045837242529"},"_hasShrinkwrap":false}},"readme":"# has-symbols [![Version Badge][2]][1]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][5]][6]\n[![dev dependency status][7]][8]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][11]][1]\n\nDetermine if the JS environment has Symbol support. Supports spec, or shams.\n\n## Example\n\n```js\nvar hasSymbols = require('has-symbols');\n\nhasSymbols() === true; // if the environment has native Symbol support. Not polyfillable, not forgeable.\n\nvar hasSymbolsKinda = require('has-symbols/shams');\nhasSymbolsKinda() === true; // if the environment has a Symbol sham that mostly follows the spec.\n```\n\n## Supported Symbol shams\n - get-own-property-symbols [npm](https://www.npmjs.com/package/get-own-property-symbols) | [github](https://github.com/WebReflection/get-own-property-symbols)\n - core-js [npm](https://www.npmjs.com/package/core-js) | [github](https://github.com/zloirock/core-js)\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[1]: https://npmjs.org/package/has-symbols\n[2]: https://versionbadg.es/inspect-js/has-symbols.svg\n[5]: https://david-dm.org/inspect-js/has-symbols.svg\n[6]: https://david-dm.org/inspect-js/has-symbols\n[7]: https://david-dm.org/inspect-js/has-symbols/dev-status.svg\n[8]: https://david-dm.org/inspect-js/has-symbols#info=devDependencies\n[11]: https://nodei.co/npm/has-symbols.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/has-symbols.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/has-symbols.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=has-symbols\n[codecov-image]: https://codecov.io/gh/inspect-js/has-symbols/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/inspect-js/has-symbols/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/has-symbols\n[actions-url]: https://github.com/inspect-js/has-symbols/actions\n","maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"time":{"modified":"2022-06-18T19:26:17.135Z","created":"2016-09-19T23:46:36.740Z","1.0.0":"2016-09-19T23:46:36.740Z","1.0.1":"2019-11-17T00:01:00.460Z","1.0.2":"2021-02-27T16:31:02.668Z","1.0.3":"2022-03-01T21:23:41.133Z"},"homepage":"https://github.com/ljharb/has-symbols#readme","keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"repository":{"type":"git","url":"git://github.com/inspect-js/has-symbols.git"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"license":"MIT","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/has/has-1.0.3.tgz b/cli/tests/testdata/npm/registry/has/has-1.0.3.tgz new file mode 100644 index 0000000000..90c33297e2 Binary files /dev/null and b/cli/tests/testdata/npm/registry/has/has-1.0.3.tgz differ diff --git a/cli/tests/testdata/npm/registry/has/registry.json b/cli/tests/testdata/npm/registry/has/registry.json new file mode 100644 index 0000000000..137122e4a7 --- /dev/null +++ b/cli/tests/testdata/npm/registry/has/registry.json @@ -0,0 +1 @@ +{"_id":"has","_rev":"28-a7978fcc3beffd13dd518b65fdb20142","name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","dist-tags":{"latest":"1.0.3"},"versions":{"0.0.1":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"0.0.1","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src/index","devDependencies":{"chai":"~1.7.2","grunt":"~0.4.1","grunt-contrib-watch":"~0.5.3","grunt-mocha-debug":"~0.0.6","grunt-exec-jshint":"~0.0.0","grunt-release":"~0.5.1","grunt-newer":"~0.5.4"},"engines":{"node":">= 0.8.0"},"_id":"has@0.0.1","dist":{"shasum":"66639c14eaf559f139da2be0e438910ef3fd5b1b","tarball":"http://localhost:4545/npm/registry/has/has-0.0.1.tgz","integrity":"sha512-Ulo9uG05SN7r55LqJxpU84yWzVPfJGv+GZSaEnm5mKO/jtwV5KODce9bPEDJh1uoYGJpsy5pKi4dQOdDSFzCvw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDfgu84CAfHIBVCPe26sam0TSBJ85TxQTylJ60gIU80NgIgf47PIcgnILXdcQJnGDT+j5EpCT2kN392mwWhJQz21gg="}]},"_from":".","_npmVersion":"1.3.8","_npmUser":{"name":"tarruda","email":"tpadilha84@gmail.com"},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{}},"1.0.0":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.0","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src/index","devDependencies":{"chai":"~1.7.2","mocha":"^1.21.4"},"engines":{"node":">= 0.8.0"},"scripts":{"test":"node_modules/mocha/bin/mocha"},"gitHead":"3113c5ff93ec8befffd9cf23c4dbab7a9d429c20","_id":"has@1.0.0","_shasum":"56c6582d23b40f3a5458f68ba79bc6c4bef203b3","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"tarruda","email":"tpadilha84@gmail.com"},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"dist":{"shasum":"56c6582d23b40f3a5458f68ba79bc6c4bef203b3","tarball":"http://localhost:4545/npm/registry/has/has-1.0.0.tgz","integrity":"sha512-pZW9uw/9635RZCMUO1nIiZ8Ue8fJP6GlegyXWsFmqp1asx44TMS+K+ffoKnhdFt/piqIpvHG1h6qXmyVEiXCfg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIFCoV79Swa7ogDk0FPLZ8kwt4fygO2aUdChkivfxg5juAiAjZm1GE4R6dpMqLcAafe1QcBaMYvhuK5jkjQfoULHq/A=="}]},"directories":{}},"1.0.1":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.1","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src/index","dependencies":{"function-bind":"^1.0.2"},"devDependencies":{"chai":"~1.7.2","mocha":"^1.21.4"},"engines":{"node":">= 0.8.0"},"scripts":{"test":"node_modules/mocha/bin/mocha"},"gitHead":"535c5c8ed1dc255c9e223829e702548dd514d2a5","_id":"has@1.0.1","_shasum":"8461733f538b0837c9361e39a9ab9e9704dc2f28","_from":".","_npmVersion":"2.11.0","_nodeVersion":"2.2.1","_npmUser":{"name":"tarruda","email":"tpadilha84@gmail.com"},"dist":{"shasum":"8461733f538b0837c9361e39a9ab9e9704dc2f28","tarball":"http://localhost:4545/npm/registry/has/has-1.0.1.tgz","integrity":"sha512-8wpov6mGFPJ/SYWGQIFo6t0yuNWoO9MkSq3flX8LhiGmbIUhDETp9knPMcIm0Xig1ybWsw6gq2w0gCz1JHD+Qw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIGwsArRhXrhj+qjKhTjYer8IcOloz5NSf90mKsSbjDTCAiEAikyzkXCsz1Xr2d+L0/QJVYD+vLNZUGu/gQpngIRstPM="}]},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{}},"1.0.2":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.2","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"license":"MIT","licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src","dependencies":{"function-bind":"^1.1.1"},"devDependencies":{"@ljharb/eslint-config":"^12.2.1","eslint":"^4.19.1","tape":"^4.9.0"},"engines":{"node":">= 0.4.0"},"scripts":{"lint":"eslint .","pretest":"npm run lint","test":"tape test"},"gitHead":"5becaf997373b548e790e8c5ec0b718e20da6097","_id":"has@1.0.2","_npmVersion":"6.1.0","_nodeVersion":"10.3.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-D5/WxwX+SrGfs/fiQn34RAoIZkCLJBDEfBWS1kmTI6G/1mtjhxTBiIiJi8EsKhwaQqKqj7lpKOi3i69tg3P+OQ==","shasum":"1a64bfe4b52e67fb87b9822503d97c019fb6ba42","tarball":"http://localhost:4545/npm/registry/has/has-1.0.2.tgz","fileCount":6,"unpackedSize":2854,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbEwZkCRA9TVsSAnZWagAALQAP+wSiRGrAWvCQnVLAKjxZ\nYQebEuC2gpCQX1p/eG0RGxdRC+rC1+LOG2CJR41wtMtXXIcnC6wrXb2TnHMw\nyHMqSyQIjRxVAmR2CPOlh36FXAp9efcgPKyTAfXEw0/3iZMjJ0jcZCbaDckQ\ngLFB0fkv1T0uzudvjUqXVbSXihMe17qi3/vXQbQNtnrFkHfGLstp+cdXN33+\nh0Hwv6FsE+tdfRM2q2xhYzIvDbDQ3mGCe0nsMYj3nRFoSOsAnftsOrZnIGr0\n4VuFxe66oYGaokH/GI/JR9AikEj9iEizknW4TB3d9KDSHtfVbsHeptDQ/CdA\nXUpyKlALK0VZvHGC+lKQsllCb4D1uJ6isnoqL3rV20/v1X1tjKIm9/P1tBsB\nKGS7mMlR+vSFzB8iGNhYVvE5p7Du5FXJElGI8qj0AjCaCbvech30WbHyIJlx\n26/ywIE5/m+HJ+wnFAUDW3VQWakzZDPqhyc0GVo+yCixxjLGv++UslsdSncn\nx9g0k5l5bGQ2SCn8XbZRCG/jHm+bdD0NOjqvaE7bu7RvGPfniqZDpH6TzJmI\nlb55w8iPpR+GZ5vhypDdpQBT//ty33rNHb+Q9U4e9ZHnQNnwK15WaImfE7J5\nxfmA+8JRj3FBiMGpKU++NHOtI9Y+aK+CqCmVwlhlkpZT0z6oQphcsl2gqe4F\nuujs\r\n=q25B\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIFVCO11i1G9/dgjkto0mkv30OIZOjyqKhV8oPKebk69aAiAipxVf8Vs0ptvMfLEdGwLLLc7iQxf6hreeLjoOsrhRgg=="}]},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has_1.0.2_1527973476308_0.8578208238940064"},"_hasShrinkwrap":false},"1.0.3":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.3","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"license":"MIT","licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src","dependencies":{"function-bind":"^1.1.1"},"devDependencies":{"@ljharb/eslint-config":"^12.2.1","eslint":"^4.19.1","tape":"^4.9.0"},"engines":{"node":">= 0.4.0"},"scripts":{"lint":"eslint .","pretest":"npm run lint","test":"tape test"},"gitHead":"4edf96f2dec87ad6b6e68482e8f6d7c8eb7e07e6","_id":"has@1.0.3","_npmVersion":"6.1.0","_nodeVersion":"10.3.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==","shasum":"722d7cbfc1f6aa8241f16dd814e011e1f41e8796","tarball":"http://localhost:4545/npm/registry/has/has-1.0.3.tgz","fileCount":5,"unpackedSize":2770,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbFWeCCRA9TVsSAnZWagAAGqoP/3/GpNmBbr2IzYG0v9Rg\n0jDcyA0p95I8Tc1GbOsB0YWMY+VMs5I3tggHG4yjB8OskeR7GyItfcZpe9b5\nEGNr2Yq/eOKI602MTnWzHaAczxF887EVcXDISg1qDlzjTYWFMNms5jDxH6OT\nKD0SLHE1qRCASPxNZsJLoxT/dPaVfRC5QMIz8msEaI+qUu8p54cO0/DSLSlT\n5kCGKA5CfbfIODAmyvddsKgOW0at16XJ97f+qHhrI5q6HoYdM2jcLzPXxPiw\nSgPKXICus5TjFvRdBoOXa2diz9urPjQGzVsGoKs8W0Z9EeoxpiJInVRN1Hec\nabW1NzLvINbuFcG1I/CvNVXKGKwIafKg25ba/T/PHa/5rHY2+S4Y9Mj0SLFv\n8V7HWSQMGaG86kZjH9vgd7MPP39lBZXw5msjftiRVxiDiZYutbGzBymwHcYd\nzPVrzCfdNg9o2OsG8mjIBGntCwz9/Yrx7npK9mP97nZQ4EDDoDoCga5efkMP\nmT02Vru1cCdHOfRnM/tlr4Onf0umRhgzUfsbjdSsrGapGCTeVvBvnm6XKznZ\nw9HWfGyNaZiT1J0/pmnRMwxEp/xcKMaAOa1c2pisTGpjkbQloGQx77IwfBiO\nAVBfV3yBDWCgutiwEb4zg0RPPjlS1RCJDMzg+CsXT1u9H6St8MUtuBikkNr4\ndlxV\r\n=Dlp+\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDi2aj8+sqrebhyEkv7F23LOzXMTe0zUrFZqtxhj4ubhQIhAM8njQhxRA7zu5nYQHE3+EN4rJa5+9EpwlJiBeEkLn54"}]},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has_1.0.3_1528129409940_0.08921093934264301"},"_hasShrinkwrap":false}},"readme":"# has\n\n> Object.prototype.hasOwnProperty.call shortcut\n\n## Installation\n\n```sh\nnpm install --save has\n```\n\n## Usage\n\n```js\nvar has = require('has');\n\nhas({}, 'hasOwnProperty'); // false\nhas(Object.prototype, 'hasOwnProperty'); // true\n```\n","maintainers":[{"email":"tpadilha84@gmail.com","name":"tarruda"},{"email":"ljharb@gmail.com","name":"ljharb"}],"time":{"modified":"2022-11-08T10:38:33.269Z","created":"2013-10-08T00:54:01.609Z","0.0.1":"2013-10-08T00:54:07.309Z","1.0.0":"2014-10-07T18:41:58.615Z","1.0.1":"2015-07-24T08:49:30.633Z","1.0.2":"2018-06-02T21:04:36.393Z","1.0.3":"2018-06-04T16:23:29.998Z"},"author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"users":{"getify":true,"bradleymeck":true,"akiva":true,"rsp":true,"nickeltobias":true,"tobiasnickel":true,"ahmed-dinar":true,"maximusx":true,"tjfwalker":true,"iori20091101":true},"homepage":"https://github.com/tarruda/has","bugs":{"url":"https://github.com/tarruda/has/issues"},"readmeFilename":"README.md","contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"license":"MIT"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/object-keys/object-keys-1.1.1.tgz b/cli/tests/testdata/npm/registry/object-keys/object-keys-1.1.1.tgz new file mode 100644 index 0000000000..3e52f10dfe Binary files /dev/null and b/cli/tests/testdata/npm/registry/object-keys/object-keys-1.1.1.tgz differ diff --git a/cli/tests/testdata/npm/registry/object-keys/registry.json b/cli/tests/testdata/npm/registry/object-keys/registry.json new file mode 100644 index 0000000000..bbfedec12b --- /dev/null +++ b/cli/tests/testdata/npm/registry/object-keys/registry.json @@ -0,0 +1 @@ +{"_id":"object-keys","_rev":"104-fff9f09b12add81f4389e3e50a2ff098","name":"object-keys","description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","dist-tags":{"latest":"1.1.1"},"versions":{"0.0.1":{"name":"object-keys","version":"0.0.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/objectkeys.git"},"keywords":["Object.keys","keys","ES5","shim"],"devDependencies":{"tap":"~0.4.1"},"testling":{"files":"test/index.js","browsers":["ie/6..latest","firefox/3..latest","firefox/nightly","chrome/4..latest","chrome/canary","opera/10..latest","opera/next","safari/5..latest","ipad/6..latest","iphone/6..latest"]},"_id":"object-keys@0.0.1","dist":{"shasum":"ab917307b1042981453e094c41049246e99602d6","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.0.1.tgz","integrity":"sha512-/aM4V/OT388JOkoQJ57Gxeg43O8qI89rybO5CgLo1i4Z1rI/LXnC8RTdZZxmpxC273gOECNPb2qW9jerijQAwA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIHpOEk4wRRe8+XOD49ps98iZzl4U4078lTFmqufTj3eQAiBw262wqFt1AZUWcLtoA6F+KvHVEkwE97aLaycw/EDPYw=="}]},"_from":".","_npmVersion":"1.2.15","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.0":{"name":"object-keys","version":"0.1.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"devDependencies":{"tap":"~0.4.1","tape":"~0.3.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.0","dist":{"shasum":"f60a5d0b3f878089a4b9645b26e43df09436dbb8","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.0.tgz","integrity":"sha512-nmv/hFMWJmfEUuMUjE2m2ZDmwi4Q9RDeZto0S04PfD8wnwINgJT5Raib18UT/EAa/A3tIhpEPHewLX83OCRSzQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDvuUuQtK1x8psnRhTMpgczIFTWOSlTu0hV+851vuWrKQIgYdB+mrlzs3w/Bw74j9ju2BZEFbhiDMpRVvbTsHBJodc="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.1":{"name":"object-keys","version":"0.1.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"indexof":"~0.0.1","is-extended":"~0.0.4"},"devDependencies":{"tap":"~0.4.1","tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.1","dist":{"shasum":"e35f1c8d9cbc5fe503c1b13ad57c334e3f637b3e","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.1.tgz","integrity":"sha512-0YAQMhYdszhy3qw0CZHKp2/+pw0VIBSbb5G5oMItAXW384Qbi6XRg4J8Q9O8kg43WVcFyFUT+GCCTt/rz6890w==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIBRuwzoWqmGMBA4CCDak783BfOsQ6ycfmHlEnZqC5gWnAiAKsWT0JVhP/+dBICcDXulO75XJTJjG4yGibvL1UpxT6A=="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.2":{"name":"object-keys","version":"0.1.2","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"indexof":"~0.0.1","is-extended":"~0.0.5"},"devDependencies":{"tap":"~0.4.1","tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.2","dist":{"shasum":"df74e8662eb0e8b5ee64fc8eda750c2db4debc7b","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.2.tgz","integrity":"sha512-WMWSee5aYXB5Iu7bfsD3wSdO9TaYqwrIfqHWoQQHIx3XbvhslTBAyqY+tOp9DpaNGjE75vM9IhwMFbDcEs0Ntw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIHy+GeOlh7SUtU0NRAUk02ZvdJF+bUX7/XN9IrbewUc3AiEAmkuTbEEx+bYIAsEWhbMxFynWo+j5mtMl0weHc2vIqvE="}]},"_from":".","_npmVersion":"1.2.15","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.3":{"name":"object-keys","version":"0.1.3","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is-extended":"~0.0.5"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.3","dist":{"shasum":"201972597dfdbaef2512144a969351b67340966d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.3.tgz","integrity":"sha512-P40wNJQL4FoACelJjjI0N0iO3oRfiy0Pvym34FvBmJbArXAmIj0u8p8dLPFjKtN3Bikqb2I3kYJLjS2RnIP2KQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIEr/gqaTEuK7tXhDHdY5SxqAScNngeW1qXRGkAYdsqLEAiEAtFJBjZIJKSFL9yK6M4lUIVRqPyeLc4o5JBFufjryS9A="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.4":{"name":"object-keys","version":"0.1.4","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.0"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.4","dist":{"shasum":"094b203cdc23c0d61b04f13cc8135fe964cc314a","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.4.tgz","integrity":"sha512-EhLn1BDThRMKDUnB4a9Pu99R0V7FvciLi4M2Y7fyoa/qnl202sd4RhLuYCL6IfR0f133TaWpP4JgNPRpMBac6Q==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDCCt8XSKs0l1ykra1eRaTbBjfgyO/RAqXJZUWBu0LmewIhAP0IMkVWwACYSzm5FboLyb096r0WXAhEQaQS2m74C3E7"}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.5":{"name":"object-keys","version":"0.1.5","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.1"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.5","dist":{"shasum":"ff9b7518e468804c4066ac553c5d452ec8ffbb27","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.5.tgz","integrity":"sha512-FWlklzi/z7zzTVU/hnBrUUyiMRw894gIwpgUCkeFqWSXD/m3y7KUzbcWe6oJWPr+PEZ/ACLa/lDWLIQsYmY0ng==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIBdydC523dr2nuIF2D77vsdvVS6m7etNCltX7XRKBrgKAiAzGtetgYZd9SXs1ixPt+EqqAMeabvo92SNOZHVRFqXbQ=="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.6":{"name":"object-keys","version":"0.1.6","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.2"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.6","dist":{"shasum":"2d8d2c3e3f57979d08e56c5a72532750e8fc9aae","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.6.tgz","integrity":"sha512-0EVnJ1F9QV4PMwv/hVwr1Ww/4AmGuHC4Wk1TlkWzUjvxZOZsD/I3jYrP3Cj1wX0C4i5fmAlopJdnQ0XiHOup+Q==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIB42TE8/g3tYNAcNlrGjmxVF0slnQzgQqHN6Ozb32j5cAiAb8eJ+WLFiVR5jEpIgg7FddzSK6CcDOrZS7fhOEaEVAA=="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.7":{"name":"object-keys","version":"0.1.7","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.3"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.7","dist":{"shasum":"fefce99868aeb040f357b3d3aa29ad26ec30bbd2","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.7.tgz","integrity":"sha512-q2+Sfmxqz5jDT7Ri0GZmZog2DCmsYzUo39+ESQFgE6AYSTITCZnrhp5thlTTWKxP0ilN23pvE5voVH2SAQp73Q==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQC0ZbU1lhEEgxMCvgRZhnhW4CeB2kRvMvEeAQGqoxfxHgIgHu6pVbufE1cs9nnihYjZrfi6oEN4sQDd1+IVeMnv9us="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.8":{"name":"object-keys","version":"0.1.8","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.6"},"devDependencies":{"tape":"~1.0.2"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.8","dist":{"shasum":"d40164df81104b0da49edfa6aba9dd29eb480293","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.8.tgz","integrity":"sha512-QVLwfAl2DJtsOVW8BXxa8g9gjzqwAJijFj/hTCOknQ5uIfonbZIEeX+asYCgq93HYkfcMkWL51H6z3XLwALVaw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIEstxBPiFXo98Vg3f5JaR5PY3HzLTVOKyPP3xJxIHeCHAiAJIjSWZJuR7zKuGcfALY9bv20LttxAEtWMngbhCcVELA=="}]},"_from":".","_npmVersion":"1.2.18","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.2.0":{"name":"object-keys","version":"0.2.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.6"},"devDependencies":{"tape":"~1.0.2"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.2.0","dist":{"shasum":"cddec02998b091be42bf1035ae32e49f1cb6ea67","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.2.0.tgz","integrity":"sha512-XODjdR2pBh/1qrjPcbSeSgEtKbYo7LqYNq64/TPuCf7j9SfDD3i21yatKoIy39yIWNvVM59iutfQQpCv1RfFzA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDHqI4i8TCwYU0W7hvKd5jX2WFPHuJ0kESFyw/as3++xgIgaKT/CU6g2wUXjGaGccKcj5U4akUaDasKizs8P3yDewE="}]},"_from":".","_npmVersion":"1.2.18","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.3.0":{"name":"object-keys","version":"0.3.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.3","is":"~0.2.6"},"devDependencies":{"tape":"~1.0.2","indexof":"~0.0.1"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"_id":"object-keys@0.3.0","dist":{"shasum":"4ce2945fee6669cf98424bbaa0f59c244ff97f1d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.3.0.tgz","integrity":"sha512-5NWmqk9N0NPSzhUAjJwjA1fbpYkmCyc3DRpIObOIsOTEz98JZg8fiJUbnxKofPrRXXW/J5Sh0M4pku7my7KHWw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIDt2X4Q2m0E/f+ITcYDQdhb9WZQobOe3l/s8X+WttvWEAiAn0ThWjlLuWOUW3FrAinp3k15grW86MXXMLNCLKBiOpg=="}]},"_from":".","_npmVersion":"1.2.21","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.4.0":{"name":"object-keys","version":"0.4.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test/index.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.3","is":"~0.2.6","tape":"~1.0.4","indexof":"~0.0.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"_id":"object-keys@0.4.0","dist":{"shasum":"28a6aae7428dd2c3a92f3d95f21335dd204e0336","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.4.0.tgz","integrity":"sha512-ncrLw+X55z7bkl5PnUvHwFK9FcGuFYo9gtjws2XtSzL+aZ8tm830P60WJ0dSmFVaSalWieW5MD7kEdnXda9yJw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIHUDMLh3fWS5OpydQINZqo8WFrJ3lqEJiDuN+YFRsxG3AiBbbYCG5+dD0UXyu+R6+L4BfEXZJeODELgzbKLRDEEWOw=="}]},"_from":".","_npmVersion":"1.3.5","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":""},"0.5.0":{"name":"object-keys","version":"0.5.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test/index.js","coverage":"covert test/index.js","coverage-quiet":"covert test/index.js --quiet"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~0.2.7","tape":"~2.3.2","indexof":"~0.0.1","covert":"~0.3.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.5.0","dist":{"shasum":"09e211f3e00318afc4f592e36e7cdc10d9ad7293","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.5.0.tgz","integrity":"sha512-2GU36PPj0BVaGl9JDw1zY5vkLMV1hQ1QtI+PoBq7f5bZKY2j/7IO0uQDv0UcuBhimMYnditq7dz+uO9C1TXV4w==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDMwauwpEiIxU1RlG+eAIRnOLrboadeDQRORnvEQufqswIhANVI50TQxUwOhs2291FQ2NIdlE1uCKDjOx8jTVsGXZEn"}]},"_from":".","_npmVersion":"1.3.24","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"0.5.1":{"name":"object-keys","version":"0.5.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test/index.js","coverage":"covert test/index.js","coverage-quiet":"covert test/index.js --quiet"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~0.3.0","tape":"~2.10.2","indexof":"~0.0.1","covert":"~0.3.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.5.1","dist":{"shasum":"0eb20ffa0ce7c01977648681b42c515f297d2cc1","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.5.1.tgz","integrity":"sha512-VVh5OqHlY0N4Hueq9KteojSoj8BmEZeKC+nFyAmQFGF37dJSbcFB4jNhV7+6Xnn6t4t3jh0P0Cuy0hEA+xq+Mg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDoseRCnmntQ8ISi56+7YstplMr/rq01BE5OsLrcg/b6gIhAJKIVTCp4DmdXm8LavjKRr4lG/KH6m/RyVA72NGkvDSc"}]},"_from":".","_npmVersion":"1.4.3","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"0.6.0":{"name":"object-keys","version":"0.6.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~0.3.0","tape":"~2.13.3","indexof":"~0.0.1","covert":"~0.4.0","jscs":"~1.5.8"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"3cbf74b330bb04f263a96d59925db5704c08968c","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.6.0","_shasum":"4638690dfaf1e65a63d43b5855d2f6ce04aeef6d","_from":".","_npmVersion":"1.4.21","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"4638690dfaf1e65a63d43b5855d2f6ce04aeef6d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.6.0.tgz","integrity":"sha512-NwTyBxMHbTVCd46WsQlY4WMwYoJ+PXkIkU6x/S22usMJQewtKMrwPAV9jtB6HBXnL4+EzaXQrtllK0MPl+V4PQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIEAekrpsVGBhFATo6EM1rcjVMSHjzwnEm8OSWZY5YhYqAiEA1YnRXNwRpg9sHlQweTFu1/6zpLR4rTQ50u+odWruJ+o="}]},"directories":{}},"0.6.1":{"name":"object-keys","version":"0.6.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~2.0.0","tape":"~2.14.0","indexof":"~0.0.1","covert":"~1.0.0","jscs":"~1.5.8"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"cfa534edc801eef5a3fd01512b30b025d177a79a","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.6.1","_shasum":"ed8d052b3662b093c9ee00152c259815c0db4d3c","_from":".","_npmVersion":"1.4.23","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"ed8d052b3662b093c9ee00152c259815c0db4d3c","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.6.1.tgz","integrity":"sha512-yFH+vVBczUKglNkPAb96wIWXv1AqdR4PCdoL8fYt6+uqm/Ucn4G7NVOgI54GG6Pai8yswIqzZIz0kLq4/3egQQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDhFPCvJ7VZ/jc+5VfjHDrTEIbXXMDA+p7qQPbB7D7QhgIhAIpfJj4sqHZfgrQO3bYBRwxqahD1d23Zea/rSaIZhqJ+"}]},"directories":{}},"1.0.0":{"name":"object-keys","version":"1.0.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~2.0.0","tape":"~2.14.0","indexof":"~0.0.1","covert":"~1.0.0","jscs":"~1.5.8"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"f78356a5eda9b059acdc841607edbd3940aed477","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.0","_shasum":"1b66cc8cafc27391944098216726f746b15c2a30","_from":".","_npmVersion":"1.4.23","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"1b66cc8cafc27391944098216726f746b15c2a30","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.0.tgz","integrity":"sha512-7zE2Pyy6jZ30PT8LSB/J+WfBvd8gw6PClm9Ilhq/S42rZ32NiDgBD0GtBDcmeObLtRIAC087WNyCW4QLAF/F1A==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQD4P78vN3qENInRoJidkqsanNRVgDGq1o1IDbclEaeAugIgGF9eS40md3HbBTo2TP+LbsCZhL+mjAqG91O7hwXfVPY="}]},"directories":{}},"1.0.1":{"name":"object-keys","version":"1.0.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~2.0.1","tape":"~2.14.0","indexof":"~0.0.1","covert":"~1.0.0","jscs":"~1.6.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"2ecbaaa0405c2f03e8b669ccf4b70376318a8f8b","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.1","_shasum":"55802e85842c26bbb5ebbc157abf3be302569ba8","_from":".","_npmVersion":"1.4.23","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"55802e85842c26bbb5ebbc157abf3be302569ba8","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.1.tgz","integrity":"sha512-DsJ69TA3wPICBmxYj6rij6uGKvKb9s2mtebzhuN/eI1GabJ3xC7fZ7PWjW0GS06hSclD0GxKGGAHQo5P7R2ZTg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQD4gYBznqPY/77jmrmzAiN5nRqHR25mrZuveDAAkyBi/wIhANnwIwT2H5eNkTIWUt3c+j4p5ovDyUM83vj0pvCHuFBL"}]},"directories":{}},"1.0.2":{"name":"object-keys","version":"1.0.2","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.5","is":"~2.2.0","tape":"~3.0.3","indexof":"~0.0.1","covert":"1.0.0","jscs":"~1.9.0","editorconfig-tools":"~0.0.1","nsp":"~0.5.2","eslint":"~0.10.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"06f2d46a85a0be12fc9e0377e3ce7bef32be5eb3","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.2","_shasum":"810205bc58367a1d9dcf9e8b7b8c099ef2503c6c","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"810205bc58367a1d9dcf9e8b7b8c099ef2503c6c","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.2.tgz","integrity":"sha512-QaJ3L+WfJ2mCirdIvDbXRW8q76+WnsITenRbpAAJ2Z/fPcKaXvRAn94rv1YzwUGqxj/m08vu3HBvR6WdxXXRsw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIAh4SRfmAsWSFsJGW/zwtEkL5i6WpjFvxOkmwnfDW/LTAiBr8+G5luLEkszDkl+ANwlTeCyO/PceL8aRv/UO/XqUtw=="}]},"directories":{}},"1.0.3":{"name":"object-keys","version":"1.0.3","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.5","is":"~2.2.0","tape":"~3.0.3","indexof":"~0.0.1","covert":"1.0.0","jscs":"~1.9.0","editorconfig-tools":"~0.0.1","nsp":"~0.5.2","eslint":"~0.11.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"f0fc8ccdf81843fa7aa88c85777cf717c3ead129","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.3","_shasum":"1b679dbec65103da488edb32f782bd9a15e3de0a","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"1b679dbec65103da488edb32f782bd9a15e3de0a","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.3.tgz","integrity":"sha512-C9AHglIN4DeikXJitZAmcls7Ics4QJr0QnVXFtK4wVly8zo0udlW96Hfw0kLQ0LqiE21Z2HgBMIS7C6/s4L2Tg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIGYBo/Zp8ilkQTBiGuDQvrpFHmLCZGxdimx6CQPuVK4PAiEAofwl6l/SVKlk89+QpAy6VRVczBPULX48M5hGH78V7Vc="}]},"directories":{}},"1.0.4":{"name":"object-keys","version":"1.0.4","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.1","eslint":"^0.21.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"fc869b3088d6047bcbf42e534304ffe034b06cb0","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.4","_shasum":"b115f96a7ee837df1517fbc5bd91ea965e37685c","_from":".","_npmVersion":"2.9.0","_nodeVersion":"2.0.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"b115f96a7ee837df1517fbc5bd91ea965e37685c","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.4.tgz","integrity":"sha512-+MtQIw3zdFntcjAKeWGPRbCj0SZeCSN1Yhp1jAI1GmPgF6wCHTJkhJgfPE3kHgryFpX2MgFWQLcKsqHlSlPD9A==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDKrpjpmWkxUnjvqfhVyWqDoQh7rExWokqaM7GWI3do6wIhAONj4OP7k0W21ye/Mzi92MX8ageuQydTsWFK4cB75Zik"}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.5":{"name":"object-keys","version":"1.0.5","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^0.24.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"a6fb624febfdbde087b5637bedd5233054520b18","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.5","_shasum":"84fe12516867496e97796a49db0a89399053fe06","_from":".","_npmVersion":"2.11.3","_nodeVersion":"2.3.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"84fe12516867496e97796a49db0a89399053fe06","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.5.tgz","integrity":"sha512-ads8edXgDSXcILPLzQa0i8HaXMSPoCj1SYW8C+W+fL8cTIcpxp8M3/wFu4ODfegdiKP9LEatqLbcd7noEtoL2g==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIAdMlFebBqVB9aq9/VQPFrEI72Ai91euXsNekOZS67lKAiB6/Y3cE8bKzOVtc/erkWzyFt9rjyc4HBMVfRy2SN9V2A=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.6":{"name":"object-keys","version":"1.0.6","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^0.24.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"3b0fbe74b40b5d78661461339f09a82f45a0a345","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.6","_shasum":"f910c99bb3f57d8ba29b6580e1508eb0ebbfc177","_from":".","_npmVersion":"2.11.3","_nodeVersion":"2.3.3","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"f910c99bb3f57d8ba29b6580e1508eb0ebbfc177","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.6.tgz","integrity":"sha512-JFO9tB3N/R17IA/IVKb3K0amIIpaR5T7CSg9z47uRXOFv9Kw1LOm1t3NB6FjosNIuKqNwpExODZqNnJb8zIZgQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIHoYa/sXYRU7F0BxiOvK1r/U1E2lj0iXpqZwIHQMjc29AiA66y8fVi2dNIyHyihDxm0gL/8pAm04MHpq25c6K5c33g=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.7":{"name":"object-keys","version":"1.0.7","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^1.0.0-rc-1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"c0c183e0aaed86487218f46127fcebec9258e84e","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.7","_shasum":"e7d117261139d6acac8f0afabf261d700ebb0b93","_from":".","_npmVersion":"2.13.0","_nodeVersion":"2.4.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"e7d117261139d6acac8f0afabf261d700ebb0b93","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.7.tgz","integrity":"sha512-SLdJAA8lTumufd2VJDOEXwfb81eE/ujQccVmFsofTnoPv1RvHqSlrMjDkq06lTaqnJxCDaY3d8rUwUJIeFk5sA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCkZcFwsc8+AngNfDPYrHnGBGkQHFSyW+hmnv8O33Ng3AIgSwHqYxg+rHqOlyoYbs/OqwzctpcSXfGjgbwpsa34dDw="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.8":{"name":"object-keys","version":"1.0.8","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.2.1","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^2.3.1","editorconfig-tools":"^0.1.1","nsp":"^1.1.0","eslint":"^1.6.0","@ljharb/eslint-config":"^1.3.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"f094a4832583959d0a0a132ea80efa2f44a5d58e","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.8","_shasum":"9a71ce236e200a943d7fbddba25332fba057c205","_from":".","_npmVersion":"2.14.7","_nodeVersion":"4.2.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"9a71ce236e200a943d7fbddba25332fba057c205","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.8.tgz","integrity":"sha512-yMyMdHyEjnPMnRpKnwOQLtTcS/2DQCItvwFh/A0RFvorh1aWqsIO46ZzfkaT0CmPXcKjCtrq7DhZo+unsR99hA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFHd2Fd8fYxiukgf0PCCQ4pAuKxhEwsecMScXYwTs/ntAiEA4b6t2m3zTXFek1FJZm3TEuhYZFwPhYCknyORUWHNXXo="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.9":{"name":"object-keys","version":"1.0.9","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.2.1","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^2.3.4","editorconfig-tools":"^0.1.1","nsp":"^1.1.0","eslint":"^1.7.2","@ljharb/eslint-config":"^1.4.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"e4331f920ff49824ad999b3449005349e31139f9","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.9","_shasum":"cabb1202d9a7af29b50edface8094bb46da5ea21","_from":".","_npmVersion":"2.14.7","_nodeVersion":"4.2.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"cabb1202d9a7af29b50edface8094bb46da5ea21","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.9.tgz","integrity":"sha512-xRGFTKkyFuP9AilRkEw4KfMPqaD9spcc6PVVPiOxAau61l+m/4zHUW6crXGtSt8lBfXD2vgnqNFFY8cr8NOBTQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIBJQxUXhh6hPZURj6mH0fOxfW7ePLUq0TXl/tfNBFT3aAiAsoqWESRjpTfRmFkKnYvuJgqI1ovXv42EHD2LzxAs71A=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.10":{"name":"object-keys","version":"1.0.10","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.6.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^3.0.6","nsp":"^2.5.0","eslint":"^3.0.0","@ljharb/eslint-config":"^6.0.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"a12ae2c01a443afb43414ab844175d2b6d5cd50a","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.10","_shasum":"57e67f7041b66d145c45136fa8040a32717f7465","_from":".","_npmVersion":"3.9.5","_nodeVersion":"6.2.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"57e67f7041b66d145c45136fa8040a32717f7465","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.10.tgz","integrity":"sha512-fKnqZ/+BvdAsCto14RQRo1q0W9ObXswVgq2Vc/y/OQXfGVom9jEJ193KpHjgkO7QJNCxy8hBWTDBYUsSBExYFA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCUJa2v4dH/fEuWBmVFTYGyt6k+uRH9k63SnIhS07UPggIgarq6DuufB4ttn5xtTjnxChN0qzqHsyhVN2mhkaF1IBk="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"packages-16-east.internal.npmjs.com","tmp":"tmp/object-keys-1.0.10.tgz_1467655315616_0.8326317083556205"},"directories":{}},"1.0.11":{"name":"object-keys","version":"1.0.11","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.6.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^3.0.6","nsp":"^2.5.0","eslint":"^3.0.0","@ljharb/eslint-config":"^6.0.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"3f869cc4b9f0f0489b2af7e80964f90d6c4403a4","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.11","_shasum":"c54601778ad560f1142ce0e01bcca8b56d13426d","_from":".","_npmVersion":"3.9.5","_nodeVersion":"6.2.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"c54601778ad560f1142ce0e01bcca8b56d13426d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.11.tgz","integrity":"sha512-I0jUsqFqmQFOIhQQFlW8QDuX3pVqUWkiiavYj8+TBiS7m+pM9hPCxSnYWqL1hHMBb7BbQ2HidT+6CZ8/BT/ilw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIGVatYL5nqFjnyTPO0/FYHebFDZUNL6H4evuOwJXOd20AiAVQtHX+GpfjVa90v7F8y+Z0Nkf/bKGSVeNf/Sqys+gRg=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"packages-16-east.internal.npmjs.com","tmp":"tmp/object-keys-1.0.11.tgz_1467740975903_0.8028358130250126"},"directories":{}},"1.0.12":{"name":"object-keys","version":"1.0.12","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^12.2.1","covert":"^1.1.0","eslint":"^4.19.1","foreach":"^2.0.5","indexof":"^0.0.1","is":"^3.2.1","jscs":"^3.0.7","nsp":"^3.2.1","tape":"^4.9.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"e3acd057c5b7be1029b3b9f6f69133292d77d558","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.12","_npmVersion":"6.1.0","_nodeVersion":"10.4.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==","shasum":"09c53855377575310cca62f55bb334abff7b3ed2","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.12.tgz","fileCount":11,"unpackedSize":28233,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbKChkCRA9TVsSAnZWagAAVoIP/1jqNQKOVOx4jOpaSivw\nSRxdpzt236t7t9D5YnDgXhT5hrfbfajBz59CVtikezyYS3+ccurC/M2fTHno\nAT8VGxLmemptEoH7woqX27cFdWDFqyMlyfYZkC29w59+cXq44+J3+VFtyd8s\nV09lwj934D/DxdSCKZ/BVuZoffigow37yg7kIC9+VVS0em2XG3W633V8LQAF\nrRiUVSk0ne/BlO1TWV5fTmPQwranmUMnodZAqarVn2/vl0wN8rCTM9qGHdGH\nYWJNQC0ed73ZWOJN+C+OeQqtRdmjS/s5MbLrnMC7JdSQqEDFr6cuLf6TXYa8\nQmy4MCwN7IN1+XeUbDLsOQ1NdjIg9TVlybL5HjKiBjL5FYcjiZQHvtLYTOLa\n/x4eteDcVF8WObCLsUfrB3XuwH2sJX1tACds7IalOS0WLR2bHeBGjejQFyKK\n6k8strtCWMxaWt/nRSTOpZZfMz/HMtHmqVJ3C/VZGYvoexpt6EXqZm4Yemtx\n7AS82sEnfnKF92m/EXZbdP5Gz0fnAksKtzOncsFCOk7qomkD0PLNZkhIadfX\nWTOz9FW+gQNA+im76POpLk8EwQBFYIfTQesLVYB243Z3jH6O5EuTTkzclkWU\nVNXHIoouGL3S+1gPactA2lr6PM4G0hTkco98HSEvHMZpdgoHtz4Jx8xMtX6N\nGJLk\r\n=/f2l\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDBTqK5jg1fMSZC7viYJn8AgFqfaNKFJrUlPYMQVnIReQIgUtgVmrbrFyXy3Qupn7eBPqwBkzKQXN6D+aQtkn5/tEk="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/object-keys_1.0.12_1529358434802_0.4383878957043432"},"_hasShrinkwrap":false},"1.1.0":{"name":"object-keys","version":"1.1.0","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent audit","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"eslint .","preaudit":"npm install --package-lock --package-lock-only","audit":"npm audit","postaudit":"rm package-lock.json"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^13.1.1","covert":"^1.1.1","eslint":"^5.13.0","foreach":"^2.0.5","indexof":"^0.0.1","is":"^3.3.0","tape":"^4.9.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"abd4ff039708a0166a57388b348730cbda4a1593","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.1.0","_npmVersion":"6.5.0","_nodeVersion":"11.8.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-6OO5X1+2tYkNyNEx6TsCxEqFfRWaqx6EtMiSbGrw8Ob8v9Ne+Hl8rBAgLBZn5wjEz3s/s6U1WXFUFOcxxAwUpg==","shasum":"11bd22348dd2e096a045ab06f6c85bcc340fa032","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.1.0.tgz","fileCount":11,"unpackedSize":26395,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJcYQZGCRA9TVsSAnZWagAAu9oP/3ed1S6D9BtHLFImT804\nudV4Mm/anfUE0jBXkJXLab4+qVIgkkqQOnEIg/Wl1ea/UHN/r21cRAaxVcdx\nqH6c5bcRpIr5gWNpcXnCgUVzOZHz2woa0jgZ4YQsAvt67m1lPAZBSppp98tx\nvw//RLPRQB0MNppFXRMLQEMABvIyP7bvNy6wK/SHp21hLuxzKdEmkwYwgKDN\nlhKdtpJQDrRjAmD6w2i8GW71S4K+Kis/ugzDGYX5eM4iwxJYQaKhtsgRJe34\nohUKHsVDe54wAYo4ZfJp+oJFLdJoY5DtYZM8VRkFlIya8X339oietwezlVoB\n0t3/8LuwJJZrG3tprTl6ek4d2AFL2Mf/xJhxSwKLY8B6UpFD+yNn4PuUt70u\ndea9T4zC9KE0swRdxLNRkKDTexCMlq3I3LZ28rH5MyPfcgdZMs1v9tGlaWhL\n7lXxQ5DIq/MSoMDKgOh8T0OBbvapnIfb6f1cqJgreZ0W0mpTm2Fu6joBfJKp\n7rp4erjunUXk4vNTsUxB9R/DMJgiefPRf7XPMG7evhO+MLlh3380Hf5DTf6E\nAqLqOZnAYLBzjigMxK8C3F0jxLVTFuwjA0Z3qejpWgwdl7RF7qyxekeSI1/g\n02ewe6HLdJilSbcev4i1zF8IKjGXFMoCWkVhpiz/q4DlNJbIv25C1482NbrF\nw+qE\r\n=/QeE\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCICDx3IvajHE/2cYYP/a/E25efhfP7DH10u0fg5s8927jAiEAmE9cIJNUB1ec8+cg2AkIDXhtz8ipkE80rlnTC4g6Pps="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/object-keys_1.1.0_1549862469286_0.18723271962423693"},"_hasShrinkwrap":false},"1.1.1":{"name":"object-keys","version":"1.1.1","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent audit","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"eslint .","preaudit":"npm install --package-lock --package-lock-only","audit":"npm audit","postaudit":"rm package-lock.json"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^13.1.1","covert":"^1.1.1","eslint":"^5.13.0","foreach":"^2.0.5","indexof":"^0.0.1","is":"^3.3.0","tape":"^4.9.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"ba2c1989270c7de969aa8498fc3b7c8e677806f3","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.1.1","_nodeVersion":"11.13.0","_npmVersion":"6.7.0","dist":{"integrity":"sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==","shasum":"1c47f272df277f3b1daf061677d9c82e2322c60e","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.1.1.tgz","fileCount":11,"unpackedSize":26544,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJcqWC7CRA9TVsSAnZWagAApOoQAIGgpq1xnaDCEe3hqJFt\n1fjwFbEKHTyK59hA/zVmvvR4ikMeltZc5KAIKNt5XyDBO4NtuRcA7E1b3D7C\nFX/IMtPwIq5OvZLWhEnGBNTmwlVq8PI9DwZ6AE2hWM4JAmkT5tay7QtjDAur\nYRdTEEB3eqWETNiaybnF9d1GLKuH4dKcM/v9yiHMp+qa9Ivpe9VtWRj7WTr+\nkxc39JZdSVGFbVYNCFkZ8oyj5VbLOtyMB++6JxbR9fYlZ06ibmT+XrFsz7CF\nr7hQ/XFHlyodg0pi34+YhlyDAsPIvk8DOxDoKGs4aFZ6EqZm3hVnWaAlKqgX\n3ikZAT9Z/4d9icoRkEhVMj7INySL4bSd7lFDIlwGruc4j6U6b6phhwgIlhQE\nMsnWmnLL7/AAaPB8oiNhb8Lt/9/jRJsAHwRBRH9NN/DH2VyP0F2hzp66L5dF\niIVw9YUIBCOzfRg5Gr0qd0GCGbIefcq4AomxsJEdBbV+3AFkJvj7dibMrSb7\nJcBVC/TwJCjNv+Ols7VZE+Yj6ZYbNrsuh5KbPkdFchg6qNgds1Dh1tH8GwrJ\nULdSyACz/0stHNGr8p+Boa85mDseApgozr42UUHdEQyohO1/meNonjDGJl9w\ncLlcHMcR7hnLzp4v54jcv+q74EDZa15iEk/ckLdYFoUXXhRhBJo0XyG43zf8\nEO6O\r\n=PzOq\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDEXnivOzyezLnJgG1VUzWQj/PurnTkz1ZGYX4uYuK0JgIhAKKg/wGdPhn67UneiElcVANcbwapE+3GGhHJftwDOncL"}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/object-keys_1.1.1_1554604218505_0.17078310534837748"},"_hasShrinkwrap":false}},"readme":"#object-keys [![Version Badge][npm-version-svg]][package-url]\n\n[![Build Status][travis-svg]][travis-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\n[![browser support][testling-svg]][testling-url]\n\nAn Object.keys shim. Invoke its \"shim\" method to shim Object.keys if it is unavailable.\n\nMost common usage:\n```js\nvar keys = Object.keys || require('object-keys');\n```\n\n## Example\n\n```js\nvar keys = require('object-keys');\nvar assert = require('assert');\nvar obj = {\n\ta: true,\n\tb: true,\n\tc: true\n};\n\nassert.deepEqual(keys(obj), ['a', 'b', 'c']);\n```\n\n```js\nvar keys = require('object-keys');\nvar assert = require('assert');\n/* when Object.keys is not present */\ndelete Object.keys;\nvar shimmedKeys = keys.shim();\nassert.equal(shimmedKeys, keys);\nassert.deepEqual(Object.keys(obj), keys(obj));\n```\n\n```js\nvar keys = require('object-keys');\nvar assert = require('assert');\n/* when Object.keys is present */\nvar shimmedKeys = keys.shim();\nassert.equal(shimmedKeys, Object.keys);\nassert.deepEqual(Object.keys(obj), keys(obj));\n```\n\n## Source\nImplementation taken directly from [es5-shim][es5-shim-url], with modifications, including from [lodash][lodash-url].\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[package-url]: https://npmjs.org/package/object-keys\n[npm-version-svg]: http://versionbadg.es/ljharb/object-keys.svg\n[travis-svg]: https://travis-ci.org/ljharb/object-keys.svg\n[travis-url]: https://travis-ci.org/ljharb/object-keys\n[deps-svg]: https://david-dm.org/ljharb/object-keys.svg\n[deps-url]: https://david-dm.org/ljharb/object-keys\n[dev-deps-svg]: https://david-dm.org/ljharb/object-keys/dev-status.svg\n[dev-deps-url]: https://david-dm.org/ljharb/object-keys#info=devDependencies\n[testling-svg]: https://ci.testling.com/ljharb/object-keys.png\n[testling-url]: https://ci.testling.com/ljharb/object-keys\n[es5-shim-url]: https://github.com/es-shims/es5-shim/blob/master/es5-shim.js#L542-589\n[lodash-url]: https://github.com/lodash/lodash\n[npm-badge-png]: https://nodei.co/npm/object-keys.png?downloads=true&stars=true\n[license-image]: http://img.shields.io/npm/l/object-keys.svg\n[license-url]: LICENSE\n[downloads-image]: http://img.shields.io/npm/dm/object-keys.svg\n[downloads-url]: http://npm-stat.com/charts.html?package=object-keys\n\n","maintainers":[{"email":"ljharb@gmail.com","name":"ljharb"}],"time":{"modified":"2022-06-22T16:42:21.741Z","created":"2013-03-29T20:44:12.281Z","0.0.1":"2013-03-29T20:44:12.881Z","0.0.2":"2013-03-30T16:13:52.880Z","0.1.0":"2013-03-30T20:58:48.065Z","0.1.1":"2013-04-02T06:16:54.290Z","0.1.2":"2013-04-03T16:43:21.243Z","0.1.3":"2013-04-08T01:18:51.713Z","0.1.4":"2013-04-09T00:47:37.900Z","0.1.5":"2013-04-14T12:27:20.913Z","0.1.6":"2013-04-17T07:18:02.522Z","0.1.7":"2013-04-18T02:23:24.367Z","0.1.8":"2013-05-10T17:32:12.476Z","0.2.0":"2013-05-10T18:52:03.655Z","0.3.0":"2013-05-18T22:06:13.036Z","0.4.0":"2013-08-14T08:10:10.483Z","0.5.0":"2014-01-30T09:28:17.465Z","0.5.1":"2014-03-10T06:43:32.469Z","0.6.0":"2014-08-01T07:22:33.482Z","0.6.1":"2014-08-26T05:51:23.007Z","1.0.0":"2014-08-26T19:21:11.757Z","1.0.1":"2014-09-03T07:19:08.654Z","1.0.2":"2014-12-28T09:03:12.859Z","1.0.3":"2015-01-06T22:27:00.343Z","1.0.4":"2015-05-23T20:19:48.735Z","1.0.5":"2015-07-03T23:43:33.872Z","1.0.6":"2015-07-09T15:41:54.153Z","1.0.7":"2015-07-18T19:23:11.235Z","1.0.8":"2015-10-14T22:21:16.304Z","1.0.9":"2015-10-19T22:07:23.370Z","1.0.10":"2016-07-04T18:01:59.134Z","1.0.11":"2016-07-05T17:49:39.399Z","1.0.12":"2018-06-18T21:47:14.916Z","1.1.0":"2019-02-11T05:21:09.393Z","1.1.1":"2019-04-07T02:30:18.674Z"},"author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"users":{"claudiopro":true,"brostoch":true,"rocket0191":true},"readmeFilename":"README.md","homepage":"https://github.com/ljharb/object-keys#readme","keywords":["Object.keys","keys","ES5","shim"],"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"license":"MIT","contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}]} \ No newline at end of file diff --git a/cli/tests/testdata/npm/tarball_with_global_header/main.out b/cli/tests/testdata/npm/tarball_with_global_header/main.out index caf351e2e3..ff211087b6 100644 --- a/cli/tests/testdata/npm/tarball_with_global_header/main.out +++ b/cli/tests/testdata/npm/tarball_with_global_header/main.out @@ -1 +1 @@ -[Class: Client] +[class Client extends EventEmitter] diff --git a/cli/tests/testdata/package_json/basic/main.info.out b/cli/tests/testdata/package_json/basic/main.info.out index bf36f4f19e..3572c75e11 100644 --- a/cli/tests/testdata/package_json/basic/main.info.out +++ b/cli/tests/testdata/package_json/basic/main.info.out @@ -5,4 +5,4 @@ size: [WILDCARD] file:///[WILDCARD]/main.ts (63B) └─┬ file:///[WILDCARD]/lib.ts (166B) - └── npm:@denotest/esm-basic@1.0.0 (345B) + └── npm:@denotest/esm-basic@1.0.0 (416B) diff --git a/cli/tests/testdata/package_json/invalid_value/task.out b/cli/tests/testdata/package_json/invalid_value/task.out index 914dc27c6b..823c50612f 100644 --- a/cli/tests/testdata/package_json/invalid_value/task.out +++ b/cli/tests/testdata/package_json/invalid_value/task.out @@ -1,6 +1,6 @@ Warning Ignoring dependency '@denotest/cjs-default-export' in package.json because its version requirement failed to parse: Invalid npm specifier version requirement. Unexpected character. invalid stuff that won't parse ~ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task test echo 1 1 diff --git a/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out b/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out index 12a45b8da9..89e16b4781 100644 --- a/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out +++ b/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out @@ -1 +1 @@ -Module { isMod4: true } +[Module: null prototype] { isMod4: true } diff --git a/cli/tests/testdata/run/070_location.ts.out b/cli/tests/testdata/run/070_location.ts.out index 8b2f9e49df..6827a555d4 100644 --- a/cli/tests/testdata/run/070_location.ts.out +++ b/cli/tests/testdata/run/070_location.ts.out @@ -1,5 +1,5 @@ -[WILDCARD][Class: Location] -Location {} +[WILDCARD][class Location] +Object [Location] {} Location { hash: "#bat", host: "foo", diff --git a/cli/tests/testdata/run/071_location_unset.ts.out b/cli/tests/testdata/run/071_location_unset.ts.out index dc67c55787..cf4a9d6059 100644 --- a/cli/tests/testdata/run/071_location_unset.ts.out +++ b/cli/tests/testdata/run/071_location_unset.ts.out @@ -1,5 +1,5 @@ -[WILDCARD][Class: Location] -Location {} +[WILDCARD][class Location] +Object [Location] {} undefined /bar [WILDCARD] diff --git a/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out b/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out index 701ddc3b57..868c971940 100644 --- a/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out +++ b/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out @@ -2,11 +2,15 @@ Caught direct dynamic import error. TypeError: Relative import path "does not exist" not prefixed with / or ./ or ../ at [WILDCARD]/error_014_catch_dynamic_import_error.js:3:18 - at async [WILDCARD]/error_014_catch_dynamic_import_error.js:3:5 + at [WILDCARD]/error_014_catch_dynamic_import_error.js:3:5 { + code: "ERR_MODULE_NOT_FOUND" +} Caught indirect direct dynamic import error. TypeError: Relative import path "does not exist either" not prefixed with / or ./ or ../ at [WILDCARD]/subdir/indirect_import_error.js:1:15 - at async [WILDCARD]/error_014_catch_dynamic_import_error.js:10:5 + at async [WILDCARD]/error_014_catch_dynamic_import_error.js:10:5 { + code: "ERR_MODULE_NOT_FOUND" +} Caught error thrown by dynamically imported module. Error: An error at [WILDCARD]/subdir/throws.js:6:7 diff --git a/cli/tests/testdata/run/error_015_dynamic_import_permissions.js b/cli/tests/testdata/run/error_015_dynamic_import_permissions.js index 73da56fd89..47961cf63b 100644 --- a/cli/tests/testdata/run/error_015_dynamic_import_permissions.js +++ b/cli/tests/testdata/run/error_015_dynamic_import_permissions.js @@ -1,3 +1,3 @@ (async () => { - await import("http://localhost:4545/subdir/mod4.js"); + await import("" + "http://localhost:4545/subdir/mod4.js"); })(); diff --git a/cli/tests/testdata/run/error_015_dynamic_import_permissions.out b/cli/tests/testdata/run/error_015_dynamic_import_permissions.out index ef54f331b0..87ce43e9cd 100644 --- a/cli/tests/testdata/run/error_015_dynamic_import_permissions.out +++ b/cli/tests/testdata/run/error_015_dynamic_import_permissions.out @@ -1,4 +1,4 @@ error: Uncaught (in promise) TypeError: Requires net access to "localhost:4545", run again with the --allow-net flag - await import("http://localhost:4545/subdir/mod4.js"); + await import("" + "http://localhost:4545/subdir/mod4.js"); ^ at async file://[WILDCARD]/error_015_dynamic_import_permissions.js:2:3 diff --git a/cli/tests/testdata/run/error_with_errors_prop.js.out b/cli/tests/testdata/run/error_with_errors_prop.js.out index 3154e86e65..946b5ad84e 100644 --- a/cli/tests/testdata/run/error_with_errors_prop.js.out +++ b/cli/tests/testdata/run/error_with_errors_prop.js.out @@ -2,7 +2,14 @@ Error: Error with errors prop. at [WILDCARD]/error_with_errors_prop.js:1:15 Error: Error with errors prop. - at [WILDCARD]/error_with_errors_prop.js:1:15 + at [WILDCARD]/error_with_errors_prop.js:1:15 { + errors: [ + Error: Error message 1. + at [WILDCARD]/error_with_errors_prop.js:3:3, + Error: Error message 2. + at [WILDCARD]/error_with_errors_prop.js:4:3 + ] +} error: Uncaught Error: Error with errors prop. const error = new Error("Error with errors prop."); diff --git a/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out b/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out index 39e1640832..ac7f7c2305 100644 --- a/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out +++ b/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out @@ -1 +1,5 @@ -{ thrown: DOMException: foo, isNativeError: true, isCompileError: false } +[Object: null prototype] { + thrown: DOMException: foo, + isNativeError: true, + isCompileError: false +} diff --git a/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out b/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out index 8bd3122980..85b52190bf 100644 --- a/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out +++ b/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out @@ -2,7 +2,7 @@ error: Uncaught (in worker "") Error: bar throw new Error("bar"); ^ - at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9 + at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9[WILDCARD] at [WILDCARD]/event_listener_error_immediate_exit.ts:11:1 error: Uncaught (in promise) Error: Unhandled error in child worker. at [WILDCARD] diff --git a/cli/tests/testdata/run/extension_dynamic_import.ts.out b/cli/tests/testdata/run/extension_dynamic_import.ts.out index 18b05ea47d..4414ad9235 100644 --- a/cli/tests/testdata/run/extension_dynamic_import.ts.out +++ b/cli/tests/testdata/run/extension_dynamic_import.ts.out @@ -1,4 +1,10 @@ -error: Uncaught TypeError: Cannot load extension module from external code +error: Uncaught (in promise) TypeError: Unsupported scheme "ext" for module "ext:runtime/01_errors.js". Supported schemes: [ + "data", + "blob", + "file", + "http", + "https", +] await import("ext:runtime/01_errors.js"); ^ - at [WILDCARD]/extension_dynamic_import.ts:1:1 + at async [WILDCARD]/extension_dynamic_import.ts:1:1 diff --git a/cli/tests/testdata/run/extension_import.ts.out b/cli/tests/testdata/run/extension_import.ts.out index f1d9d5eb20..88039a9ce8 100644 --- a/cli/tests/testdata/run/extension_import.ts.out +++ b/cli/tests/testdata/run/extension_import.ts.out @@ -5,4 +5,4 @@ error: Unsupported scheme "ext" for module "ext:runtime/01_errors.js". Supported "http", "https", ] - at [WILDCARD] + at [WILDCARD]/extension_import.ts:1:8 diff --git a/cli/tests/testdata/run/fetch_response_finalization.js.out b/cli/tests/testdata/run/fetch_response_finalization.js.out index 844a4e4b2d..1a8d7563df 100644 --- a/cli/tests/testdata/run/fetch_response_finalization.js.out +++ b/cli/tests/testdata/run/fetch_response_finalization.js.out @@ -1,2 +1,7 @@ -{ "0": "stdin", "1": "stdout", "2": "stderr", "5": "fetchResponseBody" } +{ + "0": "stdin", + "1": "stdout", + "2": "stderr", + "5": "fetchResponseBody" +} { "0": "stdin", "1": "stdout", "2": "stderr" } diff --git a/cli/tests/testdata/run/fix_js_imports.ts.out b/cli/tests/testdata/run/fix_js_imports.ts.out index 5e45122de8..c427932a42 100644 --- a/cli/tests/testdata/run/fix_js_imports.ts.out +++ b/cli/tests/testdata/run/fix_js_imports.ts.out @@ -1 +1 @@ -Module {} +[Module: null prototype] { } diff --git a/cli/tests/testdata/run/heapstats.js.out b/cli/tests/testdata/run/heapstats.js.out index b75a755f8e..9542663331 100644 --- a/cli/tests/testdata/run/heapstats.js.out +++ b/cli/tests/testdata/run/heapstats.js.out @@ -1,2 +1,2 @@ -Allocated: 4MB -Freed: -4MB +Allocated: 8MB +Freed: -8MB diff --git a/cli/tests/testdata/run/node_builtin_modules/mod.js.out b/cli/tests/testdata/run/node_builtin_modules/mod.js.out index 0d96b31ab6..844e3d9275 100644 --- a/cli/tests/testdata/run/node_builtin_modules/mod.js.out +++ b/cli/tests/testdata/run/node_builtin_modules/mod.js.out @@ -1,8 +1,3 @@ [Function: createRequire] v[WILDCARD].[WILDCARD].[WILDCARD] -[ - "[WILDCARD]", - "[WILDCARD]mod.js", - "hello", - "there" -] +[ [Getter], [Getter], "hello", "there" ] diff --git a/cli/tests/testdata/run/node_builtin_modules/mod.ts.out b/cli/tests/testdata/run/node_builtin_modules/mod.ts.out index f19bd81e67..844e3d9275 100644 --- a/cli/tests/testdata/run/node_builtin_modules/mod.ts.out +++ b/cli/tests/testdata/run/node_builtin_modules/mod.ts.out @@ -1,8 +1,3 @@ [Function: createRequire] v[WILDCARD].[WILDCARD].[WILDCARD] -[ - "[WILDCARD]", - "[WILDCARD]mod.ts", - "hello", - "there" -] +[ [Getter], [Getter], "hello", "there" ] diff --git a/cli/tests/testdata/run/top_level_await/loop.out b/cli/tests/testdata/run/top_level_await/loop.out index 7f72048c2d..1bdffbf660 100644 --- a/cli/tests/testdata/run/top_level_await/loop.out +++ b/cli/tests/testdata/run/top_level_await/loop.out @@ -1,5 +1,5 @@ loading [WILDCARD]a.js -loaded Module { default: [Class: Foo] } +loaded [Module: null prototype] { default: [class Foo] } loading [WILDCARD]b.js -loaded Module { default: [Class: Bar] } +loaded [Module: null prototype] { default: [class Bar] } all loaded diff --git a/cli/tests/testdata/run/ts_decorators.ts.out b/cli/tests/testdata/run/ts_decorators.ts.out index 381c7a8091..ee77417cf2 100644 --- a/cli/tests/testdata/run/ts_decorators.ts.out +++ b/cli/tests/testdata/run/ts_decorators.ts.out @@ -1,2 +1,2 @@ Check [WILDCARD] -{ someField: "asdf" } +SomeClass { someField: "asdf" } diff --git a/cli/tests/testdata/run/type_directives_js_main.js.out b/cli/tests/testdata/run/type_directives_js_main.js.out deleted file mode 100644 index 7bca837f02..0000000000 --- a/cli/tests/testdata/run/type_directives_js_main.js.out +++ /dev/null @@ -1,3 +0,0 @@ -[WILDCARD] -DEBUG RS - [WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts -[WILDCARD] diff --git a/cli/tests/testdata/run/websocket_server_idletimeout.ts b/cli/tests/testdata/run/websocket_server_idletimeout.ts index 9ae6698cbf..211b5f6ea9 100644 --- a/cli/tests/testdata/run/websocket_server_idletimeout.ts +++ b/cli/tests/testdata/run/websocket_server_idletimeout.ts @@ -1,5 +1,5 @@ -import { assertEquals } from "../../../test_util/std/testing/asserts.ts"; -import { deferred } from "../../../test_util/std/async/deferred.ts"; +import { assertEquals } from "../../../../test_util/std/testing/asserts.ts"; +import { deferred } from "../../../../test_util/std/async/deferred.ts"; const errorDeferred = deferred(); const closeDeferred = deferred(); diff --git a/cli/tests/testdata/run/websocket_test.ts b/cli/tests/testdata/run/websocket_test.ts index a9dc34ad1d..d80f03c92a 100644 --- a/cli/tests/testdata/run/websocket_test.ts +++ b/cli/tests/testdata/run/websocket_test.ts @@ -161,7 +161,10 @@ Deno.test("websocket error", async () => { assert(err instanceof ErrorEvent); // Error message got changed because we don't use warp in test_util - assertEquals(err.message, "UnexpectedEof: tls handshake eof"); + assertEquals( + err.message, + "InvalidData: received corrupt message of type InvalidContentType", + ); promise1.resolve(); }; await promise1; diff --git a/cli/tests/testdata/run/with_package_json/no_deno_json/main.out b/cli/tests/testdata/run/with_package_json/no_deno_json/main.out index 45bcbb819c..b3af7331d7 100644 --- a/cli/tests/testdata/run/with_package_json/no_deno_json/main.out +++ b/cli/tests/testdata/run/with_package_json/no_deno_json/main.out @@ -1,13 +1,13 @@ [WILDCARD]package.json file found at '[WILDCARD]with_package_json[WILDCARD]package.json' [WILDCARD] ok -[Chalk (anonymous)] { +[Function (anonymous)] Chalk { constructor: [Function (anonymous)], - Instance: [Class: ChalkClass], + Instance: [class ChalkClass], supportsColor: false, - stderr: [Chalk (anonymous)] { + stderr: [Function (anonymous)] Chalk { constructor: [Function (anonymous)], - Instance: [Class: ChalkClass], + Instance: [class ChalkClass], supportsColor: false } } diff --git a/cli/tests/testdata/run/worker_close_in_wasm_reactions.js.out b/cli/tests/testdata/run/worker_close_in_wasm_reactions.js.out index 66eb8201cd..325180de4f 100644 --- a/cli/tests/testdata/run/worker_close_in_wasm_reactions.js.out +++ b/cli/tests/testdata/run/worker_close_in_wasm_reactions.js.out @@ -1,2 +1,2 @@ -Error: CompileError: WebAssembly.compile(): expected length: @+10 +Error: CompileError: WebAssembly.compile(): reached end while decoding length: @+10 at file:///[WILDCARD]/close_in_wasm_reactions.js:18:13 diff --git a/cli/tests/testdata/run/worker_drop_handle_race.js.out b/cli/tests/testdata/run/worker_drop_handle_race.js.out index ba66941591..451c3af3d5 100644 --- a/cli/tests/testdata/run/worker_drop_handle_race.js.out +++ b/cli/tests/testdata/run/worker_drop_handle_race.js.out @@ -5,5 +5,4 @@ error: Uncaught (in worker "") Error at Object.action (ext:deno_web/02_timers.js:[WILDCARD]) at handleTimerMacrotask (ext:deno_web/02_timers.js:[WILDCARD]) error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl (ext:runtime/11_workers.js:[WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/spawn_kill_permissions.ts b/cli/tests/testdata/spawn_kill_permissions.ts new file mode 100644 index 0000000000..86626bd5cf --- /dev/null +++ b/cli/tests/testdata/spawn_kill_permissions.ts @@ -0,0 +1,6 @@ +const child = new Deno.Command("cat", { + args: ["-"], + stdout: "null", + stderr: "null", +}).spawn(); +child.kill("SIGTERM"); diff --git a/cli/tests/testdata/task/both/package_json_selected.out b/cli/tests/testdata/task/both/package_json_selected.out index 06b735c9da..d317af4ed4 100644 --- a/cli/tests/testdata/task/both/package_json_selected.out +++ b/cli/tests/testdata/task/both/package_json_selected.out @@ -1,7 +1,7 @@ Download http://localhost:4545/npm/registry/@denotest/bin Download http://localhost:4545/npm/registry/@denotest/bin/1.0.0.tgz Initialize @denotest/bin@1.0.0 -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task bin cli-esm testing this out "asdf" testing this diff --git a/cli/tests/testdata/task/deno_json_pre_post/bin.out b/cli/tests/testdata/task/deno_json_pre_post/bin.out new file mode 100644 index 0000000000..ad66595f1e --- /dev/null +++ b/cli/tests/testdata/task/deno_json_pre_post/bin.out @@ -0,0 +1,2 @@ +Task test echo 'test' +test diff --git a/cli/tests/testdata/task/deno_json_pre_post/deno.json b/cli/tests/testdata/task/deno_json_pre_post/deno.json new file mode 100644 index 0000000000..165b92e3ad --- /dev/null +++ b/cli/tests/testdata/task/deno_json_pre_post/deno.json @@ -0,0 +1,7 @@ +{ + "tasks": { + "pretest": "echo 'pretest'", + "posttest": "echo 'posttest'", + "test": "echo 'test'" + } +} diff --git a/cli/tests/testdata/task/deno_json_pre_post/echo.out b/cli/tests/testdata/task/deno_json_pre_post/echo.out new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/cli/tests/testdata/task/deno_json_pre_post/echo.out @@ -0,0 +1 @@ +0 diff --git a/cli/tests/testdata/task/npx/non_existent.out b/cli/tests/testdata/task/npx/non_existent.out index b08d29ece6..81065bf743 100644 --- a/cli/tests/testdata/task/npx/non_existent.out +++ b/cli/tests/testdata/task/npx/non_existent.out @@ -1,3 +1,3 @@ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task non-existent npx this-command-should-not-exist-for-you npx: could not resolve command 'this-command-should-not-exist-for-you' diff --git a/cli/tests/testdata/task/npx/on_own.out b/cli/tests/testdata/task/npx/on_own.out index 80d8ed9db3..fc9673f7f6 100644 --- a/cli/tests/testdata/task/npx/on_own.out +++ b/cli/tests/testdata/task/npx/on_own.out @@ -1,3 +1,3 @@ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task on-own npx npx: missing command diff --git a/cli/tests/testdata/task/package_json/bin.out b/cli/tests/testdata/task/package_json/bin.out index fac6921156..6cfa06d433 100644 --- a/cli/tests/testdata/task/package_json/bin.out +++ b/cli/tests/testdata/task/package_json/bin.out @@ -3,7 +3,7 @@ Download http://localhost:4545/npm/registry/@denotest/bin/0.5.0.tgz Initialize @denotest/bin@0.5.0 Download http://localhost:4545/npm/registry/@denotest/bin/1.0.0.tgz Initialize @denotest/bin@1.0.0 -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task bin @denotest/bin hi && cli-esm testing this out && npx cli-cjs test "extra" hi testing diff --git a/cli/tests/testdata/task/package_json_post/bin.out b/cli/tests/testdata/task/package_json_post/bin.out new file mode 100644 index 0000000000..9864cc76d0 --- /dev/null +++ b/cli/tests/testdata/task/package_json_post/bin.out @@ -0,0 +1,5 @@ +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. +Task test echo 'test' +test +Task posttest echo 'posttest' +posttest diff --git a/cli/tests/testdata/task/package_json_post/echo.out b/cli/tests/testdata/task/package_json_post/echo.out new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/cli/tests/testdata/task/package_json_post/echo.out @@ -0,0 +1 @@ +0 diff --git a/cli/tests/testdata/task/package_json_post/package.json b/cli/tests/testdata/task/package_json_post/package.json new file mode 100644 index 0000000000..82689f7d44 --- /dev/null +++ b/cli/tests/testdata/task/package_json_post/package.json @@ -0,0 +1,6 @@ +{ + "scripts": { + "posttest": "echo 'posttest'", + "test": "echo 'test'" + } +} diff --git a/cli/tests/testdata/task/package_json_post_only/bin.out b/cli/tests/testdata/task/package_json_post_only/bin.out new file mode 100644 index 0000000000..9e7cea0916 --- /dev/null +++ b/cli/tests/testdata/task/package_json_post_only/bin.out @@ -0,0 +1,4 @@ +Task not found: test +Available tasks: +- posttest (package.json) + echo 'posttest' diff --git a/cli/tests/testdata/task/package_json_post_only/echo.out b/cli/tests/testdata/task/package_json_post_only/echo.out new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/cli/tests/testdata/task/package_json_post_only/echo.out @@ -0,0 +1 @@ +0 diff --git a/cli/tests/testdata/task/package_json_post_only/package.json b/cli/tests/testdata/task/package_json_post_only/package.json new file mode 100644 index 0000000000..ce8a6bbd54 --- /dev/null +++ b/cli/tests/testdata/task/package_json_post_only/package.json @@ -0,0 +1,5 @@ +{ + "scripts": { + "posttest": "echo 'posttest'" + } +} diff --git a/cli/tests/testdata/task/package_json_pre/bin.out b/cli/tests/testdata/task/package_json_pre/bin.out new file mode 100644 index 0000000000..89c64f2e5a --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre/bin.out @@ -0,0 +1,5 @@ +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. +Task pretest echo 'pretest' +pretest +Task test echo 'test' +test diff --git a/cli/tests/testdata/task/package_json_pre/echo.out b/cli/tests/testdata/task/package_json_pre/echo.out new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre/echo.out @@ -0,0 +1 @@ +0 diff --git a/cli/tests/testdata/task/package_json_pre/package.json b/cli/tests/testdata/task/package_json_pre/package.json new file mode 100644 index 0000000000..d3eba02a19 --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre/package.json @@ -0,0 +1,6 @@ +{ + "scripts": { + "test": "echo 'test'", + "pretest": "echo 'pretest'" + } +} diff --git a/cli/tests/testdata/task/package_json_pre_only/bin.out b/cli/tests/testdata/task/package_json_pre_only/bin.out new file mode 100644 index 0000000000..e96e8e3417 --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre_only/bin.out @@ -0,0 +1,4 @@ +Task not found: test +Available tasks: +- pretest (package.json) + echo 'pretest' diff --git a/cli/tests/testdata/task/package_json_pre_only/echo.out b/cli/tests/testdata/task/package_json_pre_only/echo.out new file mode 100644 index 0000000000..d00491fd7e --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre_only/echo.out @@ -0,0 +1 @@ +1 diff --git a/cli/tests/testdata/task/package_json_pre_only/package.json b/cli/tests/testdata/task/package_json_pre_only/package.json new file mode 100644 index 0000000000..032a5d4eab --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre_only/package.json @@ -0,0 +1,5 @@ +{ + "scripts": { + "pretest": "echo 'pretest'" + } +} diff --git a/cli/tests/testdata/task/package_json_pre_post/bin.out b/cli/tests/testdata/task/package_json_pre_post/bin.out new file mode 100644 index 0000000000..0c686b9cdb --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre_post/bin.out @@ -0,0 +1,7 @@ +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. +Task pretest echo 'pretest' +pretest +Task test echo 'test' +test +Task posttest echo 'posttest' +posttest diff --git a/cli/tests/testdata/task/package_json_pre_post/echo.out b/cli/tests/testdata/task/package_json_pre_post/echo.out new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre_post/echo.out @@ -0,0 +1 @@ +0 diff --git a/cli/tests/testdata/task/package_json_pre_post/package.json b/cli/tests/testdata/task/package_json_pre_post/package.json new file mode 100644 index 0000000000..24a3ff1ed7 --- /dev/null +++ b/cli/tests/testdata/task/package_json_pre_post/package.json @@ -0,0 +1,7 @@ +{ + "scripts": { + "pretest": "echo 'pretest'", + "posttest": "echo 'posttest'", + "test": "echo 'test'" + } +} diff --git a/cli/tests/testdata/test/before_unload_prevent_default.out b/cli/tests/testdata/test/before_unload_prevent_default.out new file mode 100644 index 0000000000..09da32ff96 --- /dev/null +++ b/cli/tests/testdata/test/before_unload_prevent_default.out @@ -0,0 +1,5 @@ +running 1 test from [WILDCARD]/before_unload_prevent_default.ts +foo ... ok ([WILDCARD]) + +ok | 1 passed | 0 failed ([WILDCARD]) + diff --git a/cli/tests/testdata/test/before_unload_prevent_default.ts b/cli/tests/testdata/test/before_unload_prevent_default.ts new file mode 100644 index 0000000000..421ded5200 --- /dev/null +++ b/cli/tests/testdata/test/before_unload_prevent_default.ts @@ -0,0 +1,6 @@ +addEventListener("beforeunload", (e) => { + // The worker should be killed once tests are done regardless of this. + e.preventDefault(); +}); + +Deno.test("foo", () => {}); diff --git a/cli/tests/testdata/test/captured_output.ts b/cli/tests/testdata/test/captured_output.ts index 43295f027b..905156fd41 100644 --- a/cli/tests/testdata/test/captured_output.ts +++ b/cli/tests/testdata/test/captured_output.ts @@ -1,4 +1,5 @@ Deno.test("output", async () => { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "console.log(0); console.error(1);"], }); diff --git a/cli/tests/testdata/test/collect.deprecated.out b/cli/tests/testdata/test/collect.deprecated.out new file mode 100644 index 0000000000..9bf68807cf --- /dev/null +++ b/cli/tests/testdata/test/collect.deprecated.out @@ -0,0 +1,10 @@ +Warning: "files" configuration is deprecated. Please use "include" and "exclude" instead. +Check [WILDCARD]/test/collect/include/2_test.ts +Check [WILDCARD]/test/collect/include/test.ts +Check [WILDCARD]/test/collect/test.ts +running 0 tests from ./test/collect/include/2_test.ts +running 0 tests from ./test/collect/include/test.ts +running 0 tests from ./test/collect/test.ts + +ok | 0 passed | 0 failed ([WILDCARD]) + diff --git a/cli/tests/testdata/test/collect/deno.deprecated.jsonc b/cli/tests/testdata/test/collect/deno.deprecated.jsonc new file mode 100644 index 0000000000..b8acda27d0 --- /dev/null +++ b/cli/tests/testdata/test/collect/deno.deprecated.jsonc @@ -0,0 +1,7 @@ +{ + "test": { + "files": { + "exclude": ["./ignore"] + } + } +} diff --git a/cli/tests/testdata/test/collect/deno.jsonc b/cli/tests/testdata/test/collect/deno.jsonc index b8acda27d0..e14ce86da0 100644 --- a/cli/tests/testdata/test/collect/deno.jsonc +++ b/cli/tests/testdata/test/collect/deno.jsonc @@ -1,7 +1,5 @@ { "test": { - "files": { - "exclude": ["./ignore"] - } + "exclude": ["./ignore"] } } diff --git a/cli/tests/testdata/test/collect/deno2.jsonc b/cli/tests/testdata/test/collect/deno2.jsonc index a4d244e31e..b7af09d1c0 100644 --- a/cli/tests/testdata/test/collect/deno2.jsonc +++ b/cli/tests/testdata/test/collect/deno2.jsonc @@ -1,8 +1,6 @@ { "test": { - "files": { - "include": ["./include/"], - "exclude": ["./ignore", "./include/2_test.ts"] - } + "include": ["./include/"], + "exclude": ["./ignore", "./include/2_test.ts"] } } diff --git a/cli/tests/testdata/test/collect_with_malformed_config.out b/cli/tests/testdata/test/collect_with_malformed_config.out index 25c34406fd..b31b18e6a2 100644 --- a/cli/tests/testdata/test/collect_with_malformed_config.out +++ b/cli/tests/testdata/test/collect_with_malformed_config.out @@ -1,4 +1,4 @@ error: Failed to parse "test" configuration Caused by: - unknown field `dont_know_this_field`, expected `files` + unknown field `dont_know_this_field`, expected one of `include`, `exclude`, `files` diff --git a/cli/tests/testdata/test/deno.glob.json b/cli/tests/testdata/test/deno.glob.json new file mode 100644 index 0000000000..9deb4d2f22 --- /dev/null +++ b/cli/tests/testdata/test/deno.glob.json @@ -0,0 +1,11 @@ +{ + "test": { + "include": [ + "glob/data/test1.?s", + "glob/nested/foo/*.ts", + "glob/nested/fizz/*.ts", + "glob/pages/[id].ts" + ], + "exclude": ["glob/nested/**/*bazz.ts"] + } +} diff --git a/cli/tests/testdata/test/glob/data/tes.ts b/cli/tests/testdata/test/glob/data/tes.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/data/tes.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/data/test1.js b/cli/tests/testdata/test/glob/data/test1.js new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/data/test1.js @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/data/test1.ts b/cli/tests/testdata/test/glob/data/test1.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/data/test1.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/data/test12.ts b/cli/tests/testdata/test/glob/data/test12.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/data/test12.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/fizz/bar.ts b/cli/tests/testdata/test/glob/nested/fizz/bar.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/fizz/bar.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/fizz/bazz.ts b/cli/tests/testdata/test/glob/nested/fizz/bazz.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/fizz/bazz.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/fizz/fizz.ts b/cli/tests/testdata/test/glob/nested/fizz/fizz.ts new file mode 100644 index 0000000000..6940729e9e --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/fizz/fizz.ts @@ -0,0 +1,2 @@ +function foo() { +} diff --git a/cli/tests/testdata/test/glob/nested/fizz/foo.ts b/cli/tests/testdata/test/glob/nested/fizz/foo.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/fizz/foo.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/foo/bar.ts b/cli/tests/testdata/test/glob/nested/foo/bar.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/foo/bar.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/foo/bazz.ts b/cli/tests/testdata/test/glob/nested/foo/bazz.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/foo/bazz.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/foo/fizz.ts b/cli/tests/testdata/test/glob/nested/foo/fizz.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/foo/fizz.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/nested/foo/foo.ts b/cli/tests/testdata/test/glob/nested/foo/foo.ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/nested/foo/foo.ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/glob/pages/[id].ts b/cli/tests/testdata/test/glob/pages/[id].ts new file mode 100644 index 0000000000..26f07fba54 --- /dev/null +++ b/cli/tests/testdata/test/glob/pages/[id].ts @@ -0,0 +1,3 @@ +function foo() { + +} \ No newline at end of file diff --git a/cli/tests/testdata/test/report_error.out b/cli/tests/testdata/test/report_error.out new file mode 100644 index 0000000000..698550f97d --- /dev/null +++ b/cli/tests/testdata/test/report_error.out @@ -0,0 +1,23 @@ +running 2 tests from [WILDCARD]/report_error.ts +foo ... +Uncaught error from [WILDCARD]/report_error.ts FAILED +foo ... cancelled (0ms) +bar ... cancelled (0ms) + + ERRORS + +[WILDCARD]/report_error.ts (uncaught error) +error: Error: foo + reportError(new Error("foo")); + ^ + at [WILDCARD]/report_error.ts:2:15 +This error was not caught from a test and caused the test runner to fail on the referenced module. +It most likely originated from a dangling promise, event/timeout handler or top-level code. + + FAILURES + +[WILDCARD]/report_error.ts (uncaught error) + +FAILED | 0 passed | 3 failed ([WILDCARD]) + +error: Test failed diff --git a/cli/tests/testdata/test/report_error.ts b/cli/tests/testdata/test/report_error.ts new file mode 100644 index 0000000000..56b6db26c8 --- /dev/null +++ b/cli/tests/testdata/test/report_error.ts @@ -0,0 +1,6 @@ +Deno.test("foo", () => { + reportError(new Error("foo")); + console.log(1); +}); + +Deno.test("bar", () => {}); diff --git a/cli/tests/testdata/vendor/npm_and_node_specifier.ts b/cli/tests/testdata/vendor/npm_and_node_specifier.ts new file mode 100644 index 0000000000..61962e836b --- /dev/null +++ b/cli/tests/testdata/vendor/npm_and_node_specifier.ts @@ -0,0 +1,2 @@ +export { default as path } from "node:path"; +export { getValue, setValue } from "npm:@denotest/esm-basic"; diff --git a/cli/tests/testdata/workers/dynamic_remote.ts b/cli/tests/testdata/workers/dynamic_remote.ts index 381c7f374c..54e4a4714e 100644 --- a/cli/tests/testdata/workers/dynamic_remote.ts +++ b/cli/tests/testdata/workers/dynamic_remote.ts @@ -1,2 +1,2 @@ // This file doesn't really exist, but it doesn't matter, a "PermissionsDenied" error should be thrown. -await import("https://example.com/some/file.ts"); +await import("" + "https://example.com/some/file.ts"); diff --git a/cli/tests/testdata/workers/permissions_blob_local.ts.out b/cli/tests/testdata/workers/permissions_blob_local.ts.out index 8cfd41523c..0cd581f7b7 100644 --- a/cli/tests/testdata/workers/permissions_blob_local.ts.out +++ b/cli/tests/testdata/workers/permissions_blob_local.ts.out @@ -1,5 +1,4 @@ error: Uncaught (in worker "") Requires read access to "[WILDCARD]local_file.ts", run again with the --allow-read flag at blob:null/[WILDCARD]:1:8 error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out b/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out index cbd3f480f7..91f3cc6d5b 100644 --- a/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out +++ b/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out @@ -1,7 +1,6 @@ error: Uncaught (in worker "") (in promise) TypeError: Requires net access to "example.com", run again with the --allow-net flag -await import("https://example.com/some/file.ts"); +await import("" + "https://example.com/some/file.ts"); ^ at async http://localhost:4545/workers/dynamic_remote.ts:2:1 [WILDCARD]error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/permissions_remote_remote.ts.out b/cli/tests/testdata/workers/permissions_remote_remote.ts.out index 001370f2fc..bb065740aa 100644 --- a/cli/tests/testdata/workers/permissions_remote_remote.ts.out +++ b/cli/tests/testdata/workers/permissions_remote_remote.ts.out @@ -1,5 +1,4 @@ error: Uncaught (in worker "") Requires net access to "example.com", run again with the --allow-net flag at http://localhost:4545/workers/static_remote.ts:2:8 error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_async_error.ts.out b/cli/tests/testdata/workers/worker_async_error.ts.out index 84863f0166..8d017859c4 100644 --- a/cli/tests/testdata/workers/worker_async_error.ts.out +++ b/cli/tests/testdata/workers/worker_async_error.ts.out @@ -4,5 +4,4 @@ error: Uncaught (in worker "foo") (in promise) Error: bar at [WILDCARD]/async_error.ts:[WILDCARD] at [WILDCARD]/async_error.ts:[WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_error.ts.out b/cli/tests/testdata/workers/worker_error.ts.out index 89f579fb74..78d0c423ed 100644 --- a/cli/tests/testdata/workers/worker_error.ts.out +++ b/cli/tests/testdata/workers/worker_error.ts.out @@ -2,5 +2,4 @@ at foo ([WILDCARD]) at [WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_message_handler_error.ts.out b/cli/tests/testdata/workers/worker_message_handler_error.ts.out index 76449f989e..0f97e97036 100644 --- a/cli/tests/testdata/workers/worker_message_handler_error.ts.out +++ b/cli/tests/testdata/workers/worker_message_handler_error.ts.out @@ -4,5 +4,4 @@ error: Uncaught (in worker "foo") Error: bar at onmessage ([WILDCARD]/message_handler_error.ts:[WILDCARD]) at [WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_nested_error.ts.out b/cli/tests/testdata/workers/worker_nested_error.ts.out index dd65036b28..15cb85b48c 100644 --- a/cli/tests/testdata/workers/worker_nested_error.ts.out +++ b/cli/tests/testdata/workers/worker_nested_error.ts.out @@ -4,8 +4,6 @@ at foo ([WILDCARD]/workers/error.ts:[WILDCARD]) at [WILDCARD]/workers/error.ts:[WILDCARD] error: Uncaught (in worker "baz") (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/unit/body_test.ts b/cli/tests/unit/body_test.ts index e7a38b7a6b..8aebfadd30 100644 --- a/cli/tests/unit/body_test.ts +++ b/cli/tests/unit/body_test.ts @@ -53,6 +53,59 @@ Deno.test( }, ); +// FormData: non-ASCII names and filenames +Deno.test( + { permissions: { net: true } }, + async function bodyMultipartFormDataNonAsciiNames() { + const boundary = "----01230123"; + const payload = [ + `--${boundary}`, + `Content-Disposition: form-data; name="文字"`, + "", + "文字", + `--${boundary}`, + `Content-Disposition: form-data; name="file"; filename="文字"`, + "Content-Type: application/octet-stream", + "", + "", + `--${boundary}--`, + ].join("\r\n"); + + const body = buildBody( + new TextEncoder().encode(payload), + new Headers({ + "Content-Type": `multipart/form-data; boundary=${boundary}`, + }), + ); + + const formData = await body.formData(); + assert(formData.has("文字")); + assertEquals(formData.get("文字"), "文字"); + assert(formData.has("file")); + assert(formData.get("file") instanceof File); + assertEquals((formData.get("file") as File).name, "文字"); + }, +); + +// FormData: non-ASCII names and filenames roundtrip +Deno.test( + { permissions: { net: true } }, + async function bodyMultipartFormDataNonAsciiRoundtrip() { + const inFormData = new FormData(); + inFormData.append("文字", "文字"); + inFormData.append("file", new File([], "文字")); + + const body = buildBody(inFormData); + + const formData = await body.formData(); + assert(formData.has("文字")); + assertEquals(formData.get("文字"), "文字"); + assert(formData.has("file")); + assert(formData.get("file") instanceof File); + assertEquals((formData.get("file") as File).name, "文字"); + }, +); + Deno.test( { permissions: { net: true } }, async function bodyURLEncodedFormData() { diff --git a/cli/tests/unit/command_test.ts b/cli/tests/unit/command_test.ts index 0763a7ac68..198f94aedb 100644 --- a/cli/tests/unit/command_test.ts +++ b/cli/tests/unit/command_test.ts @@ -867,3 +867,21 @@ Deno.test( } }, ); + +Deno.test( + { permissions: { run: true, read: true } }, + async function commandKillAfterStatus() { + const command = new Deno.Command(Deno.execPath(), { + args: ["help"], + stdout: "null", + stderr: "null", + }); + const child = command.spawn(); + await child.status; + assertThrows( + () => child.kill(), + TypeError, + "Child process has already terminated.", + ); + }, +); diff --git a/cli/tests/unit/console_test.ts b/cli/tests/unit/console_test.ts index 3f0f4b7023..4cedf35846 100644 --- a/cli/tests/unit/console_test.ts +++ b/cli/tests/unit/console_test.ts @@ -152,16 +152,16 @@ Deno.test( }, ), `{ - [Symbol("foo\\b")]: 'Symbol("foo\\n\")', - [Symbol("bar\\n")]: 'Symbol("bar\\n\")', - [Symbol("bar\\r")]: 'Symbol("bar\\r\")', - [Symbol("baz\\t")]: 'Symbol("baz\\t\")', - [Symbol("qux\\x00")]: 'Symbol(\"qux\\x00")' + [Symbol("foo\\b")]: 'Symbol("foo\\n")', + [Symbol("bar\\n")]: 'Symbol("bar\\n")', + [Symbol("bar\\r")]: 'Symbol("bar\\r")', + [Symbol("baz\\t")]: 'Symbol("baz\\t")', + [Symbol("qux\\x00")]: 'Symbol("qux\\x00")' }`, ); assertEquals( stringify(new Set(["foo\n", "foo\r", "foo\0"])), - `Set { "foo\\n", "foo\\r", "foo\\x00" }`, + `Set(3) { "foo\\n", "foo\\r", "foo\\x00" }`, ); }, ); @@ -236,8 +236,8 @@ Deno.test(function consoleTestStringifyCircular() { nu: null, arrowFunc: [Function: arrowFunc], extendedClass: Extended { a: 1, b: 2 }, - nFunc: [Function (anonymous)], - extendedCstr: [Class: Extended], + nFunc: [Function: anonymous], + extendedCstr: [class Extended extends Base], o: { num: 2, bool: false, @@ -267,7 +267,7 @@ Deno.test(function consoleTestStringifyCircular() { stringify(new Date("2018-12-10T02:26:59.002Z")), "2018-12-10T02:26:59.002Z", ); - assertEquals(stringify(new Set([1, 2, 3])), "Set { 1, 2, 3 }"); + assertEquals(stringify(new Set([1, 2, 3])), "Set(3) { 1, 2, 3 }"); assertEquals( stringify( new Map([ @@ -275,10 +275,10 @@ Deno.test(function consoleTestStringifyCircular() { [2, "two"], ]), ), - `Map { 1 => "one", 2 => "two" }`, + `Map(2) { 1 => "one", 2 => "two" }`, ); - assertEquals(stringify(new WeakSet()), "WeakSet { [items unknown] }"); - assertEquals(stringify(new WeakMap()), "WeakMap { [items unknown] }"); + assertEquals(stringify(new WeakSet()), "WeakSet { }"); + assertEquals(stringify(new WeakMap()), "WeakMap { }"); assertEquals(stringify(Symbol(1)), `Symbol("1")`); assertEquals(stringify(Object(Symbol(1))), `[Symbol: Symbol("1")]`); assertEquals(stringify(null), "null"); @@ -304,19 +304,23 @@ Deno.test(function consoleTestStringifyCircular() { stringify(new Uint8Array([1, 2, 3])), "Uint8Array(3) [ 1, 2, 3 ]", ); - assertEquals(stringify(Uint8Array.prototype), "Uint8Array {}"); + assertEquals(stringify(Uint8Array.prototype), "TypedArray {}"); assertEquals( stringify({ a: { b: { c: { d: new Set([1]) } } } }), - "{ a: { b: { c: { d: [Set] } } } }", + `{ + a: { + b: { c: { d: Set(1) { 1 } } } + } +}`, ); assertEquals(stringify(nestedObj), nestedObjExpected); assertEquals( stringify(JSON), - "JSON {}", + "Object [JSON] {}", ); assertEquals( stringify(new Console(() => {})), - `console { + `Object [console] { log: [Function: log], debug: [Function: debug], info: [Function: info], @@ -345,15 +349,11 @@ Deno.test(function consoleTestStringifyCircular() { ); assertEquals( stringify({ str: 1, [Symbol.for("sym")]: 2, [Symbol.toStringTag]: "TAG" }), - 'TAG { str: 1, [Symbol(sym)]: 2, [Symbol(Symbol.toStringTag)]: "TAG" }', - ); - assertEquals( - stringify({ - [Symbol.for("Deno.customInspect")]: function () { - return Deno.inspect(this); - }, - }), - "[Circular *1]", + `Object [TAG] { + str: 1, + [Symbol(sym)]: 2, + [Symbol(Symbol.toStringTag)]: "TAG" +}`, ); // test inspect is working the same assertEquals(stripColor(Deno.inspect(nestedObj)), nestedObjExpected); @@ -363,26 +363,28 @@ Deno.test(function consoleTestStringifyMultipleCircular() { const y = { a: { b: {} }, foo: { bar: {} } }; y.a.b = y.a; y.foo.bar = y.foo; - console.log(y); assertEquals( stringify(y), - "{ a: { b: [Circular *1] }, foo: { bar: [Circular *2] } }", + "{\n" + + " a: { b: [Circular *1] },\n" + + " foo: { bar: [Circular *2] }\n" + + "}", ); }); Deno.test(function consoleTestStringifyFunctionWithPrototypeRemoved() { const f = function f() {}; Reflect.setPrototypeOf(f, null); - assertEquals(stringify(f), "[Function: f]"); + assertEquals(stringify(f), "[Function (null prototype): f]"); const af = async function af() {}; Reflect.setPrototypeOf(af, null); - assertEquals(stringify(af), "[Function: af]"); + assertEquals(stringify(af), "[Function (null prototype): af]"); const gf = function* gf() {}; Reflect.setPrototypeOf(gf, null); - assertEquals(stringify(gf), "[Function: gf]"); + assertEquals(stringify(gf), "[Function (null prototype): gf]"); const agf = async function* agf() {}; Reflect.setPrototypeOf(agf, null); - assertEquals(stringify(agf), "[Function: agf]"); + assertEquals(stringify(agf), "[Function (null prototype): agf]"); }); Deno.test(function consoleTestStringifyFunctionWithProperties() { @@ -400,7 +402,7 @@ Deno.test(function consoleTestStringifyFunctionWithProperties() { y: 3, z: [Function (anonymous)], b: [Function: bar], - a: Map {} + a: Map(0) {} } }`, ); @@ -417,7 +419,7 @@ Deno.test(function consoleTestStringifyFunctionWithProperties() { y: 3, z: [Function (anonymous)], b: [Function: bar], - a: Map {}, + a: Map(0) {}, s: [Circular *1], t: [Function: t] { x: [Circular *1] } } @@ -431,7 +433,75 @@ Deno.test(function consoleTestStringifyFunctionWithProperties() { assertEquals( stripColor(Deno.inspect(Array, { showHidden: true })), - `[Function: Array] { [Symbol(Symbol.species)]: [Getter] }`, + ` [Function: Array] { + [length]: 1, + [name]: "Array", + [prototype]: Object(0) [ + [length]: 0, + [constructor]: [Circular *1], + [at]: [Function: at] { [length]: 1, [name]: "at" }, + [concat]: [Function: concat] { [length]: 1, [name]: "concat" }, + [copyWithin]: [Function: copyWithin] { [length]: 2, [name]: "copyWithin" }, + [fill]: [Function: fill] { [length]: 1, [name]: "fill" }, + [find]: [Function: find] { [length]: 1, [name]: "find" }, + [findIndex]: [Function: findIndex] { [length]: 1, [name]: "findIndex" }, + [findLast]: [Function: findLast] { [length]: 1, [name]: "findLast" }, + [findLastIndex]: [Function: findLastIndex] { [length]: 1, [name]: "findLastIndex" }, + [lastIndexOf]: [Function: lastIndexOf] { [length]: 1, [name]: "lastIndexOf" }, + [pop]: [Function: pop] { [length]: 0, [name]: "pop" }, + [push]: [Function: push] { [length]: 1, [name]: "push" }, + [reverse]: [Function: reverse] { [length]: 0, [name]: "reverse" }, + [shift]: [Function: shift] { [length]: 0, [name]: "shift" }, + [unshift]: [Function: unshift] { [length]: 1, [name]: "unshift" }, + [slice]: [Function: slice] { [length]: 2, [name]: "slice" }, + [sort]: [Function: sort] { [length]: 1, [name]: "sort" }, + [splice]: [Function: splice] { [length]: 2, [name]: "splice" }, + [includes]: [Function: includes] { [length]: 1, [name]: "includes" }, + [indexOf]: [Function: indexOf] { [length]: 1, [name]: "indexOf" }, + [join]: [Function: join] { [length]: 1, [name]: "join" }, + [keys]: [Function: keys] { [length]: 0, [name]: "keys" }, + [entries]: [Function: entries] { [length]: 0, [name]: "entries" }, + [values]: [Function: values] { [length]: 0, [name]: "values" }, + [forEach]: [Function: forEach] { [length]: 1, [name]: "forEach" }, + [filter]: [Function: filter] { [length]: 1, [name]: "filter" }, + [flat]: [Function: flat] { [length]: 0, [name]: "flat" }, + [flatMap]: [Function: flatMap] { [length]: 1, [name]: "flatMap" }, + [map]: [Function: map] { [length]: 1, [name]: "map" }, + [every]: [Function: every] { [length]: 1, [name]: "every" }, + [some]: [Function: some] { [length]: 1, [name]: "some" }, + [reduce]: [Function: reduce] { [length]: 1, [name]: "reduce" }, + [reduceRight]: [Function: reduceRight] { [length]: 1, [name]: "reduceRight" }, + [toLocaleString]: [Function: toLocaleString] { [length]: 0, [name]: "toLocaleString" }, + [toString]: [Function: toString] { [length]: 0, [name]: "toString" }, + [toReversed]: [Function: toReversed] { [length]: 0, [name]: "toReversed" }, + [toSorted]: [Function: toSorted] { [length]: 1, [name]: "toSorted" }, + [toSpliced]: [Function: toSpliced] { [length]: 2, [name]: "toSpliced" }, + [with]: [Function: with] { [length]: 2, [name]: "with" }, + [Symbol(Symbol.iterator)]: [Function: values] { [length]: 0, [name]: "values" }, + [Symbol(Symbol.unscopables)]: [Object: null prototype] { + at: true, + copyWithin: true, + entries: true, + fill: true, + find: true, + findIndex: true, + findLast: true, + findLastIndex: true, + flat: true, + flatMap: true, + includes: true, + keys: true, + values: true, + toReversed: true, + toSorted: true, + toSpliced: true + } + ], + [isArray]: [Function: isArray] { [length]: 1, [name]: "isArray" }, + [from]: [Function: from] { [length]: 1, [name]: "from" }, + [of]: [Function: of] { [length]: 0, [name]: "of" }, + [Symbol(Symbol.species)]: [Getter] +}`, ); }); @@ -440,21 +510,24 @@ Deno.test(function consoleTestStringifyWithDepth() { const nestedObj: any = { a: { b: { c: { d: { e: { f: 42 } } } } } }; assertEquals( stripColor(inspectArgs([nestedObj], { depth: 3 })), - "{ a: { b: { c: [Object] } } }", + "{\n a: { b: { c: { d: [Object] } } }\n}", ); assertEquals( stripColor(inspectArgs([nestedObj], { depth: 4 })), - "{ a: { b: { c: { d: [Object] } } } }", + "{\n a: {\n b: { c: { d: { e: [Object] } } }\n }\n}", + ); + assertEquals( + stripColor(inspectArgs([nestedObj], { depth: 0 })), + "{ a: [Object] }", ); - assertEquals(stripColor(inspectArgs([nestedObj], { depth: 0 })), "[Object]"); assertEquals( stripColor(inspectArgs([nestedObj])), - "{ a: { b: { c: { d: [Object] } } } }", + "{\n a: {\n b: { c: { d: { e: [Object] } } }\n }\n}", ); // test inspect is working the same way assertEquals( stripColor(Deno.inspect(nestedObj, { depth: 4 })), - "{ a: { b: { c: { d: [Object] } } } }", + "{\n a: {\n b: { c: { d: { e: [Object] } } }\n }\n}", ); }); @@ -502,13 +575,15 @@ Deno.test(function consoleTestStringifyIterable() { assertEquals( stringify(longArray), `[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, ... 100 more items ]`, ); @@ -519,13 +594,15 @@ Deno.test(function consoleTestStringifyIterable() { `{ a: "a", longArray: [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, ... 100 more items ] }`, @@ -535,7 +612,7 @@ Deno.test(function consoleTestStringifyIterable() { ["a", 0], ["b", 1], ]); - assertEquals(stringify(shortMap), `Map { "a" => 0, "b" => 1 }`); + assertEquals(stringify(shortMap), `Map(2) { "a" => 0, "b" => 1 }`); const longMap = new Map(); for (const key of Array(200).keys()) { @@ -543,7 +620,7 @@ Deno.test(function consoleTestStringifyIterable() { } assertEquals( stringify(longMap), - `Map { + `Map(200) { "0" => 0, "1" => 1, "2" => 2, @@ -649,14 +726,14 @@ Deno.test(function consoleTestStringifyIterable() { ); const shortSet = new Set([1, 2, 3]); - assertEquals(stringify(shortSet), `Set { 1, 2, 3 }`); + assertEquals(stringify(shortSet), `Set(3) { 1, 2, 3 }`); const longSet = new Set(); for (const key of Array(200).keys()) { longSet.add(key); } assertEquals( stringify(longSet), - `Set { + `Set(200) { 0, 1, 2, @@ -1059,7 +1136,7 @@ Deno.test(function consoleTestWithObjectFormatSpecifier() { assertEquals(stringify("%o", { a: 42 }), "{ a: 42 }"); assertEquals( stringify("%o", { a: { b: { c: { d: new Set([1]) } } } }), - "{ a: { b: { c: { d: [Set] } } } }", + "{\n a: {\n b: { c: { d: Set(1) { 1 } } }\n }\n}", ); }); @@ -1503,15 +1580,15 @@ Deno.test(function consoleTable() { assertEquals( stripColor(out.toString()), `\ -┌───────┬───────────┬───────────────────┬────────┐ -│ (idx) │ c │ e │ Values │ -├───────┼───────────┼───────────────────┼────────┤ -│ a │ │ │ true │ -│ b │ { d: 10 } │ [ 1, 2, [Array] ] │ │ -│ f │ │ │ "test" │ -│ g │ │ │ │ -│ h │ │ │ │ -└───────┴───────────┴───────────────────┴────────┘ +┌───────┬───────────┬────────────────────┬────────┐ +│ (idx) │ c │ e │ Values │ +├───────┼───────────┼────────────────────┼────────┤ +│ a │ │ │ true │ +│ b │ { d: 10 } │ [ 1, 2, [ 5, 6 ] ] │ │ +│ f │ │ │ "test" │ +│ g │ │ │ │ +│ h │ │ │ │ +└───────┴───────────┴────────────────────┴────────┘ `, ); }); @@ -1797,7 +1874,7 @@ Deno.test(function inspectGetters() { return 0; }, }, { getters: true })), - "{ foo: 0 }", + "{ foo: [Getter: 0] }", ); assertEquals( @@ -1806,13 +1883,13 @@ Deno.test(function inspectGetters() { throw new Error("bar"); }, }, { getters: true }), - "{ foo: [Thrown Error: bar] }", + "{ foo: [Getter: ] }", ); }); Deno.test(function inspectPrototype() { class A {} - assertEquals(Deno.inspect(A.prototype), "A {}"); + assertEquals(Deno.inspect(A.prototype), "{}"); }); Deno.test(function inspectSorted() { @@ -1822,7 +1899,7 @@ Deno.test(function inspectSorted() { ); assertEquals( stripColor(Deno.inspect(new Set(["b", "a"]), { sorted: true })), - `Set { "a", "b" }`, + `Set(2) { "a", "b" }`, ); assertEquals( stripColor(Deno.inspect( @@ -1832,7 +1909,7 @@ Deno.test(function inspectSorted() { ]), { sorted: true }, )), - `Map { "a" => 1, "b" => 2 }`, + `Map(2) { "a" => 1, "b" => 2 }`, ); }); @@ -1871,7 +1948,7 @@ Deno.test(function inspectTrailingComma() { ]), { trailingComma: true }, )), - `Set { + `Set(2) { "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", }`, @@ -1884,7 +1961,7 @@ Deno.test(function inspectTrailingComma() { ]), { trailingComma: true }, )), - `Map { + `Map(2) { "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" => 1, "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" => 2, }`, @@ -1904,11 +1981,11 @@ Deno.test(function inspectCompact() { Deno.test(function inspectIterableLimit() { assertEquals( stripColor(Deno.inspect(["a", "b", "c"], { iterableLimit: 2 })), - `[ "a", "b", ... 1 more items ]`, + `[ "a", "b", ... 1 more item ]`, ); assertEquals( stripColor(Deno.inspect(new Set(["a", "b", "c"]), { iterableLimit: 2 })), - `Set { "a", "b", ... 1 more items }`, + `Set(3) { "a", "b", ... 1 more item }`, ); assertEquals( stripColor(Deno.inspect( @@ -1919,7 +1996,7 @@ Deno.test(function inspectIterableLimit() { ]), { iterableLimit: 2 }, )), - `Map { "a" => 1, "b" => 2, ... 1 more items }`, + `Map(3) { "a" => 1, "b" => 2, ... 1 more item }`, ); }); @@ -1958,7 +2035,7 @@ Deno.test(function inspectProxy() { }, }), )), - `MyProxy { prop1: 5, prop2: 5 }`, + `Object [MyProxy] { prop1: 5, prop2: 5 }`, ); assertEquals( stripColor(Deno.inspect( @@ -1983,10 +2060,13 @@ Deno.test(function inspectProxy() { new Proxy([1, 2, 3, 4, 5, 6, 7], { get() {} }), { showProxy: true }, )), - `Proxy [ [ + `Proxy [ + [ 1, 2, 3, 4, 5, 6, 7 - ], { get: [Function: get] } ]`, + ], + { get: [Function: get] } +]`, ); assertEquals( stripColor(Deno.inspect( @@ -2057,7 +2137,7 @@ Deno.test(function inspectEmptyArray() { compact: false, trailingComma: true, }), - "[\n]", + "[]", ); }); @@ -2072,8 +2152,7 @@ Deno.test(function inspectDeepEmptyArray() { trailingComma: true, }), `{ - arr: [ - ], + arr: [], }`, ); }); @@ -2086,11 +2165,11 @@ Deno.test(function inspectEmptyMap() { compact: false, trailingComma: true, }), - "Map {\n}", + "Map(0) {}", ); }); -Deno.test(function inspectEmptyMap() { +Deno.test(function inspectEmptySet() { const set = new Set(); assertEquals( @@ -2098,11 +2177,11 @@ Deno.test(function inspectEmptyMap() { compact: false, trailingComma: true, }), - "Set {\n}", + "Set(0) {}", ); }); -Deno.test(function inspectEmptyMap() { +Deno.test(function inspectEmptyUint8Array() { const typedArray = new Uint8Array(0); assertEquals( @@ -2110,7 +2189,32 @@ Deno.test(function inspectEmptyMap() { compact: false, trailingComma: true, }), - "Uint8Array(0) [\n]", + "Uint8Array(0) []", + ); +}); + +Deno.test(function inspectLargeArrayBuffer() { + const arrayBuffer = new ArrayBuffer(2 ** 32 + 1); + assertEquals( + Deno.inspect(arrayBuffer), + `ArrayBuffer { + [Uint8Contents]: <00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ... 4294967197 more bytes>, + byteLength: 4294967297 +}`, + ); + structuredClone(arrayBuffer, { transfer: [arrayBuffer] }); + assertEquals( + Deno.inspect(arrayBuffer), + "ArrayBuffer { (detached), byteLength: 0 }", + ); + + const sharedArrayBuffer = new SharedArrayBuffer(2 ** 32 + 1); + assertEquals( + Deno.inspect(sharedArrayBuffer), + `SharedArrayBuffer { + [Uint8Contents]: <00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ... 4294967197 more bytes>, + byteLength: 4294967297 +}`, ); }); @@ -2124,12 +2228,12 @@ Deno.test(function inspectStringAbbreviation() { assertEquals( Deno.inspect(obj, { strAbbreviateSize: 10 }), - '{ str: "This is a ..." }', + '{ str: "This is a "... 59 more characters }', ); assertEquals( Deno.inspect(arr, { strAbbreviateSize: 10 }), - '[ "This is a ..." ]', + '[ "This is a "... 59 more characters ]', ); }); @@ -2156,6 +2260,13 @@ Deno.test(function inspectWithPrototypePollution() { } }); +Deno.test(function inspectPromiseLike() { + assertEquals( + Deno.inspect(Object.create(Promise.prototype)), + "Promise { }", + ); +}); + Deno.test(function inspectorMethods() { console.timeStamp("test"); console.profile("test"); @@ -2192,3 +2303,27 @@ Deno.test(function inspectAnonymousFunctions() { "[AsyncGeneratorFunction (anonymous)]", ); }); + +Deno.test(function inspectBreakLengthOption() { + assertEquals( + Deno.inspect("123456789\n".repeat(3), { breakLength: 34 }), + `"123456789\\n123456789\\n123456789\\n"`, + ); + assertEquals( + Deno.inspect("123456789\n".repeat(3), { breakLength: 33 }), + `"123456789\\n" + + "123456789\\n" + + "123456789\\n"`, + ); +}); + +Deno.test(function inspectEscapeSequencesFalse() { + assertEquals( + Deno.inspect("foo\nbar", { escapeSequences: true }), + '"foo\\nbar"', + ); // default behavior + assertEquals( + Deno.inspect("foo\nbar", { escapeSequences: false }), + '"foo\nbar"', + ); +}); diff --git a/cli/tests/unit/event_target_test.ts b/cli/tests/unit/event_target_test.ts index 49bd354aa2..c7acab364c 100644 --- a/cli/tests/unit/event_target_test.ts +++ b/cli/tests/unit/event_target_test.ts @@ -245,6 +245,20 @@ Deno.test(function eventTargetDispatchShouldSetTargetInListener() { assertEquals(called, true); }); +Deno.test(function eventTargetDispatchShouldFireCurrentListenersOnly() { + const target = new EventTarget(); + const event = new Event("foo"); + let callCount = 0; + target.addEventListener("foo", () => { + ++callCount; + target.addEventListener("foo", () => { + ++callCount; + }); + }); + target.dispatchEvent(event); + assertEquals(callCount, 1); +}); + Deno.test(function eventTargetAddEventListenerGlobalAbort() { return new Promise((resolve) => { const c = new AbortController(); diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index bafb23c2a9..db553f14d4 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -10,6 +10,8 @@ import { } from "./test_util.ts"; import { Buffer } from "../../../test_util/std/io/buffer.ts"; +const listenPort = 4504; + Deno.test( { permissions: { net: true } }, async function fetchRequiresOneArgument() { @@ -639,7 +641,7 @@ Deno.test( permissions: { net: true }, }, async function fetchRequest() { - const addr = "127.0.0.1:4501"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const response = await fetch(`http://${addr}/blah`, { method: "POST", @@ -673,7 +675,7 @@ Deno.test( permissions: { net: true }, }, async function fetchRequestAcceptHeaders() { - const addr = "127.0.0.1:4501"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const response = await fetch(`http://${addr}/blah`, { method: "POST", @@ -705,7 +707,7 @@ Deno.test( permissions: { net: true }, }, async function fetchPostBodyString() { - const addr = "127.0.0.1:4511"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const body = "hello world"; const response = await fetch(`http://${addr}/blah`, { @@ -743,7 +745,7 @@ Deno.test( permissions: { net: true }, }, async function fetchPostBodyTypedArray() { - const addr = "127.0.0.1:4503"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const bodyStr = "hello world"; const body = new TextEncoder().encode(bodyStr); @@ -781,7 +783,7 @@ Deno.test( permissions: { net: true }, }, async function fetchUserSetContentLength() { - const addr = "127.0.0.1:4501"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const response = await fetch(`http://${addr}/blah`, { method: "POST", @@ -812,7 +814,7 @@ Deno.test( permissions: { net: true }, }, async function fetchUserSetTransferEncoding() { - const addr = "127.0.0.1:4501"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const response = await fetch(`http://${addr}/blah`, { method: "POST", @@ -1158,7 +1160,7 @@ Deno.test( permissions: { net: true }, }, async function fetchPostBodyReadableStream() { - const addr = "127.0.0.1:4511"; + const addr = `127.0.0.1:${listenPort}`; const bufPromise = bufferServer(addr); const stream = new TransformStream(); const writer = stream.writable.getWriter(); @@ -1217,7 +1219,7 @@ Deno.test( async function fetchFilterOutCustomHostHeader(): Promise< void > { - const addr = "127.0.0.1:4511"; + const addr = `127.0.0.1:${listenPort}`; const [hostname, port] = addr.split(":"); const listener = Deno.listen({ hostname, @@ -1495,6 +1497,18 @@ Deno.test( }, ); +Deno.test( + { permissions: { net: true, read: true } }, + async function fetchSupportsHttpsOverIpAddress() { + const caCert = await Deno.readTextFile("cli/tests/testdata/tls/RootCA.pem"); + const client = Deno.createHttpClient({ caCerts: [caCert] }); + const res = await fetch("https://localhost:5546/http_version", { client }); + assert(res.ok); + assertEquals(await res.text(), "HTTP/1.1"); + client.close(); + }, +); + Deno.test( { permissions: { net: true, read: true } }, async function fetchSupportsHttp1Only() { @@ -1519,6 +1533,30 @@ Deno.test( }, ); +Deno.test( + { permissions: { net: true, read: true } }, + async function fetchForceHttp1OnHttp2Server() { + const client = Deno.createHttpClient({ http2: false, http1: true }); + await assertRejects( + () => fetch("http://localhost:5549/http_version", { client }), + TypeError, + ); + client.close(); + }, +); + +Deno.test( + { permissions: { net: true, read: true } }, + async function fetchForceHttp2OnHttp1Server() { + const client = Deno.createHttpClient({ http2: true, http1: false }); + await assertRejects( + () => fetch("http://localhost:5548/http_version", { client }), + TypeError, + ); + client.close(); + }, +); + Deno.test( { permissions: { net: true, read: true } }, async function fetchPrefersHttp2() { @@ -1681,7 +1719,7 @@ Deno.test( async function fetchWithInvalidContentLengthAndTransferEncoding(): Promise< void > { - const addr = "127.0.0.1:4516"; + const addr = `127.0.0.1:${listenPort}`; const data = "a".repeat(10 << 10); const body = new TextEncoder().encode( @@ -1713,7 +1751,7 @@ Deno.test( async function fetchWithInvalidContentLength(): Promise< void > { - const addr = "127.0.0.1:4517"; + const addr = `127.0.0.1:${listenPort}`; const data = "a".repeat(10 << 10); const body = new TextEncoder().encode( @@ -1741,7 +1779,7 @@ Deno.test( async function fetchWithInvalidContentLength(): Promise< void > { - const addr = "127.0.0.1:4518"; + const addr = `127.0.0.1:${listenPort}`; const data = "a".repeat(10 << 10); const contentLength = data.length / 2; @@ -1768,7 +1806,7 @@ Deno.test( async function fetchWithInvalidContentLength(): Promise< void > { - const addr = "127.0.0.1:4519"; + const addr = `127.0.0.1:${listenPort}`; const data = "a".repeat(10 << 10); const contentLength = data.length * 2; @@ -1893,3 +1931,19 @@ Deno.test( await server; }, ); + +Deno.test("Request with subarray TypedArray body", async () => { + const body = new Uint8Array([1, 2, 3, 4, 5]).subarray(1); + const req = new Request("https://example.com", { method: "POST", body }); + const actual = new Uint8Array(await req.arrayBuffer()); + const expected = new Uint8Array([2, 3, 4, 5]); + assertEquals(actual, expected); +}); + +Deno.test("Response with subarray TypedArray body", async () => { + const body = new Uint8Array([1, 2, 3, 4, 5]).subarray(1); + const req = new Response(body); + const actual = new Uint8Array(await req.arrayBuffer()); + const expected = new Uint8Array([2, 3, 4, 5]); + assertEquals(actual, expected); +}); diff --git a/cli/tests/unit/http_test.ts b/cli/tests/unit/http_test.ts index f407c9186e..549234986b 100644 --- a/cli/tests/unit/http_test.ts +++ b/cli/tests/unit/http_test.ts @@ -943,7 +943,7 @@ Deno.test( file.close(); let httpConn: Deno.HttpConn; - const listener = Deno.listen({ port: 4503 }); + const listener = Deno.listen({ port: 4501 }); const promise = (async () => { const conn = await listener.accept(); httpConn = Deno.serveHttp(conn); @@ -952,7 +952,7 @@ Deno.test( const f = await Deno.open(tmpFile, { read: true }); await respondWith(new Response(f.readable, { status: 200 })); })(); - const resp = await fetch("http://127.0.0.1:4503/"); + const resp = await fetch("http://127.0.0.1:4501/"); const body = await resp.arrayBuffer(); assertEquals(body.byteLength, 70 * 1024); await promise; @@ -2085,6 +2085,7 @@ Deno.test({ "--header", "Accept-Encoding: deflate, gzip", ]; + // deno-lint-ignore no-deprecated-deno-api const proc = Deno.run({ cmd, stdout: "piped", stderr: "null" }); const status = await proc.status(); assert(status.success); @@ -2147,6 +2148,7 @@ Deno.test({ "--header", "Accept-Encoding: deflate, gzip", ]; + // deno-lint-ignore no-deprecated-deno-api const proc = Deno.run({ cmd, stdout: "piped", stderr: "null" }); const status = await proc.status(); assert(status.success); diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 60cf11b8ef..3c5efa5887 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -66,6 +66,7 @@ dbTest("basic read-write-delete and versionstamps", async (db) => { assertEquals(result1.versionstamp, null); const setRes = await db.set(["a"], "b"); + assert(setRes.ok); assertEquals(setRes.versionstamp, "00000000000000010000"); const result2 = await db.get(["a"]); assertEquals(result2.key, ["a"]); @@ -122,6 +123,36 @@ dbTest("set and get recursive object", async (db) => { assert(resultValue.a === resultValue); }); +// invalid values (as per structured clone algorithm with _for storage_, NOT JSON) +const INVALID_VALUE_CASES = [ + { name: "function", value: () => {} }, + { name: "symbol", value: Symbol() }, + { name: "WeakMap", value: new WeakMap() }, + { name: "WeakSet", value: new WeakSet() }, + { + name: "WebAssembly.Module", + value: new WebAssembly.Module( + new Uint8Array([0x00, 0x61, 0x73, 0x6D, 0x01, 0x00, 0x00, 0x00]), + ), + }, + { + name: "SharedArrayBuffer", + value: new SharedArrayBuffer(3), + }, +]; + +for (const { name, value } of INVALID_VALUE_CASES) { + dbTest(`set and get ${name} value (invalid)`, async (db) => { + await assertRejects( + async () => await db.set(["a"], value), + Error, + ); + const res = await db.get(["a"]); + assertEquals(res.key, ["a"]); + assertEquals(res.value, null); + }); +} + const keys = [ ["a"], ["a", "b"], @@ -183,7 +214,7 @@ dbTest("compare and mutate", async (db) => { .check({ key: ["t"], versionstamp: currentValue.versionstamp }) .set(currentValue.key, "2") .commit(); - assert(res); + assert(res.ok); assertEquals(res.versionstamp, "00000000000000020000"); const newValue = await db.get(["t"]); @@ -194,7 +225,7 @@ dbTest("compare and mutate", async (db) => { .check({ key: ["t"], versionstamp: currentValue.versionstamp }) .set(currentValue.key, "3") .commit(); - assertEquals(res, null); + assert(!res.ok); const newValue2 = await db.get(["t"]); assertEquals(newValue2.versionstamp, "00000000000000020000"); @@ -206,7 +237,7 @@ dbTest("compare and mutate not exists", async (db) => { .check({ key: ["t"], versionstamp: null }) .set(["t"], "1") .commit(); - assert(res); + assert(res.ok); const newValue = await db.get(["t"]); assertEquals(newValue.versionstamp, "00000000000000010000"); @@ -216,7 +247,37 @@ dbTest("compare and mutate not exists", async (db) => { .check({ key: ["t"], versionstamp: null }) .set(["t"], "2") .commit(); - assertEquals(res, null); + assert(!res.ok); +}); + +dbTest("atomic mutation helper (sum)", async (db) => { + await db.set(["t"], new Deno.KvU64(42n)); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().sum(["t"], 1n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(43n)); +}); + +dbTest("atomic mutation helper (min)", async (db) => { + await db.set(["t"], new Deno.KvU64(42n)); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().min(["t"], 1n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(1n)); + + await db.atomic().min(["t"], 2n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(1n)); +}); + +dbTest("atomic mutation helper (max)", async (db) => { + await db.set(["t"], new Deno.KvU64(42n)); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().max(["t"], 41n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().max(["t"], 43n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(43n)); }); dbTest("compare multiple and mutate", async (db) => { @@ -234,7 +295,7 @@ dbTest("compare multiple and mutate", async (db) => { .set(currentValue1.key, "3") .set(currentValue2.key, "4") .commit(); - assert(res); + assert(res.ok); const newValue1 = await db.get(["t1"]); assertEquals(newValue1.versionstamp, "00000000000000030000"); @@ -250,7 +311,7 @@ dbTest("compare multiple and mutate", async (db) => { .set(newValue1.key, "5") .set(newValue2.key, "6") .commit(); - assertEquals(res2, null); + assert(!res2.ok); const newValue3 = await db.get(["t1"]); assertEquals(newValue3.versionstamp, "00000000000000030000"); @@ -266,7 +327,7 @@ dbTest("atomic mutation ordering (set before delete)", async (db) => { .set(["a"], "2") .delete(["a"]) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, null); }); @@ -277,7 +338,7 @@ dbTest("atomic mutation ordering (delete before set)", async (db) => { .delete(["a"]) .set(["a"], "2") .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, "2"); }); @@ -286,7 +347,7 @@ dbTest("atomic mutation type=set", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: "1", type: "set" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, "1"); }); @@ -296,7 +357,7 @@ dbTest("atomic mutation type=set overwrite", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: "2", type: "set" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, "2"); }); @@ -306,7 +367,7 @@ dbTest("atomic mutation type=delete", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], type: "delete" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, null); }); @@ -315,7 +376,7 @@ dbTest("atomic mutation type=delete no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], type: "delete" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, null); }); @@ -325,7 +386,7 @@ dbTest("atomic mutation type=sum", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "sum" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(11n)); }); @@ -334,7 +395,7 @@ dbTest("atomic mutation type=sum no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "sum" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assert(result.value); assertEquals(result.value, new Deno.KvU64(1n)); @@ -345,7 +406,7 @@ dbTest("atomic mutation type=sum wrap around", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(10n), type: "sum" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(9n)); @@ -393,7 +454,7 @@ dbTest("atomic mutation type=min", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(5n), type: "min" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(5n)); @@ -409,7 +470,7 @@ dbTest("atomic mutation type=min no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "min" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assert(result.value); assertEquals(result.value, new Deno.KvU64(1n)); @@ -447,7 +508,7 @@ dbTest("atomic mutation type=max", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(5n), type: "max" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(10n)); @@ -463,7 +524,7 @@ dbTest("atomic mutation type=max no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "max" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assert(result.value); assertEquals(result.value, new Deno.KvU64(1n)); @@ -517,19 +578,31 @@ Deno.test("KvU64 underflow", () => { }, RangeError); }); -Deno.test("KvU64 frozen", () => { - const a = new Deno.KvU64(1n); - assertThrows(() => { - // @ts-expect-error value is readonly - a.value = 2n; - }, TypeError); -}); - Deno.test("KvU64 unbox", () => { const a = new Deno.KvU64(1n); assertEquals(a.value, 1n); }); +Deno.test("KvU64 unbox with valueOf", () => { + const a = new Deno.KvU64(1n); + assertEquals(a.valueOf(), 1n); +}); + +Deno.test("KvU64 auto-unbox", () => { + const a = new Deno.KvU64(1n); + assertEquals(a as unknown as bigint + 1n, 2n); +}); + +Deno.test("KvU64 toString", () => { + const a = new Deno.KvU64(1n); + assertEquals(a.toString(), "1"); +}); + +Deno.test("KvU64 inspect", () => { + const a = new Deno.KvU64(1n); + assertEquals(Deno.inspect(a), "[Deno.KvU64: 1n]"); +}); + async function collect( iter: Deno.KvListIterator, ): Promise[]> { @@ -1183,6 +1256,12 @@ dbTest("keys must be arrays", async (db) => { ); }); +Deno.test("Deno.Kv constructor throws", () => { + assertThrows(() => { + new Deno.Kv(); + }); +}); + // This function is never called, it is just used to check that all the types // are behaving as expected. async function _typeCheckingTests() { diff --git a/cli/tests/unit/metrics_test.ts b/cli/tests/unit/metrics_test.ts index df2f1b2be5..5fdfebc85b 100644 --- a/cli/tests/unit/metrics_test.ts +++ b/cli/tests/unit/metrics_test.ts @@ -80,12 +80,14 @@ Deno.test(function metricsForOpCrates() { // Test that op_names == Objects.keys(Deno[Deno.internal].core.ops) // since building the per-op metrics depends on op_names being complete Deno.test(function opNamesMatch() { + // @ts-ignore: Deno[Deno.internal].core allowed + const ops = Object.keys(Deno[Deno.internal].core.ops); + // @ts-ignore: Deno[Deno.internal].core allowed + ops.concat(Object.keys(Deno[Deno.internal].core.asyncOps)); + assertEquals( // @ts-ignore: Deno[Deno.internal].core allowed Deno[Deno.internal].core.opNames().sort(), - // @ts-ignore: Deno[Deno.internal].core allowed - Object.keys(Deno[Deno.internal].core.ops).sort().filter((name) => - name !== "asyncOpsInfo" - ), + ops.sort().filter((name) => name !== "asyncOpsInfo"), ); }); diff --git a/cli/tests/unit/net_test.ts b/cli/tests/unit/net_test.ts index 935a6f846b..32250bbd07 100644 --- a/cli/tests/unit/net_test.ts +++ b/cli/tests/unit/net_test.ts @@ -12,6 +12,10 @@ import { } from "./test_util.ts"; import { join } from "../../../test_util/std/path/mod.ts"; +// Since these tests may run in parallel, ensure this port is unique to this file +const listenPort = 4503; +const listenPort2 = 4504; + let isCI: boolean; try { isCI = Deno.env.get("CI") !== undefined; @@ -20,10 +24,10 @@ try { } Deno.test({ permissions: { net: true } }, function netTcpListenClose() { - const listener = Deno.listen({ hostname: "127.0.0.1", port: 3500 }); + const listener = Deno.listen({ hostname: "127.0.0.1", port: listenPort }); assert(listener.addr.transport === "tcp"); assertEquals(listener.addr.hostname, "127.0.0.1"); - assertEquals(listener.addr.port, 3500); + assertEquals(listener.addr.port, listenPort); assertNotEquals(listener.rid, 0); listener.close(); }); @@ -35,12 +39,12 @@ Deno.test( function netUdpListenClose() { const socket = Deno.listenDatagram({ hostname: "127.0.0.1", - port: 3500, + port: listenPort, transport: "udp", }); assert(socket.addr.transport === "udp"); assertEquals(socket.addr.hostname, "127.0.0.1"); - assertEquals(socket.addr.port, 3500); + assertEquals(socket.addr.port, listenPort); socket.close(); }, ); @@ -127,7 +131,7 @@ Deno.test( permissions: { net: true }, }, async function netTcpCloseWhileAccept() { - const listener = Deno.listen({ port: 4501 }); + const listener = Deno.listen({ port: listenPort }); const p = listener.accept(); listener.close(); // TODO(piscisaureus): the error type should be `Interrupted` here, which @@ -212,22 +216,22 @@ Deno.test( ); Deno.test({ permissions: { net: true } }, async function netTcpDialListen() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: listenPort }); listener.accept().then( async (conn) => { assert(conn.remoteAddr != null); assert(conn.localAddr.transport === "tcp"); assertEquals(conn.localAddr.hostname, "127.0.0.1"); - assertEquals(conn.localAddr.port, 3500); + assertEquals(conn.localAddr.port, listenPort); await conn.write(new Uint8Array([1, 2, 3])); conn.close(); }, ); - const conn = await Deno.connect({ hostname: "127.0.0.1", port: 3500 }); + const conn = await Deno.connect({ hostname: "127.0.0.1", port: listenPort }); assert(conn.remoteAddr.transport === "tcp"); assertEquals(conn.remoteAddr.hostname, "127.0.0.1"); - assertEquals(conn.remoteAddr.port, 3500); + assertEquals(conn.remoteAddr.port, listenPort); assert(conn.localAddr != null); const buf = new Uint8Array(1024); const readResult = await conn.read(buf); @@ -247,23 +251,23 @@ Deno.test({ permissions: { net: true } }, async function netTcpDialListen() { }); Deno.test({ permissions: { net: true } }, async function netTcpSetNoDelay() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: listenPort }); listener.accept().then( async (conn) => { assert(conn.remoteAddr != null); assert(conn.localAddr.transport === "tcp"); assertEquals(conn.localAddr.hostname, "127.0.0.1"); - assertEquals(conn.localAddr.port, 3500); + assertEquals(conn.localAddr.port, listenPort); await conn.write(new Uint8Array([1, 2, 3])); conn.close(); }, ); - const conn = await Deno.connect({ hostname: "127.0.0.1", port: 3500 }); + const conn = await Deno.connect({ hostname: "127.0.0.1", port: listenPort }); conn.setNoDelay(true); assert(conn.remoteAddr.transport === "tcp"); assertEquals(conn.remoteAddr.hostname, "127.0.0.1"); - assertEquals(conn.remoteAddr.port, 3500); + assertEquals(conn.remoteAddr.port, listenPort); assert(conn.localAddr != null); const buf = new Uint8Array(1024); const readResult = await conn.read(buf); @@ -283,23 +287,23 @@ Deno.test({ permissions: { net: true } }, async function netTcpSetNoDelay() { }); Deno.test({ permissions: { net: true } }, async function netTcpSetKeepAlive() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: listenPort }); listener.accept().then( async (conn) => { assert(conn.remoteAddr != null); assert(conn.localAddr.transport === "tcp"); assertEquals(conn.localAddr.hostname, "127.0.0.1"); - assertEquals(conn.localAddr.port, 3500); + assertEquals(conn.localAddr.port, listenPort); await conn.write(new Uint8Array([1, 2, 3])); conn.close(); }, ); - const conn = await Deno.connect({ hostname: "127.0.0.1", port: 3500 }); + const conn = await Deno.connect({ hostname: "127.0.0.1", port: listenPort }); conn.setKeepAlive(true); assert(conn.remoteAddr.transport === "tcp"); assertEquals(conn.remoteAddr.hostname, "127.0.0.1"); - assertEquals(conn.remoteAddr.port, 3500); + assertEquals(conn.remoteAddr.port, listenPort); assert(conn.localAddr != null); const buf = new Uint8Array(1024); const readResult = await conn.read(buf); @@ -360,14 +364,14 @@ Deno.test( Deno.test( { permissions: { net: true } }, async function netUdpSendReceive() { - const alice = Deno.listenDatagram({ port: 3500, transport: "udp" }); + const alice = Deno.listenDatagram({ port: listenPort, transport: "udp" }); assert(alice.addr.transport === "udp"); - assertEquals(alice.addr.port, 3500); + assertEquals(alice.addr.port, listenPort); assertEquals(alice.addr.hostname, "127.0.0.1"); - const bob = Deno.listenDatagram({ port: 4501, transport: "udp" }); + const bob = Deno.listenDatagram({ port: listenPort2, transport: "udp" }); assert(bob.addr.transport === "udp"); - assertEquals(bob.addr.port, 4501); + assertEquals(bob.addr.port, listenPort2); assertEquals(bob.addr.hostname, "127.0.0.1"); const sent = new Uint8Array([1, 2, 3]); @@ -377,7 +381,7 @@ Deno.test( const [recvd, remote] = await bob.receive(); assert(remote.transport === "udp"); - assertEquals(remote.port, 3500); + assertEquals(remote.port, listenPort); assertEquals(recvd.length, 3); assertEquals(1, recvd[0]); assertEquals(2, recvd[1]); @@ -393,18 +397,18 @@ Deno.test( // Must bind sender to an address that can send to the broadcast address on MacOS. // Macos will give us error 49 when sending the broadcast packet if we omit hostname here. const alice = Deno.listenDatagram({ - port: 3500, + port: listenPort, transport: "udp", hostname: "0.0.0.0", }); const bob = Deno.listenDatagram({ - port: 4501, + port: listenPort, transport: "udp", hostname: "0.0.0.0", }); assert(bob.addr.transport === "udp"); - assertEquals(bob.addr.port, 4501); + assertEquals(bob.addr.port, listenPort); assertEquals(bob.addr.hostname, "0.0.0.0"); const broadcastAddr = { ...bob.addr, hostname: "255.255.255.255" }; @@ -415,7 +419,7 @@ Deno.test( assertEquals(byteLength, 3); const [recvd, remote] = await bob.receive(); assert(remote.transport === "udp"); - assertEquals(remote.port, 3500); + assertEquals(remote.port, listenPort); assertEquals(recvd.length, 3); assertEquals(1, recvd[0]); assertEquals(2, recvd[1]); @@ -563,9 +567,9 @@ Deno.test( Deno.test( { permissions: { net: true } }, async function netUdpConcurrentSendReceive() { - const socket = Deno.listenDatagram({ port: 3500, transport: "udp" }); + const socket = Deno.listenDatagram({ port: listenPort, transport: "udp" }); assert(socket.addr.transport === "udp"); - assertEquals(socket.addr.port, 3500); + assertEquals(socket.addr.port, listenPort); assertEquals(socket.addr.hostname, "127.0.0.1"); const recvPromise = socket.receive(); @@ -588,7 +592,7 @@ Deno.test( { permissions: { net: true } }, async function netUdpBorrowMutError() { const socket = Deno.listenDatagram({ - port: 4501, + port: listenPort, transport: "udp", }); // Panic happened on second send: BorrowMutError @@ -761,7 +765,7 @@ Deno.test( Deno.test( { permissions: { net: true } }, async function netListenAsyncIterator() { - const addr = { hostname: "127.0.0.1", port: 3500 }; + const addr = { hostname: "127.0.0.1", port: listenPort }; const listener = Deno.listen(addr); const runAsyncIterator = async () => { for await (const conn of listener) { @@ -794,7 +798,7 @@ Deno.test( permissions: { net: true }, }, async function netCloseWriteSuccess() { - const addr = { hostname: "127.0.0.1", port: 3500 }; + const addr = { hostname: "127.0.0.1", port: listenPort }; const listener = Deno.listen(addr); const closeDeferred = deferred(); listener.accept().then(async (conn) => { @@ -850,7 +854,7 @@ Deno.test( } } - const addr = { hostname: "127.0.0.1", port: 3500 }; + const addr = { hostname: "127.0.0.1", port: listenPort }; const listener = Deno.listen(addr); const listenerPromise = iteratorReq(listener); const connectionPromise = (async () => { @@ -898,13 +902,13 @@ Deno.test( Deno.test({ permissions: { net: true } }, async function whatwgStreams() { (async () => { - const listener = Deno.listen({ hostname: "127.0.0.1", port: 3500 }); + const listener = Deno.listen({ hostname: "127.0.0.1", port: listenPort }); const conn = await listener.accept(); await conn.readable.pipeTo(conn.writable); listener.close(); })(); - const conn = await Deno.connect({ hostname: "127.0.0.1", port: 3500 }); + const conn = await Deno.connect({ hostname: "127.0.0.1", port: listenPort }); const reader = conn.readable.getReader(); const writer = conn.writable.getWriter(); const encoder = new TextEncoder(); @@ -957,7 +961,7 @@ Deno.test( async function netListenUnref() { const [statusCode, _output] = await execCode(` async function main() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: ${listenPort} }); listener.unref(); await listener.accept(); // This doesn't block the program from exiting } @@ -972,14 +976,14 @@ Deno.test( async function netListenUnref() { const [statusCode, _output] = await execCode(` async function main() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: ${listenPort} }); await listener.accept(); listener.unref(); await listener.accept(); // The program exits here throw new Error(); // The program doesn't reach here } main(); - const conn = await Deno.connect({ port: 3500 }); + const conn = await Deno.connect({ port: ${listenPort} }); conn.close(); `); assertEquals(statusCode, 0); @@ -991,7 +995,7 @@ Deno.test( async function netListenUnrefAndRef() { const p = execCode2(` async function main() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: ${listenPort} }); listener.unref(); listener.ref(); // This restores 'ref' state of listener console.log("started"); @@ -1001,7 +1005,7 @@ Deno.test( main(); `); await p.waitStdoutText("started"); - const conn = await Deno.connect({ port: 3500 }); + const conn = await Deno.connect({ port: listenPort }); conn.close(); const [statusCode, output] = await p.finished(); assertEquals(statusCode, 0); @@ -1013,7 +1017,7 @@ Deno.test( { permissions: { net: true } }, async function netListenUnrefConcurrentAccept() { const timer = setTimeout(() => {}, 1000); - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: listenPort }); listener.accept().catch(() => {}); listener.unref(); // Unref'd listener still causes Busy error @@ -1044,12 +1048,12 @@ Deno.test({ Deno.test( { permissions: { net: true, read: true, run: true } }, async function netConnUnref() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: listenPort }); const intervalId = setInterval(() => {}); // This keeps event loop alive. const program = execCode(` async function main() { - const conn = await Deno.connect({ port: 3500 }); + const conn = await Deno.connect({ port: ${listenPort} }); conn.unref(); await conn.read(new Uint8Array(10)); // The program exits here throw new Error(); // The program doesn't reach here @@ -1068,12 +1072,12 @@ Deno.test( Deno.test( { permissions: { net: true, read: true, run: true } }, async function netConnUnrefReadable() { - const listener = Deno.listen({ port: 3500 }); + const listener = Deno.listen({ port: listenPort }); const intervalId = setInterval(() => {}); // This keeps event loop alive. const program = execCode(` async function main() { - const conn = await Deno.connect({ port: 3500 }); + const conn = await Deno.connect({ port: ${listenPort} }); conn.unref(); const reader = conn.readable.getReader(); await reader.read(); // The program exits here @@ -1093,7 +1097,7 @@ Deno.test( Deno.test({ permissions: { net: true } }, async function netTcpReuseAddr() { const listener1 = Deno.listen({ hostname: "127.0.0.1", - port: 3500, + port: listenPort, }); listener1.accept().then( (conn) => { @@ -1101,7 +1105,7 @@ Deno.test({ permissions: { net: true } }, async function netTcpReuseAddr() { }, ); - const conn1 = await Deno.connect({ hostname: "127.0.0.1", port: 3500 }); + const conn1 = await Deno.connect({ hostname: "127.0.0.1", port: listenPort }); const buf1 = new Uint8Array(1024); await conn1.read(buf1); listener1.close(); @@ -1109,7 +1113,7 @@ Deno.test({ permissions: { net: true } }, async function netTcpReuseAddr() { const listener2 = Deno.listen({ hostname: "127.0.0.1", - port: 3500, + port: listenPort, }); listener2.accept().then( @@ -1118,7 +1122,7 @@ Deno.test({ permissions: { net: true } }, async function netTcpReuseAddr() { }, ); - const conn2 = await Deno.connect({ hostname: "127.0.0.1", port: 3500 }); + const conn2 = await Deno.connect({ hostname: "127.0.0.1", port: listenPort }); const buf2 = new Uint8Array(1024); await conn2.read(buf2); diff --git a/cli/tests/unit/opcall_test.ts b/cli/tests/unit/opcall_test.ts index 8985c97801..3b37f8c097 100644 --- a/cli/tests/unit/opcall_test.ts +++ b/cli/tests/unit/opcall_test.ts @@ -1,20 +1,18 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { assertEquals } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; import { assert, assertStringIncludes, unreachable } from "./test_util.ts"; Deno.test(async function sendAsyncStackTrace() { - const buf = new Uint8Array(10); - const rid = 10; try { - await Deno.read(rid, buf); + await core.ops.op_error_async(); unreachable(); } catch (error) { assert(error instanceof Error); const s = error.stack?.toString(); assert(s); - console.log(s); assertStringIncludes(s, "opcall_test.ts"); - assertStringIncludes(s, "read"); + assertStringIncludes(s, "sendAsyncStackTrace"); assert( !s.includes("ext:core"), "opcall stack traces should NOT include ext:core internals such as unwrapOpResult", @@ -22,6 +20,31 @@ Deno.test(async function sendAsyncStackTrace() { } }); +Deno.test(async function sendAsyncStackTraceDeferred() { + try { + await core.ops.op_error_async_deferred(); + unreachable(); + } catch (error) { + assert(error instanceof Error); + const s = error.stack?.toString(); + assert(s); + assertStringIncludes(s, "opcall_test.ts"); + assertStringIncludes(s, "sendAsyncStackTraceDeferred"); + assert( + !s.includes("ext:core"), + "opcall stack traces should NOT include ext:core internals such as unwrapOpResult", + ); + } +}); + +Deno.test(function syncAdd() { + assertEquals(30, core.ops.op_add(10, 20)); +}); + +Deno.test(async function asyncAdd() { + assertEquals(30, await core.ops.op_add_async(10, 20)); +}); + // @ts-ignore This is not publicly typed namespace, but it's there for sure. const core = Deno[Deno.internal].core; diff --git a/cli/tests/unit/process_test.ts b/cli/tests/unit/process_test.ts index e6c4bfe595..54ebb07b22 100644 --- a/cli/tests/unit/process_test.ts +++ b/cli/tests/unit/process_test.ts @@ -11,6 +11,7 @@ Deno.test( { permissions: { read: true, run: false } }, function runPermissions() { assertThrows(() => { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], }); @@ -21,6 +22,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runSuccess() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ // freeze the array to ensure it's not modified cmd: Object.freeze([ @@ -43,6 +45,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runUrl() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ new URL(`file:///${Deno.execPath()}`), @@ -66,6 +69,7 @@ Deno.test( async function runStdinRid0(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], stdin: 0, @@ -85,6 +89,7 @@ Deno.test( { permissions: { run: true, read: true } }, function runInvalidStdio() { assertThrows(() => + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], // @ts-expect-error because Deno.run should throw on invalid stdin. @@ -92,6 +97,7 @@ Deno.test( }) ); assertThrows(() => + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], // @ts-expect-error because Deno.run should throw on invalid stdout. @@ -99,6 +105,7 @@ Deno.test( }) ); assertThrows(() => + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], // @ts-expect-error because Deno.run should throw on invalid stderr. @@ -111,6 +118,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runCommandFailedWithCode() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "Deno.exit(41 + 1)"], }); @@ -127,6 +135,7 @@ Deno.test( permissions: { run: true, read: true }, }, async function runCommandFailedWithSignal() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -150,6 +159,7 @@ Deno.test( Deno.test({ permissions: { run: true } }, function runNotFound() { let error; try { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: ["this file hopefully doesn't exist"] }); } catch (e) { error = e; @@ -181,6 +191,7 @@ tryExit(); `; Deno.writeFileSync(`${cwd}/${programFile}`, enc.encode(program)); + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cwd, cmd: [Deno.execPath(), "run", "--allow-read", programFile], @@ -204,6 +215,7 @@ Deno.test( async function runStdinPiped(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -235,6 +247,7 @@ Deno.test( async function runStdoutPiped(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -271,6 +284,7 @@ Deno.test( async function runStderrPiped(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -305,6 +319,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runOutput() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -325,6 +340,7 @@ Deno.test( async function runStderrOutput(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -350,6 +366,7 @@ Deno.test( write: true, }); + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -382,6 +399,7 @@ Deno.test( await Deno.writeFile(fileName, encoder.encode("hello")); const file = await Deno.open(fileName); + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -401,6 +419,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runEnv() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -423,6 +442,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runClose() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -446,6 +466,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runKillAfterStatus() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", 'console.log("hello")'], }); @@ -502,6 +523,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function killSuccess() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "setTimeout(() => {}, 10000)"], }); @@ -525,6 +547,7 @@ Deno.test( ); Deno.test({ permissions: { run: true, read: true } }, function killFailed() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "setTimeout(() => {}, 10000)"], }); @@ -542,6 +565,7 @@ Deno.test({ permissions: { run: true, read: true } }, function killFailed() { Deno.test( { permissions: { run: true, read: true, env: true } }, async function clearEnv(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -574,6 +598,7 @@ Deno.test( ignore: Deno.build.os === "windows", }, async function uid(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ "id", @@ -587,6 +612,7 @@ Deno.test( if (currentUid !== "0") { assertThrows(() => { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [ "echo", @@ -605,6 +631,7 @@ Deno.test( ignore: Deno.build.os === "windows", }, async function gid(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ "id", @@ -618,6 +645,7 @@ Deno.test( if (currentGid !== "0") { assertThrows(() => { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [ "echo", @@ -636,6 +664,7 @@ Deno.test( ignore: Deno.build.os === "windows", }, async function non_existent_cwd(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), diff --git a/cli/tests/unit/read_text_file_test.ts b/cli/tests/unit/read_text_file_test.ts index c40cb83e39..21b13c9281 100644 --- a/cli/tests/unit/read_text_file_test.ts +++ b/cli/tests/unit/read_text_file_test.ts @@ -164,7 +164,13 @@ Deno.test( const bytes = new Uint8Array(kStringMaxLengthPlusOne); const filePath = "cli/tests/testdata/too_big_a_file.txt"; - Deno.writeFileSync(filePath, bytes); + try { + Deno.writeFileSync(filePath, bytes); + } catch { + // NOTE(bartlomieju): writing a 0.5Gb file might be too much for CI, + // so skip running if writing fails. + return; + } assertThrows( () => { @@ -185,7 +191,13 @@ Deno.test( const bytes = new Uint8Array(kStringMaxLengthPlusOne); const filePath = "cli/tests/testdata/too_big_a_file_2.txt"; - await Deno.writeFile(filePath, bytes); + try { + await Deno.writeFile(filePath, bytes); + } catch { + // NOTE(bartlomieju): writing a 0.5Gb file might be too much for CI, + // so skip running if writing fails. + return; + } await assertRejects( async () => { diff --git a/cli/tests/unit/resources_test.ts b/cli/tests/unit/resources_test.ts index 2d1f2fd75b..4a55f05a70 100644 --- a/cli/tests/unit/resources_test.ts +++ b/cli/tests/unit/resources_test.ts @@ -1,6 +1,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { assert, assertEquals, assertThrows } from "./test_util.ts"; +const listenPort = 4505; + Deno.test(function resourcesCloseBadArgs() { assertThrows(() => { Deno.close((null as unknown) as number); @@ -16,8 +18,8 @@ Deno.test(function resourcesStdio() { }); Deno.test({ permissions: { net: true } }, async function resourcesNet() { - const listener = Deno.listen({ port: 4501 }); - const dialerConn = await Deno.connect({ port: 4501 }); + const listener = Deno.listen({ port: listenPort }); + const dialerConn = await Deno.connect({ port: listenPort }); const listenerConn = await listener.accept(); const res = Deno.resources(); diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 32d436d04f..24ae7f6664 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -1,20 +1,30 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -// deno-lint-ignore-file - +import { assertMatch } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; import { Buffer, BufReader, BufWriter } from "../../../test_util/std/io/mod.ts"; import { TextProtoReader } from "../testdata/run/textproto.ts"; import { assert, assertEquals, - assertRejects, assertStringIncludes, assertThrows, Deferred, deferred, + execCode, fail, } from "./test_util.ts"; +// Since these tests may run in parallel, ensure this port is unique to this file +const servePort = 4502; + +const { + upgradeHttpRaw, + addTrailers, + serveHttpOnListener, + serveHttpOnConnection, + // @ts-expect-error TypeScript (as of 3.7) does not support indexing namespaces by symbol +} = Deno[Deno.internal]; + function createOnErrorCb(ac: AbortController): (err: unknown) => Response { return (err) => { console.error(err); @@ -31,6 +41,42 @@ function onListen( }; } +Deno.test(async function httpServerShutsDownPortBeforeResolving() { + const ac = new AbortController(); + const listeningPromise = deferred(); + + const server = Deno.serve({ + handler: (_req) => new Response("ok"), + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + }); + + await listeningPromise; + assertThrows(() => Deno.listen({ port: servePort })); + + ac.abort(); + await server.finished; + + const listener = Deno.listen({ port: servePort }); + listener!.close(); +}); + +Deno.test( + { permissions: { read: true, run: true } }, + async function httpServerUnref() { + const [statusCode, _output] = await execCode(` + async function main() { + const server = Deno.serve({ port: 4501, handler: () => null }); + server.unref(); + await server.finished; // This doesn't block the program from exiting + } + main(); + `); + assertEquals(statusCode, 0); + }, +); + Deno.test(async function httpServerCanResolveHostnames() { const ac = new AbortController(); const listeningPromise = deferred(); @@ -38,14 +84,14 @@ Deno.test(async function httpServerCanResolveHostnames() { const server = Deno.serve({ handler: (_req) => new Response("ok"), hostname: "localhost", - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const resp = await fetch("http://localhost:4501/", { + const resp = await fetch(`http://localhost:${servePort}/`, { headers: { "connection": "close" }, }); const text = await resp.text(); @@ -61,18 +107,19 @@ Deno.test(async function httpServerRejectsOnAddrInUse() { const server = Deno.serve({ handler: (_req) => new Response("ok"), hostname: "localhost", - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); + await listeningPromise; - assertRejects( + assertThrows( () => Deno.serve({ handler: (_req) => new Response("ok"), hostname: "localhost", - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -93,20 +140,20 @@ Deno.test({ permissions: { net: true } }, async function httpServerBasic() { // FIXME(bartlomieju): // make sure that request can be inspected console.log(request); - assertEquals(new URL(request.url).href, "http://127.0.0.1:4501/"); + assertEquals(new URL(request.url).href, `http://127.0.0.1:${servePort}/`); assertEquals(await request.text(), ""); assertEquals(remoteAddr.hostname, "127.0.0.1"); promise.resolve(); return new Response("Hello World", { headers: { "foo": "bar" } }); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/", { + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { headers: { "connection": "close" }, }); await promise; @@ -120,13 +167,171 @@ Deno.test({ permissions: { net: true } }, async function httpServerBasic() { await server; }); +// Test serving of HTTP on an arbitrary listener. +Deno.test( + { permissions: { net: true } }, + async function httpServerOnListener() { + const ac = new AbortController(); + const promise = deferred(); + const listeningPromise = deferred(); + const listener = Deno.listen({ port: servePort }); + const server = serveHttpOnListener( + listener, + ac.signal, + async ( + request: Request, + { remoteAddr }: { remoteAddr: { hostname: string } }, + ) => { + assertEquals( + new URL(request.url).href, + `http://127.0.0.1:${servePort}/`, + ); + assertEquals(await request.text(), ""); + assertEquals(remoteAddr.hostname, "127.0.0.1"); + promise.resolve(); + return new Response("Hello World", { headers: { "foo": "bar" } }); + }, + createOnErrorCb(ac), + onListen(listeningPromise), + ); + + await listeningPromise; + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { + headers: { "connection": "close" }, + }); + await promise; + const clone = resp.clone(); + const text = await resp.text(); + assertEquals(text, "Hello World"); + assertEquals(resp.headers.get("foo"), "bar"); + const cloneText = await clone.text(); + assertEquals(cloneText, "Hello World"); + ac.abort(); + await server; + }, +); + +// Test serving of HTTP on an arbitrary connection. +Deno.test( + { permissions: { net: true } }, + async function httpServerOnConnection() { + const ac = new AbortController(); + const promise = deferred(); + const listeningPromise = deferred(); + const listener = Deno.listen({ port: servePort }); + const acceptPromise = listener.accept(); + const fetchPromise = fetch(`http://127.0.0.1:${servePort}/`, { + headers: { "connection": "close" }, + }); + + const server = serveHttpOnConnection( + await acceptPromise, + ac.signal, + async ( + request: Request, + { remoteAddr }: { remoteAddr: { hostname: string } }, + ) => { + assertEquals( + new URL(request.url).href, + `http://127.0.0.1:${servePort}/`, + ); + assertEquals(await request.text(), ""); + assertEquals(remoteAddr.hostname, "127.0.0.1"); + promise.resolve(); + return new Response("Hello World", { headers: { "foo": "bar" } }); + }, + createOnErrorCb(ac), + onListen(listeningPromise), + ); + + const resp = await fetchPromise; + await promise; + const clone = resp.clone(); + const text = await resp.text(); + assertEquals(text, "Hello World"); + assertEquals(resp.headers.get("foo"), "bar"); + const cloneText = await clone.text(); + assertEquals(cloneText, "Hello World"); + // Note that we don't need to abort this server -- it closes when the connection does + // ac.abort(); + await server; + listener.close(); + }, +); + +Deno.test({ permissions: { net: true } }, async function httpServerOnError() { + const ac = new AbortController(); + const listeningPromise = deferred(); + let requestStash: Request | null; + + const server = Deno.serve({ + handler: async (request: Request) => { + requestStash = request; + await new Promise((r) => setTimeout(r, 100)); + throw "fail"; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: () => { + return new Response("failed: " + requestStash!.url, { status: 500 }); + }, + }); + + await listeningPromise; + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { + headers: { "connection": "close" }, + }); + const text = await resp.text(); + ac.abort(); + await server; + + assertEquals(text, `failed: http://127.0.0.1:${servePort}/`); +}); + +Deno.test( + { permissions: { net: true } }, + async function httpServerOnErrorFails() { + const ac = new AbortController(); + const listeningPromise = deferred(); + // NOTE(bartlomieju): deno lint doesn't know that it's actually used later, + // but TypeScript can't see that either ¯\_(ツ)_/¯ + // deno-lint-ignore no-unused-vars + let requestStash: Request | null; + + const server = Deno.serve({ + handler: async (request: Request) => { + requestStash = request; + await new Promise((r) => setTimeout(r, 100)); + throw "fail"; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: () => { + throw "again"; + }, + }); + + await listeningPromise; + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { + headers: { "connection": "close" }, + }); + const text = await resp.text(); + ac.abort(); + await server; + + assertEquals(text, "Internal Server Error"); + }, +); + Deno.test({ permissions: { net: true } }, async function httpServerOverload1() { const ac = new AbortController(); const promise = deferred(); const listeningPromise = deferred(); const server = Deno.serve({ - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -134,14 +339,14 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload1() { // FIXME(bartlomieju): // make sure that request can be inspected console.log(request); - assertEquals(new URL(request.url).href, "http://127.0.0.1:4501/"); + assertEquals(new URL(request.url).href, `http://127.0.0.1:${servePort}/`); assertEquals(await request.text(), ""); promise.resolve(); return new Response("Hello World", { headers: { "foo": "bar" } }); }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/", { + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { headers: { "connection": "close" }, }); await promise; @@ -160,23 +365,23 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload2() { const promise = deferred(); const listeningPromise = deferred(); - const server = Deno.serve(async (request) => { - // FIXME(bartlomieju): - // make sure that request can be inspected - console.log(request); - assertEquals(new URL(request.url).href, "http://127.0.0.1:4501/"); - assertEquals(await request.text(), ""); - promise.resolve(); - return new Response("Hello World", { headers: { "foo": "bar" } }); - }, { - port: 4501, + const server = Deno.serve({ + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), + }, async (request) => { + // FIXME(bartlomieju): + // make sure that request can be inspected + console.log(request); + assertEquals(new URL(request.url).href, `http://127.0.0.1:${servePort}/`); + assertEquals(await request.text(), ""); + promise.resolve(); + return new Response("Hello World", { headers: { "foo": "bar" } }); }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/", { + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { headers: { "connection": "close" }, }); await promise; @@ -192,18 +397,18 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload2() { Deno.test( { permissions: { net: true } }, - async function httpServerErrorOverloadMissingHandler() { + function httpServerErrorOverloadMissingHandler() { // @ts-ignore - testing invalid overload - await assertRejects(() => Deno.serve(), TypeError, "handler"); + assertThrows(() => Deno.serve(), TypeError, "handler"); // @ts-ignore - testing invalid overload - await assertRejects(() => Deno.serve({}), TypeError, "handler"); - await assertRejects( + assertThrows(() => Deno.serve({}), TypeError, "handler"); + assertThrows( // @ts-ignore - testing invalid overload () => Deno.serve({ handler: undefined }), TypeError, "handler", ); - await assertRejects( + assertThrows( // @ts-ignore - testing invalid overload () => Deno.serve(undefined, { handler: () => {} }), TypeError, @@ -238,7 +443,7 @@ Deno.test( console.log = (msg) => { try { const match = msg.match(/Listening on http:\/\/localhost:(\d+)\//); - assert(!!match); + assert(!!match, `Didn't match ${msg}`); const port = +match[1]; assert(port > 0 && port < 65536); } finally { @@ -279,14 +484,14 @@ Deno.test( promise.resolve(); return new Response(""); }, - port: 2333, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 2333 }); + const conn = await Deno.connect({ port: servePort }); // Send GET request with a body + content-length. const encoder = new TextEncoder(); const body = @@ -301,6 +506,109 @@ Deno.test( }, ); +function createUrlTest( + name: string, + methodAndPath: string, + host: string | null, + expected: string, +) { + Deno.test(`httpServerUrl${name}`, async () => { + const listeningPromise: Deferred = deferred(); + const urlPromise = deferred(); + const ac = new AbortController(); + const server = Deno.serve({ + handler: (request: Request) => { + urlPromise.resolve(request.url); + return new Response(""); + }, + port: 0, + signal: ac.signal, + onListen: ({ port }: { port: number }) => { + listeningPromise.resolve(port); + }, + onError: createOnErrorCb(ac), + }); + + const port = await listeningPromise; + const conn = await Deno.connect({ port }); + + const encoder = new TextEncoder(); + const body = `${methodAndPath} HTTP/1.1\r\n${ + host ? ("Host: " + host + "\r\n") : "" + }Content-Length: 5\r\n\r\n12345`; + const writeResult = await conn.write(encoder.encode(body)); + assertEquals(body.length, writeResult); + + try { + const expectedResult = expected.replace("HOST", "localhost").replace( + "PORT", + `${port}`, + ); + assertEquals(await urlPromise, expectedResult); + } finally { + ac.abort(); + await server; + conn.close(); + } + }); +} + +createUrlTest("WithPath", "GET /path", null, "http://HOST:PORT/path"); +createUrlTest( + "WithPathAndHost", + "GET /path", + "deno.land", + "http://deno.land/path", +); +createUrlTest( + "WithAbsolutePath", + "GET http://localhost/path", + null, + "http://localhost/path", +); +createUrlTest( + "WithAbsolutePathAndHost", + "GET http://localhost/path", + "deno.land", + "http://localhost/path", +); +createUrlTest( + "WithPortAbsolutePath", + "GET http://localhost:1234/path", + null, + "http://localhost:1234/path", +); +createUrlTest( + "WithPortAbsolutePathAndHost", + "GET http://localhost:1234/path", + "deno.land", + "http://localhost:1234/path", +); +createUrlTest( + "WithPortAbsolutePathAndHostWithPort", + "GET http://localhost:1234/path", + "deno.land:9999", + "http://localhost:1234/path", +); + +createUrlTest("WithAsterisk", "OPTIONS *", null, "*"); +createUrlTest( + "WithAuthorityForm", + "CONNECT deno.land:80", + null, + "deno.land:80", +); + +// TODO(mmastrac): These should probably be 400 errors +createUrlTest("WithInvalidAsterisk", "GET *", null, "*"); +createUrlTest("WithInvalidNakedPath", "GET path", null, "path"); +createUrlTest( + "WithInvalidNakedAuthority", + "GET deno.land:1234", + null, + "deno.land:1234", +); + Deno.test( { permissions: { net: true } }, async function httpServerGetRequestBody() { @@ -314,18 +622,18 @@ Deno.test( promise.resolve(); return new Response("", { headers: {} }); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4501 }); + const conn = await Deno.connect({ port: servePort }); // Send GET request with a body + content-length. const encoder = new TextEncoder(); const body = - `GET / HTTP/1.1\r\nHost: 127.0.0.1:4501\r\nContent-Length: 5\r\n\r\n12345`; + `GET / HTTP/1.1\r\nHost: 127.0.0.1:${servePort}\r\nContent-Length: 5\r\n\r\n12345`; const writeResult = await conn.write(encoder.encode(body)); assertEquals(body.length, writeResult); @@ -341,36 +649,80 @@ Deno.test( }, ); -Deno.test( - { permissions: { net: true } }, - async function httpServerStreamResponse() { - const stream = new TransformStream(); - const writer = stream.writable.getWriter(); - writer.write(new TextEncoder().encode("hello ")); - writer.write(new TextEncoder().encode("world")); - writer.close(); +function createStreamTest(count: number, delay: number, action: string) { + function doAction(controller: ReadableStreamDefaultController, i: number) { + if (i == count) { + if (action == "Throw") { + controller.error(new Error("Expected error!")); + } else { + controller.close(); + } + } else { + controller.enqueue(`a${i}`); - const listeningPromise = deferred(); - const ac = new AbortController(); - const server = Deno.serve({ - handler: (request) => { - assert(!request.body); - return new Response(stream.readable); + if (delay == 0) { + doAction(controller, i + 1); + } else { + setTimeout(() => doAction(controller, i + 1), delay); + } + } + } + + function makeStream(_count: number, delay: number): ReadableStream { + return new ReadableStream({ + start(controller) { + if (delay == 0) { + doAction(controller, 0); + } else { + setTimeout(() => doAction(controller, 0), delay); + } }, - port: 4501, + }).pipeThrough(new TextEncoderStream()); + } + + Deno.test(`httpServerStreamCount${count}Delay${delay}${action}`, async () => { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: (_request) => { + return new Response(makeStream(count, delay)); + }, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/"); - const respBody = await resp.text(); - assertEquals("hello world", respBody); + const resp = await fetch(`http://127.0.0.1:${servePort}/`); + const text = await resp.text(); + ac.abort(); await server; - }, -); + let expected = ""; + if (action == "Throw" && count < 2 && delay < 1000) { + // NOTE: This is specific to the current implementation. In some cases where a stream errors, we + // don't send the first packet. + expected = ""; + } else { + for (let i = 0; i < count; i++) { + expected += `a${i}`; + } + } + + assertEquals(text, expected); + }); +} + +for (const count of [0, 1, 2, 3]) { + for (const delay of [0, 1, 1000]) { + // Creating a stream that errors in start will throw + if (delay > 0) { + createStreamTest(count, delay, "Throw"); + } + createStreamTest(count, delay, "Close"); + } +} Deno.test( { permissions: { net: true } }, @@ -388,14 +740,14 @@ Deno.test( assertEquals("hello world", reqBody); return new Response("yo"); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/", { + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { body: stream.readable, method: "POST", headers: { "connection": "close" }, @@ -412,18 +764,50 @@ Deno.test({ permissions: { net: true } }, async function httpServerClose() { const listeningPromise = deferred(); const server = Deno.serve({ handler: () => new Response("ok"), - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const client = await Deno.connect({ port: 4501 }); + const client = await Deno.connect({ port: servePort }); client.close(); ac.abort(); await server; }); +// https://github.com/denoland/deno/issues/15427 +Deno.test({ permissions: { net: true } }, async function httpServerCloseGet() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const requestPromise = deferred(); + const responsePromise = deferred(); + const server = Deno.serve({ + handler: async () => { + requestPromise.resolve(); + await new Promise((r) => setTimeout(r, 500)); + responsePromise.resolve(); + return new Response("ok"); + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + await listeningPromise; + const conn = await Deno.connect({ port: servePort }); + const encoder = new TextEncoder(); + const body = + `GET / HTTP/1.1\r\nHost: example.domain\r\nConnection: close\r\n\r\n`; + const writeResult = await conn.write(encoder.encode(body)); + assertEquals(body.length, writeResult); + await requestPromise; + conn.close(); + await responsePromise; + ac.abort(); + await server; +}); + // FIXME: Deno.test( { permissions: { net: true } }, @@ -432,14 +816,14 @@ Deno.test( const listeningPromise = deferred(); const server = Deno.serve({ handler: () => new Response(new Blob([])), - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/"); + const resp = await fetch(`http://127.0.0.1:${servePort}/`); const respBody = await resp.text(); assertEquals("", respBody); @@ -467,7 +851,7 @@ Deno.test( }); return new Response(body); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: (err) => { @@ -481,7 +865,7 @@ Deno.test( }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/"); + const resp = await fetch(`http://127.0.0.1:${servePort}/`); // Incorrectly implemented reader ReadableStream should reject. assertStringIncludes(await resp.text(), "Failed to execute 'enqueue'"); await errorPromise; @@ -498,14 +882,14 @@ Deno.test( const server = Deno.serve({ handler: () => new Response("韓國".repeat(10)), - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -531,19 +915,22 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { const ac = new AbortController(); const listeningPromise = deferred(); const server = Deno.serve({ - handler: async (request) => { + handler: (request) => { const { response, socket, } = Deno.upgradeWebSocket(request); - socket.onerror = () => fail(); + socket.onerror = (e) => { + console.error(e); + fail(); + }; socket.onmessage = (m) => { socket.send(m.data); socket.close(1001); }; return response; }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -551,9 +938,12 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { await listeningPromise; const def = deferred(); - const ws = new WebSocket("ws://localhost:4501"); + const ws = new WebSocket(`ws://localhost:${servePort}`); ws.onmessage = (m) => assertEquals(m.data, "foo"); - ws.onerror = () => fail(); + ws.onerror = (e) => { + console.error(e); + fail(); + }; ws.onclose = () => def.resolve(); ws.onopen = () => ws.send("foo"); @@ -562,6 +952,216 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { await server; }); +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketRaw() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (request) => { + const { conn, response } = upgradeHttpRaw(request); + const buf = new Uint8Array(1024); + let read; + + // Write our fake HTTP upgrade + await conn.write( + new TextEncoder().encode( + "HTTP/1.1 101 Switching Protocols\r\nConnection: Upgraded\r\n\r\nExtra", + ), + ); + + // Upgrade data + read = await conn.read(buf); + assertEquals( + new TextDecoder().decode(buf.subarray(0, read!)), + "Upgrade data", + ); + // Read the packet to echo + read = await conn.read(buf); + // Echo + await conn.write(buf.subarray(0, read!)); + + conn.close(); + return response; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + + const conn = await Deno.connect({ port: servePort }); + await conn.write( + new TextEncoder().encode( + "GET / HTTP/1.1\r\nConnection: Upgrade\r\nUpgrade: websocket\r\n\r\nUpgrade data", + ), + ); + const buf = new Uint8Array(1024); + let len; + + // Headers + let headers = ""; + for (let i = 0; i < 2; i++) { + len = await conn.read(buf); + headers += new TextDecoder().decode(buf.subarray(0, len!)); + if (headers.endsWith("Extra")) { + break; + } + } + assertMatch( + headers, + /HTTP\/1\.1 101 Switching Protocols[ ,.A-Za-z:0-9\r\n]*Extra/im, + ); + + // Data to echo + await conn.write(new TextEncoder().encode("buffer data")); + + // Echo + len = await conn.read(buf); + assertEquals( + new TextDecoder().decode(buf.subarray(0, len!)), + "buffer data", + ); + + conn.close(); + ac.abort(); + await server; + }, +); + +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketUpgradeTwice() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: (request) => { + const { + response, + socket, + } = Deno.upgradeWebSocket(request); + assertThrows( + () => { + Deno.upgradeWebSocket(request); + }, + Deno.errors.Http, + "already upgraded", + ); + socket.onerror = (e) => { + console.error(e); + fail(); + }; + socket.onmessage = (m) => { + socket.send(m.data); + socket.close(1001); + }; + return response; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + const def = deferred(); + const ws = new WebSocket(`ws://localhost:${servePort}`); + ws.onmessage = (m) => assertEquals(m.data, "foo"); + ws.onerror = (e) => { + console.error(e); + fail(); + }; + ws.onclose = () => def.resolve(); + ws.onopen = () => ws.send("foo"); + + await def; + ac.abort(); + await server; + }, +); + +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketCloseFast() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: (request) => { + const { + response, + socket, + } = Deno.upgradeWebSocket(request); + socket.onopen = () => socket.close(); + return response; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + const def = deferred(); + const ws = new WebSocket(`ws://localhost:${servePort}`); + ws.onerror = (e) => { + console.error(e); + fail(); + }; + ws.onclose = () => def.resolve(); + + await def; + ac.abort(); + await server; + }, +); + +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketCanAccessRequest() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: (request) => { + const { + response, + socket, + } = Deno.upgradeWebSocket(request); + socket.onerror = (e) => { + console.error(e); + fail(); + }; + socket.onmessage = (_m) => { + socket.send(request.url.toString()); + socket.close(1001); + }; + return response; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + const def = deferred(); + const ws = new WebSocket(`ws://localhost:${servePort}`); + ws.onmessage = (m) => + assertEquals(m.data, `http://localhost:${servePort}/`); + ws.onerror = (e) => { + console.error(e); + fail(); + }; + ws.onclose = () => def.resolve(); + ws.onopen = () => ws.send("foo"); + + await def; + ac.abort(); + await server; + }, +); + Deno.test( { permissions: { net: true } }, async function httpVeryLargeRequest() { @@ -571,19 +1171,19 @@ Deno.test( let headers: Headers; const server = Deno.serve({ - handler: async (request) => { + handler: (request) => { headers = request.headers; promise.resolve(); return new Response(""); }, - port: 2333, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 2333 }); + const conn = await Deno.connect({ port: servePort }); // Send GET request with a body + content-length. const encoder = new TextEncoder(); const smthElse = "x".repeat(16 * 1024 + 256); @@ -616,14 +1216,14 @@ Deno.test( promise.resolve(); return new Response(""); }, - port: 2333, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 2333 }); + const conn = await Deno.connect({ port: servePort }); // Send GET request with a body + content-length. const encoder = new TextEncoder(); const smthElse = "x".repeat(16 * 1024 + 256); @@ -659,14 +1259,14 @@ Deno.test( promise.resolve(); return new Response(""); }, - port: 2333, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 2333 }); + const conn = await Deno.connect({ port: servePort }); // Send GET request with a body + connection: close. const encoder = new TextEncoder(); const body = @@ -682,47 +1282,49 @@ Deno.test( }, ); -// FIXME: auto request body reading is intefering with passing it as response. -// Deno.test( -// { permissions: { net: true } }, -// async function httpServerStreamDuplex() { -// const promise = deferred(); -// const ac = new AbortController(); +Deno.test( + { permissions: { net: true } }, + async function httpServerStreamDuplex() { + const promise = deferred(); + const ac = new AbortController(); -// const server = Deno.serve(request => { -// assert(request.body); + const server = Deno.serve( + { port: servePort, signal: ac.signal }, + (request) => { + assert(request.body); -// promise.resolve(); -// return new Response(request.body); -// }, { port: 2333, signal: ac.signal }); + promise.resolve(); + return new Response(request.body); + }, + ); -// const ts = new TransformStream(); -// const writable = ts.writable.getWriter(); + const ts = new TransformStream(); + const writable = ts.writable.getWriter(); -// const resp = await fetch("http://127.0.0.1:2333/", { -// method: "POST", -// body: ts.readable, -// }); + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { + method: "POST", + body: ts.readable, + }); -// await promise; -// assert(resp.body); -// const reader = resp.body.getReader(); -// await writable.write(new Uint8Array([1])); -// const chunk1 = await reader.read(); -// assert(!chunk1.done); -// assertEquals(chunk1.value, new Uint8Array([1])); -// await writable.write(new Uint8Array([2])); -// const chunk2 = await reader.read(); -// assert(!chunk2.done); -// assertEquals(chunk2.value, new Uint8Array([2])); -// await writable.close(); -// const chunk3 = await reader.read(); -// assert(chunk3.done); + await promise; + assert(resp.body); + const reader = resp.body.getReader(); + await writable.write(new Uint8Array([1])); + const chunk1 = await reader.read(); + assert(!chunk1.done); + assertEquals(chunk1.value, new Uint8Array([1])); + await writable.write(new Uint8Array([2])); + const chunk2 = await reader.read(); + assert(!chunk2.done); + assertEquals(chunk2.value, new Uint8Array([2])); + await writable.close(); + const chunk3 = await reader.read(); + assert(chunk3.done); -// ac.abort(); -// await server; -// }, -// ); + ac.abort(); + await server; + }, +); Deno.test( { permissions: { net: true } }, @@ -751,7 +1353,7 @@ Deno.test( const w = new BufWriter(conn); const r = new BufReader(conn); - const body = `GET / HTTP/1.1\r\nHost: 127.0.0.1:4501\r\n\r\n`; + const body = `GET / HTTP/1.1\r\nHost: 127.0.0.1:${servePort}\r\n\r\n`; const writeResult = await w.write(encoder.encode(body)); assertEquals(body.length, writeResult); await w.flush(); @@ -809,7 +1411,7 @@ Deno.test( promise.resolve(); return new Response(periodicStream()); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -817,7 +1419,7 @@ Deno.test( await listeningPromise; // start a client - const clientConn = await Deno.connect({ port: 4501 }); + const clientConn = await Deno.connect({ port: servePort }); const r1 = await writeRequest(clientConn); assertEquals(r1, "0\n1\n2\n"); @@ -841,16 +1443,16 @@ Deno.test( promise.resolve(); return new Response("hello", { headers: { "X-Header-Test": "Æ" } }); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const clientConn = await Deno.connect({ port: 4501 }); + const clientConn = await Deno.connect({ port: servePort }); const requestText = - "GET / HTTP/1.1\r\nHost: 127.0.0.1:4501\r\nX-Header-Test: á\r\n\r\n"; + `GET / HTTP/1.1\r\nHost: 127.0.0.1:${servePort}\r\nX-Header-Test: á\r\n\r\n`; const requestBytes = new Uint8Array(requestText.length); for (let i = 0; i < requestText.length; i++) { requestBytes[i] = requestText.charCodeAt(i); @@ -864,13 +1466,13 @@ Deno.test( await clientConn.read(buf); await promise; - let responseText = new TextDecoder("iso-8859-1").decode(buf); + const responseText = new TextDecoder("iso-8859-1").decode(buf); clientConn.close(); - assert(/\r\n[Xx]-[Hh]eader-[Tt]est: Æ\r\n/.test(responseText)); - ac.abort(); await server; + + assertMatch(responseText, /\r\n[Xx]-[Hh]eader-[Tt]est: Æ\r\n/); }, ); @@ -884,19 +1486,19 @@ Deno.test( const server = Deno.serve({ handler: async (request) => { // FIXME: - // assertEquals(new URL(request.url).href, "http://127.0.0.1:4501/"); + // assertEquals(new URL(request.url).href, `http://127.0.0.1:${servePort}/`); assertEquals(await request.text(), ""); promise.resolve(); return new Response("11"); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const clientConn = await Deno.connect({ port: 4501 }); + const clientConn = await Deno.connect({ port: servePort }); async function writeRequest(conn: Deno.Conn) { const encoder = new TextEncoder(); @@ -904,7 +1506,7 @@ Deno.test( const w = new BufWriter(conn); const r = new BufReader(conn); const body = - `CONNECT 127.0.0.1:4501 HTTP/1.1\r\nHost: 127.0.0.1:4501\r\n\r\n`; + `CONNECT 127.0.0.1:${servePort} HTTP/1.1\r\nHost: 127.0.0.1:${servePort}\r\n\r\n`; const writeResult = await w.write(encoder.encode(body)); assertEquals(body.length, writeResult); await w.flush(); @@ -942,7 +1544,7 @@ Deno.test( promise.resolve(); return new Response("ok"); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -950,7 +1552,7 @@ Deno.test( }); await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/", { + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { headers: [ ["connection", "close"], ["cookie", "foo=bar"], @@ -967,41 +1569,6 @@ Deno.test( }, ); -Deno.test( - { permissions: { net: true, write: true, read: true } }, - async function httpServerCorrectSizeResponse() { - const promise = deferred(); - const listeningPromise = deferred(); - const ac = new AbortController(); - - const tmpFile = await Deno.makeTempFile(); - const file = await Deno.open(tmpFile, { write: true, read: true }); - await file.write(new Uint8Array(70 * 1024).fill(1)); // 70kb sent in 64kb + 6kb chunks - file.close(); - - const server = Deno.serve({ - handler: async (request) => { - const f = await Deno.open(tmpFile, { read: true }); - promise.resolve(); - return new Response(f.readable); - }, - port: 4503, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), - }); - - await listeningPromise; - const resp = await fetch("http://127.0.0.1:4503/"); - await promise; - const body = await resp.arrayBuffer(); - - assertEquals(body.byteLength, 70 * 1024); - ac.abort(); - await server; - }, -); - // https://github.com/denoland/deno/issues/12741 // https://github.com/denoland/deno/pull/12746 // https://github.com/denoland/deno/pull/12798 @@ -1013,21 +1580,20 @@ Deno.test( const ac = new AbortController(); const hostname = "localhost"; - const port = 4501; const server = Deno.serve({ handler: () => { promise.resolve(); return new Response("ok"); }, - port: port, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const url = `http://${hostname}:${port}/`; + const url = `http://${hostname}:${servePort}/`; const args = ["-X", "DELETE", url]; const { success } = await new Deno.Command("curl", { args, @@ -1051,18 +1617,18 @@ Deno.test( const ac = new AbortController(); const server = Deno.serve({ - handler: async (request) => { + handler: (request) => { assertEquals(request.body, null); promise.resolve(); return new Response(new Uint8Array([128])); }, - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const resp = await fetch("http://localhost:4501/"); + const resp = await fetch(`http://localhost:${servePort}/`); await promise; @@ -1087,20 +1653,20 @@ Deno.test( const ac = new AbortController(); const server = Deno.serve({ - handler: async (request) => { + handler: (request) => { assertEquals(request.method, "GET"); assertEquals(request.headers.get("host"), "deno.land"); promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = `GET /echo HTTP/1.1\r\nHost: deno.land\r\n\r\n`; const writeResult = await conn.write(encoder.encode(body)); @@ -1121,20 +1687,20 @@ Deno.test( const ac = new AbortController(); const server = Deno.serve({ - handler: async (request) => { + handler: (request) => { assertEquals(request.method, "GET"); assertEquals(request.headers.get("server"), "hello\tworld"); promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = `GET / HTTP/1.1\r\nserver: hello\tworld\r\n\r\n`; const writeResult = await conn.write(encoder.encode(body)); @@ -1161,14 +1727,14 @@ Deno.test( promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); // Connection: close = don't try to parse the body as a new request const body = @@ -1197,14 +1763,14 @@ Deno.test( promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = `POST / HTTP/1.1\r\nHost: example.domain\r\nContent-Length: 19\r\n\r\nI'm a good request.`; @@ -1220,13 +1786,14 @@ Deno.test( type TestCase = { headers?: Record; + // deno-lint-ignore no-explicit-any body: any; - expects_chunked?: boolean; - expects_con_len?: boolean; + expectsChunked?: boolean; + expectsConnLen?: boolean; }; function hasHeader(msg: string, name: string): boolean { - let n = msg.indexOf("\r\n\r\n") || msg.length; + const n = msg.indexOf("\r\n\r\n") || msg.length; return msg.slice(0, n).includes(name); } @@ -1237,19 +1804,19 @@ function createServerLengthTest(name: string, testCase: TestCase) { const listeningPromise = deferred(); const server = Deno.serve({ - handler: async (request) => { + handler: (request) => { assertEquals(request.method, "GET"); promise.resolve(); return new Response(testCase.body, testCase.headers ?? {}); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = `GET / HTTP/1.1\r\nHost: example.domain\r\nConnection: close\r\n\r\n`; @@ -1268,23 +1835,23 @@ function createServerLengthTest(name: string, testCase: TestCase) { msg += decoder.decode(buf.subarray(0, readResult)); try { assert( - testCase.expects_chunked == hasHeader(msg, "Transfer-Encoding:"), + testCase.expectsChunked == hasHeader(msg, "Transfer-Encoding:"), ); - assert(testCase.expects_chunked == hasHeader(msg, "chunked")); - assert(testCase.expects_con_len == hasHeader(msg, "Content-Length:")); + assert(testCase.expectsChunked == hasHeader(msg, "chunked")); + assert(testCase.expectsConnLen == hasHeader(msg, "Content-Length:")); const n = msg.indexOf("\r\n\r\n") + 4; - if (testCase.expects_chunked) { + if (testCase.expectsChunked) { assertEquals(msg.slice(n + 1, n + 3), "\r\n"); assertEquals(msg.slice(msg.length - 7), "\r\n0\r\n\r\n"); } - if (testCase.expects_con_len && typeof testCase.body === "string") { + if (testCase.expectsConnLen && typeof testCase.body === "string") { assertEquals(msg.slice(n), testCase.body); } break; - } catch (e) { + } catch { continue; } } @@ -1305,134 +1872,61 @@ function stream(s: string): ReadableStream { createServerLengthTest("fixedResponseKnown", { headers: { "content-length": "11" }, body: "foo bar baz", - expects_chunked: false, - expects_con_len: true, + expectsChunked: false, + expectsConnLen: true, }); createServerLengthTest("fixedResponseUnknown", { headers: { "content-length": "11" }, body: stream("foo bar baz"), - expects_chunked: true, - expects_con_len: false, + expectsChunked: true, + expectsConnLen: false, }); createServerLengthTest("fixedResponseKnownEmpty", { headers: { "content-length": "0" }, body: "", - expects_chunked: false, - expects_con_len: true, + expectsChunked: false, + expectsConnLen: true, }); createServerLengthTest("chunkedRespondKnown", { headers: { "transfer-encoding": "chunked" }, body: "foo bar baz", - expects_chunked: false, - expects_con_len: true, + expectsChunked: false, + expectsConnLen: true, }); createServerLengthTest("chunkedRespondUnknown", { headers: { "transfer-encoding": "chunked" }, body: stream("foo bar baz"), - expects_chunked: true, - expects_con_len: false, + expectsChunked: true, + expectsConnLen: false, }); createServerLengthTest("autoResponseWithKnownLength", { body: "foo bar baz", - expects_chunked: false, - expects_con_len: true, + expectsChunked: false, + expectsConnLen: true, }); createServerLengthTest("autoResponseWithUnknownLength", { body: stream("foo bar baz"), - expects_chunked: true, - expects_con_len: false, + expectsChunked: true, + expectsConnLen: false, }); createServerLengthTest("autoResponseWithKnownLengthEmpty", { body: "", - expects_chunked: false, - expects_con_len: true, + expectsChunked: false, + expectsConnLen: true, }); -// FIXME: https://github.com/denoland/deno/issues/15892 -// createServerLengthTest("autoResponseWithUnknownLengthEmpty", { -// body: stream(""), -// expects_chunked: true, -// expects_con_len: false, -// }); - -Deno.test( - { permissions: { net: true } }, - async function httpServerGetChunkedResponseWithKa() { - const promises = [deferred(), deferred()]; - let reqCount = 0; - const listeningPromise = deferred(); - const ac = new AbortController(); - - const server = Deno.serve({ - handler: async (request) => { - assertEquals(request.method, "GET"); - promises[reqCount].resolve(); - reqCount++; - return new Response(reqCount <= 1 ? stream("foo bar baz") : "zar quux"); - }, - port: 4503, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), - }); - - await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); - const encoder = new TextEncoder(); - { - const body = - `GET / HTTP/1.1\r\nHost: example.domain\r\nConnection: keep-alive\r\n\r\n`; - const writeResult = await conn.write(encoder.encode(body)); - assertEquals(body.length, writeResult); - await promises[0]; - } - - const decoder = new TextDecoder(); - { - let msg = ""; - while (true) { - try { - const buf = new Uint8Array(1024); - const readResult = await conn.read(buf); - assert(readResult); - msg += decoder.decode(buf.subarray(0, readResult)); - assert(msg.endsWith("\r\nfoo bar baz\r\n0\r\n\r\n")); - break; - } catch { - continue; - } - } - } - - // once more! - { - const body = - `GET /quux HTTP/1.1\r\nHost: example.domain\r\nConnection: close\r\n\r\n`; - const writeResult = await conn.write(encoder.encode(body)); - assertEquals(body.length, writeResult); - await promises[1]; - } - { - const buf = new Uint8Array(1024); - const readResult = await conn.read(buf); - assert(readResult); - const msg = decoder.decode(buf.subarray(0, readResult)); - assert(msg.endsWith("zar quux")); - } - - conn.close(); - - ac.abort(); - await server; - }, -); +createServerLengthTest("autoResponseWithUnknownLengthEmpty", { + body: stream(""), + expectsChunked: true, + expectsConnLen: false, +}); Deno.test( { permissions: { net: true } }, @@ -1449,14 +1943,14 @@ Deno.test( promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = @@ -1481,14 +1975,14 @@ Deno.test( handler: () => { throw new Error("unreachable"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -1524,14 +2018,14 @@ Deno.test( promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = @@ -1560,14 +2054,14 @@ Deno.test( assertEquals(await r.text(), "12345"); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = @@ -1595,14 +2089,14 @@ Deno.test( promise.resolve(); return new Response("NaN".repeat(100)); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -1627,38 +2121,154 @@ Deno.test( }, ); -Deno.test( - { permissions: { net: true, write: true, read: true } }, - async function httpServerSendFile() { - const promise = deferred(); - const ac = new AbortController(); - const listeningPromise = deferred(); - const tmpFile = await Deno.makeTempFile(); - const file = await Deno.open(tmpFile, { write: true, read: true }); - const data = new Uint8Array(70 * 1024).fill(1); - await file.write(data); - file.close(); - const server = Deno.serve({ - handler: async () => { - const f = await Deno.open(tmpFile, { read: true }); - promise.resolve(); - return new Response(f.readable, { status: 200 }); - }, - port: 4503, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), - }); +function makeTempData(size: number) { + return new Uint8Array(size).fill(1); +} - await listeningPromise; - const response = await fetch(`http://localhost:4503/`); - assertEquals(response.status, 200); - await promise; - assertEquals(new Uint8Array(await response.arrayBuffer()), data); - ac.abort(); - await server; +async function makeTempFile(size: number) { + const tmpFile = await Deno.makeTempFile(); + const file = await Deno.open(tmpFile, { write: true, read: true }); + const data = makeTempData(size); + await file.write(data); + file.close(); + + return await Deno.open(tmpFile, { write: true, read: true }); +} + +const compressionTestCases = [ + { name: "Empty", length: 0, in: {}, out: {}, expect: null }, + { + name: "EmptyAcceptGzip", + length: 0, + in: { "Accept-Encoding": "gzip" }, + out: {}, + expect: null, }, -); + // This technically would be compressible if not for the size, however the size_hint is not implemented + // for FileResource and we don't currently peek ahead on resources. + // { + // name: "EmptyAcceptGzip2", + // length: 0, + // in: { "Accept-Encoding": "gzip" }, + // out: { "Content-Type": "text/plain" }, + // expect: null, + // }, + { name: "Uncompressible", length: 1024, in: {}, out: {}, expect: null }, + { + name: "UncompressibleAcceptGzip", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: {}, + expect: null, + }, + { + name: "UncompressibleType", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/fake" }, + expect: null, + }, + { + name: "CompressibleType", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain" }, + expect: "gzip", + }, + { + name: "CompressibleType2", + length: 1024, + in: { "Accept-Encoding": "gzip, deflate, br" }, + out: { "Content-Type": "text/plain" }, + expect: "gzip", + }, + { + name: "CompressibleType3", + length: 1024, + in: { "Accept-Encoding": "br" }, + out: { "Content-Type": "text/plain" }, + expect: "br", + }, + { + name: "UncompressibleRange", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain", "Content-Range": "1" }, + expect: null, + }, + { + name: "UncompressibleCE", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain", "Content-Encoding": "random" }, + expect: null, + }, + { + name: "UncompressibleCC", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain", "Cache-Control": "no-transform" }, + expect: null, + }, +]; + +for (const testCase of compressionTestCases) { + const name = `httpServerCompression${testCase.name}`; + Deno.test( + { permissions: { net: true, write: true, read: true } }, + { + [name]: async function () { + const promise = deferred(); + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (_request) => { + const f = await makeTempFile(testCase.length); + promise.resolve(); + // deno-lint-ignore no-explicit-any + const headers = testCase.out as any; + headers["Content-Length"] = testCase.length.toString(); + return new Response(f.readable, { + headers: headers as HeadersInit, + }); + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + try { + await listeningPromise; + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { + headers: testCase.in as HeadersInit, + }); + await promise; + const body = await resp.arrayBuffer(); + if (testCase.expect == null) { + assertEquals(body.byteLength, testCase.length); + assertEquals( + resp.headers.get("content-length"), + testCase.length.toString(), + ); + assertEquals( + resp.headers.get("content-encoding"), + testCase.out["Content-Encoding"] || null, + ); + } else if (testCase.expect == "gzip") { + // Note the fetch will transparently decompress this response, BUT we can detect that a response + // was compressed by the lack of a content length. + assertEquals(body.byteLength, testCase.length); + assertEquals(resp.headers.get("content-encoding"), null); + assertEquals(resp.headers.get("content-length"), null); + } + } finally { + ac.abort(); + await server; + } + }, + }[name], + ); +} Deno.test( { permissions: { net: true, write: true, read: true } }, @@ -1667,27 +2277,24 @@ Deno.test( const ac = new AbortController(); const listeningPromise = deferred(); - const tmpFile = await Deno.makeTempFile(); - const file = await Deno.open(tmpFile, { write: true, read: true }); - const data = new Uint8Array(70 * 1024).fill(1); - await file.write(data); - file.close(); - const server = Deno.serve({ handler: async (request) => { - assertEquals(new Uint8Array(await request.arrayBuffer()), data); + assertEquals( + new Uint8Array(await request.arrayBuffer()), + makeTempData(70 * 1024), + ); promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const f = await Deno.open(tmpFile, { write: true, read: true }); - const response = await fetch(`http://localhost:4503/`, { + const f = await makeTempFile(70 * 1024); + const response = await fetch(`http://localhost:${servePort}/`, { method: "POST", body: f.readable, }); @@ -1708,12 +2315,11 @@ Deno.test( const ac = new AbortController(); const listeningPromise = deferred(); const hostname = "127.0.0.1"; - const port = 4501; const server = Deno.serve({ handler: () => new Response("Hello World"), hostname, - port, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -1724,7 +2330,7 @@ Deno.test( await listeningPromise; const caCert = Deno.readTextFileSync("cli/tests/testdata/tls/RootCA.pem"); const client = Deno.createHttpClient({ caCerts: [caCert] }); - const resp = await fetch(`https://localhost:${port}/`, { + const resp = await fetch(`https://localhost:${servePort}/`, { client, headers: { "connection": "close" }, }); @@ -1751,14 +2357,14 @@ Deno.test( promise.resolve(); return new Response("ok"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const body = @@ -1784,7 +2390,7 @@ Deno.test( handler: () => { throw new Error("oops"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -1802,7 +2408,7 @@ Deno.test( await listeningPromise; for (const teHeader of variations) { - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const body = `POST / HTTP/1.1\r\nHost: example.domain\r\n${teHeader}\r\n\r\n0\r\n\r\n`; const writeResult = await conn.write(encoder.encode(body)); @@ -1829,7 +2435,7 @@ Deno.test( const ac = new AbortController(); const server = Deno.serve({ handler: (_request) => new Response(null, { status: 204 }), - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -1837,10 +2443,11 @@ Deno.test( try { await listeningPromise; - const resp = await fetch("http://127.0.0.1:4501/", { + const resp = await fetch(`http://127.0.0.1:${servePort}/`, { method: "GET", headers: { "connection": "close" }, }); + assertEquals(resp.status, 204); assertEquals(resp.headers.get("Content-Length"), null); } finally { ac.abort(); @@ -1861,14 +2468,14 @@ Deno.test( promise.resolve(); return new Response(null, { status: 304 }); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -1907,14 +2514,14 @@ Deno.test( assertEquals(await req.text(), "hello"); return new Response(null, { status: 304 }); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -1968,14 +2575,14 @@ Deno.test( assertEquals(await req.text(), ""); return new Response(null, { status: 304 }); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -2020,14 +2627,14 @@ for (const [name, req] of badRequests) { handler: () => { throw new Error("oops"); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); + const conn = await Deno.connect({ port: servePort }); const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -2064,7 +2671,7 @@ Deno.test( let reqCount = -1; let timerId: number | undefined; const server = Deno.serve({ - handler: async (req) => { + handler: (_req) => { reqCount++; if (reqCount === 0) { const msg = new TextEncoder().encode("data: hello\r\n\r\n"); @@ -2090,13 +2697,13 @@ Deno.test( return new Response(`hello ${reqCount}`); }, - port: 4503, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); - const sseRequest = await fetch(`http://localhost:4503/`); + const sseRequest = await fetch(`http://localhost:${servePort}/`); const decoder = new TextDecoder(); const stream = sseRequest.body!.getReader(); @@ -2106,7 +2713,7 @@ Deno.test( assertEquals(decoder.decode(value), "data: hello\r\n\r\n"); } - const helloRequest = await fetch(`http://localhost:4503/`); + const helloRequest = await fetch(`http://localhost:${servePort}/`); assertEquals(helloRequest.status, 200); assertEquals(await helloRequest.text(), "hello 1"); @@ -2137,7 +2744,7 @@ Deno.test( const server = Deno.serve({ handler: (_req) => new Response("ok"), hostname: "localhost", - port: 4501, + port: servePort, signal: ac.signal, onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), @@ -2158,21 +2765,21 @@ Deno.test( const ac = new AbortController(); const promise = deferred(); let count = 0; - const server = Deno.serve(() => { - count++; - return new Response(`hello world ${count}`); - }, { - async onListen() { - const res1 = await fetch("http://localhost:9000/"); + const server = Deno.serve({ + async onListen({ port }: { port: number }) { + const res1 = await fetch(`http://localhost:${port}/`); assertEquals(await res1.text(), "hello world 1"); - const res2 = await fetch("http://localhost:9000/"); + const res2 = await fetch(`http://localhost:${port}/`); assertEquals(await res2.text(), "hello world 2"); promise.resolve(); ac.abort(); }, signal: ac.signal, + }, () => { + count++; + return new Response(`hello world ${count}`); }); await promise; @@ -2199,13 +2806,13 @@ Deno.test( return new Response("ok"); }, signal: ac.signal, - onListen: onListen(listeningPromise), + onListen: ({ port }: { port: number }) => listeningPromise.resolve(port), onError: createOnErrorCb(ac), }); try { - await listeningPromise; - const resp = await fetch("http://localhost:9000/", { + const port = await listeningPromise; + const resp = await fetch(`http://localhost:${port}/`, { headers: { connection: "close" }, method: "POST", body: '{"sus":true}', @@ -2226,7 +2833,16 @@ Deno.test( async function testIssue16567() { const ac = new AbortController(); const promise = deferred(); - const server = Deno.serve(() => + const server = Deno.serve({ + async onListen({ port }) { + const res1 = await fetch(`http://localhost:${port}/`); + assertEquals((await res1.text()).length, 40 * 50_000); + + promise.resolve(); + ac.abort(); + }, + signal: ac.signal, + }, () => new Response( new ReadableStream({ start(c) { @@ -2237,16 +2853,7 @@ Deno.test( c.close(); }, }), - ), { - async onListen() { - const res1 = await fetch("http://localhost:9000/"); - assertEquals((await res1.text()).length, 40 * 50_000); - - promise.resolve(); - ac.abort(); - }, - signal: ac.signal, - }); + )); await promise; await server; @@ -2384,3 +2991,129 @@ function isProhibitedForTrailer(key: string): boolean { const s = new Set(["transfer-encoding", "content-length", "trailer"]); return s.has(key.toLowerCase()); } + +Deno.test( + { permissions: { net: true, run: true } }, + async function httpServeCurlH2C() { + const ac = new AbortController(); + const server = Deno.serve( + { signal: ac.signal }, + () => new Response("hello world!"), + ); + + assertEquals( + "hello world!", + await curlRequest(["http://localhost:8000/path"]), + ); + assertEquals( + "hello world!", + await curlRequest(["http://localhost:8000/path", "--http2"]), + ); + assertEquals( + "hello world!", + await curlRequest([ + "http://localhost:8000/path", + "--http2", + "--http2-prior-knowledge", + ]), + ); + + ac.abort(); + await server; + }, +); + +// TODO(mmastrac): This test should eventually use fetch, when we support trailers there. +// This test is ignored because it's flaky and relies on cURL's verbose output. +Deno.test( + { permissions: { net: true, run: true, read: true }, ignore: true }, + async function httpServerTrailers() { + const ac = new AbortController(); + const listeningPromise = deferred(); + + const server = Deno.serve({ + handler: () => { + const response = new Response("Hello World", { + headers: { + "trailer": "baz", + "transfer-encoding": "chunked", + "foo": "bar", + }, + }); + addTrailers(response, [["baz", "why"]]); + return response; + }, + port: servePort, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + // We don't have a great way to access this right now, so just fetch the trailers with cURL + const [_, stderr] = await curlRequestWithStdErr([ + `http://localhost:${servePort}/path`, + "-v", + "--http2", + "--http2-prior-knowledge", + ]); + assertMatch(stderr, /baz: why/); + ac.abort(); + await server; + }, +); + +Deno.test( + { permissions: { net: true, run: true, read: true } }, + async function httpsServeCurlH2C() { + const ac = new AbortController(); + const server = Deno.serve( + { + signal: ac.signal, + cert: Deno.readTextFileSync("cli/tests/testdata/tls/localhost.crt"), + key: Deno.readTextFileSync("cli/tests/testdata/tls/localhost.key"), + }, + () => new Response("hello world!"), + ); + + assertEquals( + "hello world!", + await curlRequest(["https://localhost:9000/path", "-k"]), + ); + assertEquals( + "hello world!", + await curlRequest(["https://localhost:9000/path", "-k", "--http2"]), + ); + assertEquals( + "hello world!", + await curlRequest([ + "https://localhost:9000/path", + "-k", + "--http2", + "--http2-prior-knowledge", + ]), + ); + + ac.abort(); + await server; + }, +); + +async function curlRequest(args: string[]) { + const { success, stdout } = await new Deno.Command("curl", { + args, + stdout: "piped", + stderr: "null", + }).output(); + assert(success); + return new TextDecoder().decode(stdout); +} + +async function curlRequestWithStdErr(args: string[]) { + const { success, stdout, stderr } = await new Deno.Command("curl", { + args, + stdout: "piped", + stderr: "piped", + }).output(); + assert(success); + return [new TextDecoder().decode(stdout), new TextDecoder().decode(stderr)]; +} diff --git a/cli/tests/unit/stat_test.ts b/cli/tests/unit/stat_test.ts index f386fd92fd..69730d439b 100644 --- a/cli/tests/unit/stat_test.ts +++ b/cli/tests/unit/stat_test.ts @@ -307,6 +307,10 @@ Deno.test( assert(s.rdev === null); assert(s.blksize === null); assert(s.blocks === null); + assert(s.isBlockDevice === null); + assert(s.isCharDevice === null); + assert(s.isFifo === null); + assert(s.isSocket === null); }, ); @@ -334,5 +338,9 @@ Deno.test( assert(s.rdev !== null); assert(s.blksize !== null); assert(s.blocks !== null); + assert(!s.isBlockDevice); + assert(!s.isCharDevice); + assert(!s.isFifo); + assert(!s.isSocket); }, ); diff --git a/cli/tests/unit/streams_deprecated.ts b/cli/tests/unit/streams_deprecated.ts deleted file mode 100644 index 04dbfa3fb7..0000000000 --- a/cli/tests/unit/streams_deprecated.ts +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -import { assertEquals } from "./test_util.ts"; - -Deno.test(async function symlinkSyncPerm() { - const rs = new ReadableStream({ - start(controller) { - controller.enqueue("hello "); - controller.enqueue("deno"); - controller.close(); - }, - }); - - for await (const chunk of rs.getIterator()) { - assertEquals(typeof chunk, "string"); - } -}); diff --git a/cli/tests/unit/testing_test.ts b/cli/tests/unit/testing_test.ts index 4e28d545c5..52e3baa133 100644 --- a/cli/tests/unit/testing_test.ts +++ b/cli/tests/unit/testing_test.ts @@ -147,3 +147,8 @@ Deno.test(async function parentOnTextContext(t1) { }); }); }); + +Deno.test("explicit undefined for boolean options", { + ignore: undefined, + only: undefined, +}, () => {}); diff --git a/cli/tests/unit/timers_test.ts b/cli/tests/unit/timers_test.ts index 8de7565169..c50cb779c6 100644 --- a/cli/tests/unit/timers_test.ts +++ b/cli/tests/unit/timers_test.ts @@ -557,7 +557,7 @@ Deno.test({ permissions: { run: true, read: true }, fn: async () => { const [statusCode, output] = await execCode(` - const timer = setTimeout(() => console.log("1")); + const timer = setTimeout(() => console.log("1"), 1); Deno.unrefTimer(timer); `); assertEquals(statusCode, 0); diff --git a/cli/tests/unit/tls_test.ts b/cli/tests/unit/tls_test.ts index b7cde10208..c8dd7ddbeb 100644 --- a/cli/tests/unit/tls_test.ts +++ b/cli/tests/unit/tls_test.ts @@ -1337,7 +1337,7 @@ Deno.test( await assertRejects( () => conn.handshake(), Deno.errors.InvalidData, - "BadCertificate", + "received fatal alert", ); } conn.close(); @@ -1368,7 +1368,7 @@ Deno.test( await assertRejects( () => tlsConn.handshake(), Deno.errors.InvalidData, - "CertNotValidForName", + "NotValidForName", ); tlsConn.close(); } diff --git a/cli/tests/unit/url_test.ts b/cli/tests/unit/url_test.ts index 644b8dd39a..28cf9a0e2c 100644 --- a/cli/tests/unit/url_test.ts +++ b/cli/tests/unit/url_test.ts @@ -32,6 +32,21 @@ Deno.test(function urlParsing() { ); }); +Deno.test(function emptyUrl() { + assertThrows( + // @ts-ignore for test + () => new URL(), + TypeError, + "1 argument required, but only 0 present", + ); + assertThrows( + // @ts-ignore for test + () => URL.canParse(), + TypeError, + "1 argument required, but only 0 present", + ); +}); + Deno.test(function urlProtocolParsing() { assertEquals(new URL("Aa+-.1://foo").protocol, "aa+-.1:"); assertEquals(new URL("aA+-.1://foo").protocol, "aa+-.1:"); diff --git a/cli/tests/unit/urlpattern_test.ts b/cli/tests/unit/urlpattern_test.ts index 9bed092355..cb5fc76c53 100644 --- a/cli/tests/unit/urlpattern_test.ts +++ b/cli/tests/unit/urlpattern_test.ts @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { assert, assertEquals } from "./test_util.ts"; +import { assertType, IsExact } from "../../../test_util/std/testing/types.ts"; Deno.test(function urlPatternFromString() { const pattern = new URLPattern("https://deno.land/foo/:bar"); @@ -13,6 +14,10 @@ Deno.test(function urlPatternFromString() { assert(match); assertEquals(match.pathname.input, "/foo/x"); assertEquals(match.pathname.groups, { bar: "x" }); + + // group values should be nullable + const val = match.pathname.groups.val; + assertType>(true); }); Deno.test(function urlPatternFromStringWithBase() { diff --git a/cli/tests/unit/version_test.ts b/cli/tests/unit/version_test.ts index f129de6b23..222aeeb851 100644 --- a/cli/tests/unit/version_test.ts +++ b/cli/tests/unit/version_test.ts @@ -6,5 +6,5 @@ Deno.test(function version() { const pattern = /^\d+\.\d+\.\d+/; assert(pattern.test(Deno.version.deno)); assert(pattern.test(Deno.version.v8)); - assertEquals(Deno.version.typescript, "5.0.3"); + assertEquals(Deno.version.typescript, "5.0.4"); }); diff --git a/cli/tests/unit/websocket_test.ts b/cli/tests/unit/websocket_test.ts index 948e2add23..795d5ebc18 100644 --- a/cli/tests/unit/websocket_test.ts +++ b/cli/tests/unit/websocket_test.ts @@ -1,5 +1,11 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -import { assertEquals, assertThrows, deferred, fail } from "./test_util.ts"; +import { + assert, + assertEquals, + assertThrows, + deferred, + fail, +} from "./test_util.ts"; Deno.test({ permissions: "none" }, function websocketPermissionless() { assertThrows( @@ -37,6 +43,22 @@ Deno.test(async function websocketPingPong() { ws.close(); }); +// TODO(mmastrac): This requires us to ignore bad certs +// Deno.test(async function websocketSecureConnect() { +// const promise = deferred(); +// const ws = new WebSocket("wss://localhost:4243/"); +// assertEquals(ws.url, "wss://localhost:4243/"); +// ws.onerror = (error) => { +// console.log(error); +// fail(); +// }; +// ws.onopen = () => ws.close(); +// ws.onclose = () => { +// promise.resolve(); +// }; +// await promise; +// }); + // https://github.com/denoland/deno/issues/18700 Deno.test( { sanitizeOps: false, sanitizeResources: false }, @@ -82,3 +104,68 @@ Deno.test( ws.close(); }, ); + +// https://github.com/denoland/deno/issues/18775 +Deno.test({ + sanitizeOps: false, + sanitizeResources: false, +}, async function websocketDoubleClose() { + const promise = deferred(); + + const ac = new AbortController(); + const listeningPromise = deferred(); + + const server = Deno.serve({ + handler: (req) => { + const { response, socket } = Deno.upgradeWebSocket(req); + let called = false; + socket.onopen = () => socket.send("Hello"); + socket.onmessage = () => { + assert(!called); + called = true; + socket.send("bye"); + socket.close(); + }; + socket.onclose = () => ac.abort(); + socket.onerror = () => fail(); + return response; + }, + signal: ac.signal, + onListen: () => listeningPromise.resolve(), + hostname: "localhost", + port: 4247, + }); + + await listeningPromise; + + const ws = new WebSocket("ws://localhost:4247/"); + assertEquals(ws.url, "ws://localhost:4247/"); + ws.onerror = () => fail(); + ws.onmessage = () => ws.send("bye"); + ws.onclose = () => { + promise.resolve(); + }; + await Promise.all([promise, server]); +}); + +Deno.test( + { sanitizeOps: false }, + function websocketConstructorWithPrototypePollusion() { + const originalSymbolIterator = Array.prototype[Symbol.iterator]; + try { + Array.prototype[Symbol.iterator] = () => { + throw Error("unreachable"); + }; + assertThrows(() => { + new WebSocket( + new URL("ws://localhost:4242/"), + // Allow `Symbol.iterator` to be called in WebIDL conversion to `sequence` + // deno-lint-ignore no-explicit-any + ["soap", "soap"].values() as any, + ); + }, DOMException); + } finally { + Array.prototype[Symbol.iterator] = originalSymbolIterator; + } + }, +); diff --git a/cli/tests/unit_node/_fs/_fs_handle_test.ts b/cli/tests/unit_node/_fs/_fs_handle_test.ts new file mode 100644 index 0000000000..165608e1ce --- /dev/null +++ b/cli/tests/unit_node/_fs/_fs_handle_test.ts @@ -0,0 +1,20 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import * as path from "../../../../test_util/std/path/mod.ts"; +import { + assert, + assertEquals, +} from "../../../../test_util/std/testing/asserts.ts"; + +const moduleDir = path.dirname(path.fromFileUrl(import.meta.url)); +const testData = path.resolve(moduleDir, "testdata", "hello.txt"); + +Deno.test("readFileSuccess", async function () { + const fs = await import("node:fs/promises"); + const fileHandle = await fs.open(testData); + const data = await fileHandle.readFile(); + + assert(data instanceof Uint8Array); + assertEquals(new TextDecoder().decode(data as Uint8Array), "hello world"); + + await fileHandle.close(); +}); diff --git a/cli/tests/unit_node/async_hooks_test.ts b/cli/tests/unit_node/async_hooks_test.ts index 73d6a99bc8..4062443151 100644 --- a/cli/tests/unit_node/async_hooks_test.ts +++ b/cli/tests/unit_node/async_hooks_test.ts @@ -41,7 +41,10 @@ Deno.test(async function bar() { let differentScopeDone = false; const als = new AsyncLocalStorage(); const ac = new AbortController(); - const server = Deno.serve(() => { + const server = Deno.serve({ + signal: ac.signal, + port: 4000, + }, () => { const differentScope = als.run(123, () => AsyncResource.bind(() => { differentScopeDone = true; @@ -54,9 +57,6 @@ Deno.test(async function bar() { await new Promise((res) => setTimeout(res, 10)); return new Response(als.getStore() as string); // "Hello World" }); - }, { - signal: ac.signal, - port: 4000, }); const res = await fetch("http://localhost:4000"); diff --git a/cli/tests/unit_node/child_process_test.ts b/cli/tests/unit_node/child_process_test.ts index b40cfd9ff4..f8de5b6f6d 100644 --- a/cli/tests/unit_node/child_process_test.ts +++ b/cli/tests/unit_node/child_process_test.ts @@ -577,3 +577,66 @@ Deno.test( assertStringIncludes(output, "typescript"); }, ); + +Deno.test( + "[node/child_process spawn] supports stdio array option", + async () => { + const cmdFinished = deferred(); + let output = ""; + const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "testdata", + "child_process_stdio.js", + ); + const cp = spawn(Deno.execPath(), ["run", "-A", script]); + cp.stdout?.on("data", (data) => { + output += data; + }); + cp.on("close", () => cmdFinished.resolve()); + await cmdFinished; + + assertStringIncludes(output, "foo"); + assertStringIncludes(output, "close"); + }, +); + +Deno.test( + "[node/child_process spawn] supports stdio [0, 1, 2] option", + async () => { + const cmdFinished = deferred(); + let output = ""; + const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "testdata", + "child_process_stdio_012.js", + ); + const cp = spawn(Deno.execPath(), ["run", "-A", script]); + cp.stdout?.on("data", (data) => { + output += data; + }); + cp.on("close", () => cmdFinished.resolve()); + await cmdFinished; + + assertStringIncludes(output, "foo"); + assertStringIncludes(output, "close"); + }, +); + +Deno.test({ + name: "[node/child_process spawn] supports SIGIOT signal", + ignore: Deno.build.os === "windows", + async fn() { + const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "testdata", + "child_process_stdin.js", + ); + const cp = spawn(Deno.execPath(), ["run", "-A", script]); + const p = withTimeout(); + cp.on("exit", () => p.resolve()); + cp.kill("SIGIOT"); + await p; + assert(cp.killed); + assertEquals(cp.signalCode, "SIGIOT"); + }, +}); diff --git a/cli/tests/unit_node/crypto_cipher_test.ts b/cli/tests/unit_node/crypto/crypto_cipher_test.ts similarity index 96% rename from cli/tests/unit_node/crypto_cipher_test.ts rename to cli/tests/unit_node/crypto/crypto_cipher_test.ts index 2c8cca2567..a8a5130cf1 100644 --- a/cli/tests/unit_node/crypto_cipher_test.ts +++ b/cli/tests/unit_node/crypto/crypto_cipher_test.ts @@ -6,13 +6,13 @@ import { buffer, text } from "node:stream/consumers"; import { assertEquals, assertThrows, -} from "../../../test_util/std/testing/asserts.ts"; +} from "../../../../test_util/std/testing/asserts.ts"; const rsaPrivateKey = Deno.readTextFileSync( - new URL("./testdata/rsa_private.pem", import.meta.url), + new URL("../testdata/rsa_private.pem", import.meta.url), ); const rsaPublicKey = Deno.readTextFileSync( - new URL("./testdata/rsa_public.pem", import.meta.url), + new URL("../testdata/rsa_public.pem", import.meta.url), ); const input = new TextEncoder().encode("hello world"); diff --git a/cli/tests/unit_node/crypto_hash.ts b/cli/tests/unit_node/crypto/crypto_hash_test.ts similarity index 88% rename from cli/tests/unit_node/crypto_hash.ts rename to cli/tests/unit_node/crypto/crypto_hash_test.ts index fae66e0244..6795777703 100644 --- a/cli/tests/unit_node/crypto_hash.ts +++ b/cli/tests/unit_node/crypto/crypto_hash_test.ts @@ -1,6 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { createHash, createHmac } from "node:crypto"; -import { assertEquals } from "../../../test_util/std/testing/asserts.ts"; +import { assertEquals } from "../../../../test_util/std/testing/asserts.ts"; // https://github.com/denoland/deno/issues/18140 Deno.test({ diff --git a/cli/tests/unit_node/crypto_key.ts b/cli/tests/unit_node/crypto/crypto_key_test.ts similarity index 98% rename from cli/tests/unit_node/crypto_key.ts rename to cli/tests/unit_node/crypto/crypto_key_test.ts index 49d81003f0..672c9fa7f0 100644 --- a/cli/tests/unit_node/crypto_key.ts +++ b/cli/tests/unit_node/crypto/crypto_key_test.ts @@ -13,7 +13,7 @@ import { Buffer } from "node:buffer"; import { assertEquals, assertThrows, -} from "../../../test_util/std/testing/asserts.ts"; +} from "../../../../test_util/std/testing/asserts.ts"; import { createHmac } from "node:crypto"; const generateKeyPairAsync = promisify( diff --git a/cli/tests/unit_node/crypto_sign_test.ts b/cli/tests/unit_node/crypto/crypto_sign_test.ts similarity index 95% rename from cli/tests/unit_node/crypto_sign_test.ts rename to cli/tests/unit_node/crypto/crypto_sign_test.ts index 9d346e7d02..9988ed71c3 100644 --- a/cli/tests/unit_node/crypto_sign_test.ts +++ b/cli/tests/unit_node/crypto/crypto_sign_test.ts @@ -3,18 +3,18 @@ import { assert, assertEquals, -} from "../../../test_util/std/testing/asserts.ts"; +} from "../../../../test_util/std/testing/asserts.ts"; import { createSign, createVerify, sign, verify } from "node:crypto"; import { Buffer } from "node:buffer"; const rsaPrivatePem = Buffer.from( await Deno.readFile( - new URL("./testdata/rsa_private.pem", import.meta.url), + new URL("../testdata/rsa_private.pem", import.meta.url), ), ); const rsaPublicPem = Buffer.from( await Deno.readFile( - new URL("./testdata/rsa_public.pem", import.meta.url), + new URL("../testdata/rsa_public.pem", import.meta.url), ), ); diff --git a/cli/tests/unit_node/http2_test.ts b/cli/tests/unit_node/http2_test.ts new file mode 100644 index 0000000000..543543cbdc --- /dev/null +++ b/cli/tests/unit_node/http2_test.ts @@ -0,0 +1,104 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import * as http2 from "node:http2"; +import * as net from "node:net"; +import { deferred } from "../../../test_util/std/async/deferred.ts"; +import { assertEquals } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; + +const { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_STATUS, +} = http2.constants; + +Deno.test("[node/http2 client]", async () => { + // Create a server to respond to the HTTP2 requests + const portPromise = deferred(); + const reqPromise = deferred(); + const ready = deferred(); + const ac = new AbortController(); + const server = Deno.serve({ + port: 0, + signal: ac.signal, + onListen: ({ port }: { port: number }) => portPromise.resolve(port), + handler: async (req: Request) => { + reqPromise.resolve(req); + await ready; + return new Response("body", { + status: 401, + headers: { "resp-header-name": "resp-header-value" }, + }); + }, + }); + + const port = await portPromise; + + // Get a session + const sessionPromise = deferred(); + const session = http2.connect( + `localhost:${port}`, + {}, + sessionPromise.resolve.bind(sessionPromise), + ); + const session2 = await sessionPromise; + assertEquals(session, session2); + + // Write a request, including a body + const stream = session.request({ + [HTTP2_HEADER_AUTHORITY]: `localhost:${port}`, + [HTTP2_HEADER_METHOD]: "POST", + [HTTP2_HEADER_PATH]: "/path", + "req-header-name": "req-header-value", + }); + stream.write("body"); + stream.end(); + + // Check the request + const req = await reqPromise; + assertEquals(req.headers.get("req-header-name"), "req-header-value"); + assertEquals(await req.text(), "body"); + + ready.resolve(); + + // Read a response + const headerPromise = new Promise>(( + resolve, + ) => stream.on("headers", resolve)); + const headers = await headerPromise; + assertEquals(headers["resp-header-name"], "resp-header-value"); + assertEquals(headers[HTTP2_HEADER_STATUS], "401"); + + ac.abort(); + await server.finished; +}); + +Deno.test("[node/http2 server]", async () => { + const server = http2.createServer(); + server.listen(0); + const port = ( server.address()).port; + const sessionPromise = new Promise((resolve) => + server.on("session", resolve) + ); + + const responsePromise = fetch(`http://localhost:${port}/path`, { + method: "POST", + body: "body", + }); + + const session = await sessionPromise; + const stream = await new Promise((resolve) => + session.on("stream", resolve) + ); + const _headers = await new Promise((resolve) => + stream.on("headers", resolve) + ); + const _data = await new Promise((resolve) => stream.on("data", resolve)); + const _end = await new Promise((resolve) => stream.on("end", resolve)); + stream.respond(); + stream.end(); + const resp = await responsePromise; + await resp.text(); + + await new Promise((resolve) => server.close(resolve)); +}); diff --git a/cli/tests/unit_node/http_test.ts b/cli/tests/unit_node/http_test.ts index 556ba16843..6b02282743 100644 --- a/cli/tests/unit_node/http_test.ts +++ b/cli/tests/unit_node/http_test.ts @@ -2,6 +2,7 @@ import EventEmitter from "node:events"; import http, { type RequestOptions } from "node:http"; +import https from "node:https"; import { assert, assertEquals, @@ -12,6 +13,7 @@ import { deferred } from "../../../test_util/std/async/deferred.ts"; import { gzip } from "node:zlib"; import { Buffer } from "node:buffer"; import { serve } from "../../../test_util/std/http/server.ts"; +import { execCode } from "../unit/test_util.ts"; Deno.test("[node/http listen]", async () => { { @@ -185,27 +187,41 @@ Deno.test("[node/http] server can respond with 101, 204, 205, 304 status", async Deno.test("[node/http] request default protocol", async () => { const promise = deferred(); + const promise2 = deferred(); const server = http.createServer((_, res) => { res.end("ok"); }); + + // @ts-ignore IncomingMessageForClient + // deno-lint-ignore no-explicit-any + let clientRes: any; + // deno-lint-ignore no-explicit-any + let clientReq: any; server.listen(() => { - const req = http.request( + clientReq = http.request( // deno-lint-ignore no-explicit-any { host: "localhost", port: (server.address() as any).port }, (res) => { + assert(res.socket instanceof EventEmitter); + assertEquals(res.complete, false); res.on("data", () => {}); res.on("end", () => { server.close(); }); + clientRes = res; assertEquals(res.statusCode, 200); + promise2.resolve(); }, ); - req.end(); + clientReq.end(); }); server.on("close", () => { promise.resolve(); }); await promise; + await promise2; + assert(clientReq.socket instanceof EventEmitter); + assertEquals(clientRes!.complete, true); }); Deno.test("[node/http] request with headers", async () => { @@ -292,32 +308,6 @@ Deno.test("[node/http] http.IncomingMessage can be created without url", () => { }); */ -Deno.test("[node/http] set http.IncomingMessage.statusMessage", () => { - // deno-lint-ignore no-explicit-any - const message = new (http as any).IncomingMessageForClient( - new Response(null, { status: 404, statusText: "Not Found" }), - { - encrypted: true, - readable: false, - remoteAddress: "foo", - address() { - return { port: 443, family: "IPv4" }; - }, - // deno-lint-ignore no-explicit-any - end(_cb: any) { - return this; - }, - // deno-lint-ignore no-explicit-any - destroy(_e: any) { - return; - }, - }, - ); - assertEquals(message.statusMessage, "Not Found"); - message.statusMessage = "boom"; - assertEquals(message.statusMessage, "boom"); -}); - Deno.test("[node/http] send request with non-chunked body", async () => { let requestHeaders: Headers; let requestBody = ""; @@ -484,3 +474,150 @@ Deno.test("[node/http] ServerResponse _implicitHeader", async () => { await d; }); + +Deno.test("[node/http] server unref", async () => { + const [statusCode, _output] = await execCode(` + import http from "node:http"; + const server = http.createServer((_req, res) => { + res.statusCode = status; + res.end(""); + }); + + // This should let the program to exit without waiting for the + // server to close. + server.unref(); + + server.listen(async () => { + }); + `); + assertEquals(statusCode, 0); +}); + +Deno.test("[node/http] ClientRequest handle non-string headers", async () => { + // deno-lint-ignore no-explicit-any + let headers: any; + const def = deferred(); + const req = http.request("http://localhost:4545/echo_server", { + method: "POST", + headers: { 1: 2 }, + }, (resp) => { + headers = resp.headers; + + resp.on("data", () => {}); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + assertEquals(headers!["1"], "2"); +}); + +Deno.test("[node/http] ClientRequest uses HTTP/1.1", async () => { + let body = ""; + const def = deferred(); + const req = https.request("https://localhost:5545/http_version", { + method: "POST", + headers: { 1: 2 }, + }, (resp) => { + resp.on("data", (chunk) => { + body += chunk; + }); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + assertEquals(body, "HTTP/1.1"); +}); + +Deno.test("[node/http] ClientRequest setTimeout", async () => { + let body = ""; + const def = deferred(); + const timer = setTimeout(() => def.reject("timed out"), 50000); + const req = http.request("http://localhost:4545/http_version", (resp) => { + resp.on("data", (chunk) => { + body += chunk; + }); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.setTimeout(120000); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + clearTimeout(timer); + assertEquals(body, "HTTP/1.1"); +}); + +Deno.test("[node/http] ClientRequest PATCH", async () => { + let body = ""; + const def = deferred(); + const req = http.request("http://localhost:4545/echo_server", { + method: "PATCH", + }, (resp) => { + resp.on("data", (chunk) => { + body += chunk; + }); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.write("hello "); + req.write("world"); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + assertEquals(body, "hello world"); +}); + +Deno.test("[node/http] ClientRequest PUT", async () => { + let body = ""; + const def = deferred(); + const req = http.request("http://localhost:4545/echo_server", { + method: "PUT", + }, (resp) => { + resp.on("data", (chunk) => { + body += chunk; + }); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.write("hello "); + req.write("world"); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + assertEquals(body, "hello world"); +}); + +Deno.test("[node/http] ClientRequest search params", async () => { + let body = ""; + const def = deferred(); + const req = http.request({ + host: "localhost:4545", + path: "search_params?foo=bar", + }, (resp) => { + resp.on("data", (chunk) => { + body += chunk; + }); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + assertEquals(body, "foo=bar"); +}); diff --git a/cli/tests/unit_node/module_test.ts b/cli/tests/unit_node/module_test.ts index d071ed2d18..3a675c7a17 100644 --- a/cli/tests/unit_node/module_test.ts +++ b/cli/tests/unit_node/module_test.ts @@ -1,7 +1,12 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { Module } from "node:module"; -import { assertStrictEquals } from "../../../test_util/std/testing/asserts.ts"; +import { + assert, + assertEquals, +} from "../../../test_util/std/testing/asserts.ts"; +import process from "node:process"; +import * as path from "node:path"; Deno.test("[node/module _preloadModules] has internal require hook", () => { // Check if it's there @@ -10,5 +15,46 @@ Deno.test("[node/module _preloadModules] has internal require hook", () => { "./cli/tests/unit_node/testdata/add_global_property.js", ]); // deno-lint-ignore no-explicit-any - assertStrictEquals((globalThis as any).foo, "Hello"); + assertEquals((globalThis as any).foo, "Hello"); +}); + +Deno.test("[node/module runMain] loads module using the current process.argv", () => { + process.argv = [ + process.argv[0], + "./cli/tests/unit_node/testdata/add_global_property_run_main.js", + ]; + + // deno-lint-ignore no-explicit-any + (Module as any).runMain(); + // deno-lint-ignore no-explicit-any + assertEquals((globalThis as any).calledViaRunMain, true); +}); + +Deno.test("[node/module _nodeModulePaths] prevents duplicate /node_modules/node_modules suffix", () => { + // deno-lint-ignore no-explicit-any + const actual: string[] = (Module as any)._nodeModulePaths( + path.join(process.cwd(), "testdata", "node_modules", "foo"), + ); + + assert( + !actual.some((dir) => /node_modules[/\\]node_modules/g.test(dir)), + "Duplicate 'node_modules/node_modules' suffix found", + ); +}); + +Deno.test("[node/module _nodeModulePaths] prevents duplicate root /node_modules", () => { + // deno-lint-ignore no-explicit-any + const actual: string[] = (Module as any)._nodeModulePaths( + path.join(process.cwd(), "testdata", "node_modules", "foo"), + ); + + assert( + new Set(actual).size === actual.length, + "Duplicate path entries found", + ); + const root = path.parse(actual[0]).root; + assert( + actual.includes(path.join(root, "node_modules")), + "Missing root 'node_modules' directory", + ); }); diff --git a/cli/tests/unit_node/os_test.ts b/cli/tests/unit_node/os_test.ts new file mode 100644 index 0000000000..85164d1e60 --- /dev/null +++ b/cli/tests/unit_node/os_test.ts @@ -0,0 +1,279 @@ +// deno-lint-ignore-file no-undef +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import os from "node:os"; +import { + assert, + assertEquals, + assertThrows, +} from "../../../test_util/std/testing/asserts.ts"; + +Deno.test({ + name: "build architecture is a string", + fn() { + assertEquals(typeof os.arch(), "string"); + }, +}); + +Deno.test({ + name: "build architecture", + fn() { + if (Deno.build.arch == "x86_64") { + assertEquals(os.arch(), "x64"); + } else if (Deno.build.arch == "aarch64") { + assertEquals(os.arch(), "arm64"); + } else { + throw new Error("unreachable"); + } + }, +}); + +Deno.test({ + name: "home directory is a string", + fn() { + assertEquals(typeof os.homedir(), "string"); + }, +}); + +Deno.test({ + name: "tmp directory is a string", + fn() { + assertEquals(typeof os.tmpdir(), "string"); + }, +}); + +Deno.test({ + name: "hostname is a string", + fn() { + assertEquals(typeof os.hostname(), "string"); + }, +}); + +Deno.test({ + name: "platform is a string", + fn() { + assertEquals(typeof os.platform(), "string"); + }, +}); + +Deno.test({ + name: "release is a string", + fn() { + assertEquals(typeof os.release(), "string"); + }, +}); + +Deno.test({ + name: "type is a string", + fn() { + assertEquals(typeof os.type(), "string"); + }, +}); + +Deno.test({ + name: "getPriority(): PID must be a 32 bit integer", + fn() { + assertThrows( + () => { + os.getPriority(3.15); + }, + Error, + "pid must be 'an integer'", + ); + assertThrows( + () => { + os.getPriority(9999999999); + }, + Error, + "must be >= -2147483648 && <= 2147483647", + ); + }, +}); + +Deno.test({ + name: "setPriority(): PID must be a 32 bit integer", + fn() { + assertThrows( + () => { + os.setPriority(3.15, 0); + }, + Error, + "pid must be 'an integer'", + ); + assertThrows( + () => { + os.setPriority(9999999999, 0); + }, + Error, + "pid must be >= -2147483648 && <= 2147483647", + ); + }, +}); + +Deno.test({ + name: "setPriority(): priority must be an integer between -20 and 19", + fn() { + assertThrows( + () => { + os.setPriority(0, 3.15); + }, + Error, + "priority must be 'an integer'", + ); + assertThrows( + () => { + os.setPriority(0, -21); + }, + Error, + "priority must be >= -20 && <= 19", + ); + assertThrows( + () => { + os.setPriority(0, 20); + }, + Error, + "priority must be >= -20 && <= 19", + ); + assertThrows( + () => { + os.setPriority(0, 9999999999); + }, + Error, + "priority must be >= -20 && <= 19", + ); + }, +}); + +Deno.test({ + name: + "setPriority(): if only one argument specified, then this is the priority, NOT the pid", + fn() { + assertThrows( + () => { + os.setPriority(3.15); + }, + Error, + "priority must be 'an integer'", + ); + assertThrows( + () => { + os.setPriority(-21); + }, + Error, + "priority must be >= -20 && <= 19", + ); + assertThrows( + () => { + os.setPriority(20); + }, + Error, + "priority must be >= -20 && <= 19", + ); + assertThrows( + () => { + os.setPriority(9999999999); + }, + Error, + "priority must be >= -20 && <= 19", + ); + }, +}); + +Deno.test({ + name: "EOL is as expected", + fn() { + assert(os.EOL == "\r\n" || os.EOL == "\n"); + }, +}); + +Deno.test({ + name: "Endianness is determined", + fn() { + assert(["LE", "BE"].includes(os.endianness())); + }, +}); + +Deno.test({ + name: "Load average is an array of 3 numbers", + fn() { + const result = os.loadavg(); + assert(result.length == 3); + assertEquals(typeof result[0], "number"); + assertEquals(typeof result[1], "number"); + assertEquals(typeof result[2], "number"); + }, +}); + +Deno.test({ + name: "Primitive coercion works as expected", + fn() { + assertEquals(`${os.arch}`, os.arch()); + assertEquals(`${os.endianness}`, os.endianness()); + assertEquals(`${os.platform}`, os.platform()); + }, +}); + +Deno.test({ + name: "Total memory amount should be greater than 0", + fn() { + assert(os.totalmem() > 0); + }, +}); + +Deno.test({ + name: "Free memory amount should be greater than 0", + fn() { + assert(os.freemem() > 0); + }, +}); + +Deno.test({ + name: "Uptime should be greater than 0", + fn() { + assert(os.uptime() > 0); + }, +}); + +Deno.test({ + name: "os.cpus()", + fn() { + assertEquals(os.cpus().length, navigator.hardwareConcurrency); + + for (const cpu of os.cpus()) { + assertEquals(cpu.model, ""); + assertEquals(cpu.speed, 0); + assertEquals(cpu.times.user, 0); + assertEquals(cpu.times.nice, 0); + assertEquals(cpu.times.sys, 0); + assertEquals(cpu.times.idle, 0); + assertEquals(cpu.times.irq, 0); + } + }, +}); + +Deno.test({ + name: "APIs not yet implemented", + fn() { + assertThrows( + () => { + os.getPriority(); + }, + Error, + "Not implemented", + ); + assertThrows( + () => { + os.setPriority(0); + }, + Error, + "Not implemented", + ); + assertThrows( + () => { + os.userInfo(); + }, + Error, + "Not implemented", + ); + }, +}); diff --git a/cli/tests/unit_node/process_test.ts b/cli/tests/unit_node/process_test.ts index 686a3dbbc5..7e927a8ad2 100644 --- a/cli/tests/unit_node/process_test.ts +++ b/cli/tests/unit_node/process_test.ts @@ -2,6 +2,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import process, { argv, env } from "node:process"; +import { Readable } from "node:stream"; +import { once } from "node:events"; import { assert, assertEquals, @@ -727,3 +729,38 @@ Deno.test({ assertEquals(stripColor(decoder.decode(stdout).trim()), "exit"); }, }); + +Deno.test({ + name: "process.reallyExit", + async fn() { + const command = new Deno.Command(Deno.execPath(), { + args: [ + "run", + "--quiet", + "--unstable", + "./testdata/process_really_exit.ts", + ], + cwd: testDir, + }); + const { stdout } = await command.output(); + + const decoder = new TextDecoder(); + assertEquals(stripColor(decoder.decode(stdout).trim()), "really exited"); + }, +}); + +Deno.test({ + name: "process.stdout isn't closed when source stream ended", + async fn() { + const source = Readable.from(["foo", "bar"]); + + source.pipe(process.stdout); + await once(source, "end"); + + // Wait a bit to ensure that streaming is completely finished. + await delay(10); + + // This checks if the rid 1 is still valid. + assert(typeof process.stdout.isTTY === "boolean"); + }, +}); diff --git a/cli/tests/unit_node/readline_test.ts b/cli/tests/unit_node/readline_test.ts index bef9008dd7..914d23e4af 100644 --- a/cli/tests/unit_node/readline_test.ts +++ b/cli/tests/unit_node/readline_test.ts @@ -12,3 +12,16 @@ Deno.test("[node/readline] createInstance", () => { // deno-lint-ignore no-explicit-any assertInstanceOf(rl, Interface as any); }); + +// Test for https://github.com/denoland/deno/issues/19183 +Deno.test("[node/readline] don't throw on rl.question()", () => { + const rli = createInterface({ + input: new Readable({ read() {} }), + output: new Writable({ write() {} }), + terminal: true, + }); + + // Calling this would throw + rli.question("foo", () => rli.close()); + rli.close(); +}); diff --git a/cli/tests/unit_node/testdata/add_global_property_run_main.js b/cli/tests/unit_node/testdata/add_global_property_run_main.js new file mode 100644 index 0000000000..c9db1cea66 --- /dev/null +++ b/cli/tests/unit_node/testdata/add_global_property_run_main.js @@ -0,0 +1 @@ +globalThis.calledViaRunMain = true; diff --git a/cli/tests/unit_node/testdata/child_process_stdio.js b/cli/tests/unit_node/testdata/child_process_stdio.js new file mode 100644 index 0000000000..399b890ed1 --- /dev/null +++ b/cli/tests/unit_node/testdata/child_process_stdio.js @@ -0,0 +1,15 @@ +import childProcess from "node:child_process"; +import process from "node:process"; +import * as path from "node:path"; + +const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "node_modules", + "foo", + "index.js", +); + +const child = childProcess.spawn(process.execPath, [script], { + stdio: [process.stdin, process.stdout, process.stderr], +}); +child.on("close", () => console.log("close")); diff --git a/cli/tests/unit_node/testdata/child_process_stdio_012.js b/cli/tests/unit_node/testdata/child_process_stdio_012.js new file mode 100644 index 0000000000..682d8a084a --- /dev/null +++ b/cli/tests/unit_node/testdata/child_process_stdio_012.js @@ -0,0 +1,15 @@ +import childProcess from "node:child_process"; +import process from "node:process"; +import * as path from "node:path"; + +const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "node_modules", + "foo", + "index.js", +); + +const child = childProcess.spawn(process.execPath, [script], { + stdio: [0, 1, 2], +}); +child.on("close", () => console.log("close")); diff --git a/cli/tests/unit_node/testdata/process_really_exit.ts b/cli/tests/unit_node/testdata/process_really_exit.ts new file mode 100644 index 0000000000..16f30b33d2 --- /dev/null +++ b/cli/tests/unit_node/testdata/process_really_exit.ts @@ -0,0 +1,10 @@ +import process from "node:process"; + +//deno-lint-ignore no-undef +// @ts-ignore - Node typings don't even have this because it's +// been deprecated for 4 years. But it's used in `signal-exit`, +// which in turn is used in `node-tap`. +process.reallyExit = function () { + console.info("really exited"); +}; +process.exit(); diff --git a/cli/tests/unit_node/testdata/worker_threads.mjs b/cli/tests/unit_node/testdata/worker_threads.mjs new file mode 100644 index 0000000000..03dc462f02 --- /dev/null +++ b/cli/tests/unit_node/testdata/worker_threads.mjs @@ -0,0 +1,34 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { + getEnvironmentData, + isMainThread, + parentPort, + threadId, + workerData, +} from "node:worker_threads"; +import { once } from "node:events"; + +async function message(expectedMessage) { + const [message] = await once(parentPort, "message"); + if (message !== expectedMessage) { + console.log(`Expected the message "${expectedMessage}", but got`, message); + // fail test + parentPort.close(); + } +} + +await message("Hello, how are you my thread?"); + +parentPort.postMessage("I'm fine!"); + +await new Promise((resolve) => setTimeout(resolve, 100)); + +parentPort.postMessage({ + isMainThread, + threadId, + workerData: Array.isArray(workerData) && + workerData[workerData.length - 1] instanceof MessagePort + ? workerData.slice(0, -1) + : workerData, + envData: [getEnvironmentData("test"), getEnvironmentData(1)], +}); diff --git a/cli/tests/unit_node/v8_test.ts b/cli/tests/unit_node/v8_test.ts index ab19035962..724ac35044 100644 --- a/cli/tests/unit_node/v8_test.ts +++ b/cli/tests/unit_node/v8_test.ts @@ -4,10 +4,7 @@ import { getHeapStatistics, setFlagsFromString, } from "node:v8"; -import { - assertEquals, - assertThrows, -} from "../../../test_util/std/testing/asserts.ts"; +import { assertEquals } from "../../../test_util/std/testing/asserts.ts"; // https://github.com/nodejs/node/blob/a2bbe5ff216bc28f8dac1c36a8750025a93c3827/test/parallel/test-v8-version-tag.js#L6 Deno.test({ @@ -51,8 +48,8 @@ Deno.test({ }); Deno.test({ - name: "setFlagsFromString throws", + name: "setFlagsFromString", fn() { - assertThrows(() => setFlagsFromString("--allow_natives_syntax")); + setFlagsFromString("--allow_natives_syntax"); }, }); diff --git a/cli/tests/unit_node/worker_threads_test.ts b/cli/tests/unit_node/worker_threads_test.ts new file mode 100644 index 0000000000..f53b1e6927 --- /dev/null +++ b/cli/tests/unit_node/worker_threads_test.ts @@ -0,0 +1,194 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import { + assert, + assertEquals, + assertObjectMatch, +} from "../../../test_util/std/testing/asserts.ts"; +import { fromFileUrl, relative } from "../../../test_util/std/path/mod.ts"; +import * as workerThreads from "node:worker_threads"; +import { EventEmitter, once } from "node:events"; + +Deno.test("[node/worker_threads] BroadcastChannel is exported", () => { + assertEquals(workerThreads.BroadcastChannel, BroadcastChannel); +}); + +Deno.test("[node/worker_threads] MessageChannel are MessagePort are exported", () => { + assertEquals(workerThreads.MessageChannel, MessageChannel); + assertEquals(workerThreads.MessagePort, MessagePort); +}); + +Deno.test({ + name: "[worker_threads] isMainThread", + fn() { + assertEquals(workerThreads.isMainThread, true); + }, +}); + +Deno.test({ + name: "[worker_threads] threadId", + fn() { + assertEquals(workerThreads.threadId, 0); + }, +}); + +Deno.test({ + name: "[worker_threads] resourceLimits", + fn() { + assertObjectMatch(workerThreads.resourceLimits, {}); + }, +}); + +Deno.test({ + name: "[worker_threads] parentPort", + fn() { + assertEquals(workerThreads.parentPort, null); + }, +}); + +Deno.test({ + name: "[worker_threads] workerData", + fn() { + assertEquals(workerThreads.workerData, null); + }, +}); + +Deno.test({ + name: "[worker_threads] setEnvironmentData / getEnvironmentData", + fn() { + workerThreads.setEnvironmentData("test", "test"); + assertEquals(workerThreads.getEnvironmentData("test"), "test"); + }, +}); + +Deno.test({ + name: "[worker_threads] Worker threadId", + async fn() { + const worker = new workerThreads.Worker( + new URL("./testdata/worker_threads.mjs", import.meta.url), + ); + worker.postMessage("Hello, how are you my thread?"); + await once(worker, "message"); + const message = await once(worker, "message"); + assertEquals(message[0].threadId, 1); + worker.terminate(); + + const worker1 = new workerThreads.Worker( + new URL("./testdata/worker_threads.mjs", import.meta.url), + ); + worker1.postMessage("Hello, how are you my thread?"); + await once(worker1, "message"); + assertEquals((await once(worker1, "message"))[0].threadId, 2); + worker1.terminate(); + }, +}); + +Deno.test({ + name: "[worker_threads] Worker basics", + async fn() { + workerThreads.setEnvironmentData("test", "test"); + workerThreads.setEnvironmentData(1, { + test: "random", + random: "test", + }); + const { port1 } = new MessageChannel(); + const worker = new workerThreads.Worker( + new URL("./testdata/worker_threads.mjs", import.meta.url), + { + workerData: ["hey", true, false, 2, port1], + // deno-lint-ignore no-explicit-any + transferList: [port1 as any], + }, + ); + worker.postMessage("Hello, how are you my thread?"); + assertEquals((await once(worker, "message"))[0], "I'm fine!"); + const data = (await once(worker, "message"))[0]; + // data.threadId can be 1 when this test is runned individually + if (data.threadId === 1) data.threadId = 3; + assertObjectMatch(data, { + isMainThread: false, + threadId: 3, + workerData: ["hey", true, false, 2], + envData: ["test", { test: "random", random: "test" }], + }); + worker.terminate(); + }, + sanitizeResources: false, +}); + +Deno.test({ + name: "[worker_threads] Worker eval", + async fn() { + const worker = new workerThreads.Worker( + ` + import { parentPort } from "node:worker_threads"; + parentPort.postMessage("It works!"); + `, + { + eval: true, + }, + ); + assertEquals((await once(worker, "message"))[0], "It works!"); + worker.terminate(); + }, +}); + +Deno.test({ + name: "[worker_threads] inheritences", + async fn() { + const worker = new workerThreads.Worker( + ` + import { EventEmitter } from "node:events"; + import { parentPort } from "node:worker_threads"; + parentPort.postMessage(parentPort instanceof EventTarget); + await new Promise(resolve => setTimeout(resolve, 100)); + parentPort.postMessage(parentPort instanceof EventEmitter); + `, + { + eval: true, + }, + ); + assertEquals((await once(worker, "message"))[0], true); + assertEquals((await once(worker, "message"))[0], false); + assert(worker instanceof EventEmitter); + assert(!(worker instanceof EventTarget)); + worker.terminate(); + }, +}); + +Deno.test({ + name: "[worker_threads] Worker workerData", + async fn() { + const worker = new workerThreads.Worker( + new URL("./testdata/worker_threads.mjs", import.meta.url), + { + workerData: null, + }, + ); + worker.postMessage("Hello, how are you my thread?"); + await once(worker, "message"); + assertEquals((await once(worker, "message"))[0].workerData, null); + worker.terminate(); + + const worker1 = new workerThreads.Worker( + new URL("./testdata/worker_threads.mjs", import.meta.url), + ); + worker1.postMessage("Hello, how are you my thread?"); + await once(worker1, "message"); + assertEquals((await once(worker1, "message"))[0].workerData, undefined); + worker1.terminate(); + }, +}); + +Deno.test({ + name: "[worker_threads] Worker with relative path", + async fn() { + const worker = new workerThreads.Worker(relative( + Deno.cwd(), + fromFileUrl(new URL("./testdata/worker_threads.mjs", import.meta.url)), + )); + worker.postMessage("Hello, how are you my thread?"); + assertEquals((await once(worker, "message"))[0], "I'm fine!"); + worker.terminate(); + }, +}); diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 0b6ef8bb1d..a7b75d8be8 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -2,12 +2,12 @@ use crate::args::BenchOptions; use crate::args::CliOptions; -use crate::args::TypeCheckMode; use crate::colors; use crate::display::write_json_to_stdout; +use crate::factory::CliFactory; use crate::graph_util::graph_valid_with_cli_options; +use crate::module_loader::ModuleLoadPreparer; use crate::ops; -use crate::proc_state::ProcState; use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; use crate::util::file_watcher; @@ -15,7 +15,7 @@ use crate::util::file_watcher::ResolutionResult; use crate::util::fs::collect_specifiers; use crate::util::path::is_supported_ext; use crate::version::get_user_agent; -use crate::worker::create_custom_worker; +use crate::worker::CliMainWorkerFactory; use deno_core::error::generic_error; use deno_core::error::AnyError; @@ -26,17 +26,18 @@ use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::located_script_name; use deno_core::serde_v8; +use deno_core::task::spawn; +use deno_core::task::spawn_blocking; use deno_core::v8; use deno_core::ModuleSpecifier; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::tokio_util::run_local; +use deno_runtime::tokio_util::create_and_run_current_thread; use indexmap::IndexMap; use indexmap::IndexSet; use log::Level; use serde::Deserialize; use serde::Serialize; -use std::cell::RefCell; use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; @@ -48,6 +49,7 @@ use tokio::sync::mpsc::UnboundedSender; struct BenchSpecifierOptions { filter: TestFilter, json: bool, + log_level: Option, } #[derive(Debug, Clone, Eq, PartialEq, Deserialize)] @@ -417,40 +419,38 @@ impl BenchReporter for ConsoleReporter { /// Type check a collection of module and document specifiers. async fn check_specifiers( - ps: &ProcState, - permissions: Permissions, + cli_options: &CliOptions, + module_load_preparer: &ModuleLoadPreparer, specifiers: Vec, ) -> Result<(), AnyError> { - let lib = ps.options.ts_type_lib_window(); - ps.module_load_preparer + let lib = cli_options.ts_type_lib_window(); + module_load_preparer .prepare_module_load( specifiers, false, lib, PermissionsContainer::allow_all(), - PermissionsContainer::new(permissions), ) .await?; - Ok(()) } /// Run a single specifier as an executable bench module. async fn bench_specifier( - ps: ProcState, + worker_factory: Arc, permissions: Permissions, specifier: ModuleSpecifier, sender: UnboundedSender, filter: TestFilter, ) -> Result<(), AnyError> { - let mut worker = create_custom_worker( - &ps, - specifier.clone(), - PermissionsContainer::new(permissions), - vec![ops::bench::deno_bench::init_ops(sender.clone())], - Default::default(), - ) - .await?; + let mut worker = worker_factory + .create_custom_worker( + specifier.clone(), + PermissionsContainer::new(permissions), + vec![ops::bench::deno_bench::init_ops(sender.clone())], + Default::default(), + ) + .await?; // We execute the main module as a side module so that import.meta.main is not set. worker.execute_side_module_possibly_with_npm().await?; @@ -492,53 +492,46 @@ async fn bench_specifier( }))?; for (desc, function) in benchmarks { sender.send(BenchEvent::Wait(desc.id))?; - let promise = { - let scope = &mut worker.js_runtime.handle_scope(); - let cb = function.open(scope); - let this = v8::undefined(scope).into(); - let promise = cb.call(scope, this, &[]).unwrap(); - v8::Global::new(scope, promise) - }; - let result = worker.js_runtime.resolve_value(promise).await?; + let result = worker.js_runtime.call_and_await(&function).await?; let scope = &mut worker.js_runtime.handle_scope(); let result = v8::Local::new(scope, result); let result = serde_v8::from_v8::(scope, result)?; sender.send(BenchEvent::Result(desc.id, result))?; } - loop { - if !worker.dispatch_beforeunload_event(located_script_name!())? { - break; - } - worker.run_event_loop(false).await?; - } + // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the + // event loop to continue beyond what's needed to await results. + worker.dispatch_beforeunload_event(located_script_name!())?; worker.dispatch_unload_event(located_script_name!())?; Ok(()) } /// Test a collection of specifiers with test modes concurrently. async fn bench_specifiers( - ps: &ProcState, + worker_factory: Arc, permissions: &Permissions, specifiers: Vec, options: BenchSpecifierOptions, ) -> Result<(), AnyError> { - let log_level = ps.options.log_level(); - let (sender, mut receiver) = unbounded_channel::(); - + let log_level = options.log_level; let option_for_handles = options.clone(); let join_handles = specifiers.into_iter().map(move |specifier| { - let ps = ps.clone(); + let worker_factory = worker_factory.clone(); let permissions = permissions.clone(); let specifier = specifier; let sender = sender.clone(); let options = option_for_handles.clone(); - tokio::task::spawn_blocking(move || { - let future = - bench_specifier(ps, permissions, specifier, sender, options.filter); - run_local(future) + spawn_blocking(move || { + let future = bench_specifier( + worker_factory, + permissions, + specifier, + sender, + options.filter, + ); + create_and_run_current_thread(future) }) }); @@ -547,7 +540,7 @@ async fn bench_specifiers( .collect::, tokio::task::JoinError>>>(); let handler = { - tokio::task::spawn(async move { + spawn(async move { let mut used_only = false; let mut report = BenchReport::new(); let mut reporter = @@ -640,12 +633,13 @@ pub async fn run_benchmarks( cli_options: CliOptions, bench_options: BenchOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); // Various bench files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; + Permissions::from_options(&cli_options.permissions_options())?; let specifiers = collect_specifiers(&bench_options.files, is_supported_bench_path)?; @@ -654,19 +648,28 @@ pub async fn run_benchmarks( return Err(generic_error("No bench modules found")); } - check_specifiers(&ps, permissions.clone(), specifiers.clone()).await?; + check_specifiers( + cli_options, + factory.module_load_preparer().await?, + specifiers.clone(), + ) + .await?; if bench_options.no_run { return Ok(()); } + let log_level = cli_options.log_level(); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); bench_specifiers( - &ps, + worker_factory, &permissions, specifiers, BenchSpecifierOptions { filter: TestFilter::from_flag(&bench_options.filter), json: bench_options.json, + log_level, }, ) .await?; @@ -679,22 +682,25 @@ pub async fn run_benchmarks_with_watch( cli_options: CliOptions, bench_options: BenchOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let file_watcher = factory.file_watcher()?; + let module_load_preparer = factory.module_load_preparer().await?; // Various bench files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let no_check = ps.options.type_check_mode() == TypeCheckMode::None; - - let ps = RefCell::new(ps); + Permissions::from_options(&cli_options.permissions_options())?; + let graph_kind = cli_options.type_check_mode().as_graph_kind(); let resolver = |changed: Option>| { let paths_to_watch = bench_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let bench_options = &bench_options; - let ps = ps.borrow().clone(); + let module_graph_builder = module_graph_builder.clone(); + let cli_options = cli_options.clone(); async move { let bench_modules = @@ -706,11 +712,10 @@ pub async fn run_benchmarks_with_watch( } else { bench_modules.clone() }; - let graph = ps - .module_graph_builder - .create_graph(bench_modules.clone()) + let graph = module_graph_builder + .create_graph(graph_kind, bench_modules.clone()) .await?; - graph_valid_with_cli_options(&graph, &bench_modules, &ps.options)?; + graph_valid_with_cli_options(&graph, &bench_modules, &cli_options)?; // TODO(@kitsonk) - This should be totally derivable from the graph. for specifier in bench_modules { @@ -720,32 +725,19 @@ pub async fn run_benchmarks_with_watch( // This needs to be accessible to skip getting dependencies if they're already there, // otherwise this will cause a stack overflow with circular dependencies output: &mut HashSet<&'a ModuleSpecifier>, - no_check: bool, ) { if let Some(module) = maybe_module.and_then(|m| m.esm()) { for dep in module.dependencies.values() { if let Some(specifier) = &dep.get_code() { if !output.contains(specifier) { output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); + get_dependencies(graph, graph.get(specifier), output); } } - if !no_check { - if let Some(specifier) = &dep.get_type() { - if !output.contains(specifier) { - output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); - } + if let Some(specifier) = &dep.get_type() { + if !output.contains(specifier) { + output.insert(specifier); + get_dependencies(graph, graph.get(specifier), output); } } } @@ -755,7 +747,7 @@ pub async fn run_benchmarks_with_watch( // This bench module and all it's dependencies let mut modules = HashSet::new(); modules.insert(&specifier); - get_dependencies(&graph, graph.get(&specifier), &mut modules, no_check); + get_dependencies(&graph, graph.get(&specifier), &mut modules); paths_to_watch.extend( modules @@ -800,32 +792,40 @@ pub async fn run_benchmarks_with_watch( }) }; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |modules_to_reload: Vec| { let permissions = &permissions; let bench_options = &bench_options; - ps.borrow_mut().reset_for_file_watcher(); - let ps = ps.borrow().clone(); + file_watcher.reset(); + let module_load_preparer = module_load_preparer.clone(); + let cli_options = cli_options.clone(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); async move { + let worker_factory = Arc::new(create_cli_main_worker_factory()); let specifiers = collect_specifiers(&bench_options.files, is_supported_bench_path)? .into_iter() .filter(|specifier| modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(&ps, permissions.clone(), specifiers.clone()).await?; + check_specifiers(&cli_options, &module_load_preparer, specifiers.clone()) + .await?; if bench_options.no_run { return Ok(()); } + let log_level = cli_options.log_level(); bench_specifiers( - &ps, + worker_factory, permissions, specifiers, BenchSpecifierOptions { filter: TestFilter::from_flag(&bench_options.filter), json: bench_options.json, + log_level, }, ) .await?; @@ -834,7 +834,7 @@ pub async fn run_benchmarks_with_watch( } }; - let clear_screen = !ps.borrow().options.no_clear_screen(); + let clear_screen = !cli_options.no_clear_screen(); file_watcher::watch_func( resolver, operation, @@ -931,7 +931,10 @@ mod mitata { sysctl.arg("-n"); sysctl.arg("machdep.cpu.brand_string"); return std::str::from_utf8( - &sysctl.output().map_or(Vec::from("unknown"), |x| x.stdout), + &sysctl + .output() + .map(|x| x.stdout) + .unwrap_or(Vec::from("unknown")), ) .unwrap() .trim() @@ -1138,13 +1141,13 @@ mod mitata { } else { if options.avg { s.push_str(&format!( - "{:>23}", + "{:>30}", format!("{}/iter", colors::yellow(fmt_duration(stats.avg))) )); } if options.min_max { s.push_str(&format!( - "{:>42}", + "{:>50}", format!( "({} … {})", colors::cyan(fmt_duration(stats.min)), @@ -1154,7 +1157,7 @@ mod mitata { } if options.percentiles { s.push_str(&format!( - " {:>18} {:>18} {:>18}", + " {:>22} {:>22} {:>22}", colors::magenta(fmt_duration(stats.p75)), colors::magenta(fmt_duration(stats.p99)), colors::magenta(fmt_duration(stats.p995)) diff --git a/cli/tools/bundle.rs b/cli/tools/bundle.rs index 26d170d7e2..f38948776d 100644 --- a/cli/tools/bundle.rs +++ b/cli/tools/bundle.rs @@ -12,9 +12,8 @@ use crate::args::BundleFlags; use crate::args::CliOptions; use crate::args::Flags; use crate::args::TsConfigType; -use crate::args::TypeCheckMode; +use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; -use crate::proc_state::ProcState; use crate::util; use crate::util::display; use crate::util::file_watcher::ResolutionResult; @@ -40,9 +39,11 @@ pub async fn bundle( let module_specifier = &module_specifier; async move { log::debug!(">>>>> bundle START"); - let ps = ProcState::from_cli_options(cli_options).await?; - let graph = ps - .module_graph_builder + let factory = CliFactory::from_cli_options(cli_options); + let module_graph_builder = factory.module_graph_builder().await?; + let cli_options = factory.cli_options(); + + let graph = module_graph_builder .create_graph_and_maybe_check(vec![module_specifier.clone()]) .await?; @@ -58,15 +59,14 @@ pub async fn bundle( }) .collect(); - if let Ok(Some(import_map_path)) = ps - .options + if let Ok(Some(import_map_path)) = cli_options .resolve_import_map_specifier() .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) { paths_to_watch.push(import_map_path); } - Ok((paths_to_watch, graph, ps)) + Ok((paths_to_watch, graph, cli_options.clone())) } .map(move |result| match result { Ok((paths_to_watch, graph, ps)) => ResolutionResult::Restart { @@ -80,49 +80,50 @@ pub async fn bundle( }) }; - let operation = |(ps, graph): (ProcState, Arc)| { - let out_file = &bundle_flags.out_file; - async move { - // at the moment, we don't support npm specifiers in deno bundle, so show an error - error_for_any_npm_specifier(&graph)?; + let operation = + |(cli_options, graph): (Arc, Arc)| { + let out_file = &bundle_flags.out_file; + async move { + // at the moment, we don't support npm specifiers in deno bundle, so show an error + error_for_any_npm_specifier(&graph)?; - let bundle_output = bundle_module_graph(graph.as_ref(), &ps)?; - log::debug!(">>>>> bundle END"); + let bundle_output = bundle_module_graph(graph.as_ref(), &cli_options)?; + log::debug!(">>>>> bundle END"); - if let Some(out_file) = out_file { - let output_bytes = bundle_output.code.as_bytes(); - let output_len = output_bytes.len(); - util::fs::write_file(out_file, output_bytes, 0o644)?; - log::info!( - "{} {:?} ({})", - colors::green("Emit"), - out_file, - colors::gray(display::human_size(output_len as f64)) - ); - if let Some(bundle_map) = bundle_output.maybe_map { - let map_bytes = bundle_map.as_bytes(); - let map_len = map_bytes.len(); - let ext = if let Some(curr_ext) = out_file.extension() { - format!("{}.map", curr_ext.to_string_lossy()) - } else { - "map".to_string() - }; - let map_out_file = out_file.with_extension(ext); - util::fs::write_file(&map_out_file, map_bytes, 0o644)?; + if let Some(out_file) = out_file { + let output_bytes = bundle_output.code.as_bytes(); + let output_len = output_bytes.len(); + util::fs::write_file(out_file, output_bytes, 0o644)?; log::info!( "{} {:?} ({})", colors::green("Emit"), - map_out_file, - colors::gray(display::human_size(map_len as f64)) + out_file, + colors::gray(display::human_size(output_len as f64)) ); + if let Some(bundle_map) = bundle_output.maybe_map { + let map_bytes = bundle_map.as_bytes(); + let map_len = map_bytes.len(); + let ext = if let Some(curr_ext) = out_file.extension() { + format!("{}.map", curr_ext.to_string_lossy()) + } else { + "map".to_string() + }; + let map_out_file = out_file.with_extension(ext); + util::fs::write_file(&map_out_file, map_bytes, 0o644)?; + log::info!( + "{} {:?} ({})", + colors::green("Emit"), + map_out_file, + colors::gray(display::human_size(map_len as f64)) + ); + } + } else { + println!("{}", bundle_output.code); } - } else { - println!("{}", bundle_output.code); - } - Ok(()) - } - }; + Ok(()) + } + }; if cli_options.watch_paths().is_some() { util::file_watcher::watch_func( @@ -149,14 +150,13 @@ pub async fn bundle( fn bundle_module_graph( graph: &deno_graph::ModuleGraph, - ps: &ProcState, + cli_options: &CliOptions, ) -> Result { log::info!("{} {}", colors::green("Bundle"), graph.roots[0]); - let ts_config_result = ps - .options - .resolve_ts_config_for_emit(TsConfigType::Bundle)?; - if ps.options.type_check_mode() == TypeCheckMode::None { + let ts_config_result = + cli_options.resolve_ts_config_for_emit(TsConfigType::Bundle)?; + if !cli_options.type_check_mode().is_true() { if let Some(ignored_options) = ts_config_result.maybe_ignored_options { log::warn!("{}", ignored_options); } diff --git a/cli/tools/check.rs b/cli/tools/check.rs index c7f4042233..4464802e6e 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -9,6 +9,7 @@ use deno_core::error::AnyError; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_runtime::colors; +use deno_runtime::deno_node::NodeResolver; use once_cell::sync::Lazy; use regex::Regex; @@ -18,11 +19,9 @@ use crate::args::TsConfigType; use crate::args::TsTypeLib; use crate::args::TypeCheckMode; use crate::cache::Caches; -use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::cache::TypeCheckCache; -use crate::node::CliNodeResolver; -use crate::npm::NpmPackageResolver; +use crate::npm::CliNpmResolver; use crate::tsc; use crate::version; @@ -39,23 +38,20 @@ pub struct CheckOptions { } pub struct TypeChecker { - deno_dir: DenoDir, caches: Arc, cli_options: Arc, - node_resolver: Arc, - npm_resolver: Arc, + node_resolver: Arc, + npm_resolver: Arc, } impl TypeChecker { pub fn new( - deno_dir: DenoDir, caches: Arc, cli_options: Arc, - node_resolver: Arc, - npm_resolver: Arc, + node_resolver: Arc, + npm_resolver: Arc, ) -> Self { Self { - deno_dir, caches, cli_options, node_resolver, @@ -95,8 +91,7 @@ impl TypeChecker { let ts_config = ts_config_result.ts_config; let type_check_mode = self.cli_options.type_check_mode(); let debug = self.cli_options.log_level() == Some(log::Level::Debug); - let cache = - TypeCheckCache::new(self.caches.type_checking_cache_db(&self.deno_dir)); + let cache = TypeCheckCache::new(self.caches.type_checking_cache_db()); let check_js = ts_config.get_check_js(); let check_hash = match get_check_hash(&graph, type_check_mode, &ts_config) { CheckHashResult::NoFiles => return Ok(()), diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs new file mode 100644 index 0000000000..c53ae4e028 --- /dev/null +++ b/cli/tools/compile.rs @@ -0,0 +1,271 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::args::CompileFlags; +use crate::args::Flags; +use crate::factory::CliFactory; +use crate::standalone::is_standalone_binary; +use crate::util::path::path_has_trailing_slash; +use deno_core::anyhow::bail; +use deno_core::anyhow::Context; +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::resolve_url_or_path; +use deno_graph::GraphKind; +use deno_runtime::colors; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + +use super::installer::infer_name_from_url; + +pub async fn compile( + flags: Flags, + compile_flags: CompileFlags, +) -> Result<(), AnyError> { + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let parsed_source_cache = factory.parsed_source_cache()?; + let binary_writer = factory.create_compile_binary_writer().await?; + let module_specifier = cli_options.resolve_main_module()?; + let module_roots = { + let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); + vec.push(module_specifier.clone()); + for side_module in &compile_flags.include { + vec.push(resolve_url_or_path(side_module, cli_options.initial_cwd())?); + } + vec + }; + + let output_path = resolve_compile_executable_output_path( + &compile_flags, + cli_options.initial_cwd(), + ) + .await?; + + let graph = Arc::try_unwrap( + module_graph_builder + .create_graph_and_maybe_check(module_roots.clone()) + .await?, + ) + .unwrap(); + let graph = if cli_options.type_check_mode().is_true() { + // In this case, the previous graph creation did type checking, which will + // create a module graph with types information in it. We don't want to + // store that in the eszip so create a code only module graph from scratch. + module_graph_builder + .create_graph(GraphKind::CodeOnly, module_roots) + .await? + } else { + graph + }; + + let parser = parsed_source_cache.as_capturing_parser(); + let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; + + log::info!( + "{} {} to {}", + colors::green("Compile"), + module_specifier.to_string(), + output_path.display(), + ); + validate_output_path(&output_path)?; + + let mut file = std::fs::File::create(&output_path)?; + binary_writer + .write_bin( + &mut file, + eszip, + &module_specifier, + &compile_flags, + cli_options, + ) + .await + .with_context(|| format!("Writing {}", output_path.display()))?; + drop(file); + + // set it as executable + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let perms = std::fs::Permissions::from_mode(0o777); + std::fs::set_permissions(output_path, perms)?; + } + + Ok(()) +} + +/// This function writes out a final binary to specified path. If output path +/// is not already standalone binary it will return error instead. +fn validate_output_path(output_path: &Path) -> Result<(), AnyError> { + if output_path.exists() { + // If the output is a directory, throw error + if output_path.is_dir() { + bail!( + concat!( + "Could not compile to file '{}' because a directory exists with ", + "the same name. You can use the `--output ` flag to ", + "provide an alternative name." + ), + output_path.display() + ); + } + + // Make sure we don't overwrite any file not created by Deno compiler because + // this filename is chosen automatically in some cases. + if !is_standalone_binary(output_path) { + bail!( + concat!( + "Could not compile to file '{}' because the file already exists ", + "and cannot be overwritten. Please delete the existing file or ", + "use the `--output ` flag to provide an alternative name." + ), + output_path.display() + ); + } + + // Remove file if it was indeed a deno compiled binary, to avoid corruption + // (see https://github.com/denoland/deno/issues/10310) + std::fs::remove_file(output_path)?; + } else { + let output_base = &output_path.parent().unwrap(); + if output_base.exists() && output_base.is_file() { + bail!( + concat!( + "Could not compile to file '{}' because its parent directory ", + "is an existing file. You can use the `--output ` flag to ", + "provide an alternative name.", + ), + output_base.display(), + ); + } + std::fs::create_dir_all(output_base)?; + } + + Ok(()) +} + +async fn resolve_compile_executable_output_path( + compile_flags: &CompileFlags, + current_dir: &Path, +) -> Result { + let module_specifier = + resolve_url_or_path(&compile_flags.source_file, current_dir)?; + + let mut output = compile_flags.output.clone(); + + if let Some(out) = output.as_ref() { + if path_has_trailing_slash(out) { + if let Some(infer_file_name) = infer_name_from_url(&module_specifier) + .await + .map(PathBuf::from) + { + output = Some(out.join(infer_file_name)); + } + } else { + output = Some(out.to_path_buf()); + } + } + + if output.is_none() { + output = infer_name_from_url(&module_specifier) + .await + .map(PathBuf::from) + } + + output.ok_or_else(|| generic_error( + "An executable name was not provided. One could not be inferred from the URL. Aborting.", + )).map(|output| { + get_os_specific_filepath(output, &compile_flags.target) + }) +} + +fn get_os_specific_filepath( + output: PathBuf, + target: &Option, +) -> PathBuf { + let is_windows = match target { + Some(target) => target.contains("windows"), + None => cfg!(windows), + }; + if is_windows && output.extension().unwrap_or_default() != "exe" { + if let Some(ext) = output.extension() { + // keep version in my-exe-0.1.0 -> my-exe-0.1.0.exe + output.with_extension(format!("{}.exe", ext.to_string_lossy())) + } else { + output.with_extension("exe") + } + } else { + output + } +} + +#[cfg(test)] +mod test { + pub use super::*; + + #[tokio::test] + async fn resolve_compile_executable_output_path_target_linux() { + let path = resolve_compile_executable_output_path( + &CompileFlags { + source_file: "mod.ts".to_string(), + output: Some(PathBuf::from("./file")), + args: Vec::new(), + target: Some("x86_64-unknown-linux-gnu".to_string()), + include: vec![], + }, + &std::env::current_dir().unwrap(), + ) + .await + .unwrap(); + + // no extension, no matter what the operating system is + // because the target was specified as linux + // https://github.com/denoland/deno/issues/9667 + assert_eq!(path.file_name().unwrap(), "file"); + } + + #[tokio::test] + async fn resolve_compile_executable_output_path_target_windows() { + let path = resolve_compile_executable_output_path( + &CompileFlags { + source_file: "mod.ts".to_string(), + output: Some(PathBuf::from("./file")), + args: Vec::new(), + target: Some("x86_64-pc-windows-msvc".to_string()), + include: vec![], + }, + &std::env::current_dir().unwrap(), + ) + .await + .unwrap(); + assert_eq!(path.file_name().unwrap(), "file.exe"); + } + + #[test] + fn test_os_specific_file_path() { + fn run_test(path: &str, target: Option<&str>, expected: &str) { + assert_eq!( + get_os_specific_filepath( + PathBuf::from(path), + &target.map(|s| s.to_string()) + ), + PathBuf::from(expected) + ); + } + + if cfg!(windows) { + run_test("C:\\my-exe", None, "C:\\my-exe.exe"); + run_test("C:\\my-exe.exe", None, "C:\\my-exe.exe"); + run_test("C:\\my-exe-0.1.2", None, "C:\\my-exe-0.1.2.exe"); + } else { + run_test("my-exe", Some("linux"), "my-exe"); + run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2"); + } + + run_test("C:\\my-exe", Some("windows"), "C:\\my-exe.exe"); + run_test("C:\\my-exe.exe", Some("windows"), "C:\\my-exe.exe"); + run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe"); + run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2"); + } +} diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 0297782436..223bac3167 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -4,8 +4,9 @@ use crate::args::CoverageFlags; use crate::args::FileFlags; use crate::args::Flags; use crate::colors; -use crate::proc_state::ProcState; +use crate::factory::CliFactory; use crate::tools::fmt::format_json; +use crate::tools::test::is_supported_test_path; use crate::util::fs::FileCollector; use crate::util::text_encoding::source_map_from_code; @@ -27,6 +28,7 @@ use std::io::BufWriter; use std::io::Error; use std::io::Write; use std::io::{self}; +use std::path::Path; use std::path::PathBuf; use text_lines::TextLines; use uuid::Uuid; @@ -602,7 +604,8 @@ fn filter_coverages( || e.url.starts_with(npm_root_dir) || e.url.ends_with("__anonymous__") || e.url.ends_with("$deno$test.js") - || e.url.ends_with(".snap"); + || e.url.ends_with(".snap") + || is_supported_test_path(Path::new(e.url.as_str())); let is_included = include.iter().any(|p| p.is_match(&e.url)); let is_excluded = exclude.iter().any(|p| p.is_match(&e.url)); @@ -620,8 +623,11 @@ pub async fn cover_files( return Err(generic_error("No matching coverage profiles found")); } - let ps = ProcState::from_flags(flags).await?; - let root_dir_url = ps.npm_resolver.root_dir_url(); + let factory = CliFactory::from_flags(flags).await?; + let root_dir_url = factory.npm_resolver().await?.root_dir_url(); + let file_fetcher = factory.file_fetcher()?; + let cli_options = factory.cli_options(); + let emitter = factory.emitter()?; let script_coverages = collect_coverages(coverage_flags.files)?; let script_coverages = filter_coverages( @@ -664,13 +670,13 @@ pub async fn cover_files( for script_coverage in script_coverages { let module_specifier = deno_core::resolve_url_or_path( &script_coverage.url, - ps.options.initial_cwd(), + cli_options.initial_cwd(), )?; let maybe_file = if module_specifier.scheme() == "file" { - ps.file_fetcher.get_source(&module_specifier) + file_fetcher.get_source(&module_specifier) } else { - ps.file_fetcher + file_fetcher .fetch_cached(&module_specifier, 10) .with_context(|| { format!("Failed to fetch \"{module_specifier}\" from cache.") @@ -697,7 +703,7 @@ pub async fn cover_files( | MediaType::Mts | MediaType::Cts | MediaType::Tsx => { - match ps.emitter.maybed_cached_emit(&file.specifier, &file.source) { + match emitter.maybed_cached_emit(&file.specifier, &file.source) { Some(code) => code.into(), None => { return Err(anyhow!( diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index a07ba175aa..87fa253151 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -6,9 +6,9 @@ use crate::args::Flags; use crate::colors; use crate::display::write_json_to_stdout; use crate::display::write_to_stdout_ignore_sigpipe; +use crate::factory::CliFactory; use crate::file_fetcher::File; use crate::graph_util::graph_lock_or_exit; -use crate::proc_state::ProcState; use crate::tsc::get_types_declaration_file_text; use deno_ast::MediaType; use deno_core::anyhow::bail; @@ -16,6 +16,7 @@ use deno_core::error::AnyError; use deno_core::resolve_path; use deno_core::resolve_url_or_path; use deno_doc as doc; +use deno_graph::GraphKind; use deno_graph::ModuleSpecifier; use std::path::PathBuf; @@ -23,13 +24,14 @@ pub async fn print_docs( flags: Flags, doc_flags: DocFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); let mut doc_nodes = match doc_flags.source_file { DocSourceFileFlag::Builtin => { let source_file_specifier = ModuleSpecifier::parse("internal://lib.deno.d.ts").unwrap(); - let content = get_types_declaration_file_text(ps.options.unstable()); + let content = get_types_declaration_file_text(cli_options.unstable()); let mut loader = deno_graph::source::MemoryLoader::new( vec![( source_file_specifier.to_string(), @@ -42,7 +44,7 @@ pub async fn print_docs( Vec::new(), ); let analyzer = deno_graph::CapturingModuleAnalyzer::default(); - let mut graph = deno_graph::ModuleGraph::default(); + let mut graph = deno_graph::ModuleGraph::new(GraphKind::TypesOnly); graph .build( vec![source_file_specifier.clone()], @@ -61,13 +63,18 @@ pub async fn print_docs( doc_parser.parse_module(&source_file_specifier)?.definitions } DocSourceFileFlag::Path(source_file) => { + let file_fetcher = factory.file_fetcher()?; + let module_graph_builder = factory.module_graph_builder().await?; + let maybe_lockfile = factory.maybe_lockfile(); + let parsed_source_cache = factory.parsed_source_cache()?; + let module_specifier = - resolve_url_or_path(&source_file, ps.options.initial_cwd())?; + resolve_url_or_path(&source_file, cli_options.initial_cwd())?; // If the root module has external types, the module graph won't redirect it, // so instead create a dummy file which exports everything from the actual file being documented. let root_specifier = - resolve_path("./$deno$doc.ts", ps.options.initial_cwd()).unwrap(); + resolve_path("./$deno$doc.ts", cli_options.initial_cwd()).unwrap(); let root = File { local: PathBuf::from("./$deno$doc.ts"), maybe_types: None, @@ -78,21 +85,20 @@ pub async fn print_docs( }; // Save our fake file into file fetcher cache. - ps.file_fetcher.insert_cached(root); + file_fetcher.insert_cached(root); - let graph = ps - .module_graph_builder - .create_graph(vec![root_specifier.clone()]) + let graph = module_graph_builder + .create_graph(GraphKind::TypesOnly, vec![root_specifier.clone()]) .await?; - if let Some(lockfile) = &ps.lockfile { + if let Some(lockfile) = maybe_lockfile { graph_lock_or_exit(&graph, &mut lockfile.lock()); } let doc_parser = doc::DocParser::new( graph, doc_flags.private, - ps.parsed_source_cache.as_capturing_parser(), + parsed_source_cache.as_capturing_parser(), ); doc_parser.parse_with_reexports(&root_specifier)? } diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 41accacba9..f2fec93023 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -12,8 +12,8 @@ use crate::args::FilesConfig; use crate::args::FmtOptions; use crate::args::FmtOptionsConfig; use crate::args::ProseWrap; -use crate::cache::Caches; use crate::colors; +use crate::factory::CliFactory; use crate::util::diff::diff; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -28,6 +28,7 @@ use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use log::debug; use log::info; use log::warn; @@ -101,11 +102,12 @@ pub async fn format( } } }; - let deno_dir = &cli_options.resolve_deno_dir()?; - let caches = Caches::default(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let caches = factory.caches()?; let operation = |(paths, fmt_options): (Vec, FmtOptionsConfig)| async { let incremental_cache = Arc::new(IncrementalCache::new( - caches.fmt_incremental_cache_db(deno_dir), + caches.fmt_incremental_cache_db(), &fmt_options, &paths, )); @@ -628,7 +630,7 @@ where let handles = file_paths.iter().map(|file_path| { let f = f.clone(); let file_path = file_path.clone(); - tokio::task::spawn_blocking(move || f(file_path)) + spawn_blocking(move || f(file_path)) }); let join_results = futures::future::join_all(handles).await; diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 69faa10fbc..95a7da7b0f 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -11,6 +11,7 @@ use deno_core::resolve_url_or_path; use deno_core::serde_json; use deno_core::serde_json::json; use deno_graph::Dependency; +use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; @@ -27,57 +28,61 @@ use deno_semver::npm::NpmPackageReqReference; use crate::args::Flags; use crate::args::InfoFlags; use crate::display; +use crate::factory::CliFactory; use crate::graph_util::graph_lock_or_exit; -use crate::npm::NpmPackageResolver; -use crate::proc_state::ProcState; +use crate::npm::CliNpmResolver; use crate::util::checksum; pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); if let Some(specifier) = info_flags.file { - let specifier = resolve_url_or_path(&specifier, ps.options.initial_cwd())?; - let mut loader = ps.module_graph_builder.create_graph_loader(); + let module_graph_builder = factory.module_graph_builder().await?; + let npm_resolver = factory.npm_resolver().await?; + let maybe_lockfile = factory.maybe_lockfile(); + let specifier = resolve_url_or_path(&specifier, cli_options.initial_cwd())?; + let mut loader = module_graph_builder.create_graph_loader(); loader.enable_loading_cache_info(); // for displaying the cache information - let graph = ps - .module_graph_builder - .create_graph_with_loader(vec![specifier], &mut loader) + let graph = module_graph_builder + .create_graph_with_loader(GraphKind::All, vec![specifier], &mut loader) .await?; - if let Some(lockfile) = &ps.lockfile { + if let Some(lockfile) = maybe_lockfile { graph_lock_or_exit(&graph, &mut lockfile.lock()); } if info_flags.json { let mut json_graph = json!(graph); - add_npm_packages_to_json(&mut json_graph, &ps.npm_resolver); + add_npm_packages_to_json(&mut json_graph, npm_resolver); display::write_json_to_stdout(&json_graph)?; } else { let mut output = String::new(); - GraphDisplayContext::write(&graph, &ps.npm_resolver, &mut output)?; + GraphDisplayContext::write(&graph, npm_resolver, &mut output)?; display::write_to_stdout_ignore_sigpipe(output.as_bytes())?; } } else { // If it was just "deno info" print location of caches and exit print_cache_info( - &ps, + &factory, info_flags.json, - ps.options.location_flag().as_ref(), + cli_options.location_flag().as_ref(), )?; } Ok(()) } fn print_cache_info( - state: &ProcState, + factory: &CliFactory, json: bool, location: Option<&deno_core::url::Url>, ) -> Result<(), AnyError> { - let deno_dir = &state.dir.root_path_for_display(); - let modules_cache = &state.file_fetcher.get_http_cache_location(); - let npm_cache = &state.npm_cache.as_readonly().get_cache_location(); - let typescript_cache = &state.dir.gen_cache.location; - let registry_cache = &state.dir.registries_folder_path(); - let mut origin_dir = state.dir.origin_data_folder_path(); + let dir = factory.deno_dir()?; + let modules_cache = factory.file_fetcher()?.get_http_cache_location(); + let npm_cache = factory.npm_cache()?.as_readonly().get_cache_location(); + let typescript_cache = &dir.gen_cache.location; + let registry_cache = dir.registries_folder_path(); + let mut origin_dir = dir.origin_data_folder_path(); + let deno_dir = dir.root_path_for_display().to_string(); if let Some(location) = &location { origin_dir = @@ -88,7 +93,7 @@ fn print_cache_info( if json { let mut output = json!({ - "denoDir": deno_dir.to_string(), + "denoDir": deno_dir, "modulesCache": modules_cache, "npmCache": npm_cache, "typescriptCache": typescript_cache, @@ -141,7 +146,7 @@ fn print_cache_info( fn add_npm_packages_to_json( json: &mut serde_json::Value, - npm_resolver: &NpmPackageResolver, + npm_resolver: &CliNpmResolver, ) { // ideally deno_graph could handle this, but for now we just modify the json here let snapshot = npm_resolver.snapshot(); @@ -166,10 +171,8 @@ fn add_npm_packages_to_json( }); if let Some(pkg) = maybe_package { if let Some(module) = module.as_object_mut() { - module.insert( - "npmPackage".to_string(), - pkg.pkg_id.as_serialized().into(), - ); + module + .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); } } } else { @@ -202,7 +205,7 @@ fn add_npm_packages_to_json( { dep.insert( "npmPackage".to_string(), - pkg.pkg_id.as_serialized().into(), + pkg.id.as_serialized().into(), ); } } @@ -213,16 +216,14 @@ fn add_npm_packages_to_json( } } - let mut sorted_packages = snapshot.all_packages(); - sorted_packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); + let mut sorted_packages = + snapshot.all_packages_for_every_system().collect::>(); + sorted_packages.sort_by(|a, b| a.id.cmp(&b.id)); let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len()); for pkg in sorted_packages { let mut kv = serde_json::Map::new(); - kv.insert("name".to_string(), pkg.pkg_id.nv.name.to_string().into()); - kv.insert( - "version".to_string(), - pkg.pkg_id.nv.version.to_string().into(), - ); + kv.insert("name".to_string(), pkg.id.nv.name.to_string().into()); + kv.insert("version".to_string(), pkg.id.nv.version.to_string().into()); let mut deps = pkg.dependencies.values().collect::>(); deps.sort(); let deps = deps @@ -231,7 +232,7 @@ fn add_npm_packages_to_json( .collect::>(); kv.insert("dependencies".to_string(), deps.into()); - json_packages.insert(pkg.pkg_id.as_serialized(), kv.into()); + json_packages.insert(pkg.id.as_serialized(), kv.into()); } json.insert("npmPackages".to_string(), json_packages.into()); @@ -318,7 +319,7 @@ struct NpmInfo { impl NpmInfo { pub fn build<'a>( graph: &'a ModuleGraph, - npm_resolver: &'a NpmPackageResolver, + npm_resolver: &'a CliNpmResolver, npm_snapshot: &'a NpmResolutionSnapshot, ) -> Self { let mut info = NpmInfo::default(); @@ -330,8 +331,8 @@ impl NpmInfo { if let Module::Npm(module) = module { let nv = &module.nv_reference.nv; if let Ok(package) = npm_snapshot.resolve_package_from_deno_module(nv) { - info.resolved_ids.insert(nv.clone(), package.pkg_id.clone()); - if !info.packages.contains_key(&package.pkg_id) { + info.resolved_ids.insert(nv.clone(), package.id.clone()); + if !info.packages.contains_key(&package.id) { info.fill_package_info(package, npm_resolver, npm_snapshot); } } @@ -344,14 +345,12 @@ impl NpmInfo { fn fill_package_info<'a>( &mut self, package: &NpmResolutionPackage, - npm_resolver: &'a NpmPackageResolver, + npm_resolver: &'a CliNpmResolver, npm_snapshot: &'a NpmResolutionSnapshot, ) { - self - .packages - .insert(package.pkg_id.clone(), package.clone()); - if let Ok(size) = npm_resolver.package_size(&package.pkg_id) { - self.package_sizes.insert(package.pkg_id.clone(), size); + self.packages.insert(package.id.clone(), package.clone()); + if let Ok(size) = npm_resolver.package_size(&package.id) { + self.package_sizes.insert(package.id.clone(), size); } for id in package.dependencies.values() { if !self.packages.contains_key(id) { @@ -380,7 +379,7 @@ struct GraphDisplayContext<'a> { impl<'a> GraphDisplayContext<'a> { pub fn write( graph: &'a ModuleGraph, - npm_resolver: &'a NpmPackageResolver, + npm_resolver: &'a CliNpmResolver, writer: &mut TWrite, ) -> fmt::Result { let npm_snapshot = npm_resolver.snapshot(); @@ -531,7 +530,7 @@ impl<'a> GraphDisplayContext<'a> { None => Specifier(module.specifier().clone()), }; let was_seen = !self.seen.insert(match &package_or_specifier { - Package(package) => package.pkg_id.as_serialized(), + Package(package) => package.id.as_serialized(), Specifier(specifier) => specifier.to_string(), }); let header_text = if was_seen { @@ -549,7 +548,7 @@ impl<'a> GraphDisplayContext<'a> { }; let maybe_size = match &package_or_specifier { Package(package) => { - self.npm_info.package_sizes.get(&package.pkg_id).copied() + self.npm_info.package_sizes.get(&package.id).copied() } Specifier(_) => match module { Module::Esm(module) => Some(module.size() as u64), @@ -603,7 +602,7 @@ impl<'a> GraphDisplayContext<'a> { )); if let Some(package) = self.npm_info.packages.get(dep_id) { if !package.dependencies.is_empty() { - let was_seen = !self.seen.insert(package.pkg_id.as_serialized()); + let was_seen = !self.seen.insert(package.id.as_serialized()); if was_seen { child.text = format!("{} {}", child.text, colors::gray("*")); } else { diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 461bb1a50a..07606d5f8d 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -6,8 +6,8 @@ use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::InstallFlags; use crate::args::TypeCheckMode; +use crate::factory::CliFactory; use crate::http_util::HttpClient; -use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path_maybe_not_exists; use deno_core::anyhow::Context; @@ -133,7 +133,7 @@ pub async fn infer_name_from_url(url: &Url) -> Option { let mut url = url.clone(); if url.path() == "/" { - let client = HttpClient::new(None, None).unwrap(); + let client = HttpClient::new(None, None); if let Ok(res) = client.get_redirected_response(url.clone()).await { url = res.url().clone(); } @@ -233,9 +233,10 @@ pub async fn install_command( install_flags: InstallFlags, ) -> Result<(), AnyError> { // ensure the module is cached - ProcState::from_flags(flags.clone()) + CliFactory::from_flags(flags.clone()) + .await? + .module_load_preparer() .await? - .module_load_preparer .load_and_type_check_files(&[install_flags.module_url.clone()]) .await?; diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index eae2f1032d..40c37ce773 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -11,8 +11,8 @@ use crate::args::FilesConfig; use crate::args::LintOptions; use crate::args::LintReporterKind; use crate::args::LintRulesConfig; -use crate::cache::Caches; use crate::colors; +use crate::factory::CliFactory; use crate::tools::fmt::run_parallelized; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -98,11 +98,12 @@ pub async fn lint( }; let has_error = Arc::new(AtomicBool::new(false)); - let deno_dir = cli_options.resolve_deno_dir()?; - let caches = Caches::default(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let caches = factory.caches()?; let operation = |paths: Vec| async { let incremental_cache = Arc::new(IncrementalCache::new( - caches.lint_incremental_cache_db(&deno_dir), + caches.lint_incremental_cache_db(), // use a hash of the rule names in order to bust the cache &{ // ensure this is stable by sorting it diff --git a/cli/tools/mod.rs b/cli/tools/mod.rs index cf29435a7c..c4a8306ab9 100644 --- a/cli/tools/mod.rs +++ b/cli/tools/mod.rs @@ -3,6 +3,7 @@ pub mod bench; pub mod bundle; pub mod check; +pub mod compile; pub mod coverage; pub mod doc; pub mod fmt; @@ -12,7 +13,6 @@ pub mod installer; pub mod lint; pub mod repl; pub mod run; -pub mod standalone; pub mod task; pub mod test; pub mod upgrade; diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index a6cc716373..34acb8a4e3 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -1,11 +1,14 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::args::CliOptions; use crate::args::Flags; use crate::args::ReplFlags; use crate::colors; -use crate::proc_state::ProcState; -use crate::worker::create_main_worker; +use crate::factory::CliFactory; +use crate::file_fetcher::FileFetcher; use deno_core::error::AnyError; +use deno_core::futures::StreamExt; +use deno_core::task::spawn_blocking; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; use rustyline::error::ReadlineError; @@ -29,8 +32,11 @@ async fn read_line_and_poll( message_handler: &mut RustylineSyncMessageHandler, editor: ReplEditor, ) -> Result { - let mut line_fut = tokio::task::spawn_blocking(move || editor.readline()); + #![allow(clippy::await_holding_refcell_ref)] + let mut line_fut = spawn_blocking(move || editor.readline()); let mut poll_worker = true; + let notifications_rc = repl_session.notifications.clone(); + let mut notifications = notifications_rc.borrow_mut(); loop { tokio::select! { @@ -56,7 +62,20 @@ async fn read_line_and_poll( } poll_worker = true; - }, + } + message = notifications.next() => { + if let Some(message) = message { + let method = message.get("method").unwrap().as_str().unwrap(); + if method == "Runtime.exceptionThrown" { + let params = message.get("params").unwrap().as_object().unwrap(); + let exception_details = params.get("exceptionDetails").unwrap().as_object().unwrap(); + let text = exception_details.get("text").unwrap().as_str().unwrap(); + let exception = exception_details.get("exception").unwrap().as_object().unwrap(); + let description = exception.get("description").and_then(|d| d.as_str()).unwrap_or("undefined"); + println!("{text} {description}"); + } + } + } _ = repl_session.run_event_loop(), if poll_worker => { poll_worker = false; } @@ -65,14 +84,14 @@ async fn read_line_and_poll( } async fn read_eval_file( - ps: &ProcState, + cli_options: &CliOptions, + file_fetcher: &FileFetcher, eval_file: &str, ) -> Result { let specifier = - deno_core::resolve_url_or_path(eval_file, ps.options.initial_cwd())?; + deno_core::resolve_url_or_path(eval_file, cli_options.initial_cwd())?; - let file = ps - .file_fetcher + let file = file_fetcher .fetch(&specifier, PermissionsContainer::allow_all()) .await?; @@ -80,19 +99,29 @@ async fn read_eval_file( } pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; - let mut worker = create_main_worker( - &ps, - main_module, - PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?), - ) - .await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let main_module = cli_options.resolve_main_module()?; + let permissions = PermissionsContainer::new(Permissions::from_options( + &cli_options.permissions_options(), + )?); + let npm_resolver = factory.npm_resolver().await?.clone(); + let resolver = factory.resolver().await?.clone(); + let file_fetcher = factory.file_fetcher()?; + let worker_factory = factory.create_cli_main_worker_factory().await?; + let history_file_path = factory + .deno_dir() + .ok() + .and_then(|dir| dir.repl_history_file_path()); + + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; worker.setup_repl().await?; let worker = worker.into_main_worker(); - let mut repl_session = ReplSession::initialize(ps.clone(), worker).await?; + let mut repl_session = + ReplSession::initialize(cli_options, npm_resolver, resolver, worker) + .await?; let mut rustyline_channel = rustyline_channel(); let helper = EditorHelper { @@ -100,12 +129,11 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { sync_sender: rustyline_channel.0, }; - let history_file_path = ps.dir.repl_history_file_path(); let editor = ReplEditor::new(helper, history_file_path)?; if let Some(eval_files) = repl_flags.eval_files { for eval_file in eval_files { - match read_eval_file(&ps, &eval_file).await { + match read_eval_file(cli_options, file_fetcher, &eval_file).await { Ok(eval_source) => { let output = repl_session .evaluate_line_and_get_output(&eval_source) @@ -132,7 +160,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { // Doing this manually, instead of using `log::info!` because these messages // are supposed to go to stdout, not stderr. - if !ps.options.is_quiet() { + if !cli_options.is_quiet() { println!("Deno {}", crate::version::deno()); println!("exit using ctrl+d, ctrl+c, or close()"); if repl_flags.is_default_command { diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 7fc251362e..4a30c93c44 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -1,8 +1,14 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::cell::RefCell; +use std::rc::Rc; +use std::sync::Arc; + +use crate::args::CliOptions; use crate::colors; use crate::lsp::ReplLanguageServer; -use crate::ProcState; +use crate::npm::CliNpmResolver; +use crate::resolver::CliGraphResolver; use deno_ast::swc::ast as swc_ast; use deno_ast::swc::visit::noop_visit_type; @@ -117,22 +123,23 @@ struct TsEvaluateResponse { } pub struct ReplSession { - proc_state: ProcState, + has_node_modules_dir: bool, + npm_resolver: Arc, + resolver: Arc, pub worker: MainWorker, session: LocalInspectorSession, pub context_id: u64, pub language_server: ReplLanguageServer, + pub notifications: Rc>>, has_initialized_node_runtime: bool, referrer: ModuleSpecifier, - // FIXME(bartlomieju): this field should be used to listen - // for "exceptionThrown" notifications - #[allow(dead_code)] - notification_rx: UnboundedReceiver, } impl ReplSession { pub async fn initialize( - proc_state: ProcState, + cli_options: &CliOptions, + npm_resolver: Arc, + resolver: Arc, mut worker: MainWorker, ) -> Result { let language_server = ReplLanguageServer::new_initialized().await?; @@ -171,21 +178,21 @@ impl ReplSession { } assert_ne!(context_id, 0); - let referrer = deno_core::resolve_path( - "./$deno$repl.ts", - proc_state.options.initial_cwd(), - ) - .unwrap(); + let referrer = + deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd()) + .unwrap(); let mut repl_session = ReplSession { - proc_state, + has_node_modules_dir: cli_options.has_node_modules_dir(), + npm_resolver, + resolver, worker, session, context_id, language_server, has_initialized_node_runtime: false, referrer, - notification_rx, + notifications: Rc::new(RefCell::new(notification_rx)), }; // inject prelude @@ -251,9 +258,15 @@ impl ReplSession { Ok(if let Some(exception_details) = exception_details { session.set_last_thrown_error(&result).await?; let description = match exception_details.exception { - Some(exception) => exception - .description - .unwrap_or_else(|| "Unknown exception".to_string()), + Some(exception) => { + if let Some(description) = exception.description { + description + } else if let Some(value) = exception.value { + value.to_string() + } else { + "undefined".to_string() + } + } None => "Unknown exception".to_string(), }; EvaluationOutput::Error(format!( @@ -487,7 +500,6 @@ impl ReplSession { .iter() .flat_map(|i| { self - .proc_state .resolver .resolve(i, &self.referrer) .ok() @@ -506,22 +518,17 @@ impl ReplSession { if !self.has_initialized_node_runtime { deno_node::initialize_runtime( &mut self.worker.js_runtime, - self.proc_state.options.has_node_modules_dir(), + self.has_node_modules_dir, None, )?; self.has_initialized_node_runtime = true; } - self - .proc_state - .npm_resolver - .add_package_reqs(npm_imports) - .await?; + self.npm_resolver.add_package_reqs(&npm_imports).await?; // prevent messages in the repl about @types/node not being cached if has_node_specifier { self - .proc_state .npm_resolver .inject_synthetic_types_node_package() .await?; diff --git a/cli/tools/run.rs b/cli/tools/run.rs index 7f4b5c8f74..4805ea704d 100644 --- a/cli/tools/run.rs +++ b/cli/tools/run.rs @@ -10,10 +10,10 @@ use deno_runtime::permissions::PermissionsContainer; use crate::args::EvalFlags; use crate::args::Flags; +use crate::factory::CliFactory; +use crate::factory::CliFactoryBuilder; use crate::file_fetcher::File; -use crate::proc_state::ProcState; use crate::util; -use crate::worker::create_main_worker; pub async fn run_script(flags: Flags) -> Result { if !flags.has_permission() && flags.has_permission_in_argv() { @@ -32,41 +32,47 @@ To grant permissions, set them before the script argument. For example: } // TODO(bartlomieju): actually I think it will also fail if there's an import - // map specified and bare specifier is used on the command line - this should - // probably call `ProcState::resolve` instead - let ps = ProcState::from_flags(flags).await?; + // map specified and bare specifier is used on the command line + let factory = CliFactory::from_flags(flags).await?; + let deno_dir = factory.deno_dir()?; + let http_client = factory.http_client(); + let cli_options = factory.cli_options(); // Run a background task that checks for available upgrades. If an earlier // run of this background task found a new version of Deno. super::upgrade::check_for_upgrades( - ps.http_client.clone(), - ps.dir.upgrade_check_file_path(), + http_client.clone(), + deno_dir.upgrade_check_file_path(), ); - let main_module = ps.options.resolve_main_module()?; + let main_module = cli_options.resolve_main_module()?; + + maybe_npm_install(&factory).await?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let mut worker = create_main_worker(&ps, main_module, permissions).await?; + let worker_factory = factory.create_cli_main_worker_factory().await?; + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; let exit_code = worker.run().await?; Ok(exit_code) } pub async fn run_from_stdin(flags: Flags) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let main_module = cli_options.resolve_main_module()?; - let mut worker = create_main_worker( - &ps, - main_module.clone(), - PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?), - ) - .await?; + maybe_npm_install(&factory).await?; + let file_fetcher = factory.file_fetcher()?; + let worker_factory = factory.create_cli_main_worker_factory().await?; + let permissions = PermissionsContainer::new(Permissions::from_options( + &cli_options.permissions_options(), + )?); let mut source = Vec::new(); std::io::stdin().read_to_end(&mut source)?; // Create a dummy source file. @@ -75,13 +81,16 @@ pub async fn run_from_stdin(flags: Flags) -> Result { maybe_types: None, media_type: MediaType::TypeScript, source: String::from_utf8(source)?.into(), - specifier: main_module, + specifier: main_module.clone(), maybe_headers: None, }; // Save our fake file into file fetcher cache // to allow module access by TS compiler - ps.file_fetcher.insert_cached(source_file); + file_fetcher.insert_cached(source_file); + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; let exit_code = worker.run().await?; Ok(exit_code) } @@ -90,19 +99,30 @@ pub async fn run_from_stdin(flags: Flags) -> Result { // code properly. async fn run_with_watch(flags: Flags) -> Result { let (sender, receiver) = tokio::sync::mpsc::unbounded_channel(); - let mut ps = - ProcState::from_flags_for_file_watcher(flags, sender.clone()).await?; - let clear_screen = !ps.options.no_clear_screen(); - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactoryBuilder::new() + .with_watcher(sender.clone()) + .build_from_flags(flags) + .await?; + let file_watcher = factory.file_watcher()?; + let cli_options = factory.cli_options(); + let clear_screen = !cli_options.no_clear_screen(); + let main_module = cli_options.resolve_main_module()?; + maybe_npm_install(&factory).await?; + + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |main_module: ModuleSpecifier| { - ps.reset_for_file_watcher(); - let ps = ps.clone(); + file_watcher.reset(); + let permissions = PermissionsContainer::new(Permissions::from_options( + &cli_options.permissions_options(), + )?); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); + Ok(async move { - let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?); - let worker = create_main_worker(&ps, main_module, permissions).await?; + let worker = create_cli_main_worker_factory() + .create_main_worker(main_module, permissions) + .await?; worker.run_for_watcher().await?; Ok(()) @@ -127,13 +147,13 @@ pub async fn eval_command( flags: Flags, eval_flags: EvalFlags, ) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; - let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?); - let mut worker = - create_main_worker(&ps, main_module.clone(), permissions).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let main_module = cli_options.resolve_main_module()?; + + maybe_npm_install(&factory).await?; + // Create a dummy source file. let source_code = if eval_flags.print { format!("console.log({})", eval_flags.code) @@ -147,13 +167,34 @@ pub async fn eval_command( maybe_types: None, media_type: MediaType::Unknown, source: String::from_utf8(source_code)?.into(), - specifier: main_module, + specifier: main_module.clone(), maybe_headers: None, }; // Save our fake file into file fetcher cache // to allow module access by TS compiler. - ps.file_fetcher.insert_cached(file); + file_fetcher.insert_cached(file); + + let permissions = PermissionsContainer::new(Permissions::from_options( + &cli_options.permissions_options(), + )?); + let worker_factory = factory.create_cli_main_worker_factory().await?; + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; let exit_code = worker.run().await?; Ok(exit_code) } + +async fn maybe_npm_install(factory: &CliFactory) -> Result<(), AnyError> { + // ensure an "npm install" is done if the user has explicitly + // opted into using a node_modules directory + if factory.cli_options().node_modules_dir_enablement() == Some(true) { + factory + .package_json_deps_installer() + .await? + .ensure_top_level_install() + .await?; + } + Ok(()) +} diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs deleted file mode 100644 index fab3266ea4..0000000000 --- a/cli/tools/standalone.rs +++ /dev/null @@ -1,418 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use crate::args::CaData; -use crate::args::CompileFlags; -use crate::args::Flags; -use crate::cache::DenoDir; -use crate::graph_util::error_for_any_npm_specifier; -use crate::http_util::HttpClient; -use crate::standalone::Metadata; -use crate::standalone::MAGIC_TRAILER; -use crate::util::path::path_has_trailing_slash; -use crate::util::progress_bar::ProgressBar; -use crate::util::progress_bar::ProgressBarStyle; -use crate::ProcState; -use deno_core::anyhow::bail; -use deno_core::anyhow::Context; -use deno_core::error::generic_error; -use deno_core::error::AnyError; -use deno_core::resolve_url_or_path; -use deno_core::serde_json; -use deno_graph::ModuleSpecifier; -use deno_runtime::colors; -use std::env; -use std::fs; -use std::fs::File; -use std::io::Read; -use std::io::Seek; -use std::io::SeekFrom; -use std::io::Write; -use std::path::Path; -use std::path::PathBuf; -use std::sync::Arc; - -use super::installer::infer_name_from_url; - -pub async fn compile( - flags: Flags, - compile_flags: CompileFlags, -) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; - let module_specifier = ps.options.resolve_main_module()?; - let module_roots = { - let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); - vec.push(module_specifier.clone()); - for side_module in &compile_flags.include { - vec.push(resolve_url_or_path(side_module, ps.options.initial_cwd())?); - } - vec - }; - let deno_dir = &ps.dir; - - let output_path = resolve_compile_executable_output_path( - &compile_flags, - ps.options.initial_cwd(), - ) - .await?; - - let graph = Arc::try_unwrap( - ps.module_graph_builder - .create_graph_and_maybe_check(module_roots) - .await?, - ) - .unwrap(); - - // at the moment, we don't support npm specifiers in deno_compile, so show an error - error_for_any_npm_specifier(&graph)?; - - let parser = ps.parsed_source_cache.as_capturing_parser(); - let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; - - log::info!( - "{} {}", - colors::green("Compile"), - module_specifier.to_string() - ); - - // Select base binary based on target - let original_binary = - get_base_binary(&ps.http_client, deno_dir, compile_flags.target.clone()) - .await?; - - let final_bin = create_standalone_binary( - original_binary, - eszip, - module_specifier, - &compile_flags, - ps, - ) - .await?; - - log::info!("{} {}", colors::green("Emit"), output_path.display()); - - write_standalone_binary(output_path, final_bin).await?; - Ok(()) -} - -async fn get_base_binary( - client: &HttpClient, - deno_dir: &DenoDir, - target: Option, -) -> Result, AnyError> { - if target.is_none() { - let path = std::env::current_exe()?; - return Ok(tokio::fs::read(path).await?); - } - - let target = target.unwrap_or_else(|| env!("TARGET").to_string()); - let binary_name = format!("deno-{target}.zip"); - - let binary_path_suffix = if crate::version::is_canary() { - format!("canary/{}/{}", crate::version::GIT_COMMIT_HASH, binary_name) - } else { - format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name) - }; - - let download_directory = deno_dir.dl_folder_path(); - let binary_path = download_directory.join(&binary_path_suffix); - - if !binary_path.exists() { - download_base_binary(client, &download_directory, &binary_path_suffix) - .await?; - } - - let archive_data = tokio::fs::read(binary_path).await?; - let temp_dir = tempfile::TempDir::new()?; - let base_binary_path = crate::tools::upgrade::unpack_into_dir( - archive_data, - target.contains("windows"), - &temp_dir, - )?; - let base_binary = tokio::fs::read(base_binary_path).await?; - drop(temp_dir); // delete the temp dir - Ok(base_binary) -} - -async fn download_base_binary( - client: &HttpClient, - output_directory: &Path, - binary_path_suffix: &str, -) -> Result<(), AnyError> { - let download_url = format!("https://dl.deno.land/{binary_path_suffix}"); - let maybe_bytes = { - let progress_bars = ProgressBar::new(ProgressBarStyle::DownloadBars); - let progress = progress_bars.update(&download_url); - - client - .download_with_progress(download_url, &progress) - .await? - }; - let bytes = match maybe_bytes { - Some(bytes) => bytes, - None => { - log::info!("Download could not be found, aborting"); - std::process::exit(1) - } - }; - - std::fs::create_dir_all(output_directory)?; - let output_path = output_directory.join(binary_path_suffix); - std::fs::create_dir_all(output_path.parent().unwrap())?; - tokio::fs::write(output_path, bytes).await?; - Ok(()) -} - -/// This functions creates a standalone deno binary by appending a bundle -/// and magic trailer to the currently executing binary. -async fn create_standalone_binary( - mut original_bin: Vec, - eszip: eszip::EszipV2, - entrypoint: ModuleSpecifier, - compile_flags: &CompileFlags, - ps: ProcState, -) -> Result, AnyError> { - let mut eszip_archive = eszip.into_bytes(); - - let ca_data = match ps.options.ca_data() { - Some(CaData::File(ca_file)) => { - Some(fs::read(ca_file).with_context(|| format!("Reading: {ca_file}"))?) - } - Some(CaData::Bytes(bytes)) => Some(bytes.clone()), - None => None, - }; - let maybe_import_map = ps - .options - .resolve_import_map(&ps.file_fetcher) - .await? - .map(|import_map| (import_map.base_url().clone(), import_map.to_json())); - let metadata = Metadata { - argv: compile_flags.args.clone(), - unstable: ps.options.unstable(), - seed: ps.options.seed(), - location: ps.options.location_flag().clone(), - permissions: ps.options.permissions_options(), - v8_flags: ps.options.v8_flags().clone(), - unsafely_ignore_certificate_errors: ps - .options - .unsafely_ignore_certificate_errors() - .clone(), - log_level: ps.options.log_level(), - ca_stores: ps.options.ca_stores().clone(), - ca_data, - entrypoint, - maybe_import_map, - }; - let mut metadata = serde_json::to_string(&metadata)?.as_bytes().to_vec(); - - let eszip_pos = original_bin.len(); - let metadata_pos = eszip_pos + eszip_archive.len(); - let mut trailer = MAGIC_TRAILER.to_vec(); - trailer.write_all(&eszip_pos.to_be_bytes())?; - trailer.write_all(&metadata_pos.to_be_bytes())?; - - let mut final_bin = Vec::with_capacity( - original_bin.len() + eszip_archive.len() + trailer.len(), - ); - final_bin.append(&mut original_bin); - final_bin.append(&mut eszip_archive); - final_bin.append(&mut metadata); - final_bin.append(&mut trailer); - - Ok(final_bin) -} - -/// This function writes out a final binary to specified path. If output path -/// is not already standalone binary it will return error instead. -async fn write_standalone_binary( - output_path: PathBuf, - final_bin: Vec, -) -> Result<(), AnyError> { - if output_path.exists() { - // If the output is a directory, throw error - if output_path.is_dir() { - bail!( - concat!( - "Could not compile to file '{}' because a directory exists with ", - "the same name. You can use the `--output ` flag to ", - "provide an alternative name." - ), - output_path.display() - ); - } - - // Make sure we don't overwrite any file not created by Deno compiler. - // Check for magic trailer in last 24 bytes. - let mut has_trailer = false; - let mut output_file = File::open(&output_path)?; - // This seek may fail because the file is too small to possibly be - // `deno compile` output. - if output_file.seek(SeekFrom::End(-24)).is_ok() { - let mut trailer = [0; 24]; - output_file.read_exact(&mut trailer)?; - let (magic_trailer, _) = trailer.split_at(8); - has_trailer = magic_trailer == MAGIC_TRAILER; - } - if !has_trailer { - bail!( - concat!( - "Could not compile to file '{}' because the file already exists ", - "and cannot be overwritten. Please delete the existing file or ", - "use the `--output ` flag to ", - "provide an alternative name.", - ), - output_base.display(), - ); - } - tokio::fs::create_dir_all(output_base).await?; - } - - tokio::fs::write(&output_path, final_bin).await?; - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let perms = std::fs::Permissions::from_mode(0o777); - tokio::fs::set_permissions(output_path, perms).await?; - } - - Ok(()) -} - -async fn resolve_compile_executable_output_path( - compile_flags: &CompileFlags, - current_dir: &Path, -) -> Result { - let module_specifier = - resolve_url_or_path(&compile_flags.source_file, current_dir)?; - - let mut output = compile_flags.output.clone(); - - if let Some(out) = output.as_ref() { - if path_has_trailing_slash(out) { - if let Some(infer_file_name) = infer_name_from_url(&module_specifier) - .await - .map(PathBuf::from) - { - output = Some(out.join(infer_file_name)); - } - } else { - output = Some(out.to_path_buf()); - } - } - - if output.is_none() { - output = infer_name_from_url(&module_specifier) - .await - .map(PathBuf::from) - } - - output.ok_or_else(|| generic_error( - "An executable name was not provided. One could not be inferred from the URL. Aborting.", - )).map(|output| { - get_os_specific_filepath(output, &compile_flags.target) - }) -} - -fn get_os_specific_filepath( - output: PathBuf, - target: &Option, -) -> PathBuf { - let is_windows = match target { - Some(target) => target.contains("windows"), - None => cfg!(windows), - }; - if is_windows && output.extension().unwrap_or_default() != "exe" { - if let Some(ext) = output.extension() { - // keep version in my-exe-0.1.0 -> my-exe-0.1.0.exe - output.with_extension(format!("{}.exe", ext.to_string_lossy())) - } else { - output.with_extension("exe") - } - } else { - output - } -} - -#[cfg(test)] -mod test { - pub use super::*; - - #[tokio::test] - async fn resolve_compile_executable_output_path_target_linux() { - let path = resolve_compile_executable_output_path( - &CompileFlags { - source_file: "mod.ts".to_string(), - output: Some(PathBuf::from("./file")), - args: Vec::new(), - target: Some("x86_64-unknown-linux-gnu".to_string()), - include: vec![], - }, - &std::env::current_dir().unwrap(), - ) - .await - .unwrap(); - - // no extension, no matter what the operating system is - // because the target was specified as linux - // https://github.com/denoland/deno/issues/9667 - assert_eq!(path.file_name().unwrap(), "file"); - } - - #[tokio::test] - async fn resolve_compile_executable_output_path_target_windows() { - let path = resolve_compile_executable_output_path( - &CompileFlags { - source_file: "mod.ts".to_string(), - output: Some(PathBuf::from("./file")), - args: Vec::new(), - target: Some("x86_64-pc-windows-msvc".to_string()), - include: vec![], - }, - &std::env::current_dir().unwrap(), - ) - .await - .unwrap(); - assert_eq!(path.file_name().unwrap(), "file.exe"); - } - - #[test] - fn test_os_specific_file_path() { - fn run_test(path: &str, target: Option<&str>, expected: &str) { - assert_eq!( - get_os_specific_filepath( - PathBuf::from(path), - &target.map(|s| s.to_string()) - ), - PathBuf::from(expected) - ); - } - - if cfg!(windows) { - run_test("C:\\my-exe", None, "C:\\my-exe.exe"); - run_test("C:\\my-exe.exe", None, "C:\\my-exe.exe"); - run_test("C:\\my-exe-0.1.2", None, "C:\\my-exe-0.1.2.exe"); - } else { - run_test("my-exe", Some("linux"), "my-exe"); - run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2"); - } - - run_test("C:\\my-exe", Some("windows"), "C:\\my-exe.exe"); - run_test("C:\\my-exe.exe", Some("windows"), "C:\\my-exe.exe"); - run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe"); - run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2"); - } -} diff --git a/cli/tools/task.rs b/cli/tools/task.rs index c64e2a77cd..7dd7e7bc4d 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -4,15 +4,15 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; -use crate::node::CliNodeResolver; -use crate::npm::NpmPackageResolver; -use crate::proc_state::ProcState; +use crate::factory::CliFactory; +use crate::npm::CliNpmResolver; use crate::util::fs::canonicalize_path; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::future::LocalBoxFuture; +use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageNv; use deno_task_shell::ExecuteResult; use deno_task_shell::ShellCommand; @@ -21,14 +21,16 @@ use indexmap::IndexMap; use std::collections::HashMap; use std::path::PathBuf; use std::rc::Rc; +use tokio::task::LocalSet; pub async fn execute_script( flags: Flags, task_flags: TaskFlags, ) -> Result { - let ps = ProcState::from_flags(flags).await?; - let tasks_config = ps.options.resolve_tasks_config()?; - let maybe_package_json = ps.options.maybe_package_json(); + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let tasks_config = cli_options.resolve_tasks_config()?; + let maybe_package_json = cli_options.maybe_package_json(); let package_json_scripts = maybe_package_json .as_ref() .and_then(|p| p.scripts.clone()) @@ -43,7 +45,7 @@ pub async fn execute_script( }; if let Some(script) = tasks_config.get(task_name) { - let config_file_url = ps.options.maybe_config_file_specifier().unwrap(); + let config_file_url = cli_options.maybe_config_file_specifier().unwrap(); let config_file_path = if config_file_url.scheme() == "file" { config_file_url.to_file_path().unwrap() } else { @@ -53,17 +55,24 @@ pub async fn execute_script( Some(path) => canonicalize_path(&PathBuf::from(path))?, None => config_file_path.parent().unwrap().to_owned(), }; - let script = get_script_with_args(script, &ps.options); + let script = get_script_with_args(script, cli_options); output_task(task_name, &script); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{task_name}'."))?; let env_vars = collect_env_vars(); - let exit_code = - deno_task_shell::execute(seq_list, env_vars, &cwd, Default::default()) - .await; + let local = LocalSet::new(); + let future = + deno_task_shell::execute(seq_list, env_vars, &cwd, Default::default()); + let exit_code = local.run_until(future).await; Ok(exit_code) - } else if let Some(script) = package_json_scripts.get(task_name) { - if let Some(package_deps) = ps.package_json_deps_installer.package_deps() { + } else if package_json_scripts.contains_key(task_name) { + let package_json_deps_provider = factory.package_json_deps_provider(); + let package_json_deps_installer = + factory.package_json_deps_installer().await?; + let npm_resolver = factory.npm_resolver().await?; + let node_resolver = factory.node_resolver().await?; + + if let Some(package_deps) = package_json_deps_provider.deps() { for (key, value) in package_deps { if let Err(err) = value { log::info!( @@ -75,13 +84,14 @@ pub async fn execute_script( } } } - ps.package_json_deps_installer + + package_json_deps_installer .ensure_top_level_install() .await?; - ps.npm_resolver.resolve_pending().await?; + npm_resolver.resolve_pending().await?; log::info!( - "{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release.", + "{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release.", colors::yellow("Warning"), ); @@ -95,16 +105,34 @@ pub async fn execute_script( .unwrap() .to_owned(), }; - let script = get_script_with_args(script, &ps.options); - output_task(task_name, &script); - let seq_list = deno_task_shell::parser::parse(&script) - .with_context(|| format!("Error parsing script '{task_name}'."))?; - let npx_commands = - resolve_npm_commands(&ps.npm_resolver, &ps.node_resolver)?; - let env_vars = collect_env_vars(); - let exit_code = - deno_task_shell::execute(seq_list, env_vars, &cwd, npx_commands).await; - Ok(exit_code) + + // At this point we already checked if the task name exists in package.json. + // We can therefore check for "pre" and "post" scripts too, since we're only + // dealing with package.json here and not deno.json + let task_names = vec![ + format!("pre{}", task_name), + task_name.clone(), + format!("post{}", task_name), + ]; + for task_name in task_names { + if let Some(script) = package_json_scripts.get(&task_name) { + let script = get_script_with_args(script, cli_options); + output_task(&task_name, &script); + let seq_list = deno_task_shell::parser::parse(&script) + .with_context(|| format!("Error parsing script '{task_name}'."))?; + let npx_commands = resolve_npm_commands(npm_resolver, node_resolver)?; + let env_vars = collect_env_vars(); + let local = LocalSet::new(); + let future = + deno_task_shell::execute(seq_list, env_vars, &cwd, npx_commands); + let exit_code = local.run_until(future).await; + if exit_code > 0 { + return Ok(exit_code); + } + } + } + + Ok(0) } else { eprintln!("Task not found: {task_name}"); print_available_tasks(&tasks_config, &package_json_scripts); @@ -234,8 +262,8 @@ impl ShellCommand for NpmPackageBinCommand { } fn resolve_npm_commands( - npm_resolver: &NpmPackageResolver, - node_resolver: &CliNodeResolver, + npm_resolver: &CliNpmResolver, + node_resolver: &NodeResolver, ) -> Result>, AnyError> { let mut result = HashMap::new(); let snapshot = npm_resolver.snapshot(); diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 977073ab73..6f32d69e49 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -3,13 +3,14 @@ use crate::args::CliOptions; use crate::args::FilesConfig; use crate::args::TestOptions; -use crate::args::TypeCheckMode; use crate::colors; use crate::display; +use crate::factory::CliFactory; use crate::file_fetcher::File; +use crate::file_fetcher::FileFetcher; use crate::graph_util::graph_valid_with_cli_options; +use crate::module_loader::ModuleLoadPreparer; use crate::ops; -use crate::proc_state::ProcState; use crate::util::checksum; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -17,7 +18,7 @@ use crate::util::fs::collect_specifiers; use crate::util::path::get_extension; use crate::util::path::is_supported_ext; use crate::util::path::mapped_specifier_for_tsc; -use crate::worker::create_custom_worker; +use crate::worker::CliMainWorkerFactory; use deno_ast::swc::common::comments::CommentKind; use deno_ast::MediaType; @@ -27,11 +28,14 @@ use deno_core::error::AnyError; use deno_core::error::JsError; use deno_core::futures::future; use deno_core::futures::stream; +use deno_core::futures::task::noop_waker; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::located_script_name; use deno_core::parking_lot::Mutex; use deno_core::serde_v8; +use deno_core::task::spawn; +use deno_core::task::spawn_blocking; use deno_core::url::Url; use deno_core::v8; use deno_core::ModuleSpecifier; @@ -40,7 +44,7 @@ use deno_runtime::deno_io::StdioPipe; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::tokio_util::run_local; +use deno_runtime::tokio_util::create_and_run_current_thread; use indexmap::IndexMap; use indexmap::IndexSet; use log::Level; @@ -49,7 +53,6 @@ use rand::seq::SliceRandom; use rand::SeedableRng; use regex::Regex; use serde::Deserialize; -use std::cell::RefCell; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::HashMap; @@ -64,6 +67,7 @@ use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; use std::sync::Arc; +use std::task::Context; use std::time::Duration; use std::time::Instant; use std::time::SystemTime; @@ -336,10 +340,18 @@ pub struct TestSummary { } #[derive(Debug, Clone)] -struct TestSpecifierOptions { +struct TestSpecifiersOptions { concurrent_jobs: NonZeroUsize, fail_fast: Option, - filter: TestFilter, + log_level: Option, + specifier: TestSpecifierOptions, +} + +#[derive(Debug, Clone)] +pub struct TestSpecifierOptions { + pub shuffle: Option, + pub filter: TestFilter, + pub trace_ops: bool, } impl TestSummary { @@ -907,30 +919,30 @@ pub fn format_test_error(js_error: &JsError) -> String { /// Test a single specifier as documentation containing test programs, an executable test module or /// both. pub async fn test_specifier( - ps: &ProcState, + worker_factory: Arc, permissions: Permissions, specifier: ModuleSpecifier, mut sender: TestEventSender, fail_fast_tracker: FailFastTracker, - filter: TestFilter, + options: TestSpecifierOptions, ) -> Result<(), AnyError> { if fail_fast_tracker.should_stop() { return Ok(()); } let stdout = StdioPipe::File(sender.stdout()); let stderr = StdioPipe::File(sender.stderr()); - let mut worker = create_custom_worker( - ps, - specifier.clone(), - PermissionsContainer::new(permissions), - vec![ops::testing::deno_test::init_ops(sender.clone())], - Stdio { - stdin: StdioPipe::Inherit, - stdout, - stderr, - }, - ) - .await?; + let mut worker = worker_factory + .create_custom_worker( + specifier.clone(), + PermissionsContainer::new(permissions), + vec![ops::testing::deno_test::init_ops(sender.clone())], + Stdio { + stdin: StdioPipe::Inherit, + stdout, + stderr, + }, + ) + .await?; let mut coverage_collector = worker.maybe_setup_coverage_collector().await?; @@ -951,7 +963,7 @@ pub async fn test_specifier( } let mut worker = worker.into_main_worker(); - if ps.options.trace_ops() { + if options.trace_ops { worker.js_runtime.execute_script_static( located_script_name!(), "Deno[Deno.internal].core.enableOpCallTracing();", @@ -971,9 +983,9 @@ pub async fn test_specifier( let tests = if used_only { only } else { no_only }; let mut tests = tests .into_iter() - .filter(|(d, _)| filter.includes(&d.name)) + .filter(|(d, _)| options.filter.includes(&d.name)) .collect::>(); - if let Some(seed) = ps.options.shuffle_tests() { + if let Some(seed) = options.shuffle { tests.shuffle(&mut SmallRng::seed_from_u64(seed)); } sender.send(TestEvent::Plan(TestPlan { @@ -996,15 +1008,23 @@ pub async fn test_specifier( continue; } sender.send(TestEvent::Wait(desc.id))?; + + // TODO(bartlomieju): this is a nasty (beautiful) hack, that was required + // when switching `JsRuntime` from `FuturesUnordered` to `JoinSet`. With + // `JoinSet` all pending ops are immediately polled and that caused a problem + // when some async ops were fired and canceled before running tests (giving + // false positives in the ops sanitizer). We should probably rewrite sanitizers + // to be done in Rust instead of in JS (40_testing.js). + { + // Poll event loop once, this will allow all ops that are already resolved, + // but haven't responded to settle. + let waker = noop_waker(); + let mut cx = Context::from_waker(&waker); + let _ = worker.js_runtime.poll_event_loop(&mut cx, false); + } + let earlier = SystemTime::now(); - let promise = { - let scope = &mut worker.js_runtime.handle_scope(); - let cb = function.open(scope); - let this = v8::undefined(scope).into(); - let promise = cb.call(scope, this, &[]).unwrap(); - v8::Global::new(scope, promise) - }; - let result = match worker.js_runtime.resolve_value(promise).await { + let result = match worker.js_runtime.call_and_await(&function).await { Ok(r) => r, Err(error) => { if error.is::() { @@ -1031,12 +1051,9 @@ pub async fn test_specifier( sender.send(TestEvent::Result(desc.id, result, elapsed as u64))?; } - loop { - if !worker.dispatch_beforeunload_event(located_script_name!())? { - break; - } - worker.run_event_loop(false).await?; - } + // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the + // event loop to continue beyond what's needed to await results. + worker.dispatch_beforeunload_event(located_script_name!())?; worker.dispatch_unload_event(located_script_name!())?; if let Some(coverage_collector) = coverage_collector.as_mut() { @@ -1199,13 +1216,13 @@ fn extract_files_from_fenced_blocks( } async fn fetch_inline_files( - ps: &ProcState, + file_fetcher: &FileFetcher, specifiers: Vec, ) -> Result, AnyError> { let mut files = Vec::new(); for specifier in specifiers { let fetch_permissions = PermissionsContainer::allow_all(); - let file = ps.file_fetcher.fetch(&specifier, fetch_permissions).await?; + let file = file_fetcher.fetch(&specifier, fetch_permissions).await?; let inline_files = if file.media_type == MediaType::Unknown { extract_files_from_fenced_blocks( @@ -1229,13 +1246,14 @@ async fn fetch_inline_files( /// Type check a collection of module and document specifiers. pub async fn check_specifiers( - ps: &ProcState, - permissions: Permissions, + cli_options: &CliOptions, + file_fetcher: &FileFetcher, + module_load_preparer: &ModuleLoadPreparer, specifiers: Vec<(ModuleSpecifier, TestMode)>, ) -> Result<(), AnyError> { - let lib = ps.options.ts_type_lib_window(); + let lib = cli_options.ts_type_lib_window(); let inline_files = fetch_inline_files( - ps, + file_fetcher, specifiers .iter() .filter_map(|(specifier, mode)| { @@ -1256,16 +1274,15 @@ pub async fn check_specifiers( .collect(); for file in inline_files { - ps.file_fetcher.insert_cached(file); + file_fetcher.insert_cached(file); } - ps.module_load_preparer + module_load_preparer .prepare_module_load( specifiers, false, lib, PermissionsContainer::new(Permissions::allow_all()), - PermissionsContainer::new(permissions.clone()), ) .await?; } @@ -1281,13 +1298,12 @@ pub async fn check_specifiers( }) .collect(); - ps.module_load_preparer + module_load_preparer .prepare_module_load( module_specifiers, false, lib, PermissionsContainer::allow_all(), - PermissionsContainer::new(permissions), ) .await?; @@ -1298,13 +1314,12 @@ static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false); /// Test a collection of specifiers with test modes concurrently. async fn test_specifiers( - ps: &ProcState, + worker_factory: Arc, permissions: &Permissions, specifiers: Vec, - options: TestSpecifierOptions, + options: TestSpecifiersOptions, ) -> Result<(), AnyError> { - let log_level = ps.options.log_level(); - let specifiers = if let Some(seed) = ps.options.shuffle_tests() { + let specifiers = if let Some(seed) = options.specifier.shuffle { let mut rng = SmallRng::seed_from_u64(seed); let mut specifiers = specifiers; specifiers.sort(); @@ -1319,26 +1334,26 @@ async fn test_specifiers( let concurrent_jobs = options.concurrent_jobs; let sender_ = sender.downgrade(); - let sigint_handler_handle = tokio::task::spawn(async move { + let sigint_handler_handle = spawn(async move { signal::ctrl_c().await.unwrap(); sender_.upgrade().map(|s| s.send(TestEvent::Sigint).ok()); }); HAS_TEST_RUN_SIGINT_HANDLER.store(true, Ordering::Relaxed); let join_handles = specifiers.into_iter().map(move |specifier| { - let ps = ps.clone(); + let worker_factory = worker_factory.clone(); let permissions = permissions.clone(); let sender = sender.clone(); - let options = options.clone(); let fail_fast_tracker = FailFastTracker::new(options.fail_fast); - tokio::task::spawn_blocking(move || { - run_local(test_specifier( - &ps, + let specifier_options = options.specifier.clone(); + spawn_blocking(move || { + create_and_run_current_thread(test_specifier( + worker_factory, permissions, specifier, sender.clone(), fail_fast_tracker, - options.filter, + specifier_options, )) }) }); @@ -1349,11 +1364,11 @@ async fn test_specifiers( let mut reporter = Box::new(PrettyTestReporter::new( concurrent_jobs.get() > 1, - log_level != Some(Level::Error), + options.log_level != Some(Level::Error), )); let handler = { - tokio::task::spawn(async move { + spawn(async move { let earlier = Instant::now(); let mut tests = IndexMap::new(); let mut test_steps = IndexMap::new(); @@ -1518,7 +1533,7 @@ async fn test_specifiers( } /// Checks if the path has a basename and extension Deno supports for tests. -fn is_supported_test_path(path: &Path) -> bool { +pub(crate) fn is_supported_test_path(path: &Path) -> bool { if let Some(name) = path.file_stem() { let basename = name.to_string_lossy(); (basename.ends_with("_test") @@ -1604,15 +1619,14 @@ fn collect_specifiers_with_test_mode( /// cannot be run, and therefore need to be marked as `TestMode::Documentation` /// as well. async fn fetch_specifiers_with_test_mode( - ps: &ProcState, + file_fetcher: &FileFetcher, files: &FilesConfig, doc: &bool, ) -> Result, AnyError> { let mut specifiers_with_mode = collect_specifiers_with_test_mode(files, doc)?; for (specifier, mode) in &mut specifiers_with_mode { - let file = ps - .file_fetcher + let file = file_fetcher .fetch(specifier, PermissionsContainer::allow_all()) .await?; @@ -1630,15 +1644,19 @@ pub async fn run_tests( cli_options: CliOptions, test_options: TestOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let module_load_preparer = factory.module_load_preparer().await?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; + Permissions::from_options(&cli_options.permissions_options())?; + let log_level = cli_options.log_level(); let specifiers_with_mode = fetch_specifiers_with_test_mode( - &ps, + file_fetcher, &test_options.files, &test_options.doc, ) @@ -1648,15 +1666,23 @@ pub async fn run_tests( return Err(generic_error("No test modules found")); } - check_specifiers(&ps, permissions.clone(), specifiers_with_mode.clone()) - .await?; + check_specifiers( + cli_options, + file_fetcher, + module_load_preparer, + specifiers_with_mode.clone(), + ) + .await?; if test_options.no_run { return Ok(()); } + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); + test_specifiers( - &ps, + worker_factory, &permissions, specifiers_with_mode .into_iter() @@ -1665,10 +1691,15 @@ pub async fn run_tests( _ => Some(s), }) .collect(), - TestSpecifierOptions { + TestSpecifiersOptions { concurrent_jobs: test_options.concurrent_jobs, fail_fast: test_options.fail_fast, - filter: TestFilter::from_flag(&test_options.filter), + log_level, + specifier: TestSpecifierOptions { + filter: TestFilter::from_flag(&test_options.filter), + shuffle: test_options.shuffle, + trace_ops: test_options.trace_ops, + }, }, ) .await?; @@ -1680,22 +1711,27 @@ pub async fn run_tests_with_watch( cli_options: CliOptions, test_options: TestOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let module_load_preparer = factory.module_load_preparer().await?; + let file_fetcher = factory.file_fetcher()?; + let file_watcher = factory.file_watcher()?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let no_check = ps.options.type_check_mode() == TypeCheckMode::None; - - let ps = RefCell::new(ps); + Permissions::from_options(&cli_options.permissions_options())?; + let graph_kind = cli_options.type_check_mode().as_graph_kind(); + let log_level = cli_options.log_level(); let resolver = |changed: Option>| { let paths_to_watch = test_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let test_options = &test_options; - let ps = ps.borrow().clone(); + let cli_options = cli_options.clone(); + let module_graph_builder = module_graph_builder.clone(); async move { let test_modules = if test_options.doc { @@ -1710,11 +1746,10 @@ pub async fn run_tests_with_watch( } else { test_modules.clone() }; - let graph = ps - .module_graph_builder - .create_graph(test_modules.clone()) + let graph = module_graph_builder + .create_graph(graph_kind, test_modules.clone()) .await?; - graph_valid_with_cli_options(&graph, &test_modules, &ps.options)?; + graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?; // TODO(@kitsonk) - This should be totally derivable from the graph. for specifier in test_modules { @@ -1724,32 +1759,19 @@ pub async fn run_tests_with_watch( // This needs to be accessible to skip getting dependencies if they're already there, // otherwise this will cause a stack overflow with circular dependencies output: &mut HashSet<&'a ModuleSpecifier>, - no_check: bool, ) { if let Some(module) = maybe_module.and_then(|m| m.esm()) { for dep in module.dependencies.values() { if let Some(specifier) = &dep.get_code() { if !output.contains(specifier) { output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); + get_dependencies(graph, graph.get(specifier), output); } } - if !no_check { - if let Some(specifier) = &dep.get_type() { - if !output.contains(specifier) { - output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); - } + if let Some(specifier) = &dep.get_type() { + if !output.contains(specifier) { + output.insert(specifier); + get_dependencies(graph, graph.get(specifier), output); } } } @@ -1759,7 +1781,7 @@ pub async fn run_tests_with_watch( // This test module and all it's dependencies let mut modules = HashSet::new(); modules.insert(&specifier); - get_dependencies(&graph, graph.get(&specifier), &mut modules, no_check); + get_dependencies(&graph, graph.get(&specifier), &mut modules); paths_to_watch.extend( modules @@ -1804,15 +1826,21 @@ pub async fn run_tests_with_watch( }) }; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |modules_to_reload: Vec| { let permissions = &permissions; let test_options = &test_options; - ps.borrow_mut().reset_for_file_watcher(); - let ps = ps.borrow().clone(); + file_watcher.reset(); + let cli_options = cli_options.clone(); + let file_fetcher = file_fetcher.clone(); + let module_load_preparer = module_load_preparer.clone(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); async move { + let worker_factory = Arc::new(create_cli_main_worker_factory()); let specifiers_with_mode = fetch_specifiers_with_test_mode( - &ps, + &file_fetcher, &test_options.files, &test_options.doc, ) @@ -1821,15 +1849,20 @@ pub async fn run_tests_with_watch( .filter(|(specifier, _)| modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(&ps, permissions.clone(), specifiers_with_mode.clone()) - .await?; + check_specifiers( + &cli_options, + &file_fetcher, + &module_load_preparer, + specifiers_with_mode.clone(), + ) + .await?; if test_options.no_run { return Ok(()); } test_specifiers( - &ps, + worker_factory, permissions, specifiers_with_mode .into_iter() @@ -1838,10 +1871,15 @@ pub async fn run_tests_with_watch( _ => Some(s), }) .collect(), - TestSpecifierOptions { + TestSpecifiersOptions { concurrent_jobs: test_options.concurrent_jobs, fail_fast: test_options.fail_fast, - filter: TestFilter::from_flag(&test_options.filter), + log_level, + specifier: TestSpecifierOptions { + filter: TestFilter::from_flag(&test_options.filter), + shuffle: test_options.shuffle, + trace_ops: test_options.trace_ops, + }, }, ) .await?; @@ -1854,7 +1892,7 @@ pub async fn run_tests_with_watch( // run, a process-scoped basic exit handler is required due to a tokio // limitation where it doesn't unbind its own handler for the entire process // once a user adds one. - tokio::task::spawn(async move { + spawn(async move { loop { signal::ctrl_c().await.unwrap(); if !HAS_TEST_RUN_SIGINT_HANDLER.load(Ordering::Relaxed) { @@ -1863,7 +1901,7 @@ pub async fn run_tests_with_watch( } }); - let clear_screen = !ps.borrow().options.no_clear_screen(); + let clear_screen = !cli_options.no_clear_screen(); file_watcher::watch_func( resolver, operation, @@ -2037,7 +2075,7 @@ fn start_output_redirect_thread( sender: UnboundedSender, flush_state: Arc>>>, ) { - tokio::task::spawn_blocking(move || loop { + spawn_blocking(move || loop { let mut buffer = [0; 512]; let size = match pipe_reader.read(&mut buffer) { Ok(0) | Err(_) => break, diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index f16923bf83..b371731c31 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -5,8 +5,8 @@ use crate::args::Flags; use crate::args::UpgradeFlags; use crate::colors; +use crate::factory::CliFactory; use crate::http_util::HttpClient; -use crate::proc_state::ProcState; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::time; @@ -17,6 +17,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::future::BoxFuture; use deno_core::futures::FutureExt; +use deno_core::task::spawn; use deno_semver::Version; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -26,6 +27,7 @@ use std::ops::Sub; use std::path::Path; use std::path::PathBuf; use std::process::Command; +use std::sync::Arc; use std::time::Duration; static ARCHIVE_NAME: Lazy = @@ -50,13 +52,13 @@ trait UpdateCheckerEnvironment: Clone + Send + Sync { #[derive(Clone)] struct RealUpdateCheckerEnvironment { - http_client: HttpClient, + http_client: Arc, cache_file_path: PathBuf, current_time: chrono::DateTime, } impl RealUpdateCheckerEnvironment { - pub fn new(http_client: HttpClient, cache_file_path: PathBuf) -> Self { + pub fn new(http_client: Arc, cache_file_path: PathBuf) -> Self { Self { http_client, cache_file_path, @@ -183,7 +185,10 @@ fn print_release_notes(current_version: &str, new_version: &str) { } } -pub fn check_for_upgrades(http_client: HttpClient, cache_file_path: PathBuf) { +pub fn check_for_upgrades( + http_client: Arc, + cache_file_path: PathBuf, +) { if env::var("DENO_NO_UPDATE_CHECK").is_ok() { return; } @@ -194,7 +199,7 @@ pub fn check_for_upgrades(http_client: HttpClient, cache_file_path: PathBuf) { if update_checker.should_check_for_new_version() { let env = update_checker.env.clone(); // do this asynchronously on a separate task - tokio::spawn(async move { + spawn(async move { // Sleep for a small amount of time to not unnecessarily impact startup // time. tokio::time::sleep(UPGRADE_CHECK_FETCH_DELAY).await; @@ -226,7 +231,6 @@ pub fn check_for_upgrades(http_client: HttpClient, cache_file_path: PathBuf) { "{}", colors::italic_gray("Run `deno upgrade` to install it.") ); - print_release_notes(version::deno(), &upgrade_version); } update_checker.store_prompted(); @@ -263,7 +267,8 @@ pub async fn upgrade( flags: Flags, upgrade_flags: UpgradeFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let client = factory.http_client(); let current_exe_path = std::env::current_exe()?; let metadata = fs::metadata(¤t_exe_path)?; let permissions = metadata.permissions(); @@ -285,11 +290,13 @@ pub async fn upgrade( ), current_exe_path.display()); } - let client = &ps.http_client; - let install_version = match upgrade_flags.version { Some(passed_version) => { let re_hash = lazy_regex::regex!("^[0-9a-f]{40}$"); + let passed_version = passed_version + .strip_prefix('v') + .unwrap_or(&passed_version) + .to_string(); if upgrade_flags.canary && !re_hash.is_match(&passed_version) { bail!("Invalid commit hash passed"); @@ -313,9 +320,9 @@ pub async fn upgrade( { log::info!("Version {} is already installed", crate::version::deno()); return Ok(()); - } else { - passed_version } + + passed_version } None => { let latest_version = if upgrade_flags.canary { @@ -327,7 +334,7 @@ pub async fn upgrade( }; let current_is_most_recent = if upgrade_flags.canary { - let latest_hash = latest_version.clone(); + let latest_hash = &latest_version; crate::version::GIT_COMMIT_HASH == latest_hash } else if !crate::version::is_canary() { let current = Version::parse_standard(crate::version::deno()).unwrap(); @@ -359,7 +366,7 @@ pub async fn upgrade( let download_url = if upgrade_flags.canary { if env!("TARGET") == "aarch64-apple-darwin" { - bail!("Canary builds are not available for M1"); + bail!("Canary builds are not available for M1/M2"); } format!( diff --git a/cli/tools/vendor/build.rs b/cli/tools/vendor/build.rs index f9df8f0786..11a1fb50e6 100644 --- a/cli/tools/vendor/build.rs +++ b/cli/tools/vendor/build.rs @@ -378,6 +378,54 @@ mod test { ); } + #[tokio::test] + async fn remote_redirect_entrypoint() { + let mut builder = VendorTestBuilder::with_default_setup(); + let output = builder + .with_loader(|loader| { + loader + .add( + "/mod.ts", + concat!( + "import * as test from 'https://x.nest.land/Yenv@1.0.0/mod.ts';\n", + "console.log(test)", + ), + ) + .add_redirect("https://x.nest.land/Yenv@1.0.0/mod.ts", "https://arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts") + .add( + "https://arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts", + "export * from './src/mod.ts'", + ) + .add( + "https://arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/src/mod.ts", + "export class Test {}", + ); + }) + .build() + .await + .unwrap(); + + assert_eq!( + output.import_map, + Some(json!({ + "imports": { + "https://x.nest.land/Yenv@1.0.0/mod.ts": "./arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts", + "https://arweave.net/": "./arweave.net/" + }, + })) + ); + assert_eq!( + output.files, + to_file_vec(&[ + ("/vendor/arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts", "export * from './src/mod.ts'"), + ( + "/vendor/arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/src/mod.ts", + "export class Test {}", + ), + ]), + ); + } + #[tokio::test] async fn same_target_filename_specifiers() { let mut builder = VendorTestBuilder::with_default_setup(); diff --git a/cli/tools/vendor/import_map.rs b/cli/tools/vendor/import_map.rs index 916eb55c58..dbda81a3a0 100644 --- a/cli/tools/vendor/import_map.rs +++ b/cli/tools/vendor/import_map.rs @@ -304,7 +304,7 @@ fn handle_dep_specifier( referrer, mappings, ) - } else { + } else if specifier.scheme() == "file" { handle_local_dep_specifier( text, unresolved_specifier, @@ -326,15 +326,16 @@ fn handle_remote_dep_specifier( ) { if is_remote_specifier_text(text) { let base_specifier = mappings.base_specifier(specifier); - if !text.starts_with(base_specifier.as_str()) { - panic!("Expected {text} to start with {base_specifier}"); - } - - let sub_path = &text[base_specifier.as_str().len()..]; - let relative_text = - mappings.relative_specifier_text(base_specifier, specifier); - let expected_sub_path = relative_text.trim_start_matches("./"); - if expected_sub_path != sub_path { + if text.starts_with(base_specifier.as_str()) { + let sub_path = &text[base_specifier.as_str().len()..]; + let relative_text = + mappings.relative_specifier_text(base_specifier, specifier); + let expected_sub_path = relative_text.trim_start_matches("./"); + if expected_sub_path != sub_path { + import_map.imports.add(text.to_string(), specifier); + } + } else { + // it's probably a redirect. Add it explicitly to the import map import_map.imports.add(text.to_string(), specifier); } } else { diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs index 225c3e6a81..61ada605c5 100644 --- a/cli/tools/vendor/mod.rs +++ b/cli/tools/vendor/mod.rs @@ -5,18 +5,21 @@ use std::path::PathBuf; use std::sync::Arc; use deno_ast::ModuleSpecifier; +use deno_ast::TextChange; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; +use deno_graph::GraphKind; use log::warn; use crate::args::CliOptions; +use crate::args::ConfigFile; use crate::args::Flags; use crate::args::FmtOptionsConfig; use crate::args::VendorFlags; +use crate::factory::CliFactory; use crate::graph_util::ModuleGraphBuilder; -use crate::proc_state::ProcState; use crate::tools::fmt::format_json; use crate::util::fs::canonicalize_path; use crate::util::fs::resolve_from_cwd; @@ -43,19 +46,23 @@ pub async fn vendor( let output_dir = resolve_from_cwd(&raw_output_dir)?; validate_output_dir(&output_dir, &vendor_flags)?; validate_options(&mut cli_options, &output_dir)?; - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); let graph = create_graph( - &ps.module_graph_builder, + factory.module_graph_builder().await?, &vendor_flags, - ps.options.initial_cwd(), + cli_options.initial_cwd(), ) .await?; + let npm_package_count = graph.npm_packages.len(); + let try_add_node_modules_dir = npm_package_count > 0 + && cli_options.node_modules_dir_enablement().unwrap_or(true); let vendored_count = build::build( graph, - &ps.parsed_source_cache, + factory.parsed_source_cache()?, &output_dir, - ps.maybe_import_map.as_deref(), - ps.lockfile.clone(), + factory.maybe_import_map().await?.as_deref(), + factory.maybe_lockfile().clone(), &build::RealVendorEnvironment, )?; @@ -69,9 +76,48 @@ pub async fn vendor( }, raw_output_dir.display(), ); + + let try_add_import_map = vendored_count > 0; + let modified_result = maybe_update_config_file( + &output_dir, + cli_options, + try_add_import_map, + try_add_node_modules_dir, + ); + + // cache the node_modules folder when it's been added to the config file + if modified_result.added_node_modules_dir { + let node_modules_path = cli_options.node_modules_dir_path().or_else(|| { + cli_options + .maybe_config_file_specifier() + .filter(|c| c.scheme() == "file") + .and_then(|c| c.to_file_path().ok()) + .map(|config_path| config_path.parent().unwrap().join("node_modules")) + }); + if let Some(node_modules_path) = node_modules_path { + factory + .create_node_modules_npm_fs_resolver(node_modules_path) + .await? + .cache_packages() + .await?; + } + log::info!( + concat!( + "Vendored {} npm {} into node_modules directory. Set `nodeModulesDir: false` ", + "in the Deno configuration file to disable vendoring npm packages in the future.", + ), + npm_package_count, + if npm_package_count == 1 { + "package" + } else { + "packages" + }, + ); + } + if vendored_count > 0 { let import_map_path = raw_output_dir.join("import_map.json"); - if maybe_update_config_file(&output_dir, &ps.options) { + if modified_result.updated_import_map { log::info!( concat!( "\nUpdated your local Deno configuration file with a reference to the ", @@ -153,107 +199,156 @@ fn validate_options( Ok(()) } -fn maybe_update_config_file(output_dir: &Path, options: &CliOptions) -> bool { +fn maybe_update_config_file( + output_dir: &Path, + options: &CliOptions, + try_add_import_map: bool, + try_add_node_modules_dir: bool, +) -> ModifiedResult { assert!(output_dir.is_absolute()); - let config_file_specifier = match options.maybe_config_file_specifier() { - Some(f) => f, - None => return false, + let config_file = match options.maybe_config_file() { + Some(config_file) => config_file, + None => return ModifiedResult::default(), }; + if config_file.specifier.scheme() != "file" { + return ModifiedResult::default(); + } - let fmt_config = options - .maybe_config_file() - .as_ref() - .and_then(|config| config.to_fmt_config().ok()) + let fmt_config = config_file + .to_fmt_config() + .ok() .unwrap_or_default() .unwrap_or_default(); let result = update_config_file( - &config_file_specifier, - &ModuleSpecifier::from_file_path(output_dir.join("import_map.json")) - .unwrap(), + config_file, &fmt_config.options, + if try_add_import_map { + Some( + ModuleSpecifier::from_file_path(output_dir.join("import_map.json")) + .unwrap(), + ) + } else { + None + }, + try_add_node_modules_dir, ); match result { - Ok(()) => true, + Ok(modified_result) => modified_result, Err(err) => { warn!("Error updating config file. {:#}", err); - false + ModifiedResult::default() } } } fn update_config_file( - config_specifier: &ModuleSpecifier, - import_map_specifier: &ModuleSpecifier, + config_file: &ConfigFile, fmt_options: &FmtOptionsConfig, -) -> Result<(), AnyError> { - if config_specifier.scheme() != "file" { - return Ok(()); - } - - let config_path = specifier_to_file_path(config_specifier)?; + import_map_specifier: Option, + try_add_node_modules_dir: bool, +) -> Result { + let config_path = specifier_to_file_path(&config_file.specifier)?; let config_text = std::fs::read_to_string(&config_path)?; - let relative_text = - match relative_specifier(config_specifier, import_map_specifier) { - Some(text) => text, - None => return Ok(()), // ignore - }; - if let Some(new_text) = - update_config_text(&config_text, &relative_text, fmt_options) - { + let import_map_specifier = + import_map_specifier.and_then(|import_map_specifier| { + relative_specifier(&config_file.specifier, &import_map_specifier) + }); + let modified_result = update_config_text( + &config_text, + fmt_options, + import_map_specifier.as_deref(), + try_add_node_modules_dir, + )?; + if let Some(new_text) = &modified_result.new_text { std::fs::write(config_path, new_text)?; } + Ok(modified_result) +} - Ok(()) +#[derive(Default)] +struct ModifiedResult { + updated_import_map: bool, + added_node_modules_dir: bool, + new_text: Option, } fn update_config_text( text: &str, - import_map_specifier: &str, fmt_options: &FmtOptionsConfig, -) -> Option { + import_map_specifier: Option<&str>, + try_add_node_modules_dir: bool, +) -> Result { use jsonc_parser::ast::ObjectProp; use jsonc_parser::ast::Value; let ast = - jsonc_parser::parse_to_ast(text, &Default::default(), &Default::default()) - .ok()?; + jsonc_parser::parse_to_ast(text, &Default::default(), &Default::default())?; let obj = match ast.value { Some(Value::Object(obj)) => obj, - _ => return None, // shouldn't happen, so ignore + _ => bail!("Failed updating config file due to no object."), }; - let import_map_specifier = import_map_specifier.replace('\"', "\\\""); + let mut modified_result = ModifiedResult::default(); + let mut text_changes = Vec::new(); + let mut should_format = false; - match obj.get("importMap") { - Some(ObjectProp { - value: Value::StringLit(lit), - .. - }) => Some(format!( - "{}{}{}", - &text[..lit.range.start + 1], - import_map_specifier, - &text[lit.range.end - 1..], - )), - None => { - // insert it crudely at a position that won't cause any issues - // with comments and format after to make it look nice + if try_add_node_modules_dir { + // Only modify the nodeModulesDir property if it's not set + // as this allows people to opt-out of this when vendoring + // by specifying `nodeModulesDir: false` + if obj.get("nodeModulesDir").is_none() { let insert_position = obj.range.end - 1; - let insert_text = format!( - r#"{}"importMap": "{}""#, - if obj.properties.is_empty() { "" } else { "," }, - import_map_specifier - ); - let new_text = format!( - "{}{}{}", - &text[..insert_position], - insert_text, - &text[insert_position..], - ); - format_json(&new_text, fmt_options) - .ok() - .map(|formatted_text| formatted_text.unwrap_or(new_text)) + text_changes.push(TextChange { + range: insert_position..insert_position, + new_text: r#""nodeModulesDir": true"#.to_string(), + }); + should_format = true; + modified_result.added_node_modules_dir = true; } - // shouldn't happen, so ignore - Some(_) => None, } + + if let Some(import_map_specifier) = import_map_specifier { + let import_map_specifier = import_map_specifier.replace('\"', "\\\""); + match obj.get("importMap") { + Some(ObjectProp { + value: Value::StringLit(lit), + .. + }) => { + text_changes.push(TextChange { + range: lit.range.start..lit.range.end, + new_text: format!("\"{}\"", import_map_specifier), + }); + modified_result.updated_import_map = true; + } + None => { + // insert it crudely at a position that won't cause any issues + // with comments and format after to make it look nice + let insert_position = obj.range.end - 1; + text_changes.push(TextChange { + range: insert_position..insert_position, + new_text: format!(r#""importMap": "{}""#, import_map_specifier), + }); + should_format = true; + modified_result.updated_import_map = true; + } + // shouldn't happen + Some(_) => { + bail!("Failed updating importMap in config file due to invalid type.") + } + } + } + + if text_changes.is_empty() { + return Ok(modified_result); + } + + let new_text = deno_ast::apply_text_changes(text, text_changes); + modified_result.new_text = if should_format { + format_json(&new_text, fmt_options) + .ok() + .map(|formatted_text| formatted_text.unwrap_or(new_text)) + } else { + Some(new_text) + }; + Ok(modified_result) } fn is_dir_empty(dir_path: &Path) -> Result { @@ -277,7 +372,9 @@ async fn create_graph( .map(|p| resolve_url_or_path(p, initial_cwd)) .collect::, _>>()?; - module_graph_builder.create_graph(entry_points).await + module_graph_builder + .create_graph(GraphKind::All, entry_points) + .await } #[cfg(test)] @@ -287,36 +384,94 @@ mod internal_test { #[test] fn update_config_text_no_existing_props_add_prop() { - let text = update_config_text( + let result = update_config_text( "{\n}", - "./vendor/import_map.json", &Default::default(), + Some("./vendor/import_map.json"), + false, ) .unwrap(); + assert!(result.updated_import_map); + assert!(!result.added_node_modules_dir); assert_eq!( - text, + result.new_text.unwrap(), r#"{ "importMap": "./vendor/import_map.json" } +"# + ); + + let result = update_config_text( + "{\n}", + &Default::default(), + Some("./vendor/import_map.json"), + true, + ) + .unwrap(); + assert!(result.updated_import_map); + assert!(result.added_node_modules_dir); + assert_eq!( + result.new_text.unwrap(), + r#"{ + "nodeModulesDir": true, + "importMap": "./vendor/import_map.json" +} +"# + ); + + let result = + update_config_text("{\n}", &Default::default(), None, true).unwrap(); + assert!(!result.updated_import_map); + assert!(result.added_node_modules_dir); + assert_eq!( + result.new_text.unwrap(), + r#"{ + "nodeModulesDir": true +} "# ); } #[test] fn update_config_text_existing_props_add_prop() { - let text = update_config_text( + let result = update_config_text( r#"{ "tasks": { "task1": "other" } } "#, - "./vendor/import_map.json", &Default::default(), + Some("./vendor/import_map.json"), + false, ) .unwrap(); assert_eq!( - text, + result.new_text.unwrap(), + r#"{ + "tasks": { + "task1": "other" + }, + "importMap": "./vendor/import_map.json" +} +"# + ); + + // trailing comma + let result = update_config_text( + r#"{ + "tasks": { + "task1": "other" + }, +} +"#, + &Default::default(), + Some("./vendor/import_map.json"), + false, + ) + .unwrap(); + assert_eq!( + result.new_text.unwrap(), r#"{ "tasks": { "task1": "other" @@ -329,21 +484,54 @@ mod internal_test { #[test] fn update_config_text_update_prop() { - let text = update_config_text( + let result = update_config_text( r#"{ "importMap": "./local.json" } "#, - "./vendor/import_map.json", &Default::default(), + Some("./vendor/import_map.json"), + false, ) .unwrap(); assert_eq!( - text, + result.new_text.unwrap(), r#"{ "importMap": "./vendor/import_map.json" } "# ); } + + #[test] + fn no_update_node_modules_dir() { + // will not update if this is already set (even if it's false) + let result = update_config_text( + r#"{ + "nodeModulesDir": false +} +"#, + &Default::default(), + None, + true, + ) + .unwrap(); + assert!(!result.added_node_modules_dir); + assert!(!result.updated_import_map); + assert_eq!(result.new_text, None); + + let result = update_config_text( + r#"{ + "nodeModulesDir": true +} +"#, + &Default::default(), + None, + true, + ) + .unwrap(); + assert!(!result.added_node_modules_dir); + assert!(!result.updated_import_map); + assert_eq!(result.new_text, None); + } } diff --git a/cli/tools/vendor/specifiers.rs b/cli/tools/vendor/specifiers.rs index 7418bcb8b5..bb7e0317a8 100644 --- a/cli/tools/vendor/specifiers.rs +++ b/cli/tools/vendor/specifiers.rs @@ -65,7 +65,7 @@ pub fn make_url_relative( } pub fn is_remote_specifier(specifier: &ModuleSpecifier) -> bool { - specifier.scheme().to_lowercase().starts_with("http") + matches!(specifier.scheme().to_lowercase().as_str(), "http" | "https") } pub fn is_remote_specifier_text(text: &str) -> bool { diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs index 774ff0d583..08b6d8355b 100644 --- a/cli/tools/vendor/test.rs +++ b/cli/tools/vendor/test.rs @@ -16,13 +16,13 @@ use deno_core::serde_json; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; +use deno_graph::GraphKind; use deno_graph::ModuleGraph; use import_map::ImportMap; use crate::cache::ParsedSourceCache; use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; -use crate::npm::PackageJsonDepsInstaller; use crate::resolver::CliGraphResolver; use super::build::VendorEnvironment; @@ -270,21 +270,17 @@ async fn build_test_graph( None, None, )); - let deps_installer = Arc::new(PackageJsonDepsInstaller::new( - npm_registry_api.clone(), - npm_resolution.clone(), - None, - )); CliGraphResolver::new( None, Some(Arc::new(original_import_map)), false, npm_registry_api, npm_resolution, - deps_installer, + Default::default(), + Default::default(), ) }); - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(GraphKind::All); graph .build( roots, diff --git a/cli/tsc/00_typescript.js b/cli/tsc/00_typescript.js index 63743a3724..a477f09d9e 100644 --- a/cli/tsc/00_typescript.js +++ b/cli/tsc/00_typescript.js @@ -35,7 +35,7 @@ var ts = (() => { "src/compiler/corePublic.ts"() { "use strict"; versionMajorMinor = "5.0"; - version = "5.0.3"; + version = "5.0.4"; Comparison = /* @__PURE__ */ ((Comparison3) => { Comparison3[Comparison3["LessThan"] = -1] = "LessThan"; Comparison3[Comparison3["EqualTo"] = 0] = "EqualTo"; @@ -17997,6 +17997,9 @@ ${lanes.join("\n")} function moduleResolutionSupportsPackageJsonExportsAndImports(moduleResolution) { return moduleResolution >= 3 /* Node16 */ && moduleResolution <= 99 /* NodeNext */ || moduleResolution === 100 /* Bundler */; } + function shouldResolveJsRequire(compilerOptions) { + return !!compilerOptions.noDtsResolution || getEmitModuleResolutionKind(compilerOptions) !== 100 /* Bundler */; + } function getResolvePackageJsonExports(compilerOptions) { const moduleResolution = getEmitModuleResolutionKind(compilerOptions); if (!moduleResolutionSupportsPackageJsonExportsAndImports(moduleResolution)) { @@ -31698,6 +31701,12 @@ ${lanes.join("\n")} if (languageVariant === 1 /* JSX */) { return parseJsxElementOrSelfClosingElementOrFragment( /*inExpressionContext*/ + true, + /*topInvalidNodePosition*/ + void 0, + /*openingTag*/ + void 0, + /*mustBeUnary*/ true ); } @@ -31802,7 +31811,7 @@ ${lanes.join("\n")} true )), pos); } - function parseJsxElementOrSelfClosingElementOrFragment(inExpressionContext, topInvalidNodePosition, openingTag) { + function parseJsxElementOrSelfClosingElementOrFragment(inExpressionContext, topInvalidNodePosition, openingTag, mustBeUnary = false) { const pos = getNodePos(); const opening = parseJsxOpeningOrSelfClosingElementOrOpeningFragment(inExpressionContext); let result; @@ -31840,7 +31849,7 @@ ${lanes.join("\n")} Debug.assert(opening.kind === 282 /* JsxSelfClosingElement */); result = opening; } - if (inExpressionContext && token() === 29 /* LessThanToken */) { + if (!mustBeUnary && inExpressionContext && token() === 29 /* LessThanToken */) { const topBadPos = typeof topInvalidNodePosition === "undefined" ? result.pos : topInvalidNodePosition; const invalidElement = tryParse(() => parseJsxElementOrSelfClosingElementOrFragment( /*inExpressionContext*/ @@ -38075,7 +38084,8 @@ ${lanes.join("\n")} affectsBuildInfo: true, category: Diagnostics.Modules, description: Diagnostics.Allow_imports_to_include_TypeScript_file_extensions_Requires_moduleResolution_bundler_and_either_noEmit_or_emitDeclarationOnly_to_be_set, - defaultValueDescription: false + defaultValueDescription: false, + transpileOptionValue: void 0 }, { name: "resolvePackageJsonExports", @@ -43773,7 +43783,7 @@ ${lanes.join("\n")} } if (!isBindingPattern(node.name)) { const possibleVariableDecl = node.kind === 257 /* VariableDeclaration */ ? node : node.parent.parent; - if (isInJSFile(node) && getEmitModuleResolutionKind(options) !== 100 /* Bundler */ && isVariableDeclarationInitializedToBareOrAccessedRequire(possibleVariableDecl) && !getJSDocTypeTag(node) && !(getCombinedModifierFlags(node) & 1 /* Export */)) { + if (isInJSFile(node) && shouldResolveJsRequire(options) && isVariableDeclarationInitializedToBareOrAccessedRequire(possibleVariableDecl) && !getJSDocTypeTag(node) && !(getCombinedModifierFlags(node) & 1 /* Export */)) { declareSymbolAndAddToSymbolTable(node, 2097152 /* Alias */, 2097152 /* AliasExcludes */); } else if (isBlockOrCatchScoped(node)) { bindBlockScopedDeclaration(node, 2 /* BlockScopedVariable */, 111551 /* BlockScopedVariableExcludes */); @@ -47284,7 +47294,7 @@ ${lanes.join("\n")} const hasDefaultOnly = isOnlyImportedAsDefault(specifier); const hasSyntheticDefault = canHaveSyntheticDefault(file, moduleSymbol, dontResolveAlias, specifier); if (!exportDefaultSymbol && !hasSyntheticDefault && !hasDefaultOnly) { - if (hasExportAssignmentSymbol(moduleSymbol) && !(getAllowSyntheticDefaultImports(compilerOptions) || getESModuleInterop(compilerOptions))) { + if (hasExportAssignmentSymbol(moduleSymbol) && !allowSyntheticDefaultImports) { const compilerOptionName = moduleKind >= 5 /* ES2015 */ ? "allowSyntheticDefaultImports" : "esModuleInterop"; const exportEqualsSymbol = moduleSymbol.exports.get("export=" /* ExportEquals */); const exportAssignment = exportEqualsSymbol.valueDeclaration; @@ -47452,7 +47462,7 @@ ${lanes.join("\n")} if (!isIdentifier(name)) { return void 0; } - const suppressInteropError = name.escapedText === "default" /* Default */ && !!(compilerOptions.allowSyntheticDefaultImports || getESModuleInterop(compilerOptions)); + const suppressInteropError = name.escapedText === "default" /* Default */ && allowSyntheticDefaultImports; const targetSymbol = resolveESModuleSymbol( moduleSymbol, moduleSpecifier, @@ -52116,7 +52126,7 @@ ${lanes.join("\n")} return; } let verbatimTargetName = isShorthandAmbientModuleSymbol(target) && getSomeTargetNameFromDeclarations(symbol.declarations) || unescapeLeadingUnderscores(target.escapedName); - if (verbatimTargetName === "export=" /* ExportEquals */ && (getESModuleInterop(compilerOptions) || compilerOptions.allowSyntheticDefaultImports)) { + if (verbatimTargetName === "export=" /* ExportEquals */ && allowSyntheticDefaultImports) { verbatimTargetName = "default" /* Default */; } const targetName = getInternalSymbolName(target, verbatimTargetName); @@ -73215,7 +73225,7 @@ ${lanes.join("\n")} return anyType; } } - if (isInJSFile(node) && getEmitModuleResolutionKind(compilerOptions) !== 100 /* Bundler */ && isCommonJsRequire(node)) { + if (isInJSFile(node) && shouldResolveJsRequire(compilerOptions) && isCommonJsRequire(node)) { return resolveExternalModuleTypeByLiteral(node.arguments[0]); } const returnType = getReturnTypeOfSignature(signature); @@ -92253,11 +92263,12 @@ ${lanes.join("\n")} return visitEachChild(node, visitor, context); } function visitArrayAssignmentElement(node) { - Debug.assertNode(node, isArrayBindingOrAssignmentElement); - if (isSpreadElement(node)) - return visitAssignmentRestElement(node); - if (!isOmittedExpression(node)) - return visitAssignmentElement(node); + if (isArrayBindingOrAssignmentElement(node)) { + if (isSpreadElement(node)) + return visitAssignmentRestElement(node); + if (!isOmittedExpression(node)) + return visitAssignmentElement(node); + } return visitEachChild(node, visitor, context); } function visitAssignmentProperty(node) { @@ -117468,7 +117479,7 @@ ${lanes.join("\n")} false ); } - const shouldProcessRequires = isJavaScriptFile && getEmitModuleResolutionKind(options) !== 100 /* Bundler */; + const shouldProcessRequires = isJavaScriptFile && shouldResolveJsRequire(options); if (file.flags & 2097152 /* PossiblyContainsDynamicImport */ || shouldProcessRequires) { collectDynamicImportOrRequireCalls(file); } @@ -118396,9 +118407,6 @@ ${lanes.join("\n")} if (moduleKind === 2 /* AMD */ || moduleKind === 3 /* UMD */ || moduleKind === 4 /* System */) { createDiagnosticForOptionName(Diagnostics.Option_verbatimModuleSyntax_cannot_be_used_when_module_is_set_to_UMD_AMD_or_System, "verbatimModuleSyntax"); } - if (options.isolatedModules) { - createRedundantOptionDiagnostic("isolatedModules", "verbatimModuleSyntax"); - } if (options.preserveValueImports) { createRedundantOptionDiagnostic("preserveValueImports", "verbatimModuleSyntax"); } @@ -169808,6 +169816,7 @@ ${options.prefix}` : "\n" : options.prefix setValueDeclaration: () => setValueDeclaration, shouldAllowImportingTsExtension: () => shouldAllowImportingTsExtension, shouldPreserveConstEnums: () => shouldPreserveConstEnums, + shouldResolveJsRequire: () => shouldResolveJsRequire, shouldUseUriStyleNodeCoreModules: () => shouldUseUriStyleNodeCoreModules, showModuleSpecifier: () => showModuleSpecifier, signatureHasLiteralTypes: () => signatureHasLiteralTypes, diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts index b59f6dca81..66c0946972 100644 --- a/cli/tsc/compiler.d.ts +++ b/cli/tsc/compiler.d.ts @@ -46,6 +46,8 @@ declare global { encode(value: string): Uint8Array; // deno-lint-ignore no-explicit-any ops: Record any>; + // deno-lint-ignore no-explicit-any + asyncOps: Record any>; print(msg: string, stderr: boolean): void; registerErrorClass( name: string, diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index 1e9819309e..15aadff814 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -26,7 +26,6 @@ const UNSTABLE_DENO_PROPS: &[&str] = &[ "listen", "listenDatagram", "dlopen", - "ppid", "removeSignalListener", "shutdown", "umask", diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 4d41aea436..0247eda9c5 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -1488,6 +1488,12 @@ declare namespace Deno { * would resolve to `n` < `p.byteLength`. `write()` must not modify the * slice data, even temporarily. * + * This function is one of the lowest + * level APIs and most users should not work with this directly, but rather use + * [`writeAll()`](https://deno.land/std/streams/write_all.ts?s=writeAll) from + * [`std/streams/write_all.ts`](https://deno.land/std/streams/write_all.ts) + * instead. + * * Implementations should not retain a reference to `p`. */ write(p: Uint8Array): Promise; @@ -1559,7 +1565,7 @@ declare namespace Deno { * * It returns the updated offset. */ - seekSync(offset: number, whence: SeekMode): number; + seekSync(offset: number | bigint, whence: SeekMode): number; } /** @@ -1832,7 +1838,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(await Deno.seek(file.rid, 2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(await Deno.seek(file.rid, -2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(await Deno.seek(file.rid, -2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * file.close(); * ``` * @@ -1879,7 +1885,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(Deno.seekSync(file.rid, 2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(Deno.seekSync(file.rid, -2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(Deno.seekSync(file.rid, -2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * file.close(); * ``` * @@ -1887,7 +1893,7 @@ declare namespace Deno { */ export function seekSync( rid: number, - offset: number, + offset: number | bigint, whence: SeekMode, ): number; @@ -2200,7 +2206,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(await file.seek(2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(await file.seek(-2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(await file.seek(-2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * ``` */ seek(offset: number | bigint, whence: SeekMode): Promise; @@ -2238,7 +2244,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(file.seekSync(2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(file.seekSync(-2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(file.seekSync(-2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * file.close(); * ``` */ @@ -3116,6 +3122,22 @@ declare namespace Deno { * * _Linux/Mac OS only._ */ blocks: number | null; + /** True if this is info for a block device. + * + * _Linux/Mac OS only._ */ + isBlockDevice: boolean | null; + /** True if this is info for a char device. + * + * _Linux/Mac OS only._ */ + isCharDevice: boolean | null; + /** True if this is info for a fifo. + * + * _Linux/Mac OS only._ */ + isFifo: boolean | null; + /** True if this is info for a socket. + * + * _Linux/Mac OS only._ */ + isSocket: boolean | null; } /** Resolves to the absolute normalized path, with symbolic links resolved. @@ -3691,7 +3713,10 @@ declare namespace Deno { options?: { recursive: boolean }, ): FsWatcher; - /** Options which can be used with {@linkcode Deno.run}. + /** + * @deprecated Use {@linkcode Deno.Command} instead. + * + * Options which can be used with {@linkcode Deno.run}. * * @category Sub Process */ export interface RunOptions { @@ -3749,7 +3774,10 @@ declare namespace Deno { stdin?: "inherit" | "piped" | "null" | number; } - /** The status resolved from the `.status()` method of a + /** + * @deprecated Use {@linkcode Deno.Command} instead. + * + * The status resolved from the `.status()` method of a * {@linkcode Deno.Process} instance. * * If `success` is `true`, then `code` will be `0`, but if `success` is @@ -3769,6 +3797,8 @@ declare namespace Deno { }; /** + * * @deprecated Use {@linkcode Deno.Command} instead. + * * Represents an instance of a sub process that is returned from * {@linkcode Deno.run} which can be used to manage the sub-process. * @@ -3925,7 +3955,10 @@ declare namespace Deno { handler: () => void, ): void; - /** Spawns new subprocess. RunOptions must contain at a minimum the `opt.cmd`, + /** + * @deprecated Use {@linkcode Deno.Command} instead. + * + * Spawns new subprocess. RunOptions must contain at a minimum the `opt.cmd`, * an array of program arguments, the first of which is the binary. * * ```ts @@ -3992,11 +4025,14 @@ declare namespace Deno { * "console.log('Hello World')", * ], * stdin: "piped", + * stdout: "piped", * }); * const child = command.spawn(); * * // open a file and pipe the subprocess output to it. - * child.stdout.pipeTo(Deno.openSync("output").writable); + * child.stdout.pipeTo( + * Deno.openSync("output", { write: true, create: true }).writable, + * ); * * // manually close stdin * child.stdin.close(); @@ -4033,6 +4069,7 @@ declare namespace Deno { * console.assert("world\n" === new TextDecoder().decode(stderr)); * ``` * + * @tags allow-run * @category Sub Process */ export class Command { @@ -4203,6 +4240,14 @@ declare namespace Deno { * * @default {4} */ depth?: number; + /** The maximum length for an inspection to take up a single line. + * + * @default {80} */ + breakLength?: number; + /** Whether or not to escape sequences. + * + * @default {true} */ + escapeSequences?: boolean; /** The maximum number of iterable entries to print. * * @default {100} */ diff --git a/cli/tsc/dts/lib.deno.shared_globals.d.ts b/cli/tsc/dts/lib.deno.shared_globals.d.ts index d0b44f58af..603cc78f2e 100644 --- a/cli/tsc/dts/lib.deno.shared_globals.d.ts +++ b/cli/tsc/dts/lib.deno.shared_globals.d.ts @@ -11,7 +11,6 @@ /// /// /// -/// /** @category WebAssembly */ declare namespace WebAssembly { @@ -349,7 +348,7 @@ declare namespace WebAssembly { export function validate(bytes: BufferSource): boolean; } -/** Sets a timer which executes a function once after the timer expires. Returns +/** Sets a timer which executes a function once after the delay (in milliseconds) elapses. Returns * an id which may be used to cancel the timeout. * * ```ts diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index 9c4bd5d2cf..27d3af4cd4 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -2,6 +2,7 @@ /// /// +/// declare namespace Deno { export {}; // stop default export type behavior @@ -97,6 +98,8 @@ declare namespace Deno { /** **UNSTABLE**: New API, yet to be vetted. * * The native struct type for interfacing with foreign functions. + * + * @category FFI */ type NativeStructType = { readonly struct: readonly NativeType[] }; @@ -351,7 +354,9 @@ declare namespace Deno { : StaticForeignSymbol; }; + /** @category FFI */ const brand: unique symbol; + /** @category FFI */ type PointerObject = { [brand]: unknown }; /** **UNSTABLE**: New API, yet to be vetted. @@ -643,8 +648,11 @@ declare namespace Deno { /** * This magic code used to implement better type hints for {@linkcode Deno.dlopen} + * + * @category FFI */ type Cast = A extends B ? A : B; + /** @category FFI */ type Const = Cast< T, | (T extends string | number | bigint | boolean ? T : never) @@ -813,6 +821,22 @@ declare namespace Deno { certChain?: string; /** PEM formatted (RSA or PKCS8) private key of client certificate. */ privateKey?: string; + /** Sets the maximum numer of idle connections per host allowed in the pool. */ + poolMaxIdlePerHost?: number; + /** Set an optional timeout for idle sockets being kept-alive. + * Set to false to disable the timeout. */ + poolIdleTimeout?: number | false; + /** + * Whether HTTP/1.1 is allowed or not. + * + * @default {true} + */ + http1?: boolean; + /** Whether HTTP/2 is allowed or not. + * + * @default {true} + */ + http2?: boolean; } /** **UNSTABLE**: New API, yet to be vetted. @@ -1295,6 +1319,28 @@ declare namespace Deno { handler: ServeHandler; } + /** **UNSTABLE**: New API, yet to be vetted. + * + * @category HTTP Server + */ + export interface Server { + /** A promise that resolves once server finishes - eg. when aborted using + * the signal passed to {@linkcode ServeOptions.signal}. + */ + finished: Promise; + + /** + * Make the server block the event loop from finishing. + * + * Note: the server blocks the event loop from finishing by default. + * This method is only meaningful after `.unref()` is called. + */ + ref(): void; + + /** Make the server not block the event loop from finishing. */ + unref(): void; + } + /** **UNSTABLE**: New API, yet to be vetted. * * Serves HTTP requests with the given handler. @@ -1323,8 +1369,11 @@ declare namespace Deno { * ```ts * const ac = new AbortController(); * - * Deno.serve({ signal: ac.signal }, (_req) => new Response("Hello, world")) - * .then(() => console.log("Server closed")); + * const server = Deno.serve( + * { signal: ac.signal }, + * (_req) => new Response("Hello, world") + * ); + * server.finished.then(() => console.log("Server closed")); * * console.log("Closing server..."); * ac.abort(); @@ -1354,10 +1403,7 @@ declare namespace Deno { * * @category HTTP Server */ - export function serve( - handler: ServeHandler, - options?: ServeOptions | ServeTlsOptions, - ): Promise; + export function serve(handler: ServeHandler): Server; /** **UNSTABLE**: New API, yet to be vetted. * * Serves HTTP requests with the given handler. @@ -1386,8 +1432,11 @@ declare namespace Deno { * ```ts * const ac = new AbortController(); * - * Deno.serve({ signal: ac.signal }, (_req) => new Response("Hello, world")) - * .then(() => console.log("Server closed")); + * const server = Deno.serve( + * { signal: ac.signal }, + * (_req) => new Response("Hello, world") + * ); + * server.finished.then(() => console.log("Server closed")); * * console.log("Closing server..."); * ac.abort(); @@ -1420,7 +1469,7 @@ declare namespace Deno { export function serve( options: ServeOptions | ServeTlsOptions, handler: ServeHandler, - ): Promise; + ): Server; /** **UNSTABLE**: New API, yet to be vetted. * * Serves HTTP requests with the given handler. @@ -1449,8 +1498,11 @@ declare namespace Deno { * ```ts * const ac = new AbortController(); * - * Deno.serve({ signal: ac.signal }, (_req) => new Response("Hello, world")) - * .then(() => console.log("Server closed")); + * const server = Deno.serve( + * { signal: ac.signal }, + * (_req) => new Response("Hello, world") + * ); + * server.finished.then(() => console.log("Server closed")); * * console.log("Closing server..."); * ac.abort(); @@ -1482,7 +1534,7 @@ declare namespace Deno { */ export function serve( options: ServeInit & (ServeOptions | ServeTlsOptions), - ): Promise; + ): Server; /** **UNSTABLE**: New API, yet to be vetted. * @@ -1512,25 +1564,6 @@ declare namespace Deno { request: Request, ): Promise<[Deno.Conn, Uint8Array]>; - /** **UNSTABLE**: New API, yet to be vetted. - * - * Allows "hijacking" the connection that the request is associated with. - * This can be used to implement protocols that build on top of HTTP (eg. - * {@linkcode WebSocket}). - * - * Unlike {@linkcode Deno.upgradeHttp} this function does not require that you - * respond to the request with a {@linkcode Response} object. Instead this - * function returns the underlying connection and first packet received - * immediately, and then the caller is responsible for writing the response to - * the connection. - * - * This method can only be called on requests originating the - * {@linkcode Deno.serve} server. - * - * @category HTTP Server - */ - export function upgradeHttpRaw(request: Request): [Deno.Conn, Uint8Array]; - /** **UNSTABLE**: New API, yet to be vetted. * * Open a new {@linkcode Deno.Kv} connection to persist data. @@ -1560,6 +1593,10 @@ declare namespace Deno { * relative significance of the types can be found in documentation for the * {@linkcode Deno.KvKeyPart} type. * + * Keys have a maximum size of 2048 bytes serialized. If the size of the key + * exceeds this limit, an error will be thrown on the operation that this key + * was passed to. + * * @category KV */ export type KvKey = readonly KvKeyPart[]; @@ -1642,7 +1679,8 @@ declare namespace Deno { * - `sum` - Adds the given value to the existing value of the key. Both the * value specified in the mutation, and any existing value must be of type * `Deno.KvU64`. If the key does not exist, the value is set to the given - * value (summed with 0). + * value (summed with 0). If the result of the sum overflows an unsigned + * 64-bit integer, the result is wrapped around. * - `max` - Sets the value of the key to the maximum of the existing value * and the given value. Both the value specified in the mutation, and any * existing value must be of type `Deno.KvU64`. If the key does not exist, @@ -1770,11 +1808,18 @@ declare namespace Deno { batchSize?: number; } + /** @category KV */ export interface KvCommitResult { + ok: true; /** The versionstamp of the value committed to KV. */ versionstamp: string; } + /** @category KV */ + export interface KvCommitError { + ok: false; + } + /** **UNSTABLE**: New API, yet to be vetted. * * A check to perform as part of a {@linkcode Deno.AtomicOperation}. The check @@ -1816,11 +1861,13 @@ declare namespace Deno { * * The `commit` method of an atomic operation returns a value indicating * whether checks passed and mutations were performed. If the operation failed - * because of a failed check, the return value will be `null`. If the + * because of a failed check, the return value will be a + * {@linkcode Deno.KvCommitError} with an `ok: false` property. If the * operation failed for any other reason (storage error, invalid value, etc.), * an exception will be thrown. If the operation succeeded, the return value - * will be a {@linkcode Deno.KvCommitResult} object containing the - * versionstamp of the value committed to KV. + * will be a {@linkcode Deno.KvCommitResult} object with a `ok: true` property + * and the versionstamp of the value committed to KV. + * * @category KV */ @@ -1840,9 +1887,23 @@ declare namespace Deno { */ mutate(...mutations: KvMutation[]): this; /** - * Shortcut for creating a sum mutation. + * Shortcut for creating a `sum` mutation. This method wraps `n` in a + * {@linkcode Deno.KvU64}, so the value of `n` must be in the range + * `[0, 2^64-1]`. */ sum(key: KvKey, n: bigint): this; + /** + * Shortcut for creating a `min` mutation. This method wraps `n` in a + * {@linkcode Deno.KvU64}, so the value of `n` must be in the range + * `[0, 2^64-1]`. + */ + min(key: KvKey, n: bigint): this; + /** + * Shortcut for creating a `max` mutation. This method wraps `n` in a + * {@linkcode Deno.KvU64}, so the value of `n` must be in the range + * `[0, 2^64-1]`. + */ + max(key: KvKey, n: bigint): this; /** * Add to the operation a mutation that sets the value of the specified key * to the specified value if all checks pass during the commit. @@ -1856,17 +1917,19 @@ declare namespace Deno { /** * Commit the operation to the KV store. Returns a value indicating whether * checks passed and mutations were performed. If the operation failed - * because of a failed check, the return value will be `null`. If the - * operation failed for any other reason (storage error, invalid value, - * etc.), an exception will be thrown. If the operation succeeded, the - * return value will be a {@linkcode Deno.KvCommitResult} object containing - * the versionstamp of the value committed to KV. + * because of a failed check, the return value will be a {@linkcode + * Deno.KvCommitError} with an `ok: false` property. If the operation failed + * for any other reason (storage error, invalid value, etc.), an exception + * will be thrown. If the operation succeeded, the return value will be a + * {@linkcode Deno.KvCommitResult} object with a `ok: true` property and the + * versionstamp of the value committed to KV. * - * If the commit returns `null`, one may create a new atomic operation with - * updated checks and mutations and attempt to commit it again. See the note - * on optimistic locking in the documentation for {@linkcode Deno.AtomicOperation}. + * If the commit returns `ok: false`, one may create a new atomic operation + * with updated checks and mutations and attempt to commit it again. See the + * note on optimistic locking in the documentation for + * {@linkcode Deno.AtomicOperation}. */ - commit(): Promise; + commit(): Promise; } /** **UNSTABLE**: New API, yet to be vetted. @@ -1900,7 +1963,8 @@ declare namespace Deno { * maximum length of 64 KiB after serialization. Serialization of both keys * and values is somewhat opaque, but one can usually assume that the * serialization of any value is about the same length as the resulting string - * of a JSON serialization of that same value. + * of a JSON serialization of that same value. If theses limits are exceeded, + * an exception will be thrown. * * @category KV */ @@ -2034,10 +2098,10 @@ declare namespace Deno { /** * Close the database connection. This will prevent any further operations - * from being performed on the database, but will wait for any in-flight - * operations to complete before closing the underlying database connection. + * from being performed on the database, and interrupt any in-flight + * operations immediately. */ - close(): Promise; + close(): void; } /** **UNSTABLE**: New API, yet to be vetted. diff --git a/cli/tsc/dts/lib.dom.extras.d.ts b/cli/tsc/dts/lib.dom.extras.d.ts index 2c593b2cbe..9116596a6a 100644 --- a/cli/tsc/dts/lib.dom.extras.d.ts +++ b/cli/tsc/dts/lib.dom.extras.d.ts @@ -23,7 +23,7 @@ declare type URLPatternInput = string | URLPatternInit; declare interface URLPatternComponentResult { input: string; - groups: Record; + groups: Record; } /** `URLPatternResult` is the object returned from `URLPattern.exec`. */ diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 57a4a1be83..83fd84f9dc 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -4,8 +4,6 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; use crate::node; -use crate::node::CliNodeResolver; -use crate::node::NodeResolution; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; @@ -33,7 +31,10 @@ use deno_core::Snapshot; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; +use deno_runtime::deno_node; +use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use lsp_types::Url; @@ -116,13 +117,7 @@ pub fn get_types_declaration_file_text(unstable: bool) -> String { } fn get_asset_texts_from_new_runtime() -> Result, AnyError> { - deno_core::extension!( - deno_cli_tsc, - ops_fn = deno_ops, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, - ); + deno_core::extension!(deno_cli_tsc, ops_fn = deno_ops); // the assets are stored within the typescript isolate, so take them out of there let mut runtime = JsRuntime::new(RuntimeOptions { @@ -305,7 +300,7 @@ pub struct Request { pub debug: bool, pub graph: Arc, pub hash_data: u64, - pub maybe_node_resolver: Option>, + pub maybe_node_resolver: Option>, pub maybe_tsbuildinfo: Option, /// A vector of strings that represent the root/entry point modules for the /// program. @@ -329,7 +324,7 @@ struct State { graph: Arc, maybe_tsbuildinfo: Option, maybe_response: Option, - maybe_node_resolver: Option>, + maybe_node_resolver: Option>, remapped_specifiers: HashMap, root_map: HashMap, current_dir: PathBuf, @@ -339,7 +334,7 @@ impl State { pub fn new( graph: Arc, hash_data: u64, - maybe_node_resolver: Option>, + maybe_node_resolver: Option>, maybe_tsbuildinfo: Option, root_map: HashMap, remapped_specifiers: HashMap, @@ -537,7 +532,7 @@ fn op_resolve( }; for specifier in args.specifiers { if let Some(module_name) = specifier.strip_prefix("node:") { - if crate::node::resolve_builtin_node_module(module_name).is_ok() { + if deno_node::is_builtin_node_module(module_name) { // return itself for node: specifiers because during type checking // we resolve to the ambient modules in the @types/node package // rather than deno_std/node @@ -638,7 +633,7 @@ fn resolve_graph_specifier_types( let maybe_resolution = node_resolver.resolve_npm_reference( &module.nv_reference, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), )?; Ok(Some(NodeResolution::into_specifier_and_media_type( maybe_resolution, @@ -678,7 +673,7 @@ fn resolve_non_graph_specifier_types( specifier, referrer, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), ) .ok() .flatten(), @@ -691,7 +686,7 @@ fn resolve_non_graph_specifier_types( let maybe_resolution = node_resolver.resolve_npm_req_reference( &npm_ref, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), )?; Ok(Some(NodeResolution::into_specifier_and_media_type( maybe_resolution, @@ -779,9 +774,6 @@ pub fn exec(request: Request) -> Result { .unwrap(), )); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); let startup_source = ascii_str!("globalThis.startup({ legacyFlag: false })"); @@ -847,6 +839,7 @@ mod tests { use crate::args::TsConfig; use deno_core::futures::future; use deno_core::OpState; + use deno_graph::GraphKind; use deno_graph::ModuleGraph; use std::fs; @@ -890,7 +883,7 @@ mod tests { let hash_data = maybe_hash_data.unwrap_or(0); let fixtures = test_util::testdata_path().join("tsc2"); let mut loader = MockLoader { fixtures }; - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(GraphKind::TypesOnly); graph .build(vec![specifier], &mut loader, Default::default()) .await; @@ -916,7 +909,7 @@ mod tests { let hash_data = 123; // something random let fixtures = test_util::testdata_path().join("tsc2"); let mut loader = MockLoader { fixtures }; - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(GraphKind::TypesOnly); graph .build(vec![specifier.clone()], &mut loader, Default::default()) .await; diff --git a/cli/util/draw_thread.rs b/cli/util/draw_thread.rs index 028b20d00e..2fd81a78ab 100644 --- a/cli/util/draw_thread.rs +++ b/cli/util/draw_thread.rs @@ -2,6 +2,7 @@ use console_static_text::ConsoleStaticText; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use deno_runtime::ops::tty::ConsoleSize; use once_cell::sync::Lazy; use std::sync::Arc; @@ -162,7 +163,7 @@ impl DrawThread { internal_state.has_draw_thread = true; let drawer_id = internal_state.drawer_id; - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut previous_size = console_size(); loop { let mut delay_ms = 120; diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index 05415f2a63..1ad5e9ba07 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -304,6 +304,13 @@ where } loop { + // We may need to give the runtime a tick to settle, as cancellations may need to propagate + // to tasks. We choose yielding 10 times to the runtime as a decent heuristic. If watch tests + // start to fail, this may need to be increased. + for _ in 0..10 { + tokio::task::yield_now().await; + } + let mut watcher = new_watcher(watcher_sender.clone())?; consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 7cfd0ced79..658002e3b6 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -3,6 +3,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; pub use deno_core::normalize_path; +use deno_core::task::spawn_blocking; use deno_core::ModuleSpecifier; use deno_runtime::deno_crypto::rand; use deno_runtime::deno_node::PathClean; @@ -81,11 +82,7 @@ pub fn write_file_2>( /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result { - let path = path.canonicalize()?; - #[cfg(windows)] - return Ok(strip_unc_prefix(path)); - #[cfg(not(windows))] - return Ok(path); + Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) } /// Canonicalizes a path which might be non-existent by going up the @@ -96,12 +93,19 @@ pub fn canonicalize_path(path: &Path) -> Result { /// subsequently be created along this path by some other code. pub fn canonicalize_path_maybe_not_exists( path: &Path, +) -> Result { + canonicalize_path_maybe_not_exists_with_fs(path, canonicalize_path) +} + +pub fn canonicalize_path_maybe_not_exists_with_fs( + path: &Path, + canonicalize: impl Fn(&Path) -> Result, ) -> Result { let path = path.to_path_buf().clean(); let mut path = path.as_path(); let mut names_stack = Vec::new(); loop { - match canonicalize_path(path) { + match canonicalize(path) { Ok(mut canonicalized_path) => { for name in names_stack.into_iter().rev() { canonicalized_path = canonicalized_path.join(name); @@ -117,47 +121,6 @@ pub fn canonicalize_path_maybe_not_exists( } } -#[cfg(windows)] -fn strip_unc_prefix(path: PathBuf) -> PathBuf { - use std::path::Component; - use std::path::Prefix; - - let mut components = path.components(); - match components.next() { - Some(Component::Prefix(prefix)) => { - match prefix.kind() { - // \\?\device - Prefix::Verbatim(device) => { - let mut path = PathBuf::new(); - path.push(format!(r"\\{}\", device.to_string_lossy())); - path.extend(components.filter(|c| !matches!(c, Component::RootDir))); - path - } - // \\?\c:\path - Prefix::VerbatimDisk(_) => { - let mut path = PathBuf::new(); - path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); - path.extend(components); - path - } - // \\?\UNC\hostname\share_name\path - Prefix::VerbatimUNC(hostname, share_name) => { - let mut path = PathBuf::new(); - path.push(format!( - r"\\{}\{}\", - hostname.to_string_lossy(), - share_name.to_string_lossy() - )); - path.extend(components.filter(|c| !matches!(c, Component::RootDir))); - path - } - _ => path, - } - } - _ => path, - } -} - pub fn resolve_from_cwd(path: &Path) -> Result { let resolved_path = if path.is_absolute() { path.to_owned() @@ -541,7 +504,7 @@ impl LaxSingleProcessFsFlag { // This uses a blocking task because we use a single threaded // runtime and this is time sensitive so we don't want it to update // at the whims of of whatever is occurring on the runtime thread. - tokio::task::spawn_blocking({ + spawn_blocking({ let token = token.clone(); let last_updated_path = last_updated_path.clone(); move || { @@ -921,41 +884,6 @@ mod tests { assert_eq!(result, expected); } - #[cfg(windows)] - #[test] - fn test_strip_unc_prefix() { - run_test(r"C:\", r"C:\"); - run_test(r"C:\test\file.txt", r"C:\test\file.txt"); - - run_test(r"\\?\C:\", r"C:\"); - run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); - - run_test(r"\\.\C:\", r"\\.\C:\"); - run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); - - run_test(r"\\?\UNC\localhost\", r"\\localhost"); - run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); - run_test( - r"\\?\UNC\localhost\c$\Windows\file.txt", - r"\\localhost\c$\Windows\file.txt", - ); - run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); - - run_test(r"\\?\server1", r"\\server1"); - run_test(r"\\?\server1\e$\", r"\\server1\e$\"); - run_test( - r"\\?\server1\e$\test\file.txt", - r"\\server1\e$\test\file.txt", - ); - - fn run_test(input: &str, expected: &str) { - assert_eq!( - strip_unc_prefix(PathBuf::from(input)), - PathBuf::from(expected) - ); - } - } - #[tokio::test] async fn lax_fs_lock() { let temp_dir = TempDir::new(); diff --git a/cli/util/v8.rs b/cli/util/v8.rs index 6afaf285e3..93a2ef83f7 100644 --- a/cli/util/v8.rs +++ b/cli/util/v8.rs @@ -10,17 +10,26 @@ pub fn get_v8_flags_from_env() -> Vec { #[inline(always)] pub fn construct_v8_flags( + default_v8_flags: &[String], v8_flags: &[String], env_v8_flags: Vec, ) -> Vec { std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned()) + .chain(default_v8_flags.iter().cloned()) .chain(env_v8_flags.into_iter()) .chain(v8_flags.iter().cloned()) .collect::>() } -pub fn init_v8_flags(v8_flags: &[String], env_v8_flags: Vec) { - if v8_flags.is_empty() && env_v8_flags.is_empty() { +pub fn init_v8_flags( + default_v8_flags: &[String], + v8_flags: &[String], + env_v8_flags: Vec, +) { + if default_v8_flags.is_empty() + && v8_flags.is_empty() + && env_v8_flags.is_empty() + { return; } @@ -29,7 +38,7 @@ pub fn init_v8_flags(v8_flags: &[String], env_v8_flags: Vec) { .chain(v8_flags) .any(|flag| flag == "-help" || flag == "--help"); // Keep in sync with `standalone.rs`. - let v8_flags = construct_v8_flags(v8_flags, env_v8_flags); + let v8_flags = construct_v8_flags(default_v8_flags, v8_flags, env_v8_flags); let unrecognized_v8_flags = deno_core::v8_set_flags(v8_flags) .into_iter() .skip(1) diff --git a/cli/watcher.rs b/cli/watcher.rs new file mode 100644 index 0000000000..f9c2c1b42d --- /dev/null +++ b/cli/watcher.rs @@ -0,0 +1,99 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::args::CliOptions; +use crate::cache::ParsedSourceCache; +use crate::graph_util::ModuleGraphContainer; +use crate::module_loader::CjsResolutionStore; + +use deno_core::parking_lot::Mutex; +use deno_core::ModuleSpecifier; + +use std::path::PathBuf; +use std::sync::Arc; + +pub struct FileWatcher { + cli_options: Arc, + cjs_resolutions: Arc, + graph_container: Arc, + maybe_reporter: Option, + parsed_source_cache: Arc, +} + +impl FileWatcher { + pub fn new( + cli_options: Arc, + cjs_resolutions: Arc, + graph_container: Arc, + maybe_reporter: Option, + parsed_source_cache: Arc, + ) -> Self { + Self { + cli_options, + cjs_resolutions, + parsed_source_cache, + graph_container, + maybe_reporter, + } + } + /// Reset all runtime state to its default. This should be used on file + /// watcher restarts. + pub fn reset(&self) { + self.cjs_resolutions.clear(); + self.parsed_source_cache.clear(); + self.graph_container.clear(); + + self.init_watcher(); + } + + // Add invariant files like the import map and explicit watch flag list to + // the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher. + pub fn init_watcher(&self) { + let files_to_watch_sender = match &self.maybe_reporter { + Some(reporter) => &reporter.sender, + None => return, + }; + if let Some(watch_paths) = self.cli_options.watch_paths() { + files_to_watch_sender.send(watch_paths.clone()).unwrap(); + } + if let Ok(Some(import_map_path)) = self + .cli_options + .resolve_import_map_specifier() + .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) + { + files_to_watch_sender.send(vec![import_map_path]).unwrap(); + } + } +} + +#[derive(Clone, Debug)] +pub struct FileWatcherReporter { + sender: tokio::sync::mpsc::UnboundedSender>, + file_paths: Arc>>, +} + +impl FileWatcherReporter { + pub fn new(sender: tokio::sync::mpsc::UnboundedSender>) -> Self { + Self { + sender, + file_paths: Default::default(), + } + } +} + +impl deno_graph::source::Reporter for FileWatcherReporter { + fn on_load( + &self, + specifier: &ModuleSpecifier, + modules_done: usize, + modules_total: usize, + ) { + let mut file_paths = self.file_paths.lock(); + if specifier.scheme() == "file" { + file_paths.push(specifier.to_file_path().unwrap()); + } + + if modules_done == modules_total { + self.sender.send(file_paths.drain(..).collect()).unwrap(); + } + } +} diff --git a/cli/worker.rs b/cli/worker.rs index 7ee8fc8021..b8bb6e9416 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -5,15 +5,30 @@ use std::rc::Rc; use std::sync::Arc; use deno_ast::ModuleSpecifier; +use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::task::LocalFutureObj; use deno_core::futures::FutureExt; use deno_core::located_script_name; +use deno_core::parking_lot::Mutex; +use deno_core::url::Url; +use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::ModuleId; +use deno_core::ModuleLoader; +use deno_core::SharedArrayBufferStore; +use deno_core::SourceMapGetter; +use deno_lockfile::Lockfile; use deno_runtime::colors; +use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; +use deno_runtime::deno_fs; use deno_runtime::deno_node; +use deno_runtime::deno_node::NodeResolution; +use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_tls::RootCertStoreProvider; +use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; +use deno_runtime::inspector_server::InspectorServer; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::ops::worker_host::WorkerEventCb; use deno_runtime::permissions::PermissionsContainer; @@ -22,24 +37,89 @@ use deno_runtime::web_worker::WebWorkerOptions; use deno_runtime::worker::MainWorker; use deno_runtime::worker::WorkerOptions; use deno_runtime::BootstrapOptions; +use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; -use crate::args::DenoSubcommand; +use crate::args::StorageKeyResolver; use crate::errors; -use crate::module_loader::CliModuleLoader; -use crate::node; +use crate::npm::CliNpmResolver; use crate::ops; -use crate::proc_state::ProcState; use crate::tools; use crate::tools::coverage::CoverageCollector; use crate::util::checksum; use crate::version; +pub trait ModuleLoaderFactory: Send + Sync { + fn create_for_main( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc; + + fn create_for_worker( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc; + + fn create_source_map_getter(&self) -> Option>; +} + +// todo(dsherret): this is temporary and we should remove this +// once we no longer conditionally initialize the node runtime +pub trait HasNodeSpecifierChecker: Send + Sync { + fn has_node_specifier(&self) -> bool; +} + +#[derive(Clone)] +pub struct CliMainWorkerOptions { + pub argv: Vec, + pub log_level: WorkerLogLevel, + pub coverage_dir: Option, + pub enable_testing_features: bool, + pub has_node_modules_dir: bool, + pub inspect_brk: bool, + pub inspect_wait: bool, + pub is_inspecting: bool, + pub is_npm_main: bool, + pub location: Option, + pub maybe_binary_npm_command_name: Option, + pub origin_data_folder_path: Option, + pub seed: Option, + pub unsafely_ignore_certificate_errors: Option>, + pub unstable: bool, +} + +struct SharedWorkerState { + options: CliMainWorkerOptions, + storage_key_resolver: StorageKeyResolver, + npm_resolver: Arc, + node_resolver: Arc, + has_node_specifier_checker: Box, + blob_store: BlobStore, + broadcast_channel: InMemoryBroadcastChannel, + shared_array_buffer_store: SharedArrayBufferStore, + compiled_wasm_module_store: CompiledWasmModuleStore, + module_loader_factory: Box, + root_cert_store_provider: Arc, + fs: Arc, + maybe_inspector_server: Option>, + maybe_lockfile: Option>>, +} + +impl SharedWorkerState { + pub fn should_initialize_node_runtime(&self) -> bool { + self.npm_resolver.has_packages() + || self.has_node_specifier_checker.has_node_specifier() + || self.options.is_npm_main + } +} + pub struct CliMainWorker { main_module: ModuleSpecifier, is_main_cjs: bool, worker: MainWorker, - ps: ProcState, + shared: Arc, } impl CliMainWorker { @@ -63,7 +143,7 @@ impl CliMainWorker { &mut self.worker.js_runtime, &self.main_module.to_file_path().unwrap().to_string_lossy(), true, - self.ps.options.inspect_brk().is_some(), + self.shared.options.inspect_brk, )?; } else { self.execute_main_module_possibly_with_npm().await?; @@ -184,33 +264,17 @@ impl CliMainWorker { &mut self, id: ModuleId, ) -> Result<(), AnyError> { - if self.ps.npm_resolver.has_packages() - || self.ps.graph_container.graph().has_node_specifier - { + if self.shared.should_initialize_node_runtime() { self.initialize_main_module_for_node()?; } self.worker.evaluate_module(id).await } fn initialize_main_module_for_node(&mut self) -> Result<(), AnyError> { - let mut maybe_binary_command_name = None; - - if let DenoSubcommand::Run(flags) = self.ps.options.sub_command() { - if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) { - // if the user ran a binary command, we'll need to set process.argv[0] - // to be the name of the binary command instead of deno - let binary_name = pkg_ref - .sub_path - .as_deref() - .unwrap_or(pkg_ref.req.name.as_str()); - maybe_binary_command_name = Some(binary_name.to_string()); - } - } - deno_node::initialize_runtime( &mut self.worker.js_runtime, - self.ps.options.has_node_modules_dir(), - maybe_binary_command_name, + self.shared.options.has_node_modules_dir, + self.shared.options.maybe_binary_npm_command_name.as_deref(), )?; Ok(()) @@ -219,7 +283,7 @@ impl CliMainWorker { pub async fn maybe_setup_coverage_collector( &mut self, ) -> Result, AnyError> { - if let Some(ref coverage_dir) = self.ps.options.coverage_dir() { + if let Some(coverage_dir) = &self.shared.options.coverage_dir { let session = self.worker.create_inspector_session().await; let coverage_dir = PathBuf::from(coverage_dir); @@ -236,143 +300,204 @@ impl CliMainWorker { } } -pub async fn create_main_worker( - ps: &ProcState, - main_module: ModuleSpecifier, - permissions: PermissionsContainer, -) -> Result { - create_custom_worker(ps, main_module, permissions, vec![], Default::default()) - .await +pub struct CliMainWorkerFactory { + shared: Arc, } -pub async fn create_custom_worker( - ps: &ProcState, - main_module: ModuleSpecifier, - permissions: PermissionsContainer, - mut custom_extensions: Vec, - stdio: deno_runtime::deno_io::Stdio, -) -> Result { - let (main_module, is_main_cjs) = if let Ok(package_ref) = - NpmPackageReqReference::from_specifier(&main_module) - { - ps.npm_resolver - .add_package_reqs(vec![package_ref.req.clone()]) - .await?; - let node_resolution = - ps.node_resolver.resolve_binary_export(&package_ref)?; - let is_main_cjs = - matches!(node_resolution, node::NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) - } else if ps.options.is_npm_main() { - let node_resolution = - ps.node_resolver.url_to_node_resolution(main_module)?; - let is_main_cjs = - matches!(node_resolution, node::NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) - } else { - (main_module, false) - }; +impl CliMainWorkerFactory { + #[allow(clippy::too_many_arguments)] + pub fn new( + storage_key_resolver: StorageKeyResolver, + npm_resolver: Arc, + node_resolver: Arc, + has_node_specifier_checker: Box, + blob_store: BlobStore, + module_loader_factory: Box, + root_cert_store_provider: Arc, + fs: Arc, + maybe_inspector_server: Option>, + maybe_lockfile: Option>>, + options: CliMainWorkerOptions, + ) -> Self { + Self { + shared: Arc::new(SharedWorkerState { + options, + storage_key_resolver, + npm_resolver, + node_resolver, + has_node_specifier_checker, + blob_store, + broadcast_channel: Default::default(), + shared_array_buffer_store: Default::default(), + compiled_wasm_module_store: Default::default(), + module_loader_factory, + root_cert_store_provider, + fs, + maybe_inspector_server, + maybe_lockfile, + }), + } + } - let module_loader = CliModuleLoader::new( - ps.clone(), - PermissionsContainer::allow_all(), - permissions.clone(), - ); + pub async fn create_main_worker( + &self, + main_module: ModuleSpecifier, + permissions: PermissionsContainer, + ) -> Result { + self + .create_custom_worker( + main_module, + permissions, + vec![], + Default::default(), + ) + .await + } - let maybe_inspector_server = ps.maybe_inspector_server.clone(); + pub async fn create_custom_worker( + &self, + main_module: ModuleSpecifier, + permissions: PermissionsContainer, + mut custom_extensions: Vec, + stdio: deno_runtime::deno_io::Stdio, + ) -> Result { + let shared = &self.shared; + let (main_module, is_main_cjs) = if let Ok(package_ref) = + NpmPackageReqReference::from_specifier(&main_module) + { + shared + .npm_resolver + .add_package_reqs(&[package_ref.req.clone()]) + .await?; + let node_resolution = + shared.node_resolver.resolve_binary_export(&package_ref)?; + let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); - let create_web_worker_cb = - create_web_worker_callback(ps.clone(), stdio.clone()); - let web_worker_preload_module_cb = - create_web_worker_preload_module_callback(ps.clone()); - let web_worker_pre_execute_module_cb = - create_web_worker_pre_execute_module_callback(ps.clone()); + if let Some(lockfile) = &shared.maybe_lockfile { + // For npm binary commands, ensure that the lockfile gets updated + // so that we can re-use the npm resolution the next time it runs + // for better performance + lockfile + .lock() + .write() + .context("Failed writing lockfile.")?; + } - let maybe_storage_key = ps.options.resolve_storage_key(&main_module); - let origin_storage_dir = maybe_storage_key.as_ref().map(|key| { - ps.dir - .origin_data_folder_path() - .join(checksum::gen(&[key.as_bytes()])) - }); - let cache_storage_dir = maybe_storage_key.map(|key| { - // TODO(@satyarohith): storage quota management - // Note: we currently use temp_dir() to avoid managing storage size. - std::env::temp_dir() - .join("deno_cache") - .join(checksum::gen(&[key.as_bytes()])) - }); + (node_resolution.into_url(), is_main_cjs) + } else if shared.options.is_npm_main { + let node_resolution = + shared.node_resolver.url_to_node_resolution(main_module)?; + let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); + (node_resolution.into_url(), is_main_cjs) + } else { + (main_module, false) + }; - let mut extensions = ops::cli_exts(ps.npm_resolver.clone()); - extensions.append(&mut custom_extensions); + let module_loader = shared + .module_loader_factory + .create_for_main(PermissionsContainer::allow_all(), permissions.clone()); + let maybe_source_map_getter = + shared.module_loader_factory.create_source_map_getter(); + let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let options = WorkerOptions { - bootstrap: BootstrapOptions { - args: ps.options.argv().clone(), - cpu_count: std::thread::available_parallelism() - .map(|p| p.get()) - .unwrap_or(1), - debug_flag: ps + let create_web_worker_cb = + create_web_worker_callback(shared.clone(), stdio.clone()); + let web_worker_preload_module_cb = + create_web_worker_preload_module_callback(shared); + let web_worker_pre_execute_module_cb = + create_web_worker_pre_execute_module_callback(shared.clone()); + + let maybe_storage_key = shared + .storage_key_resolver + .resolve_storage_key(&main_module); + let origin_storage_dir = maybe_storage_key.as_ref().map(|key| { + shared .options - .log_level() - .map(|l| l == log::Level::Debug) - .unwrap_or(false), - enable_testing_features: ps.options.enable_testing_features(), - locale: deno_core::v8::icu::get_language_tag(), - location: ps.options.location_flag().clone(), - no_color: !colors::use_color(), - is_tty: colors::is_tty(), - runtime_version: version::deno().to_string(), - ts_version: version::TYPESCRIPT.to_string(), - unstable: ps.options.unstable(), - user_agent: version::get_user_agent().to_string(), - inspect: ps.options.is_inspecting(), - }, - extensions, - startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: ps - .options - .unsafely_ignore_certificate_errors() - .clone(), - root_cert_store: Some(ps.root_cert_store.clone()), - seed: ps.options.seed(), - source_map_getter: Some(Box::new(module_loader.clone())), - format_js_error_fn: Some(Arc::new(format_js_error)), - create_web_worker_cb, - web_worker_preload_module_cb, - web_worker_pre_execute_module_cb, - maybe_inspector_server, - should_break_on_first_statement: ps.options.inspect_brk().is_some(), - should_wait_for_inspector_session: ps.options.inspect_wait().is_some(), - module_loader, - npm_resolver: Some(Rc::new(ps.npm_resolver.as_require_npm_resolver())), - get_error_class_fn: Some(&errors::get_error_class_name), - cache_storage_dir, - origin_storage_dir, - blob_store: ps.blob_store.clone(), - broadcast_channel: ps.broadcast_channel.clone(), - shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()), - stdio, - }; + .origin_data_folder_path + .as_ref() + .unwrap() // must be set if storage key resolver returns a value + .join(checksum::gen(&[key.as_bytes()])) + }); + let cache_storage_dir = maybe_storage_key.map(|key| { + // TODO(@satyarohith): storage quota management + // Note: we currently use temp_dir() to avoid managing storage size. + std::env::temp_dir() + .join("deno_cache") + .join(checksum::gen(&[key.as_bytes()])) + }); - let worker = MainWorker::bootstrap_from_options( - main_module.clone(), - permissions, - options, - ); + let mut extensions = ops::cli_exts(shared.npm_resolver.clone()); + extensions.append(&mut custom_extensions); - Ok(CliMainWorker { - main_module, - is_main_cjs, - worker, - ps: ps.clone(), - }) + let options = WorkerOptions { + bootstrap: BootstrapOptions { + args: shared.options.argv.clone(), + cpu_count: std::thread::available_parallelism() + .map(|p| p.get()) + .unwrap_or(1), + log_level: shared.options.log_level, + enable_testing_features: shared.options.enable_testing_features, + locale: deno_core::v8::icu::get_language_tag(), + location: shared.options.location.clone(), + no_color: !colors::use_color(), + is_tty: colors::is_tty(), + runtime_version: version::deno().to_string(), + ts_version: version::TYPESCRIPT.to_string(), + unstable: shared.options.unstable, + user_agent: version::get_user_agent().to_string(), + inspect: shared.options.is_inspecting, + }, + extensions, + startup_snapshot: Some(crate::js::deno_isolate_init()), + create_params: None, + unsafely_ignore_certificate_errors: shared + .options + .unsafely_ignore_certificate_errors + .clone(), + root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), + seed: shared.options.seed, + source_map_getter: maybe_source_map_getter, + format_js_error_fn: Some(Arc::new(format_js_error)), + create_web_worker_cb, + web_worker_preload_module_cb, + web_worker_pre_execute_module_cb, + maybe_inspector_server, + should_break_on_first_statement: shared.options.inspect_brk, + should_wait_for_inspector_session: shared.options.inspect_wait, + module_loader, + fs: shared.fs.clone(), + npm_resolver: Some(shared.npm_resolver.clone()), + get_error_class_fn: Some(&errors::get_error_class_name), + cache_storage_dir, + origin_storage_dir, + blob_store: shared.blob_store.clone(), + broadcast_channel: shared.broadcast_channel.clone(), + shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), + compiled_wasm_module_store: Some( + shared.compiled_wasm_module_store.clone(), + ), + stdio, + }; + + let worker = MainWorker::bootstrap_from_options( + main_module.clone(), + permissions, + options, + ); + + Ok(CliMainWorker { + main_module, + is_main_cjs, + worker, + shared: shared.clone(), + }) + } } // TODO(bartlomieju): this callback could have default value // and not be required fn create_web_worker_preload_module_callback( - _ps: ProcState, + _shared: &Arc, ) -> Arc { Arc::new(move |worker| { let fut = async move { Ok(worker) }; @@ -381,16 +506,16 @@ fn create_web_worker_preload_module_callback( } fn create_web_worker_pre_execute_module_callback( - ps: ProcState, + shared: Arc, ) -> Arc { Arc::new(move |mut worker| { - let ps = ps.clone(); + let shared = shared.clone(); let fut = async move { // this will be up to date after pre-load - if ps.npm_resolver.has_packages() { + if shared.should_initialize_node_runtime() { deno_node::initialize_runtime( &mut worker.js_runtime, - ps.options.has_node_modules_dir(), + shared.options.has_node_modules_dir, None, )?; } @@ -402,27 +527,29 @@ fn create_web_worker_pre_execute_module_callback( } fn create_web_worker_callback( - ps: ProcState, + shared: Arc, stdio: deno_runtime::deno_io::Stdio, ) -> Arc { Arc::new(move |args| { - let maybe_inspector_server = ps.maybe_inspector_server.clone(); + let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let module_loader = CliModuleLoader::new_for_worker( - ps.clone(), + let module_loader = shared.module_loader_factory.create_for_worker( args.parent_permissions.clone(), args.permissions.clone(), ); + let maybe_source_map_getter = + shared.module_loader_factory.create_source_map_getter(); let create_web_worker_cb = - create_web_worker_callback(ps.clone(), stdio.clone()); - let preload_module_cb = - create_web_worker_preload_module_callback(ps.clone()); + create_web_worker_callback(shared.clone(), stdio.clone()); + let preload_module_cb = create_web_worker_preload_module_callback(&shared); let pre_execute_module_cb = - create_web_worker_pre_execute_module_callback(ps.clone()); + create_web_worker_pre_execute_module_callback(shared.clone()); - let extensions = ops::cli_exts(ps.npm_resolver.clone()); + let extensions = ops::cli_exts(shared.npm_resolver.clone()); - let maybe_storage_key = ps.options.resolve_storage_key(&args.main_module); + let maybe_storage_key = shared + .storage_key_resolver + .resolve_storage_key(&args.main_module); let cache_storage_dir = maybe_storage_key.map(|key| { // TODO(@satyarohith): storage quota management // Note: we currently use temp_dir() to avoid managing storage size. @@ -433,48 +560,47 @@ fn create_web_worker_callback( let options = WebWorkerOptions { bootstrap: BootstrapOptions { - args: ps.options.argv().clone(), + args: shared.options.argv.clone(), cpu_count: std::thread::available_parallelism() .map(|p| p.get()) .unwrap_or(1), - debug_flag: ps - .options - .log_level() - .map(|l| l == log::Level::Debug) - .unwrap_or(false), - enable_testing_features: ps.options.enable_testing_features(), + log_level: shared.options.log_level, + enable_testing_features: shared.options.enable_testing_features, locale: deno_core::v8::icu::get_language_tag(), location: Some(args.main_module.clone()), no_color: !colors::use_color(), is_tty: colors::is_tty(), runtime_version: version::deno().to_string(), ts_version: version::TYPESCRIPT.to_string(), - unstable: ps.options.unstable(), + unstable: shared.options.unstable, user_agent: version::get_user_agent().to_string(), - inspect: ps.options.is_inspecting(), + inspect: shared.options.is_inspecting, }, extensions, startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: ps + unsafely_ignore_certificate_errors: shared .options - .unsafely_ignore_certificate_errors() + .unsafely_ignore_certificate_errors .clone(), - root_cert_store: Some(ps.root_cert_store.clone()), - seed: ps.options.seed(), + root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), + seed: shared.options.seed, create_web_worker_cb, preload_module_cb, pre_execute_module_cb, format_js_error_fn: Some(Arc::new(format_js_error)), - source_map_getter: Some(Box::new(module_loader.clone())), + source_map_getter: maybe_source_map_getter, module_loader, - npm_resolver: Some(Rc::new(ps.npm_resolver.as_require_npm_resolver())), + fs: shared.fs.clone(), + npm_resolver: Some(shared.npm_resolver.clone()), worker_type: args.worker_type, maybe_inspector_server, get_error_class_fn: Some(&errors::get_error_class_name), - blob_store: ps.blob_store.clone(), - broadcast_channel: ps.broadcast_channel.clone(), - shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()), + blob_store: shared.blob_store.clone(), + broadcast_channel: shared.broadcast_channel.clone(), + shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), + compiled_wasm_module_store: Some( + shared.compiled_wasm_module_store.clone(), + ), stdio: stdio.clone(), cache_storage_dir, }; @@ -493,9 +619,6 @@ fn create_web_worker_callback( mod tests { use super::*; use deno_core::resolve_path; - use deno_core::FsModuleLoader; - use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; - use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::Permissions; fn create_test_worker() -> MainWorker { @@ -504,30 +627,8 @@ mod tests { let permissions = PermissionsContainer::new(Permissions::default()); let options = WorkerOptions { - bootstrap: BootstrapOptions::default(), - extensions: vec![], startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: None, - root_cert_store: None, - seed: None, - format_js_error_fn: None, - source_map_getter: None, - web_worker_preload_module_cb: Arc::new(|_| unreachable!()), - web_worker_pre_execute_module_cb: Arc::new(|_| unreachable!()), - create_web_worker_cb: Arc::new(|_| unreachable!()), - maybe_inspector_server: None, - should_break_on_first_statement: false, - should_wait_for_inspector_session: false, - module_loader: Rc::new(FsModuleLoader), - npm_resolver: None, - get_error_class_fn: None, - cache_storage_dir: None, - origin_storage_dir: None, - blob_store: BlobStore::default(), - broadcast_channel: InMemoryBroadcastChannel::default(), - shared_array_buffer_store: None, - compiled_wasm_module_store: None, - stdio: Default::default(), + ..Default::default() }; MainWorker::bootstrap_from_options(main_module, permissions, options) diff --git a/core/00_primordials.js b/core/00_primordials.js index f49a11de4b..60474e649d 100644 --- a/core/00_primordials.js +++ b/core/00_primordials.js @@ -405,7 +405,11 @@ Map, class SafeMap extends Map { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); @@ -413,7 +417,11 @@ WeakMap, class SafeWeakMap extends WeakMap { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); @@ -422,7 +430,11 @@ Set, class SafeSet extends Set { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); @@ -430,7 +442,11 @@ WeakSet, class SafeWeakSet extends WeakSet { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); diff --git a/core/01_core.js b/core/01_core.js index a8bdeb2a86..13aa17c7ed 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -14,25 +14,31 @@ MapPrototypeHas, MapPrototypeSet, ObjectAssign, + ObjectDefineProperty, ObjectFreeze, ObjectFromEntries, + ObjectKeys, Promise, + PromiseReject, + PromiseResolve, PromisePrototypeThen, + Proxy, RangeError, ReferenceError, ReflectHas, + ReflectApply, SafeArrayIterator, SafeMap, SafePromisePrototypeFinally, - setQueueMicrotask, StringPrototypeSlice, StringPrototypeSplit, SymbolFor, SyntaxError, TypeError, URIError, + setQueueMicrotask, } = window.__bootstrap.primordials; - const { ops } = window.Deno.core; + const { ops, asyncOps } = window.Deno.core; const build = { target: "unknown", @@ -85,6 +91,17 @@ return opCallTracingEnabled; } + function movePromise(promiseId) { + const idx = promiseId % RING_SIZE; + // Move old promise from ring to map + const oldPromise = promiseRing[idx]; + if (oldPromise !== NO_PROMISE) { + const oldPromiseId = promiseId - RING_SIZE; + MapPrototypeSet(promiseMap, oldPromiseId, oldPromise); + } + return promiseRing[idx] = NO_PROMISE; + } + function setPromise(promiseId) { const idx = promiseId % RING_SIZE; // Move old promise from ring to map @@ -208,7 +225,29 @@ return error; } - function unwrapOpResult(res) { + function unwrapOpError(hideFunction) { + return (res) => { + // .$err_class_name is a special key that should only exist on errors + const className = res?.$err_class_name; + if (!className) { + return res; + } + + const errorBuilder = errorMap[className]; + const err = errorBuilder ? errorBuilder(res.message) : new Error( + `Unregistered error class: "${className}"\n ${res.message}\n Classes of errors returned from ops should be registered via Deno.core.registerErrorClass().`, + ); + // Set .code if error was a known OS error, see error_codes.rs + if (res.code) { + err.code = res.code; + } + // Strip unwrapOpResult() and errorBuilder() calls from stack trace + ErrorCaptureStackTrace(err, hideFunction); + throw err; + }; + } + + function unwrapOpResultNewPromise(id, res, hideFunction) { // .$err_class_name is a special key that should only exist on errors if (res?.$err_class_name) { const className = res.$err_class_name; @@ -221,59 +260,339 @@ err.code = res.code; } // Strip unwrapOpResult() and errorBuilder() calls from stack trace - ErrorCaptureStackTrace(err, unwrapOpResult); - throw err; + ErrorCaptureStackTrace(err, hideFunction); + return PromiseReject(err); } - return res; + const promise = PromiseResolve(res); + promise[promiseIdSymbol] = id; + return promise; } - function opAsync2(name, arg0, arg1) { - const id = nextPromiseId++; - let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult); - let maybeResult; - try { - maybeResult = ops[name](id, arg0, arg1); - } catch (err) { - // Cleanup the just-created promise - getPromise(id); - if (!ReflectHas(ops, name)) { - throw new TypeError(`${name} is not a registered op`); - } - // Rethrow the error - throw err; - } - promise = handleOpCallTracing(name, id, promise); - promise[promiseIdSymbol] = id; - if (typeof maybeResult !== "undefined") { - const promise = getPromise(id); - promise.resolve(maybeResult); - } + /* +Basic codegen. - return promise; +TODO(mmastrac): automate this (handlebars?) + +let s = ""; +const vars = "abcdefghijklm"; +for (let i = 0; i < 10; i++) { + let args = ""; + for (let j = 0; j < i; j++) { + args += `${vars[j]},`; + } + s += ` + case ${i}: + fn = function async_op_${i}(${args}) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, ${args}); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_${i}); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_${i}); + return PromiseReject(err); + } + let promise = PromisePrototypeThen(setPromise(id), unwrapOpError(eventLoopTick)); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + `; +} + */ + + // This function is called once per async stub + function asyncStub(opName, args) { + setUpAsyncStub(opName); + return ReflectApply(ops[opName], undefined, args); + } + + function setUpAsyncStub(opName) { + const originalOp = asyncOps[opName]; + let fn; + // The body of this switch statement can be generated using the script above. + switch (originalOp.length - 1) { + case 0: + fn = function async_op_0() { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_0); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_0); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 1: + fn = function async_op_1(a) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_1); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_1); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 2: + fn = function async_op_2(a, b) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_2); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_2); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 3: + fn = function async_op_3(a, b, c) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_3); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_3); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 4: + fn = function async_op_4(a, b, c, d) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_4); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_4); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 5: + fn = function async_op_5(a, b, c, d, e) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_5); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_5); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 6: + fn = function async_op_6(a, b, c, d, e, f) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_6); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_6); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 7: + fn = function async_op_7(a, b, c, d, e, f, g) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f, g); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_7); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_7); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 8: + fn = function async_op_8(a, b, c, d, e, f, g, h) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f, g, h); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_8); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_8); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 9: + fn = function async_op_9(a, b, c, d, e, f, g, h, i) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f, g, h, i); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_9); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_9); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + default: + throw new Error( + `Too many arguments for async op codegen (length of ${opName} was ${ + originalOp.length - 1 + })`, + ); + } + ObjectDefineProperty(fn, "name", { + value: opName, + configurable: false, + writable: false, + }); + return (ops[opName] = fn); } function opAsync(name, ...args) { const id = nextPromiseId++; - let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult); - let maybeResult; try { - maybeResult = ops[name](id, ...new SafeArrayIterator(args)); - } catch (err) { - // Cleanup the just-created promise - getPromise(id); - if (!ReflectHas(ops, name)) { - throw new TypeError(`${name} is not a registered op`); + const maybeResult = asyncOps[name](id, ...new SafeArrayIterator(args)); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, opAsync); } - // Rethrow the error - throw err; + } catch (err) { + movePromise(id); + if (!ReflectHas(asyncOps, name)) { + return PromiseReject(new TypeError(`${name} is not a registered op`)); + } + ErrorCaptureStackTrace(err, opAsync); + return PromiseReject(err); } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); promise = handleOpCallTracing(name, id, promise); promise[promiseIdSymbol] = id; - if (typeof maybeResult !== "undefined") { - const promise = getPromise(id); - promise.resolve(maybeResult); - } - return promise; } @@ -439,10 +758,43 @@ ); } + // Eagerly initialize ops for snapshot purposes + for (const opName of new SafeArrayIterator(ObjectKeys(asyncOps))) { + setUpAsyncStub(opName); + } + + function ensureFastOps() { + return new Proxy({}, { + get(_target, opName) { + if (ops[opName] === undefined) { + throw new Error(`Unknown or disabled op '${opName}'`); + } + if (asyncOps[opName] !== undefined) { + return setUpAsyncStub(opName); + } else { + return ops[opName]; + } + }, + }); + } + + const { + op_close: close, + op_try_close: tryClose, + op_read: read, + op_read_all: readAll, + op_write: write, + op_write_all: writeAll, + op_read_sync: readSync, + op_write_sync: writeSync, + op_shutdown: shutdown, + } = ensureFastOps(); + // Extra Deno.core.* exports const core = ObjectAssign(globalThis.Deno.core, { + asyncStub, + ensureFastOps, opAsync, - opAsync2, resources, metrics, registerErrorBuilder, @@ -460,15 +812,15 @@ unrefOp, setReportExceptionCallback, setPromiseHooks, - close: (rid) => ops.op_close(rid), - tryClose: (rid) => ops.op_try_close(rid), - read: opAsync.bind(null, "op_read"), - readAll: opAsync.bind(null, "op_read_all"), - write: opAsync.bind(null, "op_write"), - writeAll: opAsync.bind(null, "op_write_all"), - readSync: (rid, buffer) => ops.op_read_sync(rid, buffer), - writeSync: (rid, buffer) => ops.op_write_sync(rid, buffer), - shutdown: opAsync.bind(null, "op_shutdown"), + close, + tryClose, + read, + readAll, + write, + writeAll, + readSync, + writeSync, + shutdown, print: (msg, isErr) => ops.op_print(msg, isErr), setMacrotaskCallback, setNextTickCallback, diff --git a/core/Cargo.toml b/core/Cargo.toml index f77ae7adc0..ac132f81a0 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_core" -version = "0.181.0" +version = "0.189.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -36,6 +36,7 @@ serde_json = { workspace = true, features = ["preserve_order"] } serde_v8.workspace = true smallvec.workspace = true sourcemap = "6.1" +tokio.workspace = true url.workspace = true v8.workspace = true @@ -46,4 +47,3 @@ path = "examples/http_bench_json_ops/main.rs" # These dependencies are only used for the 'http_bench_*_ops' examples. [dev-dependencies] deno_ast.workspace = true -tokio.workspace = true diff --git a/core/bindings.js b/core/bindings.js new file mode 100644 index 0000000000..21d27a2c32 --- /dev/null +++ b/core/bindings.js @@ -0,0 +1,51 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +if (!globalThis.Deno) { + globalThis.Deno = { + core: { + ops: {}, + asyncOps: {}, + }, + }; +} + +Deno.__op__console = function (callConsole, console) { + Deno.core.callConsole = callConsole; + Deno.core.console = console; +}; + +Deno.__op__registerOp = function (isAsync, op, opName) { + const core = Deno.core; + if (isAsync) { + if (core.ops[opName] !== undefined) { + return; + } + core.asyncOps[opName] = op; + const fn = function (...args) { + if (this !== core.ops) { + // deno-lint-ignore prefer-primordials + throw new Error( + "An async stub cannot be separated from Deno.core.ops. Use ???", + ); + } + return core.asyncStub(opName, args); + }; + fn.name = opName; + core.ops[opName] = fn; + } else { + core.ops[opName] = op; + } +}; + +Deno.__op__unregisterOp = function (isAsync, opName) { + if (isAsync) { + delete Deno.core.asyncOps[opName]; + } + delete Deno.core.ops[opName]; +}; + +Deno.__op__cleanup = function () { + delete Deno.__op__console; + delete Deno.__op__registerOp; + delete Deno.__op__unregisterOp; + delete Deno.__op__cleanup; +}; diff --git a/core/bindings.rs b/core/bindings.rs index 5650b78f36..2be9b35b65 100644 --- a/core/bindings.rs +++ b/core/bindings.rs @@ -1,22 +1,21 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use log::debug; +use std::fmt::Write; use std::option::Option; use std::os::raw::c_void; - -use log::debug; use v8::MapFnTo; use crate::error::is_instance_of_error; use crate::error::JsStackFrame; use crate::modules::get_asserted_module_type_from_assertions; use crate::modules::parse_import_assertions; -use crate::modules::resolve_helper; use crate::modules::validate_import_assertions; use crate::modules::ImportAssertionsKind; use crate::modules::ModuleMap; use crate::modules::ResolutionKind; use crate::ops::OpCtx; -use crate::snapshot_util::SnapshotOptions; +use crate::runtime::InitMode; use crate::JsRealm; use crate::JsRuntime; @@ -98,125 +97,116 @@ pub fn module_origin<'a>( ) } +fn get<'s, T>( + scope: &mut v8::HandleScope<'s>, + from: v8::Local, + key: &'static [u8], + path: &'static str, +) -> T +where + v8::Local<'s, v8::Value>: TryInto, +{ + let key = v8::String::new_external_onebyte_static(scope, key).unwrap(); + from + .get(scope, key.into()) + .unwrap_or_else(|| panic!("{path} exists")) + .try_into() + .unwrap_or_else(|_| panic!("unable to convert")) +} + pub(crate) fn initialize_context<'s>( - scope: &mut v8::HandleScope<'s, ()>, + scope: &mut v8::HandleScope<'s>, + context: v8::Local<'s, v8::Context>, op_ctxs: &[OpCtx], - snapshot_options: SnapshotOptions, + init_mode: InitMode, ) -> v8::Local<'s, v8::Context> { - let context = v8::Context::new(scope); let global = context.global(scope); - let scope = &mut v8::ContextScope::new(scope, context); + let mut codegen = String::with_capacity(op_ctxs.len() * 200); + codegen.push_str(include_str!("bindings.js")); + _ = writeln!( + codegen, + "Deno.__op__ = function(opFns, callConsole, console) {{" + ); + if init_mode == InitMode::New { + _ = writeln!(codegen, "Deno.__op__console(callConsole, console);"); + } + for op_ctx in op_ctxs { + if op_ctx.decl.enabled { + _ = writeln!( + codegen, + "Deno.__op__registerOp({}, opFns[{}], \"{}\");", + op_ctx.decl.is_async, op_ctx.id, op_ctx.decl.name + ); + } else { + _ = writeln!( + codegen, + "Deno.__op__unregisterOp({}, \"{}\");", + op_ctx.decl.is_async, op_ctx.decl.name + ); + } + } + codegen.push_str("Deno.__op__cleanup();"); + _ = writeln!(codegen, "}}"); - let deno_str = - v8::String::new_external_onebyte_static(scope, b"Deno").unwrap(); - let core_str = - v8::String::new_external_onebyte_static(scope, b"core").unwrap(); - let ops_str = v8::String::new_external_onebyte_static(scope, b"ops").unwrap(); + let script = v8::String::new_from_one_byte( + scope, + codegen.as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap(); + let script = v8::Script::compile(scope, script, None).unwrap(); + script.run(scope); - let ops_obj = if snapshot_options.loaded() { - // Snapshot already registered `Deno.core.ops` but - // extensions may provide ops that aren't part of the snapshot. - // Grab the Deno.core.ops object & init it - let deno_obj: v8::Local = global - .get(scope, deno_str.into()) - .unwrap() - .try_into() - .unwrap(); - let core_obj: v8::Local = deno_obj - .get(scope, core_str.into()) - .unwrap() - .try_into() - .unwrap(); - let ops_obj: v8::Local = core_obj - .get(scope, ops_str.into()) - .expect("Deno.core.ops to exist") - .try_into() - .unwrap(); - ops_obj + let deno = get(scope, global, b"Deno", "Deno"); + let op_fn: v8::Local = + get(scope, deno, b"__op__", "Deno.__op__"); + let recv = v8::undefined(scope); + let op_fns = v8::Array::new(scope, op_ctxs.len() as i32); + for op_ctx in op_ctxs { + let op_fn = op_ctx_function(scope, op_ctx); + op_fns.set_index(scope, op_ctx.id as u32, op_fn.into()); + } + if init_mode == InitMode::FromSnapshot { + op_fn.call(scope, recv.into(), &[op_fns.into()]); } else { - // globalThis.Deno = { core: { } }; - let deno_obj = v8::Object::new(scope); - global.set(scope, deno_str.into(), deno_obj.into()); - - let core_obj = v8::Object::new(scope); - deno_obj.set(scope, core_str.into(), core_obj.into()); - // Bind functions to Deno.core.* - set_func(scope, core_obj, "callConsole", call_console); + let call_console_fn = v8::Function::new(scope, call_console).unwrap(); // Bind v8 console object to Deno.core.console let extra_binding_obj = context.get_extras_binding_object(scope); - let console_str = - v8::String::new_external_onebyte_static(scope, b"console").unwrap(); - let console_obj = extra_binding_obj.get(scope, console_str.into()).unwrap(); - core_obj.set(scope, console_str.into(), console_obj); + let console_obj: v8::Local = get( + scope, + extra_binding_obj, + b"console", + "ExtrasBindingObject.console", + ); - // Bind functions to Deno.core.ops.* - let ops_obj = v8::Object::new(scope); - core_obj.set(scope, ops_str.into(), ops_obj.into()); - ops_obj - }; - - if matches!(snapshot_options, SnapshotOptions::Load) { - // Only register ops that have `force_registration` flag set to true, - // the remaining ones should already be in the snapshot. - for op_ctx in op_ctxs - .iter() - .filter(|op_ctx| op_ctx.decl.force_registration) - { - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); - } - } else if matches!(snapshot_options, SnapshotOptions::CreateFromExisting) { - // Register all ops, probing for which ones are already registered. - for op_ctx in op_ctxs { - let key = v8::String::new_external_onebyte_static( - scope, - op_ctx.decl.name.as_bytes(), - ) - .unwrap(); - if ops_obj.get(scope, key.into()).is_some() { - continue; - } - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); - } - } else { - // In other cases register all ops unconditionally. - for op_ctx in op_ctxs { - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); - } + op_fn.call( + scope, + recv.into(), + &[op_fns.into(), call_console_fn.into(), console_obj.into()], + ); } context } -fn set_func( - scope: &mut v8::HandleScope<'_>, - obj: v8::Local, - name: &'static str, - callback: impl v8::MapFnTo, -) { - let key = - v8::String::new_external_onebyte_static(scope, name.as_bytes()).unwrap(); - let val = v8::Function::new(scope, callback).unwrap(); - val.set_name(key); - obj.set(scope, key.into(), val.into()); -} - -fn add_op_to_deno_core_ops( - scope: &mut v8::HandleScope<'_>, - obj: v8::Local, +fn op_ctx_function<'s>( + scope: &mut v8::HandleScope<'s>, op_ctx: &OpCtx, -) { +) -> v8::Local<'s, v8::Function> { let op_ctx_ptr = op_ctx as *const OpCtx as *const c_void; - let key = + let external = v8::External::new(scope, op_ctx_ptr as *mut c_void); + let v8name = v8::String::new_external_onebyte_static(scope, op_ctx.decl.name.as_bytes()) .unwrap(); - let external = v8::External::new(scope, op_ctx_ptr as *mut c_void); - let builder = v8::FunctionTemplate::builder_raw(op_ctx.decl.v8_fn_ptr) - .data(external.into()); + let builder: v8::FunctionBuilder = + v8::FunctionTemplate::builder_raw(op_ctx.decl.v8_fn_ptr) + .data(external.into()) + .length(op_ctx.decl.arg_count as i32); - let templ = if let Some(fast_function) = &op_ctx.decl.fast_fn { + let template = if let Some(fast_function) = &op_ctx.decl.fast_fn { builder.build_fast( scope, fast_function, @@ -227,9 +217,10 @@ fn add_op_to_deno_core_ops( } else { builder.build(scope) }; - let val = templ.get_function(scope).unwrap(); - val.set_name(key); - obj.set(scope, key.into(), val.into()); + + let v8fn = template.get_function(scope).unwrap(); + v8fn.set_name(v8name); + v8fn } pub extern "C" fn wasm_async_resolve_promise_callback( @@ -265,46 +256,43 @@ pub fn host_import_module_dynamically_callback<'s>( .unwrap() .to_rust_string_lossy(scope); - let is_ext_module = specifier_str.starts_with("ext:"); let resolver = v8::PromiseResolver::new(scope).unwrap(); let promise = resolver.get_promise(scope); - if !is_ext_module { - let assertions = parse_import_assertions( - scope, - import_assertions, - ImportAssertionsKind::DynamicImport, + let assertions = parse_import_assertions( + scope, + import_assertions, + ImportAssertionsKind::DynamicImport, + ); + + { + let tc_scope = &mut v8::TryCatch::new(scope); + validate_import_assertions(tc_scope, &assertions); + if tc_scope.has_caught() { + let e = tc_scope.exception().unwrap(); + resolver.reject(tc_scope, e); + } + } + let asserted_module_type = + get_asserted_module_type_from_assertions(&assertions); + + let resolver_handle = v8::Global::new(scope, resolver); + { + let state_rc = JsRuntime::state_from(scope); + let module_map_rc = JsRuntime::module_map_from(scope); + + debug!( + "dyn_import specifier {} referrer {} ", + specifier_str, referrer_name_str ); - - { - let tc_scope = &mut v8::TryCatch::new(scope); - validate_import_assertions(tc_scope, &assertions); - if tc_scope.has_caught() { - let e = tc_scope.exception().unwrap(); - resolver.reject(tc_scope, e); - } - } - let asserted_module_type = - get_asserted_module_type_from_assertions(&assertions); - - let resolver_handle = v8::Global::new(scope, resolver); - { - let state_rc = JsRuntime::state(scope); - let module_map_rc = JsRuntime::module_map(scope); - - debug!( - "dyn_import specifier {} referrer {} ", - specifier_str, referrer_name_str - ); - ModuleMap::load_dynamic_import( - module_map_rc, - &specifier_str, - &referrer_name_str, - asserted_module_type, - resolver_handle, - ); - state_rc.borrow_mut().notify_new_dynamic_import(); - } + ModuleMap::load_dynamic_import( + module_map_rc, + &specifier_str, + &referrer_name_str, + asserted_module_type, + resolver_handle, + ); + state_rc.borrow_mut().notify_new_dynamic_import(); } // Map errors from module resolution (not JS errors from module execution) to // ones rethrown from this scope, so they include the call stack of the @@ -317,16 +305,6 @@ pub fn host_import_module_dynamically_callback<'s>( let promise = promise.catch(scope, map_err).unwrap(); - if is_ext_module { - let message = v8::String::new_external_onebyte_static( - scope, - b"Cannot load extension module from external code", - ) - .unwrap(); - let exception = v8::Exception::type_error(scope, message); - resolver.reject(scope, exception); - } - Some(promise) } @@ -337,7 +315,7 @@ pub extern "C" fn host_initialize_import_meta_object_callback( ) { // SAFETY: `CallbackScope` can be safely constructed from `Local` let scope = &mut unsafe { v8::CallbackScope::new(context) }; - let module_map_rc = JsRuntime::module_map(scope); + let module_map_rc = JsRuntime::module_map_from(scope); let module_map = module_map_rc.borrow(); let module_global = v8::Global::new(scope, module); @@ -380,14 +358,8 @@ fn import_meta_resolve( let url_prop = args.data(); url_prop.to_rust_string_lossy(scope) }; - let module_map_rc = JsRuntime::module_map(scope); - let (loader, snapshot_loaded_and_not_snapshotting) = { - let module_map = module_map_rc.borrow(); - ( - module_map.loader.clone(), - module_map.snapshot_loaded_and_not_snapshotting, - ) - }; + let module_map_rc = JsRuntime::module_map_from(scope); + let loader = module_map_rc.borrow().loader.clone(); let specifier_str = specifier.to_rust_string_lossy(scope); if specifier_str.starts_with("npm:") { @@ -395,13 +367,8 @@ fn import_meta_resolve( return; } - match resolve_helper( - snapshot_loaded_and_not_snapshotting, - loader, - &specifier_str, - &referrer, - ResolutionKind::DynamicImport, - ) { + match loader.resolve(&specifier_str, &referrer, ResolutionKind::DynamicImport) + { Ok(resolved) => { let resolved_val = serde_v8::to_v8(scope, resolved.as_str()).unwrap(); rv.set(resolved_val); @@ -511,7 +478,7 @@ pub extern "C" fn promise_reject_callback(message: v8::PromiseRejectMessage) { }; if has_unhandled_rejection_handler { - let state_rc = JsRuntime::state(tc_scope); + let state_rc = JsRuntime::state_from(tc_scope); let mut state = state_rc.borrow_mut(); if let Some(pending_mod_evaluate) = state.pending_mod_evaluate.as_mut() { if !pending_mod_evaluate.has_evaluated { @@ -530,12 +497,12 @@ pub extern "C" fn promise_reject_callback(message: v8::PromiseRejectMessage) { let error_global = v8::Global::new(scope, error); context_state .pending_promise_rejections - .insert(promise_global, error_global); + .push_back((promise_global, error_global)); } PromiseHandlerAddedAfterReject => { context_state .pending_promise_rejections - .remove(&promise_global); + .retain(|(key, _)| key != &promise_global); } PromiseRejectAfterResolved => {} PromiseResolveAfterResolved => { @@ -609,7 +576,7 @@ pub fn module_resolve_callback<'s>( // SAFETY: `CallbackScope` can be safely constructed from `Local` let scope = &mut unsafe { v8::CallbackScope::new(context) }; - let module_map_rc = JsRuntime::module_map(scope); + let module_map_rc = JsRuntime::module_map_from(scope); let module_map = module_map_rc.borrow(); let referrer_global = v8::Global::new(scope, referrer); diff --git a/core/error.rs b/core/error.rs index 3d0b20b0a8..16f813b896 100644 --- a/core/error.rs +++ b/core/error.rs @@ -209,7 +209,7 @@ impl JsStackFrame { let l = message.get_line_number(scope)? as i64; // V8's column numbers are 0-based, we want 1-based. let c = message.get_start_column() as i64 + 1; - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let (getter, cache) = { let state = state_rc.borrow(); ( @@ -282,7 +282,7 @@ impl JsError { frames = vec![stack_frame]; } { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let (getter, cache) = { let state = state_rc.borrow(); ( @@ -414,7 +414,7 @@ impl JsError { } } { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let (getter, cache) = { let state = state_rc.borrow(); ( diff --git a/core/examples/http_bench_json_ops/http_bench_json_ops.js b/core/examples/http_bench_json_ops/http_bench_json_ops.js index 0c3b5be13e..a840e4e9f9 100644 --- a/core/examples/http_bench_json_ops/http_bench_json_ops.js +++ b/core/examples/http_bench_json_ops/http_bench_json_ops.js @@ -3,7 +3,13 @@ // then write this fixed 'responseBuf'. The point of this benchmark is to // exercise the event loop in a simple yet semi-realistic way. -const { ops, opAsync, opAsync2 } = Deno.core; +// deno-lint-ignore-file camelcase + +const { op_listen } = Deno.core.ops; +const { + op_accept, + op_read_socket, +} = Deno.core.ensureFastOps(); const requestBuf = new Uint8Array(64 * 1024); const responseBuf = new Uint8Array( @@ -12,24 +18,10 @@ const responseBuf = new Uint8Array( .map((c) => c.charCodeAt(0)), ); -/** Listens on 0.0.0.0:4570, returns rid. */ -function listen() { - return ops.op_listen(); -} - -/** Accepts a connection, returns rid. */ -function accept(serverRid) { - return opAsync("op_accept", serverRid); -} - -function read(serverRid, buf) { - return opAsync2("op_read_socket", serverRid, buf); -} - async function serve(rid) { try { while (true) { - await read(rid, requestBuf); + await op_read_socket(rid, requestBuf); if (!ops.op_try_write(rid, responseBuf)) { await Deno.core.writeAll(rid, responseBuf); } @@ -41,11 +33,12 @@ async function serve(rid) { } async function main() { - const listenerRid = listen(); + /** Listens on 0.0.0.0:4570, returns rid. */ + const listenerRid = op_listen(); Deno.core.print(`http_bench_ops listening on http://127.0.0.1:4570/\n`); while (true) { - const rid = await accept(listenerRid); + const rid = await op_accept(listenerRid); serve(rid); } } diff --git a/core/examples/http_bench_json_ops/main.rs b/core/examples/http_bench_json_ops/main.rs index 7c15f7bf24..36c0996c3a 100644 --- a/core/examples/http_bench_json_ops/main.rs +++ b/core/examples/http_bench_json_ops/main.rs @@ -3,7 +3,7 @@ use deno_core::anyhow::Error; use deno_core::op; use deno_core::AsyncRefCell; use deno_core::AsyncResult; -use deno_core::JsRuntime; +use deno_core::JsRuntimeForSnapshot; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; @@ -93,7 +93,7 @@ impl From for TcpStream { } } -fn create_js_runtime() -> JsRuntime { +fn create_js_runtime() -> JsRuntimeForSnapshot { let ext = deno_core::Extension::builder("my_ext") .ops(vec![ op_listen::decl(), @@ -103,11 +103,13 @@ fn create_js_runtime() -> JsRuntime { ]) .build(); - JsRuntime::new(deno_core::RuntimeOptions { - extensions: vec![ext], - will_snapshot: false, - ..Default::default() - }) + JsRuntimeForSnapshot::new( + deno_core::RuntimeOptions { + extensions: vec![ext], + ..Default::default() + }, + Default::default(), + ) } #[op] diff --git a/core/extensions.rs b/core/extensions.rs index a0f99c92b0..fa6d7851e7 100644 --- a/core/extensions.rs +++ b/core/extensions.rs @@ -72,7 +72,7 @@ pub struct OpDecl { pub is_async: bool, pub is_unstable: bool, pub is_v8: bool, - pub force_registration: bool, + pub arg_count: u8, pub fast_fn: Option, } @@ -348,6 +348,7 @@ macro_rules! extension { #[derive(Default)] pub struct Extension { + pub(crate) name: &'static str, js_files: Option>, esm_files: Option>, esm_entry_point: Option<&'static str>, @@ -357,9 +358,7 @@ pub struct Extension { event_loop_middleware: Option>, initialized: bool, enabled: bool, - name: &'static str, deps: Option<&'static [&'static str]>, - force_op_registration: bool, pub(crate) is_core: bool, } @@ -430,7 +429,6 @@ impl Extension { let mut ops = self.ops.take()?; for op in ops.iter_mut() { op.enabled = self.enabled && op.enabled; - op.force_registration = self.force_op_registration; } Some(ops) } @@ -484,7 +482,6 @@ pub struct ExtensionBuilder { event_loop_middleware: Option>, name: &'static str, deps: &'static [&'static str], - force_op_registration: bool, is_core: bool, } @@ -533,15 +530,6 @@ impl ExtensionBuilder { self } - /// Mark that ops from this extension should be added to `Deno.core.ops` - /// unconditionally. This is useful is some ops are not available - /// during snapshotting, as ops are not registered by default when a - /// `JsRuntime` is created with an existing snapshot. - pub fn force_op_registration(&mut self) -> &mut Self { - self.force_op_registration = true; - self - } - /// Consume the [`ExtensionBuilder`] and return an [`Extension`]. pub fn take(self) -> Extension { let js_files = Some(self.js); @@ -559,7 +547,6 @@ impl ExtensionBuilder { initialized: false, enabled: true, name: self.name, - force_op_registration: self.force_op_registration, deps, is_core: self.is_core, } @@ -582,7 +569,6 @@ impl ExtensionBuilder { enabled: true, name: self.name, deps, - force_op_registration: self.force_op_registration, is_core: self.is_core, } } diff --git a/core/inspector.rs b/core/inspector.rs index c83784fe38..d7c84608f5 100644 --- a/core/inspector.rs +++ b/core/inspector.rs @@ -11,7 +11,6 @@ use crate::futures::channel::mpsc::UnboundedSender; use crate::futures::channel::oneshot; use crate::futures::future::select; use crate::futures::future::Either; -use crate::futures::future::Future; use crate::futures::prelude::*; use crate::futures::stream::SelectAll; use crate::futures::stream::StreamExt; @@ -82,6 +81,7 @@ pub struct JsRuntimeInspector { flags: RefCell, waker: Arc, deregister_tx: Option>, + is_dispatching_message: RefCell, } impl Drop for JsRuntimeInspector { @@ -141,30 +141,16 @@ impl v8::inspector::V8InspectorClientImpl for JsRuntimeInspector { } } -/// Polling `JsRuntimeInspector` allows inspector to accept new incoming -/// connections and "pump" messages in different sessions. -/// -/// It should be polled on tick of event loop, ie. in `JsRuntime::poll_event_loop` -/// function. -impl Future for JsRuntimeInspector { - type Output = (); - fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<()> { - self.poll_sessions(Some(cx)).unwrap() - } -} - impl JsRuntimeInspector { /// Currently Deno supports only a single context in `JsRuntime` /// and thus it's id is provided as an associated contant. const CONTEXT_GROUP_ID: i32 = 1; pub fn new( - isolate: &mut v8::OwnedIsolate, - context: v8::Global, + scope: &mut v8::HandleScope, + context: v8::Local, is_main: bool, ) -> Rc> { - let scope = &mut v8::HandleScope::new(isolate); - let (new_session_tx, new_session_rx) = mpsc::unbounded::(); @@ -182,6 +168,7 @@ impl JsRuntimeInspector { flags: Default::default(), waker, deregister_tx: None, + is_dispatching_message: Default::default(), })); let mut self_ = self__.borrow_mut(); self_.v8_inspector = Rc::new(RefCell::new( @@ -193,7 +180,6 @@ impl JsRuntimeInspector { )); // Tell the inspector about the global context. - let context = v8::Local::new(scope, context); let context_name = v8::inspector::StringView::from(&b"global context"[..]); // NOTE(bartlomieju): this is what Node.js does and it turns out some // debuggers (like VSCode) rely on this information to disconnect after @@ -224,6 +210,10 @@ impl JsRuntimeInspector { self__ } + pub fn is_dispatching_message(&self) -> bool { + *self.is_dispatching_message.borrow() + } + pub fn context_destroyed( &mut self, scope: &mut HandleScope, @@ -238,6 +228,35 @@ impl JsRuntimeInspector { .context_destroyed(context); } + pub fn exception_thrown( + &self, + scope: &mut HandleScope, + exception: v8::Local<'_, v8::Value>, + in_promise: bool, + ) { + let context = scope.get_current_context(); + let message = v8::Exception::create_message(scope, exception); + let stack_trace = message.get_stack_trace(scope).unwrap(); + let mut v8_inspector_ref = self.v8_inspector.borrow_mut(); + let v8_inspector = v8_inspector_ref.as_mut().unwrap(); + let stack_trace = v8_inspector.create_stack_trace(stack_trace); + v8_inspector.exception_thrown( + context, + if in_promise { + v8::inspector::StringView::from("Uncaught (in promise)".as_bytes()) + } else { + v8::inspector::StringView::from("Uncaught".as_bytes()) + }, + exception, + v8::inspector::StringView::from("".as_bytes()), + v8::inspector::StringView::from("".as_bytes()), + 0, + 0, + stack_trace, + 0, + ); + } + pub fn has_active_sessions(&self) -> bool { self.sessions.borrow().has_active_sessions() } @@ -246,7 +265,7 @@ impl JsRuntimeInspector { self.sessions.borrow().has_blocking_sessions() } - fn poll_sessions( + pub fn poll_sessions( &self, mut invoker_cx: Option<&mut Context>, ) -> Result, BorrowMutError> { @@ -304,7 +323,9 @@ impl JsRuntimeInspector { match sessions.established.poll_next_unpin(cx) { Poll::Ready(Some(session_stream_item)) => { let (v8_session_ptr, msg) = session_stream_item; + *self.is_dispatching_message.borrow_mut() = true; InspectorSession::dispatch_message(v8_session_ptr, msg); + *self.is_dispatching_message.borrow_mut() = false; continue; } Poll::Ready(None) => break, diff --git a/core/internal.d.ts b/core/internal.d.ts index c78310aeb6..b09d188d8f 100644 --- a/core/internal.d.ts +++ b/core/internal.d.ts @@ -637,7 +637,6 @@ declare namespace __bootstrap { export const Object: typeof globalThis.Object; export const ObjectLength: typeof Object.length; export const ObjectName: typeof Object.name; - export const ObjectPrototype: typeof Object.prototype; export const ObjectAssign: typeof Object.assign; export const ObjectGetOwnPropertyDescriptor: typeof Object.getOwnPropertyDescriptor; @@ -646,6 +645,7 @@ declare namespace __bootstrap { export const ObjectGetOwnPropertyNames: typeof Object.getOwnPropertyNames; export const ObjectGetOwnPropertySymbols: typeof Object.getOwnPropertySymbols; + export const ObjectHasOwn: typeof Object.hasOwn; export const ObjectIs: typeof Object.is; export const ObjectPreventExtensions: typeof Object.preventExtensions; export const ObjectSeal: typeof Object.seal; @@ -662,6 +662,7 @@ declare namespace __bootstrap { export const ObjectEntries: typeof Object.entries; export const ObjectFromEntries: typeof Object.fromEntries; export const ObjectValues: typeof Object.values; + export const ObjectPrototype: typeof Object.prototype; export const ObjectPrototype__defineGetter__: UncurryThis< typeof Object.prototype.__defineGetter__ >; diff --git a/core/io.rs b/core/io.rs index 103fe79c1f..567d50bd48 100644 --- a/core/io.rs +++ b/core/io.rs @@ -3,6 +3,7 @@ use std::ops::Deref; use std::ops::DerefMut; +use bytes::Buf; use serde_v8::ZeroCopyBuf; /// BufView is a wrapper around an underlying contiguous chunk of bytes. It can @@ -26,11 +27,11 @@ enum BufViewInner { } impl BufView { - fn from_inner(inner: BufViewInner) -> Self { + const fn from_inner(inner: BufViewInner) -> Self { Self { inner, cursor: 0 } } - pub fn empty() -> Self { + pub const fn empty() -> Self { Self::from_inner(BufViewInner::Empty) } @@ -65,6 +66,20 @@ impl BufView { } } +impl Buf for BufView { + fn remaining(&self) -> usize { + self.len() + } + + fn chunk(&self) -> &[u8] { + self.deref() + } + + fn advance(&mut self, cnt: usize) { + self.advance_cursor(cnt) + } +} + impl Deref for BufView { type Target = [u8]; @@ -210,6 +225,20 @@ impl BufMutView { } } +impl Buf for BufMutView { + fn remaining(&self) -> usize { + self.len() + } + + fn chunk(&self) -> &[u8] { + self.deref() + } + + fn advance(&mut self, cnt: usize) { + self.advance_cursor(cnt) + } +} + impl Deref for BufMutView { type Target = [u8]; diff --git a/core/lib.deno_core.d.ts b/core/lib.deno_core.d.ts index 7f3ea2a191..fc78658294 100644 --- a/core/lib.deno_core.d.ts +++ b/core/lib.deno_core.d.ts @@ -23,10 +23,16 @@ declare namespace Deno { /** * List of all registered ops, in the form of a map that maps op - * name to internal numerical op id. + * name to function. */ const ops: Record any>; + /** + * List of all registered async ops, in the form of a map that maps op + * name to function. + */ + const asyncOps: Record any>; + /** * Retrieve a list of all open resources, in the form of a map that maps * resource id to the resource name. diff --git a/core/lib.rs b/core/lib.rs index 70dadfc6a8..336d9c2b98 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -17,11 +17,13 @@ mod ops; mod ops_builtin; mod ops_builtin_v8; mod ops_metrics; +mod path; mod realm; mod resources; mod runtime; pub mod snapshot_util; mod source_map; +pub mod task; mod task_queue; // Re-exports @@ -77,7 +79,6 @@ pub use crate::module_specifier::resolve_url; pub use crate::module_specifier::resolve_url_or_path; pub use crate::module_specifier::ModuleResolutionError; pub use crate::module_specifier::ModuleSpecifier; -pub use crate::modules::ExtModuleLoader; pub use crate::modules::ExtModuleLoaderCb; pub use crate::modules::FsModuleLoader; pub use crate::modules::ModuleCode; @@ -89,11 +90,8 @@ pub use crate::modules::ModuleType; pub use crate::modules::NoopModuleLoader; pub use crate::modules::ResolutionKind; pub use crate::normalize_path::normalize_path; -pub use crate::ops::Op; -pub use crate::ops::OpAsyncFuture; pub use crate::ops::OpCall; pub use crate::ops::OpError; -pub use crate::ops::OpFn; pub use crate::ops::OpId; pub use crate::ops::OpResult; pub use crate::ops::OpState; @@ -104,6 +102,7 @@ pub use crate::ops_builtin::op_resources; pub use crate::ops_builtin::op_void_async; pub use crate::ops_builtin::op_void_sync; pub use crate::ops_metrics::OpsTracker; +pub use crate::path::strip_unc_prefix; pub use crate::realm::JsRealm; pub use crate::resources::AsyncResult; pub use crate::resources::Resource; @@ -114,6 +113,7 @@ pub use crate::runtime::CrossIsolateStore; pub use crate::runtime::GetErrorClassFn; pub use crate::runtime::JsErrorCreateFn; pub use crate::runtime::JsRuntime; +pub use crate::runtime::JsRuntimeForSnapshot; pub use crate::runtime::RuntimeOptions; pub use crate::runtime::SharedArrayBufferStore; pub use crate::runtime::Snapshot; @@ -135,6 +135,10 @@ pub mod _ops { pub use super::ops::to_op_result; pub use super::ops::OpCtx; pub use super::ops::OpResult; + pub use super::runtime::map_async_op1; + pub use super::runtime::map_async_op2; + pub use super::runtime::map_async_op3; + pub use super::runtime::map_async_op4; pub use super::runtime::queue_async_op; pub use super::runtime::queue_fast_async_op; pub use super::runtime::V8_WRAPPER_OBJECT_INDEX; diff --git a/core/modules.rs b/core/modules.rs index c63c4dd30b..4f1875ae59 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -9,10 +9,10 @@ use crate::module_specifier::ModuleSpecifier; use crate::resolve_import; use crate::resolve_url; use crate::snapshot_util::SnapshottedData; +use crate::Extension; use crate::JsRuntime; -use crate::OpState; +use anyhow::anyhow; use anyhow::Error; -use core::panic; use futures::future::FutureExt; use futures::stream::FuturesUnordered; use futures::stream::Stream; @@ -135,7 +135,7 @@ fn json_module_evaluation_steps<'a>( // SAFETY: `CallbackScope` can be safely constructed from `Local` let scope = &mut unsafe { v8::CallbackScope::new(context) }; let tc_scope = &mut v8::TryCatch::new(scope); - let module_map = JsRuntime::module_map(tc_scope); + let module_map = JsRuntime::module_map_from(tc_scope); let handle = v8::Global::::new(tc_scope, module); let value_handle = module_map @@ -339,7 +339,6 @@ pub trait ModuleLoader { /// It's not required to implement this method. fn prepare_load( &self, - _op_state: Rc>, _module_specifier: &ModuleSpecifier, _maybe_referrer: Option, _is_dyn_import: bool, @@ -379,87 +378,34 @@ impl ModuleLoader for NoopModuleLoader { } } -/// Helper function, that calls into `loader.resolve()`, but denies resolution -/// of `ext` scheme if we are running with a snapshot loaded and not -/// creating a snapshot -pub(crate) fn resolve_helper( - snapshot_loaded_and_not_snapshotting: bool, - loader: Rc, - specifier: &str, - referrer: &str, - kind: ResolutionKind, -) -> Result { - if snapshot_loaded_and_not_snapshotting && specifier.starts_with("ext:") { - return Err(generic_error( - "Cannot load extension module from external code", - )); - } - - loader.resolve(specifier, referrer, kind) -} - /// Function that can be passed to the `ExtModuleLoader` that allows to /// transpile sources before passing to V8. pub type ExtModuleLoaderCb = Box Result>; -pub struct ExtModuleLoader { - module_loader: Rc, - esm_sources: Vec, - used_esm_sources: RefCell>, - maybe_load_callback: Option, -} - -impl Default for ExtModuleLoader { - fn default() -> Self { - Self { - module_loader: Rc::new(NoopModuleLoader), - esm_sources: vec![], - used_esm_sources: RefCell::new(HashMap::default()), - maybe_load_callback: None, - } - } +pub(crate) struct ExtModuleLoader { + maybe_load_callback: Option>, + sources: RefCell>, + used_specifiers: RefCell>, } impl ExtModuleLoader { pub fn new( - module_loader: Option>, - esm_sources: Vec, - maybe_load_callback: Option, + extensions: &[Extension], + maybe_load_callback: Option>, ) -> Self { - let used_esm_sources: HashMap = esm_sources - .iter() - .map(|file_source| (file_source.specifier.to_string(), false)) - .collect(); - + let mut sources = HashMap::new(); + sources.extend( + extensions + .iter() + .flat_map(|e| e.get_esm_sources()) + .flatten() + .map(|s| (s.specifier.to_string(), s.clone())), + ); ExtModuleLoader { - module_loader: module_loader.unwrap_or_else(|| Rc::new(NoopModuleLoader)), - esm_sources, - used_esm_sources: RefCell::new(used_esm_sources), maybe_load_callback, - } - } -} - -impl Drop for ExtModuleLoader { - fn drop(&mut self) { - let used_esm_sources = self.used_esm_sources.get_mut(); - let unused_modules: Vec<_> = used_esm_sources - .iter() - .filter(|(_s, v)| !*v) - .map(|(s, _)| s) - .collect(); - - if !unused_modules.is_empty() { - let mut msg = - "Following modules were passed to ExtModuleLoader but never used:\n" - .to_string(); - for m in unused_modules { - msg.push_str(" - "); - msg.push_str(m); - msg.push('\n'); - } - panic!("{}", msg); + sources: RefCell::new(sources), + used_specifiers: Default::default(), } } } @@ -469,92 +415,70 @@ impl ModuleLoader for ExtModuleLoader { &self, specifier: &str, referrer: &str, - kind: ResolutionKind, + _kind: ResolutionKind, ) -> Result { - if let Ok(url_specifier) = ModuleSpecifier::parse(specifier) { - if url_specifier.scheme() == "ext" { - let referrer_specifier = ModuleSpecifier::parse(referrer).ok(); - if referrer == "." || referrer_specifier.unwrap().scheme() == "ext" { - return Ok(url_specifier); - } else { - return Err(generic_error( - "Cannot load extension module from external code", - )); - }; - } - } - - self.module_loader.resolve(specifier, referrer, kind) + Ok(resolve_import(specifier, referrer)?) } fn load( &self, - module_specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - is_dyn_import: bool, + specifier: &ModuleSpecifier, + _maybe_referrer: Option<&ModuleSpecifier>, + _is_dyn_import: bool, ) -> Pin> { - if module_specifier.scheme() != "ext" { - return self.module_loader.load( - module_specifier, - maybe_referrer, - is_dyn_import, - ); - } - - let specifier = module_specifier.to_string(); - let maybe_file_source = self - .esm_sources - .iter() - .find(|file_source| file_source.specifier == module_specifier.as_str()); - - if let Some(file_source) = maybe_file_source { - { - let mut used_esm_sources = self.used_esm_sources.borrow_mut(); - let used = used_esm_sources.get_mut(file_source.specifier).unwrap(); - *used = true; - } - - let result = if let Some(load_callback) = &self.maybe_load_callback { - load_callback(file_source) - } else { - file_source.load() - }; - - match result { - Ok(code) => { - let res = - ModuleSource::new(ModuleType::JavaScript, code, module_specifier); - return futures::future::ok(res).boxed_local(); - } - Err(err) => return futures::future::err(err).boxed_local(), + let sources = self.sources.borrow(); + let source = match sources.get(specifier.as_str()) { + Some(source) => source, + None => return futures::future::err(anyhow!("Specifier \"{}\" was not passed as an extension module and was not included in the snapshot.", specifier)).boxed_local(), + }; + self + .used_specifiers + .borrow_mut() + .insert(specifier.to_string()); + let result = if let Some(load_callback) = &self.maybe_load_callback { + load_callback(source) + } else { + source.load() + }; + match result { + Ok(code) => { + let res = ModuleSource::new(ModuleType::JavaScript, code, specifier); + return futures::future::ok(res).boxed_local(); } + Err(err) => return futures::future::err(err).boxed_local(), } - - async move { - Err(generic_error(format!( - "Cannot find extension module source for specifier {specifier}" - ))) - } - .boxed_local() } fn prepare_load( &self, - op_state: Rc>, - module_specifier: &ModuleSpecifier, - maybe_referrer: Option, - is_dyn_import: bool, + _specifier: &ModuleSpecifier, + _maybe_referrer: Option, + _is_dyn_import: bool, ) -> Pin>>> { - if module_specifier.scheme() == "ext" { - return async { Ok(()) }.boxed_local(); - } + async { Ok(()) }.boxed_local() + } +} - self.module_loader.prepare_load( - op_state, - module_specifier, - maybe_referrer, - is_dyn_import, - ) +impl Drop for ExtModuleLoader { + fn drop(&mut self) { + let sources = self.sources.get_mut(); + let used_specifiers = self.used_specifiers.get_mut(); + let unused_modules: Vec<_> = sources + .iter() + .filter(|(k, _)| !used_specifiers.contains(k.as_str())) + .collect(); + + if !unused_modules.is_empty() { + let mut msg = + "Following modules were passed to ExtModuleLoader but never used:\n" + .to_string(); + for m in unused_modules { + msg.push_str(" - "); + msg.push_str(m.0); + msg.push('\n'); + } + panic!("{}", msg); + } } } @@ -640,11 +564,9 @@ pub(crate) struct RecursiveModuleLoad { module_map_rc: Rc>, pending: FuturesUnordered>>, visited: HashSet, - // These three fields are copied from `module_map_rc`, but they are cloned + // The loader is copied from `module_map_rc`, but its reference is cloned // ahead of time to avoid already-borrowed errors. - op_state: Rc>, loader: Rc, - snapshot_loaded_and_not_snapshotting: bool, } impl RecursiveModuleLoad { @@ -686,7 +608,6 @@ impl RecursiveModuleLoad { module_map.next_load_id += 1; id }; - let op_state = module_map_rc.borrow().op_state.clone(); let loader = module_map_rc.borrow().loader.clone(); let asserted_module_type = match init { LoadInit::DynamicImport(_, _, module_type) => module_type, @@ -700,10 +621,6 @@ impl RecursiveModuleLoad { init, state: LoadState::Init, module_map_rc: module_map_rc.clone(), - snapshot_loaded_and_not_snapshotting: module_map_rc - .borrow() - .snapshot_loaded_and_not_snapshotting, - op_state, loader, pending: FuturesUnordered::new(), visited: HashSet::new(), @@ -731,60 +648,38 @@ impl RecursiveModuleLoad { fn resolve_root(&self) -> Result { match self.init { - LoadInit::Main(ref specifier) => resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::MainModule, - ), - LoadInit::Side(ref specifier) => resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::Import, - ), - LoadInit::DynamicImport(ref specifier, ref referrer, _) => { - resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - referrer, - ResolutionKind::DynamicImport, - ) + LoadInit::Main(ref specifier) => { + self + .loader + .resolve(specifier, ".", ResolutionKind::MainModule) } + LoadInit::Side(ref specifier) => { + self.loader.resolve(specifier, ".", ResolutionKind::Import) + } + LoadInit::DynamicImport(ref specifier, ref referrer, _) => self + .loader + .resolve(specifier, referrer, ResolutionKind::DynamicImport), } } async fn prepare(&self) -> Result<(), Error> { - let op_state = self.op_state.clone(); - let (module_specifier, maybe_referrer) = match self.init { LoadInit::Main(ref specifier) => { - let spec = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::MainModule, - )?; + let spec = + self + .loader + .resolve(specifier, ".", ResolutionKind::MainModule)?; (spec, None) } LoadInit::Side(ref specifier) => { - let spec = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::Import, - )?; + let spec = + self + .loader + .resolve(specifier, ".", ResolutionKind::Import)?; (spec, None) } LoadInit::DynamicImport(ref specifier, ref referrer, _) => { - let spec = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), + let spec = self.loader.resolve( specifier, referrer, ResolutionKind::DynamicImport, @@ -795,12 +690,7 @@ impl RecursiveModuleLoad { self .loader - .prepare_load( - op_state, - &module_specifier, - maybe_referrer, - self.is_dynamic_import(), - ) + .prepare_load(&module_specifier, maybe_referrer, self.is_dynamic_import()) .await } @@ -1094,7 +984,6 @@ pub(crate) struct ModuleMap { // Handling of futures for loading module sources pub loader: Rc, - op_state: Rc>, pub(crate) dynamic_import_map: HashMap>, pub(crate) preparing_dynamic_imports: @@ -1105,8 +994,6 @@ pub(crate) struct ModuleMap { // This store is used temporarly, to forward parsed JSON // value from `new_json_module` to `json_module_evaluation_steps` json_value_store: HashMap, v8::Global>, - - pub(crate) snapshot_loaded_and_not_snapshotting: bool, } impl ModuleMap { @@ -1128,6 +1015,29 @@ impl ModuleMap { output } + #[cfg(debug_assertions)] + pub(crate) fn assert_all_modules_evaluated( + &self, + scope: &mut v8::HandleScope, + ) { + let mut not_evaluated = vec![]; + + for (i, handle) in self.handles.iter().enumerate() { + let module = v8::Local::new(scope, handle); + if !matches!(module.get_status(), v8::ModuleStatus::Evaluated) { + not_evaluated.push(self.info[i].name.as_str().to_string()); + } + } + + if !not_evaluated.is_empty() { + let mut msg = "Following modules were not evaluated; make sure they are imported from other code:\n".to_string(); + for m in not_evaluated { + msg.push_str(&format!(" - {}\n", m)); + } + panic!("{}", msg); + } + } + pub fn serialize_for_snapshotting( &self, scope: &mut v8::HandleScope, @@ -1380,11 +1290,7 @@ impl ModuleMap { self.handles = snapshotted_data.module_handles; } - pub(crate) fn new( - loader: Rc, - op_state: Rc>, - snapshot_loaded_and_not_snapshotting: bool, - ) -> ModuleMap { + pub(crate) fn new(loader: Rc) -> ModuleMap { Self { handles: vec![], info: vec![], @@ -1392,18 +1298,16 @@ impl ModuleMap { by_name_json: HashMap::new(), next_load_id: 1, loader, - op_state, dynamic_import_map: HashMap::new(), preparing_dynamic_imports: FuturesUnordered::new(), pending_dynamic_imports: FuturesUnordered::new(), json_value_store: HashMap::new(), - snapshot_loaded_and_not_snapshotting, } } /// Get module id, following all aliases in case of module specifier /// that had been redirected. - fn get_id( + pub(crate) fn get_id( &self, name: impl AsRef, asserted_module_type: AssertedModuleType, @@ -1526,9 +1430,7 @@ impl ModuleMap { return Err(ModuleError::Exception(exception)); } - let module_specifier = match resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), + let module_specifier = match self.loader.resolve( &import_specifier, name.as_ref(), if is_dynamic_import { @@ -1572,6 +1474,29 @@ impl ModuleMap { Ok(id) } + pub(crate) fn clear(&mut self) { + *self = Self::new(self.loader.clone()) + } + + pub(crate) fn get_handle_by_name( + &self, + name: impl AsRef, + ) -> Option> { + let id = self + .get_id(name.as_ref(), AssertedModuleType::JavaScriptOrWasm) + .or_else(|| self.get_id(name.as_ref(), AssertedModuleType::Json))?; + self.get_handle(id) + } + + pub(crate) fn inject_handle( + &mut self, + name: ModuleName, + module_type: ModuleType, + handle: v8::Global, + ) { + self.create_module_info(name, module_type, handle, false, vec![]); + } + fn create_module_info( &mut self, name: FastString, @@ -1717,20 +1642,9 @@ impl ModuleMap { .dynamic_import_map .insert(load.id, resolver_handle); - let (loader, snapshot_loaded_and_not_snapshotting) = { - let module_map = module_map_rc.borrow(); - ( - module_map.loader.clone(), - module_map.snapshot_loaded_and_not_snapshotting, - ) - }; - let resolve_result = resolve_helper( - snapshot_loaded_and_not_snapshotting, - loader, - specifier, - referrer, - ResolutionKind::DynamicImport, - ); + let loader = module_map_rc.borrow().loader.clone(); + let resolve_result = + loader.resolve(specifier, referrer, ResolutionKind::DynamicImport); let fut = match resolve_result { Ok(module_specifier) => { if module_map_rc @@ -1764,14 +1678,10 @@ impl ModuleMap { referrer: &str, import_assertions: HashMap, ) -> Option> { - let resolved_specifier = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - referrer, - ResolutionKind::Import, - ) - .expect("Module should have been already resolved"); + let resolved_specifier = self + .loader + .resolve(specifier, referrer, ResolutionKind::Import) + .expect("Module should have been already resolved"); let module_type = get_asserted_module_type_from_assertions(&import_assertions); @@ -1786,14 +1696,22 @@ impl ModuleMap { } } +impl Default for ModuleMap { + fn default() -> Self { + Self::new(Rc::new(NoopModuleLoader)) + } +} + #[cfg(test)] mod tests { use super::*; use crate::ascii_str; use crate::JsRuntime; + use crate::JsRuntimeForSnapshot; use crate::RuntimeOptions; use crate::Snapshot; use deno_ops::op; + use futures::future::poll_fn; use futures::future::FutureExt; use parking_lot::Mutex; use std::fmt; @@ -1808,12 +1726,6 @@ mod tests { pub use crate::*; } - // TODO(ry) Sadly FuturesUnordered requires the current task to be set. So - // even though we are only using poll() in these tests and not Tokio, we must - // nevertheless run it in the tokio executor. Ideally run_in_task can be - // removed in the future. - use crate::runtime::tests::run_in_task; - #[derive(Default)] struct MockLoader { pub loads: Arc>>, @@ -1961,7 +1873,7 @@ import "/a.js"; } if inner.url == "file:///slow.js" && inner.counter < 2 { // TODO(ry) Hopefully in the future we can remove current task - // notification. See comment above run_in_task. + // notification. cx.waker().wake_by_ref(); return Poll::Pending; } @@ -2040,7 +1952,7 @@ import "/a.js"; ] ); - let module_map_rc = JsRuntime::module_map(runtime.v8_isolate()); + let module_map_rc = runtime.module_map(); let modules = module_map_rc.borrow(); assert_eq!( @@ -2152,7 +2064,7 @@ import "/a.js"; assert_eq!(DISPATCH_COUNT.load(Ordering::Relaxed), 0); - let module_map_rc = JsRuntime::module_map(runtime.v8_isolate()); + let module_map_rc = runtime.module_map().clone(); let (mod_a, mod_b) = { let scope = &mut runtime.handle_scope(); @@ -2264,7 +2176,7 @@ import "/a.js"; ) .unwrap(); - let module_map_rc = JsRuntime::module_map(runtime.v8_isolate()); + let module_map_rc = runtime.module_map().clone(); let (mod_a, mod_b) = { let scope = &mut runtime.handle_scope(); @@ -2317,8 +2229,8 @@ import "/a.js"; futures::executor::block_on(receiver).unwrap().unwrap(); } - #[test] - fn dyn_import_err() { + #[tokio::test] + async fn dyn_import_err() { #[derive(Clone, Default)] struct DynImportErrLoader { pub count: Arc, @@ -2356,7 +2268,7 @@ import "/a.js"; }); // Test an erroneous dynamic import where the specified module isn't found. - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "file:///dyn_import2.js", @@ -2374,7 +2286,9 @@ import "/a.js"; unreachable!(); } assert_eq!(count.load(Ordering::Relaxed), 4); + Poll::Ready(()) }) + .await; } #[derive(Clone, Default)] @@ -2413,7 +2327,6 @@ import "/a.js"; fn prepare_load( &self, - _op_state: Rc>, _module_specifier: &ModuleSpecifier, _maybe_referrer: Option, _is_dyn_import: bool, @@ -2423,8 +2336,8 @@ import "/a.js"; } } - #[test] - fn dyn_import_ok() { + #[tokio::test] + async fn dyn_import_ok() { let loader = Rc::new(DynImportOkLoader::default()); let prepare_load_count = loader.prepare_load_count.clone(); let resolve_count = loader.resolve_count.clone(); @@ -2433,7 +2346,7 @@ import "/a.js"; module_loader: Some(loader), ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { // Dynamically import mod_b runtime .execute_script_static( @@ -2467,11 +2380,13 @@ import "/a.js"; )); assert_eq!(resolve_count.load(Ordering::Relaxed), 7); assert_eq!(load_count.load(Ordering::Relaxed), 1); + Poll::Ready(()) }) + .await; } - #[test] - fn dyn_import_borrow_mut_error() { + #[tokio::test] + async fn dyn_import_borrow_mut_error() { // https://github.com/denoland/deno/issues/6054 let loader = Rc::new(DynImportOkLoader::default()); let prepare_load_count = loader.prepare_load_count.clone(); @@ -2480,7 +2395,7 @@ import "/a.js"; ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "file:///dyn_import3.js", @@ -2499,7 +2414,9 @@ import "/a.js"; assert_eq!(prepare_load_count.load(Ordering::Relaxed), 1); // Second poll triggers error let _ = runtime.poll_event_loop(cx, false); + Poll::Ready(()) }) + .await; } // Regression test for https://github.com/denoland/deno/issues/3736. @@ -2592,7 +2509,7 @@ import "/a.js"; ] ); - let module_map_rc = JsRuntime::module_map(runtime.v8_isolate()); + let module_map_rc = runtime.module_map(); let modules = module_map_rc.borrow(); assert_eq!( @@ -2672,7 +2589,7 @@ import "/a.js"; ] ); - let module_map_rc = JsRuntime::module_map(runtime.v8_isolate()); + let module_map_rc = runtime.module_map(); let modules = module_map_rc.borrow(); assert_eq!( @@ -2725,8 +2642,8 @@ import "/a.js"; futures::executor::block_on(fut); } - #[test] - fn slow_never_ready_modules() { + #[tokio::test] + async fn slow_never_ready_modules() { let loader = MockLoader::new(); let loads = loader.loads.clone(); let mut runtime = JsRuntime::new(RuntimeOptions { @@ -2734,7 +2651,7 @@ import "/a.js"; ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { let spec = resolve_url("file:///main.js").unwrap(); let mut recursive_load = runtime.load_main_module(&spec, None).boxed_local(); @@ -2748,8 +2665,7 @@ import "/a.js"; // "file:///never_ready.js", // "file:///slow.js" // But due to current task notification in DelayedSourceCodeFuture they - // all get loaded in a single poll. Also see the comment above - // run_in_task. + // all get loaded in a single poll. for _ in 0..10 { let result = recursive_load.poll_unpin(cx); @@ -2768,30 +2684,26 @@ import "/a.js"; ] ); } + Poll::Ready(()) }) + .await; } - #[test] - fn loader_disappears_after_error() { + #[tokio::test] + async fn loader_disappears_after_error() { let loader = MockLoader::new(); let mut runtime = JsRuntime::new(RuntimeOptions { module_loader: Some(loader), ..Default::default() }); - run_in_task(move |cx| { - let spec = resolve_url("file:///bad_import.js").unwrap(); - let mut load_fut = runtime.load_main_module(&spec, None).boxed_local(); - let result = load_fut.poll_unpin(cx); - if let Poll::Ready(Err(err)) = result { - assert_eq!( - err.downcast_ref::().unwrap(), - &MockError::ResolveErr - ); - } else { - unreachable!(); - } - }) + let spec = resolve_url("file:///bad_import.js").unwrap(); + let result = runtime.load_main_module(&spec, None).await; + let err = result.unwrap_err(); + assert_eq!( + err.downcast_ref::().unwrap(), + &MockError::ResolveErr + ); } #[test] @@ -2832,7 +2744,7 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error(); vec!["file:///b.js", "file:///c.js", "file:///d.js"] ); - let module_map_rc = JsRuntime::module_map(runtime.v8_isolate()); + let module_map_rc = runtime.module_map(); let modules = module_map_rc.borrow(); assert_eq!( @@ -2965,11 +2877,13 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error(); ); let loader = MockLoader::new(); - let mut runtime = JsRuntime::new(RuntimeOptions { - module_loader: Some(loader), - will_snapshot: true, - ..Default::default() - }); + let mut runtime = JsRuntimeForSnapshot::new( + RuntimeOptions { + module_loader: Some(loader), + ..Default::default() + }, + Default::default(), + ); // In default resolution code should be empty. // Instead we explicitly pass in our own code. // The behavior should be very similar to /a.js. @@ -3007,11 +2921,13 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error(); ); let loader = MockLoader::new(); - let mut runtime = JsRuntime::new(RuntimeOptions { - module_loader: Some(loader), - will_snapshot: true, - ..Default::default() - }); + let mut runtime = JsRuntimeForSnapshot::new( + RuntimeOptions { + module_loader: Some(loader), + ..Default::default() + }, + Default::default(), + ); // In default resolution code should be empty. // Instead we explicitly pass in our own code. // The behavior should be very similar to /a.js. @@ -3040,51 +2956,4 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error(); ) .unwrap(); } - - #[test] - fn ext_module_loader() { - let loader = ExtModuleLoader::default(); - assert!(loader - .resolve("ext:foo", "ext:bar", ResolutionKind::Import) - .is_ok()); - assert_eq!( - loader - .resolve("ext:foo", "file://bar", ResolutionKind::Import) - .err() - .map(|e| e.to_string()), - Some("Cannot load extension module from external code".to_string()) - ); - assert_eq!( - loader - .resolve("file://foo", "file://bar", ResolutionKind::Import) - .err() - .map(|e| e.to_string()), - Some( - "Module loading is not supported; attempted to resolve: \"file://foo\" from \"file://bar\"" - .to_string() - ) - ); - assert_eq!( - loader - .resolve("file://foo", "ext:bar", ResolutionKind::Import) - .err() - .map(|e| e.to_string()), - Some( - "Module loading is not supported; attempted to resolve: \"file://foo\" from \"ext:bar\"" - .to_string() - ) - ); - assert_eq!( - resolve_helper( - true, - Rc::new(loader), - "ext:core.js", - "file://bar", - ResolutionKind::Import, - ) - .err() - .map(|e| e.to_string()), - Some("Cannot load extension module from external code".to_string()) - ); - } } diff --git a/core/ops.rs b/core/ops.rs index cceeb56547..b766eb60d2 100644 --- a/core/ops.rs +++ b/core/ops.rs @@ -2,18 +2,18 @@ use crate::error::AnyError; use crate::gotham_state::GothamState; +use crate::realm::ContextState; use crate::resources::ResourceTable; use crate::runtime::GetErrorClassFn; use crate::runtime::JsRuntimeState; use crate::OpDecl; use crate::OpsTracker; use anyhow::Error; -use futures::future::maybe_done; -use futures::future::FusedFuture; use futures::future::MaybeDone; -use futures::ready; -use futures::task::noop_waker; +use futures::task::AtomicWaker; use futures::Future; +use futures::FutureExt; +use pin_project::pin_project; use serde::Serialize; use std::cell::RefCell; use std::ops::Deref; @@ -22,91 +22,74 @@ use std::pin::Pin; use std::ptr::NonNull; use std::rc::Rc; use std::rc::Weak; -use std::task::Context; -use std::task::Poll; +use std::sync::Arc; use v8::fast_api::CFunctionInfo; use v8::fast_api::CTypeInfo; -/// Wrapper around a Future, which causes that Future to be polled immediately. -/// -/// Background: ops are stored in a `FuturesUnordered` structure which polls -/// them, but without the `OpCall` wrapper this doesn't happen until the next -/// turn of the event loop, which is too late for certain ops. -pub struct OpCall(MaybeDone>>>); +pub type PromiseId = i32; +pub type OpId = u16; -pub enum EagerPollResult { - Ready(T), - Pending(OpCall), +#[pin_project] +pub struct OpCall { + promise_id: PromiseId, + op_id: OpId, + /// Future is not necessarily Unpin, so we need to pin_project. + #[pin] + fut: MaybeDone>>>, } -impl OpCall { - /// Wraps a future, and polls the inner future immediately. - /// This should be the default choice for ops. - pub fn eager(fut: impl Future + 'static) -> EagerPollResult { - let boxed = Box::pin(fut) as Pin>>; - let mut inner = maybe_done(boxed); - let waker = noop_waker(); - let mut cx = Context::from_waker(&waker); - let mut pinned = Pin::new(&mut inner); - let poll = pinned.as_mut().poll(&mut cx); - match poll { - Poll::Ready(_) => EagerPollResult::Ready(pinned.take_output().unwrap()), - _ => EagerPollResult::Pending(Self(inner)), - } - } - +impl OpCall { /// Wraps a future; the inner future is polled the usual way (lazily). - pub fn lazy(fut: impl Future + 'static) -> Self { - let boxed = Box::pin(fut) as Pin>>; - let inner = maybe_done(boxed); - Self(inner) + pub fn pending( + op_ctx: &OpCtx, + promise_id: PromiseId, + fut: Pin + 'static>>, + ) -> Self { + Self { + op_id: op_ctx.id, + promise_id, + fut: MaybeDone::Future(fut), + } } /// Create a future by specifying its output. This is basically the same as /// `async { value }` or `futures::future::ready(value)`. - pub fn ready(value: T) -> Self { - Self(MaybeDone::Done(value)) + pub fn ready(op_ctx: &OpCtx, promise_id: PromiseId, value: OpResult) -> Self { + Self { + op_id: op_ctx.id, + promise_id, + fut: MaybeDone::Done(value), + } } } -impl Future for OpCall { - type Output = T; +impl Future for OpCall { + type Output = (PromiseId, OpId, OpResult); fn poll( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll { - // TODO(piscisaureus): safety comment - #[allow(clippy::undocumented_unsafe_blocks)] - let inner = unsafe { &mut self.get_unchecked_mut().0 }; - let mut pinned = Pin::new(inner); - ready!(pinned.as_mut().poll(cx)); - Poll::Ready(pinned.as_mut().take_output().unwrap()) + let promise_id = self.promise_id; + let op_id = self.op_id; + let fut = &mut *self.project().fut; + match fut { + MaybeDone::Done(_) => { + // Let's avoid using take_output as it keeps our Pin::box + let res = std::mem::replace(fut, MaybeDone::Gone); + let MaybeDone::Done(res) = res + else { + unreachable!() + }; + std::task::Poll::Ready(res) + } + MaybeDone::Future(f) => f.poll_unpin(cx), + MaybeDone::Gone => std::task::Poll::Pending, + } + .map(move |res| (promise_id, op_id, res)) } } -impl FusedFuture for OpCall -where - F: Future, -{ - fn is_terminated(&self) -> bool { - self.0.is_terminated() - } -} - -pub type RealmIdx = usize; -pub type PromiseId = i32; -pub type OpAsyncFuture = OpCall<(PromiseId, OpId, OpResult)>; -pub type OpFn = - fn(&mut v8::HandleScope, v8::FunctionCallbackArguments, v8::ReturnValue); -pub type OpId = usize; - -pub enum Op { - Sync(OpResult), - Async(OpAsyncFuture), - NotFound, -} - pub enum OpResult { Ok(serde_v8::SerializablePkg), Err(OpError), @@ -160,14 +143,13 @@ pub struct OpCtx { pub decl: Rc, pub fast_fn_c_info: Option>, pub runtime_state: Weak>, - // Index of the current realm into `JsRuntimeState::known_realms`. - pub realm_idx: RealmIdx, + pub(crate) context_state: Rc>, } impl OpCtx { - pub fn new( + pub(crate) fn new( id: OpId, - realm_idx: RealmIdx, + context_state: Rc>, decl: Rc, state: Rc>, runtime_state: Weak>, @@ -191,7 +173,7 @@ impl OpCtx { state, runtime_state, decl, - realm_idx, + context_state, fast_fn_c_info, } } @@ -203,7 +185,8 @@ pub struct OpState { pub get_error_class_fn: GetErrorClassFn, pub tracker: OpsTracker, pub last_fast_op_error: Option, - gotham_state: GothamState, + pub(crate) gotham_state: GothamState, + pub waker: Arc, } impl OpState { @@ -214,8 +197,15 @@ impl OpState { gotham_state: Default::default(), last_fast_op_error: None, tracker: OpsTracker::new(ops_count), + waker: Arc::new(AtomicWaker::new()), } } + + /// Clear all user-provided resources and state. + pub(crate) fn clear(&mut self) { + std::mem::take(&mut self.gotham_state); + std::mem::take(&mut self.resource_table); + } } impl Deref for OpState { diff --git a/core/ops_builtin.rs b/core/ops_builtin.rs index ea85b4f00c..70f478acd9 100644 --- a/core/ops_builtin.rs +++ b/core/ops_builtin.rs @@ -27,9 +27,12 @@ crate::extension!( op_wasm_streaming_feed, op_wasm_streaming_set_url, op_void_sync, + op_error_async, + op_error_async_deferred, op_void_async, op_void_async_deferred, op_add, + op_add_async, // TODO(@AaronO): track IO metrics for builtin streams op_read, op_read_all, @@ -58,6 +61,8 @@ crate::extension!( ops_builtin_v8::op_set_promise_hooks, ops_builtin_v8::op_get_promise_details, ops_builtin_v8::op_get_proxy_details, + ops_builtin_v8::op_get_non_index_property_names, + ops_builtin_v8::op_get_constructor_name, ops_builtin_v8::op_memory_usage, ops_builtin_v8::op_set_wasm_streaming_callback, ops_builtin_v8::op_abort_wasm_streaming, @@ -94,12 +99,27 @@ fn op_add(a: i32, b: i32) -> i32 { a + b } +#[op] +pub async fn op_add_async(a: i32, b: i32) -> i32 { + a + b +} + #[op(fast)] pub fn op_void_sync() {} #[op] pub async fn op_void_async() {} +#[op] +pub async fn op_error_async() -> Result<(), Error> { + Err(Error::msg("error")) +} + +#[op(deferred)] +pub async fn op_error_async_deferred() -> Result<(), Error> { + Err(Error::msg("error")) +} + #[op(deferred)] pub async fn op_void_async_deferred() {} diff --git a/core/ops_builtin_v8.rs b/core/ops_builtin_v8.rs index 6e8b2efda0..8416546cbc 100644 --- a/core/ops_builtin_v8.rs +++ b/core/ops_builtin_v8.rs @@ -72,14 +72,14 @@ fn op_run_microtasks(scope: &mut v8::HandleScope) { #[op(v8)] fn op_has_tick_scheduled(scope: &mut v8::HandleScope) -> bool { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow(); state.has_tick_scheduled } #[op(v8)] fn op_set_has_tick_scheduled(scope: &mut v8::HandleScope, v: bool) { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); state_rc.borrow_mut().has_tick_scheduled = v; } @@ -211,6 +211,7 @@ fn op_decode<'a>( struct SerializeDeserialize<'a> { host_objects: Option>, error_callback: Option>, + for_storage: bool, } impl<'a> v8::ValueSerializerImpl for SerializeDeserialize<'a> { @@ -238,7 +239,10 @@ impl<'a> v8::ValueSerializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'s>, shared_array_buffer: v8::Local<'s, v8::SharedArrayBuffer>, ) -> Option { - let state_rc = JsRuntime::state(scope); + if self.for_storage { + return None; + } + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow_mut(); if let Some(shared_array_buffer_store) = &state.shared_array_buffer_store { let backing_store = shared_array_buffer.get_backing_store(); @@ -254,7 +258,12 @@ impl<'a> v8::ValueSerializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'_>, module: v8::Local, ) -> Option { - let state_rc = JsRuntime::state(scope); + if self.for_storage { + let message = v8::String::new(scope, "Wasm modules cannot be stored")?; + self.throw_data_clone_error(scope, message); + return None; + } + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow_mut(); if let Some(compiled_wasm_module_store) = &state.compiled_wasm_module_store { @@ -293,7 +302,10 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'s>, transfer_id: u32, ) -> Option> { - let state_rc = JsRuntime::state(scope); + if self.for_storage { + return None; + } + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow_mut(); if let Some(shared_array_buffer_store) = &state.shared_array_buffer_store { let backing_store = shared_array_buffer_store.take(transfer_id)?; @@ -310,7 +322,10 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'s>, clone_id: u32, ) -> Option> { - let state_rc = JsRuntime::state(scope); + if self.for_storage { + return None; + } + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow_mut(); if let Some(compiled_wasm_module_store) = &state.compiled_wasm_module_store { @@ -337,7 +352,7 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { } } - let message = + let message: v8::Local = v8::String::new(scope, "Failed to deserialize host object").unwrap(); let error = v8::Exception::error(scope, message); scope.throw_exception(error); @@ -350,6 +365,8 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { struct SerializeDeserializeOptions<'a> { host_objects: Option>, transferred_array_buffers: Option>, + #[serde(default)] + for_storage: bool, } #[op(v8)] @@ -385,13 +402,14 @@ fn op_serialize( let serialize_deserialize = Box::new(SerializeDeserialize { host_objects, error_callback, + for_storage: options.for_storage, }); let mut value_serializer = v8::ValueSerializer::new(scope, serialize_deserialize); value_serializer.write_header(); if let Some(transferred_array_buffers) = transferred_array_buffers { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow_mut(); for index in 0..transferred_array_buffers.length() { let i = v8::Number::new(scope, index as f64).into(); @@ -464,6 +482,7 @@ fn op_deserialize<'a>( let serialize_deserialize = Box::new(SerializeDeserialize { host_objects, error_callback: None, + for_storage: options.for_storage, }); let mut value_deserializer = v8::ValueDeserializer::new(scope, serialize_deserialize, &zero_copy); @@ -475,7 +494,7 @@ fn op_deserialize<'a>( } if let Some(transferred_array_buffers) = transferred_array_buffers { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow_mut(); if let Some(shared_array_buffer_store) = &state.shared_array_buffer_store { for i in 0..transferred_array_buffers.length() { @@ -595,6 +614,66 @@ fn op_get_proxy_details<'a>( Some((target.into(), handler.into())) } +#[op(v8)] +fn op_get_non_index_property_names<'a>( + scope: &mut v8::HandleScope<'a>, + obj: serde_v8::Value<'a>, + filter: u32, +) -> Option> { + let obj = match v8::Local::::try_from(obj.v8_value) { + Ok(proxy) => proxy, + Err(_) => return None, + }; + + let mut property_filter = v8::PropertyFilter::ALL_PROPERTIES; + if filter & 1 == 1 { + property_filter = property_filter | v8::PropertyFilter::ONLY_WRITABLE + } + if filter & 2 == 2 { + property_filter = property_filter | v8::PropertyFilter::ONLY_ENUMERABLE + } + if filter & 4 == 4 { + property_filter = property_filter | v8::PropertyFilter::ONLY_CONFIGURABLE + } + if filter & 8 == 8 { + property_filter = property_filter | v8::PropertyFilter::SKIP_STRINGS + } + if filter & 16 == 16 { + property_filter = property_filter | v8::PropertyFilter::SKIP_SYMBOLS + } + + let maybe_names = obj.get_property_names( + scope, + v8::GetPropertyNamesArgs { + mode: v8::KeyCollectionMode::OwnOnly, + property_filter, + index_filter: v8::IndexFilter::SkipIndices, + ..Default::default() + }, + ); + + if let Some(names) = maybe_names { + let names_val: v8::Local = names.into(); + Some(names_val.into()) + } else { + None + } +} + +#[op(v8)] +fn op_get_constructor_name<'a>( + scope: &mut v8::HandleScope<'a>, + obj: serde_v8::Value<'a>, +) -> Option { + let obj = match v8::Local::::try_from(obj.v8_value) { + Ok(proxy) => proxy, + Err(_) => return None, + }; + + let name = obj.get_constructor_name().to_rust_string_lossy(scope); + Some(name) +} + // HeapStats stores values from a isolate.get_heap_statistics() call #[derive(Serialize)] #[serde(rename_all = "camelCase")] @@ -645,7 +724,7 @@ fn op_set_wasm_streaming_callback( .as_ref() .unwrap() .clone(); - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let streaming_rid = state_rc .borrow() .op_state @@ -672,7 +751,7 @@ fn op_abort_wasm_streaming( error: serde_v8::Value, ) -> Result<(), Error> { let wasm_streaming = { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let state = state_rc.borrow(); let wsr = state .op_state @@ -711,24 +790,18 @@ fn op_dispatch_exception( scope: &mut v8::HandleScope, exception: serde_v8::Value, ) { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let mut state = state_rc.borrow_mut(); - state - .dispatched_exceptions - .push_front(v8::Global::new(scope, exception.v8_value)); - // Only terminate execution if there are no inspector sessions. - if state.inspector.is_none() { - scope.terminate_execution(); - return; - } - - // FIXME(bartlomieju): I'm not sure if this assumption is valid... Maybe when - // inspector is polling on pause? - if state.inspector().try_borrow().is_ok() { - scope.terminate_execution(); - } else { - // If the inspector is borrowed at this time, assume an inspector is active. + if let Some(inspector) = &state.inspector { + let inspector = inspector.borrow(); + inspector.exception_thrown(scope, exception.v8_value, false); + // This indicates that the op is being called from a REPL. Skip termination. + if inspector.is_dispatching_message() { + return; + } } + state.dispatched_exception = Some(v8::Global::new(scope, exception.v8_value)); + scope.terminate_execution(); } #[op(v8)] @@ -755,7 +828,7 @@ fn op_apply_source_map( scope: &mut v8::HandleScope, location: Location, ) -> Result { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let (getter, cache) = { let state = state_rc.borrow(); ( @@ -821,7 +894,7 @@ fn op_store_pending_promise_rejection<'a>( let error_global = v8::Global::new(scope, reason.v8_value); context_state .pending_promise_rejections - .insert(promise_global, error_global); + .push_back((promise_global, error_global)); } #[op(v8)] @@ -836,7 +909,7 @@ fn op_remove_pending_promise_rejection<'a>( let promise_global = v8::Global::new(scope, promise_value); context_state .pending_promise_rejections - .remove(&promise_global); + .retain(|(key, _)| key != &promise_global); } #[op(v8)] @@ -851,7 +924,8 @@ fn op_has_pending_promise_rejection<'a>( let promise_global = v8::Global::new(scope, promise_value); context_state .pending_promise_rejections - .contains_key(&promise_global) + .iter() + .any(|(key, _)| key == &promise_global) } #[op(v8)] diff --git a/core/ops_metrics.rs b/core/ops_metrics.rs index c0b8abb519..b25368bd01 100644 --- a/core/ops_metrics.rs +++ b/core/ops_metrics.rs @@ -63,7 +63,7 @@ impl OpsTracker { #[inline] fn metrics_mut(&self, id: OpId) -> RefMut { - RefMut::map(self.ops.borrow_mut(), |ops| &mut ops[id]) + RefMut::map(self.ops.borrow_mut(), |ops| &mut ops[id as usize]) } #[inline] diff --git a/core/path.rs b/core/path.rs new file mode 100644 index 0000000000..fd8b1a9b64 --- /dev/null +++ b/core/path.rs @@ -0,0 +1,91 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::path::PathBuf; + +#[cfg(not(windows))] +#[inline] +pub fn strip_unc_prefix(path: PathBuf) -> PathBuf { + path +} + +/// Strips the unc prefix (ex. \\?\) from Windows paths. +#[cfg(windows)] +pub fn strip_unc_prefix(path: PathBuf) -> PathBuf { + use std::path::Component; + use std::path::Prefix; + + let mut components = path.components(); + match components.next() { + Some(Component::Prefix(prefix)) => { + match prefix.kind() { + // \\?\device + Prefix::Verbatim(device) => { + let mut path = PathBuf::new(); + path.push(format!(r"\\{}\", device.to_string_lossy())); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + // \\?\c:\path + Prefix::VerbatimDisk(_) => { + let mut path = PathBuf::new(); + path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); + path.extend(components); + path + } + // \\?\UNC\hostname\share_name\path + Prefix::VerbatimUNC(hostname, share_name) => { + let mut path = PathBuf::new(); + path.push(format!( + r"\\{}\{}\", + hostname.to_string_lossy(), + share_name.to_string_lossy() + )); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + _ => path, + } + } + _ => path, + } +} + +#[cfg(test)] +mod test { + #[cfg(windows)] + #[test] + fn test_strip_unc_prefix() { + use std::path::PathBuf; + + run_test(r"C:\", r"C:\"); + run_test(r"C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\?\C:\", r"C:\"); + run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\.\C:\", r"\\.\C:\"); + run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); + + run_test(r"\\?\UNC\localhost\", r"\\localhost"); + run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); + run_test( + r"\\?\UNC\localhost\c$\Windows\file.txt", + r"\\localhost\c$\Windows\file.txt", + ); + run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); + + run_test(r"\\?\server1", r"\\server1"); + run_test(r"\\?\server1\e$\", r"\\server1\e$\"); + run_test( + r"\\?\server1\e$\test\file.txt", + r"\\server1\e$\test\file.txt", + ); + + fn run_test(input: &str, expected: &str) { + assert_eq!( + super::strip_unc_prefix(PathBuf::from(input)), + PathBuf::from(expected) + ); + } + } +} diff --git a/core/realm.rs b/core/realm.rs index 8e2d932b5b..d18f41e662 100644 --- a/core/realm.rs +++ b/core/realm.rs @@ -4,22 +4,28 @@ use crate::bindings; use crate::modules::ModuleCode; use crate::ops::OpCtx; use crate::runtime::exception_to_err_result; +use crate::runtime::JsRuntimeState; +use crate::task::MaskResultAsSend; +use crate::JsRuntime; +use crate::OpId; +use crate::OpResult; +use crate::PromiseId; use anyhow::Error; use std::cell::RefCell; -use std::collections::HashMap; use std::collections::HashSet; +use std::collections::VecDeque; use std::hash::BuildHasherDefault; use std::hash::Hasher; -use std::marker::PhantomData; use std::option::Option; use std::rc::Rc; +use tokio::task::JoinSet; use v8::HandleScope; use v8::Local; // Hasher used for `unrefed_ops`. Since these are rolling i32, there's no // need to actually hash them. #[derive(Default)] -pub(crate) struct IdentityHasher(u64, PhantomData); +pub(crate) struct IdentityHasher(u64); impl Hasher for IdentityHasher { fn write_i32(&mut self, i: i32) { @@ -43,11 +49,14 @@ pub(crate) struct ContextState { pub(crate) js_format_exception_cb: Option>>, pub(crate) js_wasm_streaming_cb: Option>>, pub(crate) pending_promise_rejections: - HashMap, v8::Global>, + VecDeque<(v8::Global, v8::Global)>, pub(crate) unrefed_ops: HashSet>, + pub(crate) pending_ops: + JoinSet>, // We don't explicitly re-read this prop but need the slice to live alongside // the context pub(crate) op_ctxs: Box<[OpCtx]>, + pub(crate) isolate: Option<*mut v8::OwnedIsolate>, } /// A representation of a JavaScript realm tied to a [`JsRuntime`], that allows @@ -95,28 +104,110 @@ pub(crate) struct ContextState { /// keep the underlying V8 context alive even if it would have otherwise been /// garbage collected. #[derive(Clone)] -pub struct JsRealm(Rc>); -impl JsRealm { - pub fn new(context: v8::Global) -> Self { - JsRealm(Rc::new(context)) +#[repr(transparent)] +pub struct JsRealm(pub(crate) JsRealmInner); + +#[derive(Clone)] +pub(crate) struct JsRealmInner { + context_state: Rc>, + context: Rc>, + runtime_state: Rc>, + is_global: bool, +} + +impl JsRealmInner { + pub(crate) fn new( + context_state: Rc>, + context: v8::Global, + runtime_state: Rc>, + is_global: bool, + ) -> Self { + Self { + context_state, + context: context.into(), + runtime_state, + is_global, + } + } + + pub fn num_pending_ops(&self) -> usize { + self.context_state.borrow().pending_ops.len() + } + + pub fn num_unrefed_ops(&self) -> usize { + self.context_state.borrow().unrefed_ops.len() } #[inline(always)] pub fn context(&self) -> &v8::Global { - &self.0 + &self.context } #[inline(always)] - pub(crate) fn state( + pub(crate) fn state(&self) -> Rc> { + self.context_state.clone() + } + + /// For info on the [`v8::Isolate`] parameter, check [`JsRealm#panics`]. + #[inline(always)] + pub fn handle_scope<'s>( &self, - isolate: &mut v8::Isolate, - ) -> Rc> { - self - .context() - .open(isolate) - .get_slot::>>(isolate) - .unwrap() - .clone() + isolate: &'s mut v8::Isolate, + ) -> v8::HandleScope<'s> { + v8::HandleScope::with_context(isolate, &*self.context) + } + + pub(crate) fn check_promise_rejections( + &self, + scope: &mut v8::HandleScope, + ) -> Result<(), Error> { + let Some((_, handle)) = self.context_state.borrow_mut().pending_promise_rejections.pop_front() else { + return Ok(()); + }; + + let exception = v8::Local::new(scope, handle); + let state_rc = JsRuntime::state_from(scope); + let state = state_rc.borrow(); + if let Some(inspector) = &state.inspector { + let inspector = inspector.borrow(); + inspector.exception_thrown(scope, exception, true); + if inspector.has_blocking_sessions() { + return Ok(()); + } + } + exception_to_err_result(scope, exception, true) + } + + pub(crate) fn is_same(&self, other: &Rc>) -> bool { + Rc::ptr_eq(&self.context, other) + } + + pub fn destroy(self) { + let state = self.state(); + let raw_ptr = self.state().borrow().isolate.unwrap(); + // SAFETY: We know the isolate outlives the realm + let isolate = unsafe { raw_ptr.as_mut().unwrap() }; + let mut realm_state = state.borrow_mut(); + // These globals will prevent snapshots from completing, take them + std::mem::take(&mut realm_state.js_event_loop_tick_cb); + std::mem::take(&mut realm_state.js_build_custom_error_cb); + std::mem::take(&mut realm_state.js_promise_reject_cb); + std::mem::take(&mut realm_state.js_format_exception_cb); + std::mem::take(&mut realm_state.js_wasm_streaming_cb); + // The OpCtx slice may contain a circular reference + std::mem::take(&mut realm_state.op_ctxs); + + self.context().open(isolate).clear_all_slots(isolate); + + // Expect that this context is dead (we only check this in debug mode) + // TODO(mmastrac): This check fails for some tests, will need to fix this + // debug_assert_eq!(Rc::strong_count(&self.context), 1, "Realm was still alive when we wanted to destory it. Not dropped?"); + } +} + +impl JsRealm { + pub(crate) fn new(inner: JsRealmInner) -> Self { + Self(inner) } #[inline(always)] @@ -130,13 +221,28 @@ impl JsRealm { .clone() } + #[inline(always)] + pub fn num_pending_ops(&self) -> usize { + self.0.num_pending_ops() + } + + #[inline(always)] + pub fn num_unrefed_ops(&self) -> usize { + self.0.num_unrefed_ops() + } + /// For info on the [`v8::Isolate`] parameter, check [`JsRealm#panics`]. #[inline(always)] pub fn handle_scope<'s>( &self, isolate: &'s mut v8::Isolate, ) -> v8::HandleScope<'s> { - v8::HandleScope::with_context(isolate, &*self.0) + self.0.handle_scope(isolate) + } + + #[inline(always)] + pub fn context(&self) -> &v8::Global { + self.0.context() } /// For info on the [`v8::Isolate`] parameter, check [`JsRealm#panics`]. @@ -144,8 +250,8 @@ impl JsRealm { &self, isolate: &'s mut v8::Isolate, ) -> v8::Local<'s, v8::Object> { - let scope = &mut self.handle_scope(isolate); - self.0.open(scope).global(scope) + let scope = &mut self.0.handle_scope(isolate); + self.0.context.open(scope).global(scope) } fn string_from_code<'a>( @@ -206,7 +312,7 @@ impl JsRealm { name: &'static str, source_code: ModuleCode, ) -> Result, Error> { - let scope = &mut self.handle_scope(isolate); + let scope = &mut self.0.handle_scope(isolate); let source = Self::string_from_code(scope, &source_code).unwrap(); debug_assert!(name.is_ascii()); @@ -240,55 +346,23 @@ impl JsRealm { // TODO(andreubotella): `mod_evaluate`, `load_main_module`, `load_side_module` } -pub struct JsRealmLocal<'s>(v8::Local<'s, v8::Context>); -impl<'s> JsRealmLocal<'s> { - pub fn new(context: v8::Local<'s, v8::Context>) -> Self { - JsRealmLocal(context) - } - - #[inline(always)] - pub fn context(&self) -> v8::Local { - self.0 - } - - #[inline(always)] - pub(crate) fn state( - &self, - isolate: &mut v8::Isolate, - ) -> Rc> { - self - .context() - .get_slot::>>(isolate) - .unwrap() - .clone() - } - - pub(crate) fn check_promise_rejections( - &self, - scope: &mut v8::HandleScope, - ) -> Result<(), Error> { - let context_state_rc = self.state(scope); - let mut context_state = context_state_rc.borrow_mut(); - - if context_state.pending_promise_rejections.is_empty() { - return Ok(()); +impl Drop for JsRealm { + fn drop(&mut self) { + // Don't do anything special with the global realm + if self.0.is_global { + return; } - let key = { - context_state - .pending_promise_rejections - .keys() - .next() - .unwrap() - .clone() - }; - let handle = context_state - .pending_promise_rejections - .remove(&key) - .unwrap(); - drop(context_state); - - let exception = v8::Local::new(scope, handle); - exception_to_err_result(scope, exception, true) + // There's us and there's the runtime + if Rc::strong_count(&self.0.context) == 2 { + self + .0 + .runtime_state + .borrow_mut() + .remove_realm(&self.0.context); + assert_eq!(Rc::strong_count(&self.0.context), 1); + self.0.clone().destroy(); + assert_eq!(Rc::strong_count(&self.0.context_state), 1); + } } } diff --git a/core/resources.rs b/core/resources.rs index 6ca86e10b6..94d2a2306a 100644 --- a/core/resources.rs +++ b/core/resources.rs @@ -155,13 +155,13 @@ pub trait Resource: Any + 'static { } /// The same as [`read_byob()`][Resource::read_byob], but synchronous. - fn read_byob_sync(&self, data: &mut [u8]) -> Result { + fn read_byob_sync(self: Rc, data: &mut [u8]) -> Result { _ = data; Err(not_supported()) } /// The same as [`write()`][Resource::write], but synchronous. - fn write_sync(&self, data: &[u8]) -> Result { + fn write_sync(self: Rc, data: &[u8]) -> Result { _ = data; Err(not_supported()) } @@ -187,6 +187,13 @@ pub trait Resource: Any + 'static { None } + /// Resources backed by a file descriptor can let ops know to allow for + /// low-level optimizations. + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + None + } + fn size_hint(&self) -> (u64, Option) { (0, None) } diff --git a/core/runtime.rs b/core/runtime.rs index 27fd824964..ecfd0bd571 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -8,6 +8,8 @@ use crate::extensions::OpDecl; use crate::extensions::OpEventLoopFn; use crate::inspector::JsRuntimeInspector; use crate::module_specifier::ModuleSpecifier; +use crate::modules::AssertedModuleType; +use crate::modules::ExtModuleLoader; use crate::modules::ExtModuleLoaderCb; use crate::modules::ModuleCode; use crate::modules::ModuleError; @@ -15,17 +17,16 @@ use crate::modules::ModuleId; use crate::modules::ModuleLoadId; use crate::modules::ModuleLoader; use crate::modules::ModuleMap; -use crate::op_void_async; -use crate::op_void_sync; +use crate::modules::ModuleName; use crate::ops::*; use crate::realm::ContextState; use crate::realm::JsRealm; -use crate::realm::JsRealmLocal; +use crate::realm::JsRealmInner; use crate::snapshot_util; use crate::source_map::SourceMapCache; use crate::source_map::SourceMapGetter; use crate::Extension; -use crate::ExtensionFileSource; +use crate::ModuleType; use crate::NoopModuleLoader; use crate::OpMiddlewareFn; use crate::OpResult; @@ -37,25 +38,30 @@ use futures::channel::oneshot; use futures::future::poll_fn; use futures::future::Future; use futures::future::FutureExt; -use futures::stream::FuturesUnordered; +use futures::future::MaybeDone; use futures::stream::StreamExt; -use futures::task::AtomicWaker; +use futures::task::noop_waker; use smallvec::SmallVec; use std::any::Any; use std::cell::RefCell; use std::collections::HashMap; -use std::collections::VecDeque; use std::ffi::c_void; +use std::mem::ManuallyDrop; +use std::ops::Deref; +use std::ops::DerefMut; use std::option::Option; +use std::pin::Pin; use std::rc::Rc; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering; use std::sync::Arc; use std::sync::Mutex; use std::sync::Once; use std::task::Context; use std::task::Poll; -use v8::OwnedIsolate; -type PendingOpFuture = OpCall<(RealmIdx, PromiseId, OpId, OpResult)>; +const STATE_DATA_OFFSET: u32 = 0; +const MODULE_MAP_DATA_OFFSET: u32 = 1; pub enum Snapshot { Static(&'static [u8]), @@ -74,30 +80,165 @@ struct IsolateAllocations { Option<(Box>, v8::NearHeapLimitCallback)>, } +/// ManuallyDrop> is clone, but it returns a ManuallyDrop> which is a massive +/// memory-leak footgun. +struct ManuallyDropRc(ManuallyDrop>); + +impl ManuallyDropRc { + pub fn clone(&self) -> Rc { + self.0.deref().clone() + } +} + +impl Deref for ManuallyDropRc { + type Target = Rc; + fn deref(&self) -> &Self::Target { + self.0.deref() + } +} + +impl DerefMut for ManuallyDropRc { + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.deref_mut() + } +} + +/// This struct contains the [`JsRuntimeState`] and [`v8::OwnedIsolate`] that are required +/// to do an orderly shutdown of V8. We keep these in a separate struct to allow us to control +/// the destruction more closely, as snapshots require the isolate to be destroyed by the +/// snapshot process, not the destructor. +/// +/// The way rusty_v8 works w/snapshots is that the [`v8::OwnedIsolate`] gets consumed by a +/// [`v8::snapshot::SnapshotCreator`] that is stored in its annex. It's a bit awkward, because this +/// means we cannot let it drop (because we don't have it after a snapshot). On top of that, we have +/// to consume it in the snapshot creator because otherwise it panics. +/// +/// This inner struct allows us to let the outer JsRuntime drop normally without a Drop impl, while we +/// control dropping more closely here using ManuallyDrop. +struct InnerIsolateState { + will_snapshot: bool, + state: ManuallyDropRc>, + v8_isolate: ManuallyDrop, +} + +impl InnerIsolateState { + /// Clean out the opstate and take the inspector to prevent the inspector from getting destroyed + /// after we've torn down the contexts. If the inspector is not correctly torn down, random crashes + /// happen in tests (and possibly for users using the inspector). + pub fn prepare_for_cleanup(&mut self) { + let mut state = self.state.borrow_mut(); + let inspector = state.inspector.take(); + state.op_state.borrow_mut().clear(); + if let Some(inspector) = inspector { + assert_eq!( + Rc::strong_count(&inspector), + 1, + "The inspector must be dropped before the runtime" + ); + } + } + + pub fn cleanup(&mut self) { + self.prepare_for_cleanup(); + + let state_ptr = self.v8_isolate.get_data(STATE_DATA_OFFSET); + // SAFETY: We are sure that it's a valid pointer for whole lifetime of + // the runtime. + _ = unsafe { Rc::from_raw(state_ptr as *const RefCell) }; + + let module_map_ptr = self.v8_isolate.get_data(MODULE_MAP_DATA_OFFSET); + // SAFETY: We are sure that it's a valid pointer for whole lifetime of + // the runtime. + _ = unsafe { Rc::from_raw(module_map_ptr as *const RefCell) }; + + self.state.borrow_mut().destroy_all_realms(); + + debug_assert_eq!(Rc::strong_count(&self.state), 1); + } + + pub fn prepare_for_snapshot(mut self) -> v8::OwnedIsolate { + self.cleanup(); + // SAFETY: We're copying out of self and then immediately forgetting self + let (state, isolate) = unsafe { + ( + ManuallyDrop::take(&mut self.state.0), + ManuallyDrop::take(&mut self.v8_isolate), + ) + }; + std::mem::forget(self); + drop(state); + isolate + } +} + +impl Drop for InnerIsolateState { + fn drop(&mut self) { + self.cleanup(); + // SAFETY: We gotta drop these + unsafe { + ManuallyDrop::drop(&mut self.state.0); + if self.will_snapshot { + // Create the snapshot and just drop it. + eprintln!("WARNING: v8::OwnedIsolate for snapshot was leaked"); + } else { + ManuallyDrop::drop(&mut self.v8_isolate); + } + } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub(crate) enum InitMode { + /// We have no snapshot -- this is a pristine context. + New, + /// We are using a snapshot, thus certain initialization steps are skipped. + FromSnapshot, +} + +impl InitMode { + fn from_options(options: &RuntimeOptions) -> Self { + match options.startup_snapshot { + None => Self::New, + Some(_) => Self::FromSnapshot, + } + } +} + /// A single execution context of JavaScript. Corresponds roughly to the "Web -/// Worker" concept in the DOM. A JsRuntime is a Future that can be used with -/// an event loop (Tokio, async_std). +/// Worker" concept in the DOM. //// /// The JsRuntime future completes when there is an error or when all /// pending ops have completed. /// -/// Pending ops are created in JavaScript by calling Deno.core.opAsync(), and in Rust -/// by implementing an async function that takes a serde::Deserialize "control argument" -/// and an optional zero copy buffer, each async Op is tied to a Promise in JavaScript. +/// Use [`JsRuntimeForSnapshot`] to be able to create a snapshot. pub struct JsRuntime { - state: Rc>, - module_map: Option>>, - // This is an Option instead of just OwnedIsolate to workaround - // a safety issue with SnapshotCreator. See JsRuntime::drop. - v8_isolate: Option, - snapshot_options: snapshot_util::SnapshotOptions, + inner: InnerIsolateState, + module_map: Rc>, allocations: IsolateAllocations, extensions: Vec, event_loop_middlewares: Vec>, + init_mode: InitMode, // Marks if this is considered the top-level runtime. Used only be inspector. is_main: bool, } +/// The runtime type used for snapshot creation. +pub struct JsRuntimeForSnapshot(JsRuntime); + +impl Deref for JsRuntimeForSnapshot { + type Target = JsRuntime; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for JsRuntimeForSnapshot { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + pub(crate) struct DynImportModEvaluate { load_id: ModuleLoadId, module_id: ModuleId, @@ -158,7 +299,7 @@ pub type CompiledWasmModuleStore = CrossIsolateStore; /// embedder slots. pub struct JsRuntimeState { global_realm: Option, - known_realms: Vec>, + known_realms: Vec, pub(crate) has_tick_scheduled: bool, pub(crate) pending_dyn_mod_evaluate: Vec, pub(crate) pending_mod_evaluate: Option, @@ -167,8 +308,6 @@ pub struct JsRuntimeState { dyn_module_evaluate_idle_counter: u32, pub(crate) source_map_getter: Option>>, pub(crate) source_map_cache: Rc>, - pub(crate) pending_ops: FuturesUnordered, - pub(crate) have_unpolled_ops: bool, pub(crate) op_state: Rc>, pub(crate) shared_array_buffer_store: Option, pub(crate) compiled_wasm_module_store: Option, @@ -177,9 +316,26 @@ pub struct JsRuntimeState { /// instead of any other exceptions. // TODO(nayeemrmn): This is polled in `exception_to_err_result()` which is // flimsy. Try to poll it similarly to `pending_promise_rejections`. - pub(crate) dispatched_exceptions: VecDeque>, + pub(crate) dispatched_exception: Option>, pub(crate) inspector: Option>>, - waker: AtomicWaker, +} + +impl JsRuntimeState { + pub(crate) fn destroy_all_realms(&mut self) { + self.global_realm.take(); + for realm in self.known_realms.drain(..) { + realm.destroy() + } + } + + pub(crate) fn remove_realm( + &mut self, + realm_context: &Rc>, + ) { + self + .known_realms + .retain(|realm| !realm.is_same(realm_context)); + } } fn v8_init( @@ -198,7 +354,6 @@ fn v8_init( " --no-validate-asm", " --turbo_fast_api_calls", " --harmony-change-array-by-copy", - " --no-harmony-rab-gsab", ); if predictable { @@ -247,15 +402,6 @@ pub struct RuntimeOptions { /// V8 snapshot that should be loaded on startup. pub startup_snapshot: Option, - /// Prepare runtime to take snapshot of loaded code. - /// The snapshot is deterministic and uses predictable random numbers. - pub will_snapshot: bool, - - /// An optional callback that will be called for each module that is loaded - /// during snapshotting. This callback can be used to transpile source on the - /// fly, during snapshotting, eg. to transpile TypeScript to JavaScript. - pub snapshot_module_load_cb: Option, - /// Isolate creation parameters. pub create_params: Option, @@ -286,46 +432,96 @@ pub struct RuntimeOptions { pub is_main: bool, } -impl Drop for JsRuntime { - fn drop(&mut self) { - if let Some(v8_isolate) = self.v8_isolate.as_mut() { - Self::drop_state_and_module_map(v8_isolate); - } - } +#[derive(Default)] +pub struct RuntimeSnapshotOptions { + /// An optional callback that will be called for each module that is loaded + /// during snapshotting. This callback can be used to transpile source on the + /// fly, during snapshotting, eg. to transpile TypeScript to JavaScript. + pub snapshot_module_load_cb: Option, } impl JsRuntime { - const STATE_DATA_OFFSET: u32 = 0; - const MODULE_MAP_DATA_OFFSET: u32 = 1; - /// Only constructor, configuration is done through `options`. - pub fn new(mut options: RuntimeOptions) -> Self { - let v8_platform = options.v8_platform.take(); + pub fn new(mut options: RuntimeOptions) -> JsRuntime { + JsRuntime::init_v8(options.v8_platform.take(), cfg!(test)); + JsRuntime::new_inner(options, false, None) + } + pub(crate) fn state_from( + isolate: &v8::Isolate, + ) -> Rc> { + let state_ptr = isolate.get_data(STATE_DATA_OFFSET); + let state_rc = + // SAFETY: We are sure that it's a valid pointer for whole lifetime of + // the runtime. + unsafe { Rc::from_raw(state_ptr as *const RefCell) }; + let state = state_rc.clone(); + std::mem::forget(state_rc); + state + } + + pub(crate) fn module_map_from( + isolate: &v8::Isolate, + ) -> Rc> { + let module_map_ptr = isolate.get_data(MODULE_MAP_DATA_OFFSET); + let module_map_rc = + // SAFETY: We are sure that it's a valid pointer for whole lifetime of + // the runtime. + unsafe { Rc::from_raw(module_map_ptr as *const RefCell) }; + let module_map = module_map_rc.clone(); + std::mem::forget(module_map_rc); + module_map + } + + pub(crate) fn event_loop_pending_state_from_scope( + scope: &mut v8::HandleScope, + ) -> EventLoopPendingState { + let state = JsRuntime::state_from(scope); + let module_map = JsRuntime::module_map_from(scope); + let state = EventLoopPendingState::new( + scope, + &mut state.borrow_mut(), + &module_map.borrow(), + ); + state + } + + fn init_v8( + v8_platform: Option>, + predictable: bool, + ) { static DENO_INIT: Once = Once::new(); - DENO_INIT.call_once(move || v8_init(v8_platform, options.will_snapshot)); + static DENO_PREDICTABLE: AtomicBool = AtomicBool::new(false); + static DENO_PREDICTABLE_SET: AtomicBool = AtomicBool::new(false); - // Add builtins extension - // TODO(bartlomieju): remove this in favor of `SnapshotOptions`. - let has_startup_snapshot = options.startup_snapshot.is_some(); - if !has_startup_snapshot { - options - .extensions - .insert(0, crate::ops_builtin::core::init_ops_and_esm()); - } else { - options - .extensions - .insert(0, crate::ops_builtin::core::init_ops()); + if DENO_PREDICTABLE_SET.load(Ordering::SeqCst) { + let current = DENO_PREDICTABLE.load(Ordering::SeqCst); + assert_eq!(current, predictable, "V8 may only be initialized once in either snapshotting or non-snapshotting mode. Either snapshotting or non-snapshotting mode may be used in a single process, not both."); + DENO_PREDICTABLE_SET.store(true, Ordering::SeqCst); + DENO_PREDICTABLE.store(predictable, Ordering::SeqCst); } - let ops = Self::collect_ops(&mut options.extensions); - let mut op_state = OpState::new(ops.len()); + DENO_INIT.call_once(move || v8_init(v8_platform, predictable)); + } - if let Some(get_error_class_fn) = options.get_error_class_fn { - op_state.get_error_class_fn = get_error_class_fn; - } + fn new_inner( + mut options: RuntimeOptions, + will_snapshot: bool, + maybe_load_callback: Option, + ) -> JsRuntime { + let init_mode = InitMode::from_options(&options); + let (op_state, ops) = Self::create_opstate(&mut options, init_mode); let op_state = Rc::new(RefCell::new(op_state)); + // Collect event-loop middleware + let mut event_loop_middlewares = + Vec::with_capacity(options.extensions.len()); + for extension in &mut options.extensions { + if let Some(middleware) = extension.init_event_loop_middleware() { + event_loop_middlewares.push(middleware); + } + } + let align = std::mem::align_of::(); let layout = std::alloc::Layout::from_size_align( std::mem::size_of::<*mut v8::OwnedIsolate>(), @@ -344,13 +540,10 @@ impl JsRuntime { has_tick_scheduled: false, source_map_getter: options.source_map_getter.map(Rc::new), source_map_cache: Default::default(), - pending_ops: FuturesUnordered::new(), shared_array_buffer_store: options.shared_array_buffer_store, compiled_wasm_module_store: options.compiled_wasm_module_store, op_state: op_state.clone(), - waker: AtomicWaker::new(), - have_unpolled_ops: false, - dispatched_exceptions: Default::default(), + dispatched_exception: None, // Some fields are initialized later after isolate is created inspector: None, global_realm: None, @@ -358,296 +551,52 @@ impl JsRuntime { })); let weak = Rc::downgrade(&state_rc); + let context_state = Rc::new(RefCell::new(ContextState::default())); let op_ctxs = ops .into_iter() .enumerate() .map(|(id, decl)| { - OpCtx::new(id, 0, Rc::new(decl), op_state.clone(), weak.clone()) + OpCtx::new( + id as u16, + context_state.clone(), + Rc::new(decl), + op_state.clone(), + weak.clone(), + ) }) .collect::>() .into_boxed_slice(); + context_state.borrow_mut().op_ctxs = op_ctxs; + context_state.borrow_mut().isolate = Some(isolate_ptr); - let snapshot_options = snapshot_util::SnapshotOptions::from_bools( - options.startup_snapshot.is_some(), - options.will_snapshot, - ); - let refs = bindings::external_references(&op_ctxs); + let refs = bindings::external_references(&context_state.borrow().op_ctxs); // V8 takes ownership of external_references. let refs: &'static v8::ExternalReferences = Box::leak(Box::new(refs)); - let global_context; - let mut maybe_snapshotted_data = None; - let (mut isolate, snapshot_options) = if snapshot_options.will_snapshot() { - let snapshot_creator = - snapshot_util::create_snapshot_creator(refs, options.startup_snapshot); - let mut isolate = JsRuntime::setup_isolate(snapshot_creator); - { - let scope = &mut v8::HandleScope::new(&mut isolate); - let context = - bindings::initialize_context(scope, &op_ctxs, snapshot_options); - - // Get module map data from the snapshot - if has_startup_snapshot { - maybe_snapshotted_data = - Some(snapshot_util::get_snapshotted_data(scope, context)); - } - - global_context = v8::Global::new(scope, context); - } - (isolate, snapshot_options) + let mut isolate = if will_snapshot { + snapshot_util::create_snapshot_creator( + refs, + options.startup_snapshot.take(), + ) } else { let mut params = options .create_params .take() - .unwrap_or_else(|| { - v8::CreateParams::default().embedder_wrapper_type_info_offsets( - V8_WRAPPER_TYPE_INDEX, - V8_WRAPPER_OBJECT_INDEX, - ) - }) + .unwrap_or_default() + .embedder_wrapper_type_info_offsets( + V8_WRAPPER_TYPE_INDEX, + V8_WRAPPER_OBJECT_INDEX, + ) .external_references(&**refs); - - if let Some(snapshot) = options.startup_snapshot { + if let Some(snapshot) = options.startup_snapshot.take() { params = match snapshot { Snapshot::Static(data) => params.snapshot_blob(data), Snapshot::JustCreated(data) => params.snapshot_blob(data), Snapshot::Boxed(data) => params.snapshot_blob(data), }; } - - let isolate = v8::Isolate::new(params); - let mut isolate = JsRuntime::setup_isolate(isolate); - { - let scope = &mut v8::HandleScope::new(&mut isolate); - let context = - bindings::initialize_context(scope, &op_ctxs, snapshot_options); - - // Get module map data from the snapshot - if has_startup_snapshot { - maybe_snapshotted_data = - Some(snapshot_util::get_snapshotted_data(scope, context)); - } - - global_context = v8::Global::new(scope, context); - } - - (isolate, snapshot_options) + v8::Isolate::new(params) }; - - // SAFETY: this is first use of `isolate_ptr` so we are sure we're - // not overwriting an existing pointer. - isolate = unsafe { - isolate_ptr.write(isolate); - isolate_ptr.read() - }; - - global_context.open(&mut isolate).set_slot( - &mut isolate, - Rc::new(RefCell::new(ContextState { - op_ctxs, - ..Default::default() - })), - ); - - op_state.borrow_mut().put(isolate_ptr); - let inspector = if options.inspector { - Some(JsRuntimeInspector::new( - &mut isolate, - global_context.clone(), - options.is_main, - )) - } else { - None - }; - - let loader = if snapshot_options != snapshot_util::SnapshotOptions::Load { - let esm_sources = options - .extensions - .iter() - .flat_map(|ext| match ext.get_esm_sources() { - Some(s) => s.to_owned(), - None => vec![], - }) - .collect::>(); - - #[cfg(feature = "include_js_files_for_snapshotting")] - if snapshot_options != snapshot_util::SnapshotOptions::None { - for source in &esm_sources { - use crate::ExtensionFileSourceCode; - if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = - &source.code - { - println!("cargo:rerun-if-changed={}", path.display()) - } - } - } - - Rc::new(crate::modules::ExtModuleLoader::new( - options.module_loader, - esm_sources, - options.snapshot_module_load_cb, - )) - } else { - options - .module_loader - .unwrap_or_else(|| Rc::new(NoopModuleLoader)) - }; - - { - let mut state = state_rc.borrow_mut(); - state.global_realm = Some(JsRealm::new(global_context.clone())); - state.inspector = inspector; - state - .known_realms - .push(v8::Weak::new(&mut isolate, &global_context)); - } - isolate.set_data( - Self::STATE_DATA_OFFSET, - Rc::into_raw(state_rc.clone()) as *mut c_void, - ); - - let module_map_rc = Rc::new(RefCell::new(ModuleMap::new( - loader, - op_state, - snapshot_options == snapshot_util::SnapshotOptions::Load, - ))); - if let Some(snapshotted_data) = maybe_snapshotted_data { - let scope = - &mut v8::HandleScope::with_context(&mut isolate, global_context); - let mut module_map = module_map_rc.borrow_mut(); - module_map.update_with_snapshotted_data(scope, snapshotted_data); - } - isolate.set_data( - Self::MODULE_MAP_DATA_OFFSET, - Rc::into_raw(module_map_rc.clone()) as *mut c_void, - ); - - let mut js_runtime = Self { - v8_isolate: Some(isolate), - snapshot_options, - allocations: IsolateAllocations::default(), - event_loop_middlewares: Vec::with_capacity(options.extensions.len()), - extensions: options.extensions, - state: state_rc, - module_map: Some(module_map_rc), - is_main: options.is_main, - }; - - // Init resources and ops before extensions to make sure they are - // available during the initialization process. - js_runtime.init_extension_ops().unwrap(); - let realm = js_runtime.global_realm(); - js_runtime.init_extension_js(&realm).unwrap(); - - js_runtime - } - - fn drop_state_and_module_map(v8_isolate: &mut OwnedIsolate) { - let state_ptr = v8_isolate.get_data(Self::STATE_DATA_OFFSET); - let state_rc = - // SAFETY: We are sure that it's a valid pointer for whole lifetime of - // the runtime. - unsafe { Rc::from_raw(state_ptr as *const RefCell) }; - drop(state_rc); - - let module_map_ptr = v8_isolate.get_data(Self::MODULE_MAP_DATA_OFFSET); - let module_map_rc = - // SAFETY: We are sure that it's a valid pointer for whole lifetime of - // the runtime. - unsafe { Rc::from_raw(module_map_ptr as *const RefCell) }; - drop(module_map_rc); - } - - #[inline] - fn get_module_map(&mut self) -> &Rc> { - self.module_map.as_ref().unwrap() - } - - #[inline] - pub fn global_context(&mut self) -> v8::Global { - let state = self.state.borrow(); - let global_realm = state.global_realm.as_ref().unwrap(); - global_realm.context().clone() - } - - #[inline] - pub fn v8_isolate(&mut self) -> &mut v8::OwnedIsolate { - self.v8_isolate.as_mut().unwrap() - } - - #[inline] - pub fn inspector(&mut self) -> Rc> { - self.state.borrow().inspector() - } - - #[inline] - pub fn global_realm(&mut self) -> JsRealm { - let state = self.state.borrow(); - state.global_realm.clone().unwrap() - } - - /// Creates a new realm (V8 context) in this JS execution context, - /// pre-initialized with all of the extensions that were passed in - /// [`RuntimeOptions::extensions`] when the [`JsRuntime`] was - /// constructed. - pub fn create_realm(&mut self) -> Result { - let realm = { - let realm_idx = self.state.borrow().known_realms.len(); - - let op_ctxs: Box<[OpCtx]> = self - .global_realm() - .state(self.v8_isolate()) - .borrow() - .op_ctxs - .iter() - .map(|op_ctx| { - OpCtx::new( - op_ctx.id, - realm_idx, - op_ctx.decl.clone(), - op_ctx.state.clone(), - op_ctx.runtime_state.clone(), - ) - }) - .collect(); - - // SAFETY: Having the scope tied to self's lifetime makes it impossible to - // reference JsRuntimeState::op_ctxs while the scope is alive. Here we - // turn it into an unbound lifetime, which is sound because 1. it only - // lives until the end of this block, and 2. the HandleScope only has - // access to the isolate, and nothing else we're accessing from self does. - let scope = &mut v8::HandleScope::new(unsafe { - &mut *(self.v8_isolate() as *mut v8::OwnedIsolate) - }); - let context = - bindings::initialize_context(scope, &op_ctxs, self.snapshot_options); - context.set_slot( - scope, - Rc::new(RefCell::new(ContextState { - op_ctxs, - ..Default::default() - })), - ); - - self - .state - .borrow_mut() - .known_realms - .push(v8::Weak::new(scope, context)); - - JsRealm::new(v8::Global::new(scope, context)) - }; - - self.init_extension_js(&realm)?; - Ok(realm) - } - - #[inline] - pub fn handle_scope(&mut self) -> v8::HandleScope { - self.global_realm().handle_scope(self.v8_isolate()) - } - - fn setup_isolate(mut isolate: v8::OwnedIsolate) -> v8::OwnedIsolate { isolate.set_capture_stack_trace_for_uncaught_exceptions(true, 10); isolate.set_promise_reject_callback(bindings::promise_reject_callback); isolate.set_host_initialize_import_meta_object_callback( @@ -659,72 +608,254 @@ impl JsRuntime { isolate.set_wasm_async_resolve_promise_callback( bindings::wasm_async_resolve_promise_callback, ); - isolate - } - pub(crate) fn state(isolate: &v8::Isolate) -> Rc> { - let state_ptr = isolate.get_data(Self::STATE_DATA_OFFSET); - let state_rc = - // SAFETY: We are sure that it's a valid pointer for whole lifetime of - // the runtime. - unsafe { Rc::from_raw(state_ptr as *const RefCell) }; - let state = state_rc.clone(); - Rc::into_raw(state_rc); - state - } + let (global_context, snapshotted_data) = { + let scope = &mut v8::HandleScope::new(&mut isolate); + let context = v8::Context::new(scope); - pub(crate) fn module_map(isolate: &v8::Isolate) -> Rc> { - let module_map_ptr = isolate.get_data(Self::MODULE_MAP_DATA_OFFSET); - let module_map_rc = - // SAFETY: We are sure that it's a valid pointer for whole lifetime of - // the runtime. - unsafe { Rc::from_raw(module_map_ptr as *const RefCell) }; - let module_map = module_map_rc.clone(); - Rc::into_raw(module_map_rc); - module_map - } + // Get module map data from the snapshot + let snapshotted_data = if init_mode == InitMode::FromSnapshot { + Some(snapshot_util::get_snapshotted_data(scope, context)) + } else { + None + }; - /// Initializes JS of provided Extensions in the given realm - fn init_extension_js(&mut self, realm: &JsRealm) -> Result<(), Error> { - fn load_and_evaluate_module( - runtime: &mut JsRuntime, - file_source: &ExtensionFileSource, - ) -> Result<(), Error> { - futures::executor::block_on(async { - let id = runtime - .load_side_module( - &ModuleSpecifier::parse(file_source.specifier)?, - None, - ) - .await?; - let receiver = runtime.mod_evaluate(id); - runtime.run_event_loop(false).await?; - receiver.await? - }) - .with_context(|| format!("Couldn't execute '{}'", file_source.specifier)) + (v8::Global::new(scope, context), snapshotted_data) + }; + + // SAFETY: this is first use of `isolate_ptr` so we are sure we're + // not overwriting an existing pointer. + isolate = unsafe { + isolate_ptr.write(isolate); + isolate_ptr.read() + }; + + let mut context_scope: v8::HandleScope = + v8::HandleScope::with_context(&mut isolate, global_context.clone()); + let scope = &mut context_scope; + let context = v8::Local::new(scope, global_context.clone()); + + bindings::initialize_context( + scope, + context, + &context_state.borrow().op_ctxs, + init_mode, + ); + + context.set_slot(scope, context_state.clone()); + + op_state.borrow_mut().put(isolate_ptr); + let inspector = if options.inspector { + Some(JsRuntimeInspector::new(scope, context, options.is_main)) + } else { + None + }; + + let loader = options + .module_loader + .unwrap_or_else(|| Rc::new(NoopModuleLoader)); + + { + let global_realm = JsRealmInner::new( + context_state, + global_context, + state_rc.clone(), + true, + ); + let mut state = state_rc.borrow_mut(); + state.global_realm = Some(JsRealm::new(global_realm.clone())); + state.inspector = inspector; + state.known_realms.push(global_realm); } + scope.set_data( + STATE_DATA_OFFSET, + Rc::into_raw(state_rc.clone()) as *mut c_void, + ); + let module_map_rc = Rc::new(RefCell::new(ModuleMap::new(loader))); + if let Some(snapshotted_data) = snapshotted_data { + let mut module_map = module_map_rc.borrow_mut(); + module_map.update_with_snapshotted_data(scope, snapshotted_data); + } + scope.set_data( + MODULE_MAP_DATA_OFFSET, + Rc::into_raw(module_map_rc.clone()) as *mut c_void, + ); - // Take extensions to avoid double-borrow + drop(context_scope); + + let mut js_runtime = JsRuntime { + inner: InnerIsolateState { + will_snapshot, + state: ManuallyDropRc(ManuallyDrop::new(state_rc)), + v8_isolate: ManuallyDrop::new(isolate), + }, + init_mode, + allocations: IsolateAllocations::default(), + event_loop_middlewares, + extensions: options.extensions, + module_map: module_map_rc, + is_main: options.is_main, + }; + + let realm = js_runtime.global_realm(); + // TODO(mmastrac): We should thread errors back out of the runtime + js_runtime + .init_extension_js(&realm, maybe_load_callback) + .unwrap(); + js_runtime + } + + #[cfg(test)] + #[inline] + pub(crate) fn module_map(&self) -> &Rc> { + &self.module_map + } + + #[inline] + pub fn global_context(&self) -> v8::Global { + self + .inner + .state + .borrow() + .known_realms + .get(0) + .unwrap() + .context() + .clone() + } + + #[inline] + pub fn v8_isolate(&mut self) -> &mut v8::OwnedIsolate { + &mut self.inner.v8_isolate + } + + #[inline] + pub fn inspector(&mut self) -> Rc> { + self.inner.state.borrow().inspector() + } + + #[inline] + pub fn global_realm(&mut self) -> JsRealm { + let state = self.inner.state.borrow(); + state.global_realm.clone().unwrap() + } + + /// Returns the extensions that this runtime is using (including internal ones). + pub fn extensions(&self) -> &Vec { + &self.extensions + } + + /// Creates a new realm (V8 context) in this JS execution context, + /// pre-initialized with all of the extensions that were passed in + /// [`RuntimeOptions::extensions`] when the [`JsRuntime`] was + /// constructed. + pub fn create_realm(&mut self) -> Result { + let realm = { + let context_state = Rc::new(RefCell::new(ContextState::default())); + let op_ctxs: Box<[OpCtx]> = self + .global_realm() + .0 + .state() + .borrow() + .op_ctxs + .iter() + .map(|op_ctx| { + OpCtx::new( + op_ctx.id, + context_state.clone(), + op_ctx.decl.clone(), + op_ctx.state.clone(), + op_ctx.runtime_state.clone(), + ) + }) + .collect(); + context_state.borrow_mut().op_ctxs = op_ctxs; + context_state.borrow_mut().isolate = Some(self.v8_isolate() as _); + + let raw_ptr = self.v8_isolate() as *mut v8::OwnedIsolate; + // SAFETY: Having the scope tied to self's lifetime makes it impossible to + // reference JsRuntimeState::op_ctxs while the scope is alive. Here we + // turn it into an unbound lifetime, which is sound because 1. it only + // lives until the end of this block, and 2. the HandleScope only has + // access to the isolate, and nothing else we're accessing from self does. + let isolate = unsafe { raw_ptr.as_mut() }.unwrap(); + let scope = &mut v8::HandleScope::new(isolate); + let context = v8::Context::new(scope); + let scope = &mut v8::ContextScope::new(scope, context); + + let context = bindings::initialize_context( + scope, + context, + &context_state.borrow().op_ctxs, + self.init_mode, + ); + context.set_slot(scope, context_state.clone()); + let realm = JsRealmInner::new( + context_state, + v8::Global::new(scope, context), + self.inner.state.clone(), + false, + ); + let mut state = self.inner.state.borrow_mut(); + state.known_realms.push(realm.clone()); + JsRealm::new(realm) + }; + + self.init_extension_js(&realm, None)?; + Ok(realm) + } + + #[inline] + pub fn handle_scope(&mut self) -> v8::HandleScope { + self.global_realm().handle_scope(self.v8_isolate()) + } + + /// Initializes JS of provided Extensions in the given realm. + fn init_extension_js( + &mut self, + realm: &JsRealm, + maybe_load_callback: Option, + ) -> Result<(), Error> { + // Initialization of JS happens in phases: + // 1. Iterate through all extensions: + // a. Execute all extension "script" JS files + // b. Load all extension "module" JS files (but do not execute them yet) + // 2. Iterate through all extensions: + // a. If an extension has a `esm_entry_point`, execute it. + + // Take extensions temporarily so we can avoid have a mutable reference to self let extensions = std::mem::take(&mut self.extensions); - for ext in &extensions { - { - if let Some(esm_files) = ext.get_esm_sources() { - if let Some(entry_point) = ext.get_esm_entry_point() { - let file_source = esm_files - .iter() - .find(|file| file.specifier == entry_point) - .unwrap(); - load_and_evaluate_module(self, file_source)?; - } else { - for file_source in esm_files { - load_and_evaluate_module(self, file_source)?; - } + + // TODO(nayeemrmn): Module maps should be per-realm. + let loader = self.module_map.borrow().loader.clone(); + let ext_loader = Rc::new(ExtModuleLoader::new( + &extensions, + maybe_load_callback.map(Rc::new), + )); + self.module_map.borrow_mut().loader = ext_loader; + + let mut esm_entrypoints = vec![]; + + futures::executor::block_on(async { + for extension in &extensions { + let maybe_esm_entry_point = extension.get_esm_entry_point(); + + if let Some(esm_files) = extension.get_esm_sources() { + for file_source in esm_files { + self + .load_side_module( + &ModuleSpecifier::parse(file_source.specifier)?, + None, + ) + .await?; } } - } - { - if let Some(js_files) = ext.get_js_sources() { + if let Some(entry_point) = maybe_esm_entry_point { + esm_entrypoints.push(entry_point); + } + + if let Some(js_files) = extension.get_js_sources() { for file_source in js_files { realm.execute_script( self.v8_isolate(), @@ -733,17 +864,42 @@ impl JsRuntime { )?; } } + + if extension.is_core { + self.init_cbs(realm); + } } - // TODO(bartlomieju): this not great that we need to have this conditional - // here, but I haven't found a better way to do it yet. - if ext.is_core { - self.init_cbs(realm); + for specifier in esm_entrypoints { + let mod_id = { + self + .module_map + .borrow() + .get_id(specifier, AssertedModuleType::JavaScriptOrWasm) + .unwrap_or_else(|| { + panic!("{} not present in the module map", specifier) + }) + }; + let receiver = self.mod_evaluate(mod_id); + self.run_event_loop(false).await?; + receiver + .await? + .with_context(|| format!("Couldn't execute '{specifier}'"))?; } - } - // Restore extensions + + #[cfg(debug_assertions)] + { + let module_map_rc = self.module_map.clone(); + let mut scope = realm.handle_scope(self.v8_isolate()); + let module_map = module_map_rc.borrow(); + module_map.assert_all_modules_evaluated(&mut scope); + } + + Ok::<_, anyhow::Error>(()) + })?; + self.extensions = extensions; - + self.module_map.borrow_mut().loader = loader; Ok(()) } @@ -765,7 +921,7 @@ impl JsRuntime { let macroware = move |d| middleware.iter().fold(d, |d, m| m(d)); // Flatten ops, apply middlware & override disabled ops - exts + let ops: Vec<_> = exts .iter_mut() .filter_map(|e| e.init_ops()) .flatten() @@ -773,41 +929,70 @@ impl JsRuntime { name: d.name, ..macroware(d) }) - .map(|op| match op.enabled { - true => op, - false => OpDecl { - v8_fn_ptr: match op.is_async { - true => op_void_async::v8_fn_ptr as _, - false => op_void_sync::v8_fn_ptr as _, - }, - ..op - }, - }) - .collect() + .collect(); + + // In debug build verify there are no duplicate ops. + #[cfg(debug_assertions)] + { + let mut count_by_name = HashMap::new(); + + for op in ops.iter() { + count_by_name + .entry(&op.name) + .or_insert(vec![]) + .push(op.name.to_string()); + } + + let mut duplicate_ops = vec![]; + for (op_name, _count) in + count_by_name.iter().filter(|(_k, v)| v.len() > 1) + { + duplicate_ops.push(op_name.to_string()); + } + if !duplicate_ops.is_empty() { + let mut msg = "Found ops with duplicate names:\n".to_string(); + for op_name in duplicate_ops { + msg.push_str(&format!(" - {}\n", op_name)); + } + msg.push_str("Op names need to be unique."); + panic!("{}", msg); + } + } + + ops } /// Initializes ops of provided Extensions - fn init_extension_ops(&mut self) -> Result<(), Error> { - let op_state = self.op_state(); - // Take extensions to avoid double-borrow - { - let mut extensions: Vec = std::mem::take(&mut self.extensions); - - // Setup state - for e in extensions.iter_mut() { - // ops are already registered during in bindings::initialize_context(); - e.init_state(&mut op_state.borrow_mut()); - - // Setup event-loop middleware - if let Some(middleware) = e.init_event_loop_middleware() { - self.event_loop_middlewares.push(middleware); - } - } - - // Restore extensions - self.extensions = extensions; + fn create_opstate( + options: &mut RuntimeOptions, + init_mode: InitMode, + ) -> (OpState, Vec) { + // Add built-in extension + if init_mode == InitMode::FromSnapshot { + options + .extensions + .insert(0, crate::ops_builtin::core::init_ops()); + } else { + options + .extensions + .insert(0, crate::ops_builtin::core::init_ops_and_esm()); } - Ok(()) + + let ops = Self::collect_ops(&mut options.extensions); + + let mut op_state = OpState::new(ops.len()); + + if let Some(get_error_class_fn) = options.get_error_class_fn { + op_state.get_error_class_fn = get_error_class_fn; + } + + // Setup state + for e in &mut options.extensions { + // ops are already registered during in bindings::initialize_context(); + e.init_state(&mut op_state); + } + + (op_state, ops) } pub fn eval<'s, T>( @@ -870,7 +1055,7 @@ impl JsRuntime { }; // Put global handles in the realm's ContextState - let state_rc = realm.state(self.v8_isolate()); + let state_rc = realm.0.state(); let mut state = state_rc.borrow_mut(); state .js_event_loop_tick_cb @@ -883,7 +1068,7 @@ impl JsRuntime { /// Returns the runtime's op state, which can be used to maintain ops /// and access resources between op calls. pub fn op_state(&mut self) -> Rc> { - let state = self.state.borrow(); + let state = self.inner.state.borrow(); state.op_state.clone() } @@ -937,70 +1122,25 @@ impl JsRuntime { ) } - /// Takes a snapshot. The isolate should have been created with will_snapshot - /// set to true. - /// - /// `Error` can usually be downcast to `JsError`. - pub fn snapshot(mut self) -> v8::StartupData { - self.state.borrow_mut().inspector.take(); - - // Set the context to be snapshot's default context - { - let context = self.global_context(); - let mut scope = self.handle_scope(); - let local_context = v8::Local::new(&mut scope, context); - scope.set_default_context(local_context); - } - - // Serialize the module map and store its data in the snapshot. - { - let snapshotted_data = { - let module_map_rc = self.module_map.take().unwrap(); - let module_map = module_map_rc.borrow(); - module_map.serialize_for_snapshotting(&mut self.handle_scope()) - }; - - let context = self.global_context(); - let mut scope = self.handle_scope(); - snapshot_util::set_snapshotted_data( - &mut scope, - context, - snapshotted_data, - ); - } - - // Drop existing ModuleMap to drop v8::Global handles - { - let v8_isolate = self.v8_isolate(); - Self::drop_state_and_module_map(v8_isolate); - } - - // Drop other v8::Global handles before snapshotting - { - for weak_context in &self.state.clone().borrow().known_realms { - let scope = &mut self.handle_scope(); - if let Some(context) = weak_context.to_local(scope) { - let realm = JsRealmLocal::new(context); - let realm_state_rc = realm.state(scope); - let mut realm_state = realm_state_rc.borrow_mut(); - std::mem::take(&mut realm_state.js_event_loop_tick_cb); - std::mem::take(&mut realm_state.js_build_custom_error_cb); - std::mem::take(&mut realm_state.js_promise_reject_cb); - std::mem::take(&mut realm_state.js_format_exception_cb); - std::mem::take(&mut realm_state.js_wasm_streaming_cb); - context.clear_all_slots(scope); - } + /// Call a function. If it returns a promise, run the event loop until that + /// promise is settled. If the promise rejects or there is an uncaught error + /// in the event loop, return `Err(error)`. Or return `Ok()`. + pub async fn call_and_await( + &mut self, + function: &v8::Global, + ) -> Result, Error> { + let promise = { + let scope = &mut self.handle_scope(); + let cb = function.open(scope); + let this = v8::undefined(scope).into(); + let promise = cb.call(scope, this, &[]); + if promise.is_none() || scope.is_execution_terminating() { + let undefined = v8::undefined(scope).into(); + return exception_to_err_result(scope, undefined, false); } - - let mut state = self.state.borrow_mut(); - state.known_realms.clear(); - state.global_realm.take(); - } - - let snapshot_creator = self.v8_isolate.take().unwrap(); - snapshot_creator - .create_blob(v8::FunctionCodeHandling::Keep) - .unwrap() + v8::Global::new(scope, promise.unwrap()) + }; + self.resolve_value(promise).await } /// Returns the namespace object of a module. @@ -1011,9 +1151,8 @@ impl JsRuntime { &mut self, module_id: ModuleId, ) -> Result, Error> { - let module_map_rc = Self::module_map(self.v8_isolate()); - - let module_handle = module_map_rc + let module_handle = self + .module_map .borrow() .get_handle(module_id) .expect("ModuleInfo not found"); @@ -1094,17 +1233,20 @@ impl JsRuntime { } pub fn maybe_init_inspector(&mut self) { - if self.state.borrow().inspector.is_some() { + if self.inner.state.borrow().inspector.is_some() { return; } - let global_context = self.global_context(); - let mut state = self.state.borrow_mut(); - state.inspector = Some(JsRuntimeInspector::new( - self.v8_isolate.as_mut().unwrap(), - global_context, - self.is_main, - )); + let context = self.global_context(); + let scope = &mut v8::HandleScope::with_context( + self.inner.v8_isolate.as_mut(), + context.clone(), + ); + let context = v8::Local::new(scope, context); + + let mut state = self.inner.state.borrow_mut(); + state.inspector = + Some(JsRuntimeInspector::new(scope, context, self.is_main)); } pub fn poll_value( @@ -1179,14 +1321,14 @@ impl JsRuntime { let has_inspector: bool; { - let state = self.state.borrow(); + let state = self.inner.state.borrow(); has_inspector = state.inspector.is_some(); - state.waker.register(cx.waker()); + state.op_state.borrow().waker.register(cx.waker()); } if has_inspector { // We poll the inspector first. - let _ = self.inspector().borrow_mut().poll_unpin(cx); + let _ = self.inspector().borrow().poll_sessions(Some(cx)).unwrap(); } self.pump_v8_message_loop()?; @@ -1227,7 +1369,7 @@ impl JsRuntime { // Event loop middlewares let mut maybe_scheduling = false; { - let op_state = self.state.borrow().op_state.clone(); + let op_state = self.inner.state.borrow().op_state.clone(); for f in &self.event_loop_middlewares { if f(op_state.clone(), cx) { maybe_scheduling = true; @@ -1263,7 +1405,7 @@ impl JsRuntime { return Poll::Ready(Ok(())); } - let state = self.state.borrow(); + let state = self.inner.state.borrow(); // Check if more async ops have been dispatched // during this turn of event loop. @@ -1272,12 +1414,11 @@ impl JsRuntime { // TODO(andreubotella) The event loop will spin as long as there are pending // background tasks. We should look into having V8 notify us when a // background task is done. - if state.have_unpolled_ops - || pending_state.has_pending_background_tasks + if pending_state.has_pending_background_tasks || pending_state.has_tick_scheduled || maybe_scheduling { - state.waker.wake(); + state.op_state.borrow().waker.wake(); } drop(state); @@ -1312,7 +1453,8 @@ impl JsRuntime { || pending_state.has_tick_scheduled { // pass, will be polled again - } else if self.state.borrow().dyn_module_evaluate_idle_counter >= 1 { + } else if self.inner.state.borrow().dyn_module_evaluate_idle_counter >= 1 + { let scope = &mut self.handle_scope(); let messages = find_stalled_top_level_await(scope); // We are gonna print only a single message to provide a nice formatting @@ -1324,12 +1466,12 @@ impl JsRuntime { let js_error = JsError::from_v8_message(scope, msg); return Poll::Ready(Err(js_error.into())); } else { - let mut state = self.state.borrow_mut(); + let mut state = self.inner.state.borrow_mut(); // Delay the above error by one spin of the event loop. A dynamic import // evaluation may complete during this, in which case the counter will // reset. state.dyn_module_evaluate_idle_counter += 1; - state.waker.wake(); + state.op_state.borrow().waker.wake(); } } @@ -1337,26 +1479,69 @@ impl JsRuntime { } fn event_loop_pending_state(&mut self) -> EventLoopPendingState { - let isolate = self.v8_isolate.as_mut().unwrap(); - let mut scope = v8::HandleScope::new(isolate); + let mut scope = v8::HandleScope::new(self.inner.v8_isolate.as_mut()); EventLoopPendingState::new( &mut scope, - &mut self.state.borrow_mut(), - &self.module_map.as_ref().unwrap().borrow(), + &mut self.inner.state.borrow_mut(), + &self.module_map.borrow(), ) } +} - pub(crate) fn event_loop_pending_state_from_scope( - scope: &mut v8::HandleScope, - ) -> EventLoopPendingState { - let state = Self::state(scope); - let module_map = Self::module_map(scope); - let state = EventLoopPendingState::new( - scope, - &mut state.borrow_mut(), - &module_map.borrow(), - ); - state +impl JsRuntimeForSnapshot { + pub fn new( + mut options: RuntimeOptions, + runtime_snapshot_options: RuntimeSnapshotOptions, + ) -> JsRuntimeForSnapshot { + JsRuntime::init_v8(options.v8_platform.take(), true); + JsRuntimeForSnapshot(JsRuntime::new_inner( + options, + true, + runtime_snapshot_options.snapshot_module_load_cb, + )) + } + + /// Takes a snapshot and consumes the runtime. + /// + /// `Error` can usually be downcast to `JsError`. + pub fn snapshot(mut self) -> v8::StartupData { + // Ensure there are no live inspectors to prevent crashes. + self.inner.prepare_for_cleanup(); + + // Set the context to be snapshot's default context + { + let context = self.global_context(); + let mut scope = self.handle_scope(); + let local_context = v8::Local::new(&mut scope, context); + scope.set_default_context(local_context); + } + + // Serialize the module map and store its data in the snapshot. + { + let snapshotted_data = { + // `self.module_map` points directly to the v8 isolate data slot, which + // we must explicitly drop before destroying the isolate. We have to + // take and drop this `Rc` before that. + let module_map_rc = std::mem::take(&mut self.module_map); + let module_map = module_map_rc.borrow(); + module_map.serialize_for_snapshotting(&mut self.handle_scope()) + }; + + let context = self.global_context(); + let mut scope = self.handle_scope(); + snapshot_util::set_snapshotted_data( + &mut scope, + context, + snapshotted_data, + ); + } + + self + .0 + .inner + .prepare_for_snapshot() + .create_blob(v8::FunctionCodeHandling::Keep) + .unwrap() } } @@ -1364,7 +1549,7 @@ fn get_stalled_top_level_await_message_for_module( scope: &mut v8::HandleScope, module_id: ModuleId, ) -> Vec> { - let module_map = JsRuntime::module_map(scope); + let module_map = JsRuntime::module_map_from(scope); let module_map = module_map.borrow(); let module_handle = module_map.handles.get(module_id).unwrap(); @@ -1380,7 +1565,7 @@ fn get_stalled_top_level_await_message_for_module( fn find_stalled_top_level_await( scope: &mut v8::HandleScope, ) -> Vec> { - let module_map = JsRuntime::module_map(scope); + let module_map = JsRuntime::module_map_from(scope); let module_map = module_map.borrow(); // First check if that's root module @@ -1428,21 +1613,14 @@ impl EventLoopPendingState { module_map: &ModuleMap, ) -> EventLoopPendingState { let mut num_unrefed_ops = 0; - - if state.known_realms.len() == 1 { - let realm = state.global_realm.as_ref().unwrap(); - num_unrefed_ops += realm.state(scope).borrow().unrefed_ops.len(); - } else { - for weak_context in &state.known_realms { - if let Some(context) = weak_context.to_local(scope) { - let realm = JsRealmLocal::new(context); - num_unrefed_ops += realm.state(scope).borrow().unrefed_ops.len(); - } - } + let mut num_pending_ops = 0; + for realm in &state.known_realms { + num_unrefed_ops += realm.num_unrefed_ops(); + num_pending_ops += realm.num_pending_ops(); } EventLoopPendingState { - has_pending_refed_ops: state.pending_ops.len() > num_unrefed_ops, + has_pending_refed_ops: num_pending_ops > num_unrefed_ops, has_pending_dyn_imports: module_map.has_pending_dynamic_imports(), has_pending_dyn_module_evaluation: !state .pending_dyn_mod_evaluate @@ -1486,7 +1664,7 @@ impl JsRuntimeState { /// after initiating new dynamic import load. pub fn notify_new_dynamic_import(&mut self) { // Notify event loop to poll again soon. - self.waker.wake(); + self.op_state.borrow().waker.wake(); } } @@ -1495,9 +1673,13 @@ pub(crate) fn exception_to_err_result( exception: v8::Local, in_promise: bool, ) -> Result { - let state_rc = JsRuntime::state(scope); + let state_rc = JsRuntime::state_from(scope); let was_terminating_execution = scope.is_execution_terminating(); + // Disable running microtasks for a moment. When upgrading to V8 v11.4 + // we discovered that canceling termination here will cause the queued + // microtasks to run which breaks some tests. + scope.set_microtasks_policy(v8::MicrotasksPolicy::Explicit); // If TerminateExecution was called, cancel isolate termination so that the // exception can be created. Note that `scope.is_execution_terminating()` may // have returned false if TerminateExecution was indeed called but there was @@ -1509,19 +1691,14 @@ pub(crate) fn exception_to_err_result( // to use the exception that was passed to it rather than the exception that // was passed to this function. let state = state_rc.borrow(); - exception = state - .dispatched_exceptions - .back() - .map(|exception| v8::Local::new(scope, exception.clone())) - .unwrap_or_else(|| { - // Maybe make a new exception object. - if was_terminating_execution && exception.is_null_or_undefined() { - let message = v8::String::new(scope, "execution terminated").unwrap(); - v8::Exception::error(scope, message) - } else { - exception - } - }); + exception = if let Some(exception) = &state.dispatched_exception { + v8::Local::new(scope, exception.clone()) + } else if was_terminating_execution && exception.is_null_or_undefined() { + let message = v8::String::new(scope, "execution terminated").unwrap(); + v8::Exception::error(scope, message) + } else { + exception + }; } let mut js_error = JsError::from_v8_exception(scope, exception); @@ -1536,6 +1713,7 @@ pub(crate) fn exception_to_err_result( // Resume exception termination. scope.terminate_execution(); } + scope.set_microtasks_policy(v8::MicrotasksPolicy::Auto); Err(js_error.into()) } @@ -1546,7 +1724,7 @@ impl JsRuntime { &mut self, id: ModuleId, ) -> Result<(), v8::Global> { - let module_map_rc = Self::module_map(self.v8_isolate()); + let module_map_rc = self.module_map.clone(); let scope = &mut self.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); @@ -1579,9 +1757,8 @@ impl JsRuntime { load_id: ModuleLoadId, id: ModuleId, ) -> Result<(), Error> { - let module_map_rc = Self::module_map(self.v8_isolate()); - - let module_handle = module_map_rc + let module_handle = self + .module_map .borrow() .get_handle(id) .expect("ModuleInfo not found"); @@ -1608,9 +1785,9 @@ impl JsRuntime { // For more details see: // https://github.com/denoland/deno/issues/4908 // https://v8.dev/features/top-level-await#module-execution-order - let global_realm = self.state.borrow_mut().global_realm.clone().unwrap(); - let scope = - &mut global_realm.handle_scope(self.v8_isolate.as_mut().unwrap()); + let global_realm = + self.inner.state.borrow_mut().global_realm.clone().unwrap(); + let scope = &mut global_realm.handle_scope(&mut self.inner.v8_isolate); let tc_scope = &mut v8::TryCatch::new(scope); let module = v8::Local::new(tc_scope, &module_handle); let maybe_value = module.evaluate(tc_scope); @@ -1638,6 +1815,7 @@ impl JsRuntime { }; self + .inner .state .borrow_mut() .pending_dyn_mod_evaluate @@ -1669,8 +1847,8 @@ impl JsRuntime { id: ModuleId, ) -> oneshot::Receiver> { let global_realm = self.global_realm(); - let state_rc = self.state.clone(); - let module_map_rc = Self::module_map(self.v8_isolate()); + let state_rc = self.inner.state.clone(); + let module_map_rc = self.module_map.clone(); let scope = &mut self.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); @@ -1729,7 +1907,7 @@ impl JsRuntime { status = module.get_status(); let has_dispatched_exception = - !state_rc.borrow_mut().dispatched_exceptions.is_empty(); + state_rc.borrow_mut().dispatched_exception.is_some(); if has_dispatched_exception { // This will be overrided in `exception_to_err_result()`. let exception = v8::undefined(tc_scope).into(); @@ -1757,10 +1935,11 @@ impl JsRuntime { .contains(&promise_global); if !pending_rejection_was_already_handled { global_realm - .state(tc_scope) + .0 + .state() .borrow_mut() .pending_promise_rejections - .remove(&promise_global); + .retain(|(key, _)| key != &promise_global); } } let promise_global = v8::Global::new(tc_scope, promise); @@ -1782,12 +1961,35 @@ impl JsRuntime { receiver } + /// Clear the module map, meant to be used after initializing extensions. + /// Optionally pass a list of exceptions `(old_name, new_name)` representing + /// specifiers which will be renamed and preserved in the module map. + pub fn clear_module_map( + &self, + exceptions: impl Iterator, + ) { + let mut module_map = self.module_map.borrow_mut(); + let handles = exceptions + .map(|(old_name, new_name)| { + (module_map.get_handle_by_name(old_name).unwrap(), new_name) + }) + .collect::>(); + module_map.clear(); + for (handle, new_name) in handles { + module_map.inject_handle( + ModuleName::from_static(new_name), + ModuleType::JavaScript, + handle, + ) + } + } + fn dynamic_import_reject( &mut self, id: ModuleLoadId, exception: v8::Global, ) { - let module_map_rc = Self::module_map(self.v8_isolate()); + let module_map_rc = self.module_map.clone(); let scope = &mut self.handle_scope(); let resolver_handle = module_map_rc @@ -1807,8 +2009,8 @@ impl JsRuntime { } fn dynamic_import_resolve(&mut self, id: ModuleLoadId, mod_id: ModuleId) { - let state_rc = self.state.clone(); - let module_map_rc = Self::module_map(self.v8_isolate()); + let state_rc = self.inner.state.clone(); + let module_map_rc = self.module_map.clone(); let scope = &mut self.handle_scope(); let resolver_handle = module_map_rc @@ -1843,7 +2045,7 @@ impl JsRuntime { cx: &mut Context, ) -> Poll> { if self - .get_module_map() + .module_map .borrow() .preparing_dynamic_imports .is_empty() @@ -1851,10 +2053,9 @@ impl JsRuntime { return Poll::Ready(Ok(())); } - let module_map_rc = self.get_module_map().clone(); - loop { - let poll_result = module_map_rc + let poll_result = self + .module_map .borrow_mut() .preparing_dynamic_imports .poll_next_unpin(cx); @@ -1865,7 +2066,8 @@ impl JsRuntime { match prepare_result { Ok(load) => { - module_map_rc + self + .module_map .borrow_mut() .pending_dynamic_imports .push(load.into_future()); @@ -1885,19 +2087,13 @@ impl JsRuntime { } fn poll_dyn_imports(&mut self, cx: &mut Context) -> Poll> { - if self - .get_module_map() - .borrow() - .pending_dynamic_imports - .is_empty() - { + if self.module_map.borrow().pending_dynamic_imports.is_empty() { return Poll::Ready(Ok(())); } - let module_map_rc = self.get_module_map().clone(); - loop { - let poll_result = module_map_rc + let poll_result = self + .module_map .borrow_mut() .pending_dynamic_imports .poll_next_unpin(cx); @@ -1922,7 +2118,8 @@ impl JsRuntime { match register_result { Ok(()) => { // Keep importing until it's fully drained - module_map_rc + self + .module_map .borrow_mut() .pending_dynamic_imports .push(load.into_future()); @@ -1982,14 +2179,14 @@ impl JsRuntime { /// then another turn of event loop must be performed. fn evaluate_pending_module(&mut self) { let maybe_module_evaluation = - self.state.borrow_mut().pending_mod_evaluate.take(); + self.inner.state.borrow_mut().pending_mod_evaluate.take(); if maybe_module_evaluation.is_none() { return; } let mut module_evaluation = maybe_module_evaluation.unwrap(); - let state_rc = self.state.clone(); + let state_rc = self.inner.state.clone(); let scope = &mut self.handle_scope(); let promise_global = module_evaluation.promise.clone().unwrap(); @@ -2030,8 +2227,9 @@ impl JsRuntime { // Returns true if some dynamic import was resolved. fn evaluate_dyn_imports(&mut self) -> bool { - let pending = - std::mem::take(&mut self.state.borrow_mut().pending_dyn_mod_evaluate); + let pending = std::mem::take( + &mut self.inner.state.borrow_mut().pending_dyn_mod_evaluate, + ); if pending.is_empty() { return false; } @@ -2074,7 +2272,7 @@ impl JsRuntime { } } } - self.state.borrow_mut().pending_dyn_mod_evaluate = still_pending; + self.inner.state.borrow_mut().pending_dyn_mod_evaluate = still_pending; resolved_any } @@ -2090,7 +2288,7 @@ impl JsRuntime { specifier: &ModuleSpecifier, code: Option, ) -> Result { - let module_map_rc = Self::module_map(self.v8_isolate()); + let module_map_rc = self.module_map.clone(); if let Some(code) = code { let specifier = specifier.as_str().to_owned().into(); let scope = &mut self.handle_scope(); @@ -2145,7 +2343,7 @@ impl JsRuntime { specifier: &ModuleSpecifier, code: Option, ) -> Result { - let module_map_rc = Self::module_map(self.v8_isolate()); + let module_map_rc = self.module_map.clone(); if let Some(code) = code { let specifier = specifier.as_str().to_owned().into(); let scope = &mut self.handle_scope(); @@ -2189,55 +2387,25 @@ impl JsRuntime { } fn check_promise_rejections(&mut self) -> Result<(), Error> { - let state = self.state.clone(); + let state = self.inner.state.clone(); let scope = &mut self.handle_scope(); let state = state.borrow(); - for weak_context in &state.known_realms { - if let Some(context) = weak_context.to_local(scope) { - JsRealmLocal::new(context).check_promise_rejections(scope)?; - } + for realm in &state.known_realms { + realm.check_promise_rejections(scope)?; } Ok(()) } // Polls pending ops and then runs `Deno.core.eventLoopTick` callback. fn do_js_event_loop_tick(&mut self, cx: &mut Context) -> Result<(), Error> { - // We have a specialized implementation of this method for the common case - // where there is only one realm. - let num_realms = self.state.borrow().known_realms.len(); - if num_realms == 1 { - return self.do_single_realm_js_event_loop_tick(cx); - } - - // `responses_per_realm[idx]` is a vector containing the promise ID and - // response for all promises in realm `self.state.known_realms[idx]`. - let mut responses_per_realm: Vec> = - (0..num_realms).map(|_| vec![]).collect(); - - // Now handle actual ops. - { - let mut state = self.state.borrow_mut(); - state.have_unpolled_ops = false; - - while let Poll::Ready(Some(item)) = state.pending_ops.poll_next_unpin(cx) - { - let (realm_idx, promise_id, op_id, resp) = item; - state.op_state.borrow().tracker.track_async_completed(op_id); - responses_per_realm[realm_idx].push((promise_id, resp)); - } - } - // Handle responses for each realm. - let isolate = self.v8_isolate.as_mut().unwrap(); - for (realm_idx, responses) in responses_per_realm.into_iter().enumerate() { - let realm = { - let context = self.state.borrow().known_realms[realm_idx] - .to_global(isolate) - .unwrap(); - JsRealm::new(context) - }; - let context_state_rc = realm.state(isolate); - let mut context_state = context_state_rc.borrow_mut(); + let state = self.inner.state.clone(); + let isolate = &mut self.inner.v8_isolate; + let realm_count = state.borrow().known_realms.len(); + for realm_idx in 0..realm_count { + let realm = state.borrow().known_realms.get(realm_idx).unwrap().clone(); + let context_state = realm.state(); + let mut context_state = context_state.borrow_mut(); let scope = &mut realm.handle_scope(isolate); // We return async responses to JS in unbounded batches (may change), @@ -2251,9 +2419,23 @@ impl JsRuntime { // This can handle 15 promises futures in a single batch without heap // allocations. let mut args: SmallVec<[v8::Local; 32]> = - SmallVec::with_capacity(responses.len() * 2 + 2); + SmallVec::with_capacity(32); - for (promise_id, mut resp) in responses { + loop { + let item = { + let next = std::pin::pin!(context_state.pending_ops.join_next()); + let Poll::Ready(Some(item)) = next.poll(cx) else { + break; + }; + item + }; + let (promise_id, op_id, mut resp) = item.unwrap().into_inner(); + state + .borrow() + .op_state + .borrow() + .tracker + .track_async_completed(op_id); context_state.unrefed_ops.remove(&promise_id); args.push(v8::Integer::new(scope, promise_id).into()); args.push(match resp.to_v8(scope) { @@ -2265,7 +2447,7 @@ impl JsRuntime { } let has_tick_scheduled = - v8::Boolean::new(scope, self.state.borrow().has_tick_scheduled); + v8::Boolean::new(scope, self.inner.state.borrow().has_tick_scheduled); args.push(has_tick_scheduled.into()); let js_event_loop_tick_cb_handle = @@ -2289,150 +2471,147 @@ impl JsRuntime { Ok(()) } +} - fn do_single_realm_js_event_loop_tick( - &mut self, - cx: &mut Context, - ) -> Result<(), Error> { - let isolate = self.v8_isolate.as_mut().unwrap(); - let scope = &mut self - .state - .borrow() - .global_realm - .as_ref() - .unwrap() - .handle_scope(isolate); +#[inline] +pub fn queue_fast_async_op( + ctx: &OpCtx, + promise_id: PromiseId, + op: impl Future> + 'static, +) { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; + let fut = op + .map(|result| crate::_ops::to_op_result(get_class, result)) + .boxed_local(); + // SAFETY: this this is guaranteed to be running on a current-thread executor + ctx.context_state.borrow_mut().pending_ops.spawn(unsafe { + crate::task::MaskFutureAsSend::new(OpCall::pending(ctx, promise_id, fut)) + }); +} - // We return async responses to JS in unbounded batches (may change), - // each batch is a flat vector of tuples: - // `[promise_id1, op_result1, promise_id2, op_result2, ...]` - // promise_id is a simple integer, op_result is an ops::OpResult - // which contains a value OR an error, encoded as a tuple. - // This batch is received in JS via the special `arguments` variable - // and then each tuple is used to resolve or reject promises - // - // This can handle 15 promises futures in a single batch without heap - // allocations. - let mut args: SmallVec<[v8::Local; 32]> = SmallVec::new(); +#[inline] +pub fn map_async_op1( + ctx: &OpCtx, + op: impl Future> + 'static, +) -> MaybeDone>>> { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; - // Now handle actual ops. - { - let mut state = self.state.borrow_mut(); - state.have_unpolled_ops = false; + let fut = op + .map(|result| crate::_ops::to_op_result(get_class, result)) + .boxed_local(); + MaybeDone::Future(fut) +} - let realm_state_rc = state.global_realm.as_ref().unwrap().state(scope); - let mut realm_state = realm_state_rc.borrow_mut(); +#[inline] +pub fn map_async_op2( + ctx: &OpCtx, + op: impl Future + 'static, +) -> MaybeDone>>> { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); - while let Poll::Ready(Some(item)) = state.pending_ops.poll_next_unpin(cx) - { - let (realm_idx, promise_id, op_id, mut resp) = item; - debug_assert_eq!( - state.known_realms[realm_idx], - state.global_realm.as_ref().unwrap().context() - ); - realm_state.unrefed_ops.remove(&promise_id); - state.op_state.borrow().tracker.track_async_completed(op_id); - args.push(v8::Integer::new(scope, promise_id).into()); - args.push(match resp.to_v8(scope) { - Ok(v) => v, - Err(e) => OpResult::Err(OpError::new(&|_| "TypeError", e.into())) - .to_v8(scope) - .unwrap(), - }); - } - } + let fut = op.map(|result| OpResult::Ok(result.into())).boxed_local(); + MaybeDone::Future(fut) +} - let has_tick_scheduled = - v8::Boolean::new(scope, self.state.borrow().has_tick_scheduled); - args.push(has_tick_scheduled.into()); +#[inline] +pub fn map_async_op3( + ctx: &OpCtx, + op: Result> + 'static, Error>, +) -> MaybeDone>>> { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; - let js_event_loop_tick_cb_handle = { - let state = self.state.borrow_mut(); - let realm_state_rc = state.global_realm.as_ref().unwrap().state(scope); - let handle = realm_state_rc - .borrow() - .js_event_loop_tick_cb - .clone() - .unwrap(); - handle - }; - let tc_scope = &mut v8::TryCatch::new(scope); - let js_event_loop_tick_cb = js_event_loop_tick_cb_handle.open(tc_scope); - let this = v8::undefined(tc_scope).into(); - js_event_loop_tick_cb.call(tc_scope, this, args.as_slice()); - - if let Some(exception) = tc_scope.exception() { - return exception_to_err_result(tc_scope, exception, false); - } - - if tc_scope.has_terminated() || tc_scope.is_execution_terminating() { - return Ok(()); - } - - Ok(()) + match op { + Err(err) => MaybeDone::Done(OpResult::Err(OpError::new(get_class, err))), + Ok(fut) => MaybeDone::Future( + fut + .map(|result| crate::_ops::to_op_result(get_class, result)) + .boxed_local(), + ), } } #[inline] -pub fn queue_fast_async_op( +pub fn map_async_op4( ctx: &OpCtx, - op: impl Future + 'static, -) { - let runtime_state = match ctx.runtime_state.upgrade() { - Some(rc_state) => rc_state, - // atleast 1 Rc is held by the JsRuntime. - None => unreachable!(), + op: Result + 'static, Error>, +) -> MaybeDone>>> { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn }; - let mut state = runtime_state.borrow_mut(); - state.pending_ops.push(OpCall::lazy(op)); - state.have_unpolled_ops = true; + match op { + Err(err) => MaybeDone::Done(OpResult::Err(OpError::new(get_class, err))), + Ok(fut) => MaybeDone::Future( + fut.map(|result| OpResult::Ok(result.into())).boxed_local(), + ), + } } -#[inline] pub fn queue_async_op<'s>( ctx: &OpCtx, scope: &'s mut v8::HandleScope, deferred: bool, - op: impl Future + 'static, + promise_id: PromiseId, + mut op: MaybeDone>>>, ) -> Option> { - let runtime_state = match ctx.runtime_state.upgrade() { - Some(rc_state) => rc_state, - // atleast 1 Rc is held by the JsRuntime. - None => unreachable!(), - }; - // An op's realm (as given by `OpCtx::realm_idx`) must match the realm in // which it is invoked. Otherwise, we might have cross-realm object exposure. // deno_core doesn't currently support such exposure, even though embedders // can cause them, so we panic in debug mode (since the check is expensive). - debug_assert_eq!( - runtime_state.borrow().known_realms[ctx.realm_idx].to_local(scope), - Some(scope.get_current_context()) - ); + // TODO(mmastrac): Restore this + // debug_assert_eq!( + // runtime_state.borrow().context(ctx.realm_idx as usize, scope), + // Some(scope.get_current_context()) + // ); - match OpCall::eager(op) { - // If the result is ready we'll just return it straight to the caller, so - // we don't have to invoke a JS callback to respond. // This works under the - // assumption that `()` return value is serialized as `null`. - EagerPollResult::Ready((_, _, op_id, mut resp)) if !deferred => { - let resp = resp.to_v8(scope).unwrap(); - ctx.state.borrow_mut().tracker.track_async_completed(op_id); - return Some(resp); - } - EagerPollResult::Ready(op) => { - let ready = OpCall::ready(op); - let mut state = runtime_state.borrow_mut(); - state.pending_ops.push(ready); - state.have_unpolled_ops = true; - } - EagerPollResult::Pending(op) => { - let mut state = runtime_state.borrow_mut(); - state.pending_ops.push(op); - state.have_unpolled_ops = true; - } - } + // All ops are polled immediately + let waker = noop_waker(); + let mut cx = Context::from_waker(&waker); + // Note that MaybeDone returns () from the future + let op_call = match op.poll_unpin(&mut cx) { + Poll::Pending => { + let MaybeDone::Future(fut) = op else { + unreachable!() + }; + OpCall::pending(ctx, promise_id, fut) + } + Poll::Ready(_) => { + let mut op_result = Pin::new(&mut op).take_output().unwrap(); + // If the op is ready and is not marked as deferred we can immediately return + // the result. + if !deferred { + ctx.state.borrow_mut().tracker.track_async_completed(ctx.id); + return Some(op_result.to_v8(scope).unwrap()); + } + + OpCall::ready(ctx, promise_id, op_result) + } + }; + + // Otherwise we will push it to the `pending_ops` and let it be polled again + // or resolved on the next tick of the event loop. + ctx + .context_state + .borrow_mut() + .pending_ops + // SAFETY: this this is guaranteed to be running on a current-thread executor + .spawn(unsafe { crate::task::MaskFutureAsSend::new(op_call) }); None } @@ -2452,8 +2631,6 @@ pub mod tests { use crate::modules::SymbolicModule; use crate::ZeroCopyBuf; use deno_ops::op; - use futures::future::lazy; - use std::ops::FnOnce; use std::pin::Pin; use std::rc::Rc; use std::sync::atomic::AtomicUsize; @@ -2465,13 +2642,6 @@ pub mod tests { pub use crate::*; } - pub fn run_in_task(f: F) - where - F: FnOnce(&mut Context) + 'static, - { - futures::executor::block_on(lazy(move |cx| f(cx))); - } - #[derive(Copy, Clone)] pub enum Mode { Async, @@ -2556,14 +2726,14 @@ pub mod tests { (runtime, dispatch_count) } - #[test] - fn test_ref_unref_ops() { + #[tokio::test] + async fn test_ref_unref_ops() { let (mut runtime, _dispatch_count) = setup(Mode::AsyncDeferred); runtime .execute_script_static( "filename.js", r#" - + var promiseIdSymbol = Symbol.for("Deno.core.internalPromiseId"); var p1 = Deno.core.opAsync("op_test", 42); var p2 = Deno.core.opAsync("op_test", 42); @@ -2572,10 +2742,8 @@ pub mod tests { .unwrap(); { let realm = runtime.global_realm(); - let isolate = runtime.v8_isolate(); - let state_rc = JsRuntime::state(isolate); - assert_eq!(state_rc.borrow().pending_ops.len(), 2); - assert_eq!(realm.state(isolate).borrow().unrefed_ops.len(), 0); + assert_eq!(realm.num_pending_ops(), 2); + assert_eq!(realm.num_unrefed_ops(), 0); } runtime .execute_script_static( @@ -2588,10 +2756,8 @@ pub mod tests { .unwrap(); { let realm = runtime.global_realm(); - let isolate = runtime.v8_isolate(); - let state_rc = JsRuntime::state(isolate); - assert_eq!(state_rc.borrow().pending_ops.len(), 2); - assert_eq!(realm.state(isolate).borrow().unrefed_ops.len(), 2); + assert_eq!(realm.num_pending_ops(), 2); + assert_eq!(realm.num_unrefed_ops(), 2); } runtime .execute_script_static( @@ -2604,10 +2770,8 @@ pub mod tests { .unwrap(); { let realm = runtime.global_realm(); - let isolate = runtime.v8_isolate(); - let state_rc = JsRuntime::state(isolate); - assert_eq!(state_rc.borrow().pending_ops.len(), 2); - assert_eq!(realm.state(isolate).borrow().unrefed_ops.len(), 0); + assert_eq!(realm.num_pending_ops(), 2); + assert_eq!(realm.num_unrefed_ops(), 0); } } @@ -2619,7 +2783,7 @@ pub mod tests { "filename.js", r#" let control = 42; - + Deno.core.opAsync("op_test", control); async function main() { Deno.core.opAsync("op_test", control); @@ -2638,7 +2802,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + const p = Deno.core.opAsync("op_test", 42); if (p[Symbol.for("Deno.core.internalPromiseId")] == undefined) { throw new Error("missing id on returned promise"); @@ -2655,7 +2819,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + Deno.core.opAsync("op_test"); "#, ) @@ -2670,9 +2834,9 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + const { op_test } = Deno.core.ensureFastOps(); let zero_copy_a = new Uint8Array([0]); - Deno.core.opAsync2("op_test", null, zero_copy_a); + op_test(null, zero_copy_a); "#, ) .unwrap(); @@ -2706,7 +2870,7 @@ pub mod tests { #[tokio::test] async fn test_poll_value() { let mut runtime = JsRuntime::new(Default::default()); - run_in_task(move |cx| { + poll_fn(move |cx| { let value_global = runtime .execute_script_static("a.js", "Promise.resolve(1 + 2)") .unwrap(); @@ -2745,7 +2909,8 @@ pub mod tests { .unwrap(); let v = runtime.poll_value(&value_global, cx); matches!(v, Poll::Ready(Err(e)) if e.to_string() == "Promise resolution is still pending but the event loop has already resolved."); - }); + Poll::Ready(()) + }).await; } #[tokio::test] @@ -2903,10 +3068,10 @@ pub mod tests { assert_eq!(frame.column_number, Some(12)); } - #[test] - fn test_encode_decode() { + #[tokio::test] + async fn test_encode_decode() { let (mut runtime, _dispatch_count) = setup(Mode::Async); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script( "encode_decode_test.js", @@ -2917,13 +3082,15 @@ pub mod tests { if let Poll::Ready(Err(_)) = runtime.poll_event_loop(cx, false) { unreachable!(); } - }); + Poll::Ready(()) + }) + .await; } - #[test] - fn test_serialize_deserialize() { + #[tokio::test] + async fn test_serialize_deserialize() { let (mut runtime, _dispatch_count) = setup(Mode::Async); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script( "serialize_deserialize_test.js", @@ -2933,11 +3100,13 @@ pub mod tests { if let Poll::Ready(Err(_)) = runtime.poll_event_loop(cx, false) { unreachable!(); } - }); + Poll::Ready(()) + }) + .await; } - #[test] - fn test_error_builder() { + #[tokio::test] + async fn test_error_builder() { #[op] fn op_err() -> Result<(), Error> { Err(custom_error("DOMExceptionOperationError", "abc")) @@ -2953,7 +3122,7 @@ pub mod tests { get_error_class_fn: Some(&get_error_class_name), ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "error_builder_test.js", @@ -2963,16 +3132,29 @@ pub mod tests { if let Poll::Ready(Err(_)) = runtime.poll_event_loop(cx, false) { unreachable!(); } + Poll::Ready(()) + }) + .await; + } + + /// Ensure that putting the inspector into OpState doesn't cause crashes. The only valid place we currently allow + /// the inspector to be stashed without cleanup is the OpState, and this should not actually cause crashes. + #[test] + fn inspector() { + let mut runtime = JsRuntime::new(RuntimeOptions { + inspector: true, + ..Default::default() }); + // This was causing a crash + runtime.op_state().borrow_mut().put(runtime.inspector()); + runtime.execute_script_static("check.js", "null").unwrap(); } #[test] fn will_snapshot() { let snapshot = { - let mut runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); + let mut runtime = + JsRuntimeForSnapshot::new(Default::default(), Default::default()); runtime.execute_script_static("a.js", "a = 1 + 2").unwrap(); runtime.snapshot() }; @@ -2990,10 +3172,8 @@ pub mod tests { #[test] fn will_snapshot2() { let startup_data = { - let mut runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); + let mut runtime = + JsRuntimeForSnapshot::new(Default::default(), Default::default()); runtime .execute_script_static("a.js", "let a = 1 + 2") .unwrap(); @@ -3001,11 +3181,13 @@ pub mod tests { }; let snapshot = Snapshot::JustCreated(startup_data); - let mut runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - startup_snapshot: Some(snapshot), - ..Default::default() - }); + let mut runtime = JsRuntimeForSnapshot::new( + RuntimeOptions { + startup_snapshot: Some(snapshot), + ..Default::default() + }, + Default::default(), + ); let startup_data = { runtime @@ -3033,10 +3215,8 @@ pub mod tests { #[test] fn test_snapshot_callbacks() { let snapshot = { - let mut runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); + let mut runtime = + JsRuntimeForSnapshot::new(Default::default(), Default::default()); runtime .execute_script_static( "a.js", @@ -3070,10 +3250,8 @@ pub mod tests { #[test] fn test_from_boxed_snapshot() { let snapshot = { - let mut runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); + let mut runtime = + JsRuntimeForSnapshot::new(Default::default(), Default::default()); runtime.execute_script_static("a.js", "a = 1 + 2").unwrap(); let snap: &[u8] = &runtime.snapshot(); Vec::from(snap).into_boxed_slice() @@ -3177,7 +3355,7 @@ pub mod tests { #[test] fn test_heap_limits() { let create_params = - v8::Isolate::create_params().heap_limits(0, 3 * 1024 * 1024); + v8::Isolate::create_params().heap_limits(0, 5 * 1024 * 1024); let mut runtime = JsRuntime::new(RuntimeOptions { create_params: Some(create_params), ..Default::default() @@ -3221,7 +3399,7 @@ pub mod tests { #[test] fn test_heap_limit_cb_multiple() { let create_params = - v8::Isolate::create_params().heap_limits(0, 3 * 1024 * 1024); + v8::Isolate::create_params().heap_limits(0, 5 * 1024 * 1024); let mut runtime = JsRuntime::new(RuntimeOptions { create_params: Some(create_params), ..Default::default() @@ -3333,8 +3511,7 @@ pub mod tests { } fn assert_module_map(runtime: &mut JsRuntime, modules: &Vec) { - let module_map_rc = runtime.get_module_map(); - let module_map = module_map_rc.borrow(); + let module_map = runtime.module_map.borrow(); assert_eq!(module_map.handles.len(), modules.len()); assert_eq!(module_map.info.len(), modules.len()); assert_eq!( @@ -3366,14 +3543,16 @@ pub mod tests { } let loader = Rc::new(ModsLoader::default()); - let mut runtime = JsRuntime::new(RuntimeOptions { - module_loader: Some(loader.clone()), - will_snapshot: true, - extensions: vec![Extension::builder("text_ext") - .ops(vec![op_test::decl()]) - .build()], - ..Default::default() - }); + let mut runtime = JsRuntimeForSnapshot::new( + RuntimeOptions { + module_loader: Some(loader.clone()), + extensions: vec![Extension::builder("text_ext") + .ops(vec![op_test::decl()]) + .build()], + ..Default::default() + }, + Default::default(), + ); let specifier = crate::resolve_url("file:///0.js").unwrap(); let source_code = @@ -3402,15 +3581,17 @@ pub mod tests { let snapshot = runtime.snapshot(); - let mut runtime2 = JsRuntime::new(RuntimeOptions { - module_loader: Some(loader.clone()), - will_snapshot: true, - startup_snapshot: Some(Snapshot::JustCreated(snapshot)), - extensions: vec![Extension::builder("text_ext") - .ops(vec![op_test::decl()]) - .build()], - ..Default::default() - }); + let mut runtime2 = JsRuntimeForSnapshot::new( + RuntimeOptions { + module_loader: Some(loader.clone()), + startup_snapshot: Some(Snapshot::JustCreated(snapshot)), + extensions: vec![Extension::builder("text_ext") + .ops(vec![op_test::decl()]) + .build()], + ..Default::default() + }, + Default::default(), + ); assert_module_map(&mut runtime2, &modules); @@ -3488,10 +3669,10 @@ main(); assert_eq!(result.unwrap_err().to_string(), expected_error); } - #[test] - fn test_error_async_stack() { + #[tokio::test] + async fn test_error_async_stack() { let mut runtime = JsRuntime::new(RuntimeOptions::default()); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "error_async_stack.js", @@ -3522,11 +3703,13 @@ main(); } _ => panic!(), }; + Poll::Ready(()) }) + .await; } - #[test] - fn test_error_context() { + #[tokio::test] + async fn test_error_context() { use anyhow::anyhow; #[op] @@ -3545,7 +3728,7 @@ main(); ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "test_error_context_sync.js", @@ -3588,13 +3771,14 @@ if (errMessage !== "higher-level sync error: original sync error") { Poll::Ready(Err(err)) => panic!("{err:?}"), _ => panic!(), } - }) + Poll::Ready(()) + }).await; } - #[test] - fn test_pump_message_loop() { + #[tokio::test] + async fn test_pump_message_loop() { let mut runtime = JsRuntime::new(RuntimeOptions::default()); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "pump_message_loop.js", @@ -3639,22 +3823,9 @@ assertEquals(1, notify_return_value); r#"assertEquals(globalThis.resolved, true);"#, ) .unwrap(); + Poll::Ready(()) }) - } - - #[test] - fn test_core_js_stack_frame() { - let mut runtime = JsRuntime::new(RuntimeOptions::default()); - // Call non-existent op so we get error from `core.js` - let error = runtime - .execute_script_static( - "core_js_stack_frame.js", - "Deno.core.opAsync('non_existent');", - ) - .unwrap_err(); - let error_string = error.to_string(); - // Test that the script specifier is a URL: `ext:`. - assert!(error_string.contains("ext:core/01_core.js")); + .await; } #[test] @@ -3832,7 +4003,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .execute_script_static( "macrotasks_and_nextticks.js", r#" - + (async function () { const results = []; Deno.core.setMacrotaskCallback(() => { @@ -3920,8 +4091,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { assert!(matches!(runtime.poll_event_loop(cx, false), Poll::Pending)); assert_eq!(awoken_times.swap(0, Ordering::Relaxed), 1); - let state_rc = JsRuntime::state(runtime.v8_isolate()); - state_rc.borrow_mut().has_tick_scheduled = false; + runtime.inner.state.borrow_mut().has_tick_scheduled = false; assert!(matches!( runtime.poll_event_loop(cx, false), Poll::Ready(Ok(())) @@ -3992,6 +4162,23 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .contains("JavaScript execution has been terminated")); } + #[tokio::test] + async fn test_unhandled_rejection_order() { + let mut runtime = JsRuntime::new(Default::default()); + runtime + .execute_script_static( + "", + r#" + for (let i = 0; i < 100; i++) { + Promise.reject(i); + } + "#, + ) + .unwrap(); + let err = runtime.run_event_loop(false).await.unwrap_err(); + assert_eq!(err.to_string(), "Uncaught (in promise) 0"); + } + #[tokio::test] async fn test_set_promise_reject_callback() { static PROMISE_REJECT: AtomicUsize = AtomicUsize::new(0); @@ -4070,7 +4257,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { "", format!( r#" - + globalThis.rejectValue = undefined; Deno.core.setPromiseRejectCallback((_type, _promise, reason) => {{ globalThis.rejectValue = `{realm_name}/${{reason}}`; @@ -4223,11 +4410,12 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { extensions: vec![test_ext::init_ops()], ..Default::default() }); - let r = runtime + let err = runtime .execute_script_static("test.js", "Deno.core.ops.op_foo()") - .unwrap(); - let scope = &mut runtime.handle_scope(); - assert!(r.open(scope).is_undefined()); + .unwrap_err(); + assert!(err + .to_string() + .contains("TypeError: Deno.core.ops.op_foo is not a function")); } #[test] @@ -4288,7 +4476,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { let sum = Deno.core.ops.op_sum_take(w32.subarray(0, 2)); return false; } catch(e) { - return e.message.includes('invalid type, expected: detachable'); + return e.message.includes('invalid type; expected: detachable'); } }); if (!assertWasmThrow()) { @@ -4327,7 +4515,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { if (Deno.core.ops.op_foo() !== 42) { throw new Error("Exptected op_foo() === 42"); } - if (Deno.core.ops.op_bar() !== undefined) { + if (typeof Deno.core.ops.op_bar !== "undefined") { throw new Error("Expected op_bar to be disabled") } "#, @@ -4384,10 +4572,8 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { #[test] fn js_realm_init_snapshot() { let snapshot = { - let runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); + let runtime = + JsRuntimeForSnapshot::new(Default::default(), Default::default()); let snap: &[u8] = &runtime.snapshot(); Vec::from(snap).into_boxed_slice() }; @@ -4397,13 +4583,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { Ok(String::from("Test")) } - deno_core::extension!( - test_ext, - ops = [op_test], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, - ); + deno_core::extension!(test_ext, ops = [op_test]); let mut runtime = JsRuntime::new(RuntimeOptions { startup_snapshot: Some(Snapshot::Boxed(snapshot)), extensions: vec![test_ext::init_ops()], @@ -4507,7 +4687,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.v8_isolate(), "", r#" - + (async function () { const buf = await Deno.core.opAsync("op_test", false); let err; @@ -4537,6 +4717,71 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { } } + #[ignore] + #[tokio::test] + async fn js_realm_gc() { + static INVOKE_COUNT: AtomicUsize = AtomicUsize::new(0); + struct PendingFuture {} + + impl Future for PendingFuture { + type Output = (); + fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<()> { + Poll::Pending + } + } + + impl Drop for PendingFuture { + fn drop(&mut self) { + assert_eq!(INVOKE_COUNT.fetch_sub(1, Ordering::SeqCst), 1); + } + } + + // Never resolves. + #[op] + async fn op_pending() { + assert_eq!(INVOKE_COUNT.fetch_add(1, Ordering::SeqCst), 0); + PendingFuture {}.await + } + + deno_core::extension!(test_ext, ops = [op_pending]); + let mut runtime = JsRuntime::new(RuntimeOptions { + extensions: vec![test_ext::init_ops()], + ..Default::default() + }); + + // Detect a drop in OpState + let opstate_drop_detect = Rc::new(()); + runtime + .op_state() + .borrow_mut() + .put(opstate_drop_detect.clone()); + assert_eq!(Rc::strong_count(&opstate_drop_detect), 2); + + let other_realm = runtime.create_realm().unwrap(); + other_realm + .execute_script( + runtime.v8_isolate(), + "future", + ModuleCode::from_static("Deno.core.opAsync('op_pending')"), + ) + .unwrap(); + while INVOKE_COUNT.load(Ordering::SeqCst) == 0 { + poll_fn(|cx: &mut Context| runtime.poll_event_loop(cx, false)) + .await + .unwrap(); + } + drop(other_realm); + while INVOKE_COUNT.load(Ordering::SeqCst) == 1 { + poll_fn(|cx| runtime.poll_event_loop(cx, false)) + .await + .unwrap(); + } + drop(runtime); + + // Make sure the OpState was dropped properly when the runtime dropped + assert_eq!(Rc::strong_count(&opstate_drop_detect), 1); + } + #[tokio::test] async fn js_realm_ref_unref_ops() { // Never resolves. @@ -4551,7 +4796,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { let main_realm = runtime.global_realm(); let other_realm = runtime.create_realm().unwrap(); @@ -4560,7 +4805,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.v8_isolate(), "", r#" - + var promise = Deno.core.opAsync("op_pending"); "#, ) @@ -4570,7 +4815,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.v8_isolate(), "", r#" - + var promise = Deno.core.opAsync("op_pending"); "#, ) @@ -4603,7 +4848,9 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.poll_event_loop(cx, false), Poll::Ready(Ok(())) )); - }); + Poll::Ready(()) + }) + .await; } #[test] @@ -4625,98 +4872,63 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .is_ok()); } + #[cfg(debug_assertions)] #[test] - fn test_resizable_array_buffer() { - // Verify that "resizable ArrayBuffer" is disabled - let mut runtime = JsRuntime::new(Default::default()); - runtime - .execute_script_static( - "test_rab.js", - r#"const a = new ArrayBuffer(100, {maxByteLength: 200}); - if (a.byteLength !== 100) { - throw new Error('wrong byte length'); - } - if (a.maxByteLength !== undefined) { - throw new Error("ArrayBuffer shouldn't have maxByteLength"); - } - "#, - ) - .unwrap(); - } + #[should_panic(expected = "Found ops with duplicate names:")] + fn duplicate_op_names() { + mod a { + use super::*; - #[test] - fn test_non_existent_async_op_error() { - // Verify that "resizable ArrayBuffer" is disabled - let mut runtime = JsRuntime::new(Default::default()); - let err = runtime - .execute_script_static( - "test_rab.js", - r#"Deno.core.opAsync("this_op_doesnt_exist");"#, - ) - .unwrap_err(); - assert!(err - .to_string() - .contains("this_op_doesnt_exist is not a registered op")); - } - - #[tokio::test] - async fn cant_load_internal_module_when_snapshot_is_loaded_and_not_snapshotting( - ) { - #[derive(Default)] - struct ModsLoader; - - impl ModuleLoader for ModsLoader { - fn resolve( - &self, - specifier: &str, - referrer: &str, - _kind: ResolutionKind, - ) -> Result { - assert_eq!(specifier, "file:///main.js"); - assert_eq!(referrer, "."); - let s = crate::resolve_import(specifier, referrer).unwrap(); - Ok(s) - } - - fn load( - &self, - _module_specifier: &ModuleSpecifier, - _maybe_referrer: Option<&ModuleSpecifier>, - _is_dyn_import: bool, - ) -> Pin> { - let code = r#" - // This module doesn't really exist, just verifying that we'll get - // an error when specifier starts with "ext:". - import { core } from "ext:core.js"; - "#; - - async move { Ok(ModuleSource::for_test(code, "file:///main.js")) } - .boxed_local() + #[op] + fn op_test() -> Result { + Ok(String::from("Test")) } } - let snapshot = { - let runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); - let snap: &[u8] = &runtime.snapshot(); - Vec::from(snap).into_boxed_slice() - }; + #[op] + fn op_test() -> Result { + Ok(String::from("Test")) + } - let mut runtime2 = JsRuntime::new(RuntimeOptions { - module_loader: Some(Rc::new(ModsLoader)), - startup_snapshot: Some(Snapshot::Boxed(snapshot)), + deno_core::extension!(test_ext, ops = [a::op_test, op_test]); + JsRuntime::new(RuntimeOptions { + extensions: vec![test_ext::init_ops()], + ..Default::default() + }); + } + + #[test] + fn ops_in_js_have_proper_names() { + #[op] + fn op_test_sync() -> Result { + Ok(String::from("Test")) + } + + #[op] + async fn op_test_async() -> Result { + Ok(String::from("Test")) + } + + deno_core::extension!(test_ext, ops = [op_test_sync, op_test_async]); + let mut runtime = JsRuntime::new(RuntimeOptions { + extensions: vec![test_ext::init_ops()], ..Default::default() }); - let err = runtime2 - .load_main_module(&crate::resolve_url("file:///main.js").unwrap(), None) - .await - .unwrap_err(); - assert_eq!( - err.to_string(), - "Cannot load extension module from external code" - ); + let src = r#" + if (Deno.core.ops.op_test_sync.name !== "op_test_sync") { + throw new Error(); + } + + if (Deno.core.ops.op_test_async.name !== "op_test_async") { + throw new Error(); + } + + const { op_test_async } = Deno.core.ensureFastOps(); + if (op_test_async.name !== "op_test_async") { + throw new Error(); + } + "#; + runtime.execute_script_static("test", src).unwrap(); } } diff --git a/core/snapshot_util.rs b/core/snapshot_util.rs index 20019f5cc7..88c2731477 100644 --- a/core/snapshot_util.rs +++ b/core/snapshot_util.rs @@ -4,9 +4,10 @@ use std::path::Path; use std::path::PathBuf; use std::time::Instant; +use crate::runtime::RuntimeSnapshotOptions; use crate::ExtModuleLoaderCb; use crate::Extension; -use crate::JsRuntime; +use crate::JsRuntimeForSnapshot; use crate::RuntimeOptions; use crate::Snapshot; @@ -21,16 +22,28 @@ pub struct CreateSnapshotOptions { pub snapshot_module_load_cb: Option, } -pub fn create_snapshot(create_snapshot_options: CreateSnapshotOptions) { +pub struct CreateSnapshotOutput { + /// Any files marked as LoadedFromFsDuringSnapshot are collected here and should be + /// printed as 'cargo:rerun-if-changed' lines from your build script. + pub files_loaded_during_snapshot: Vec, +} + +#[must_use = "The files listed by create_snapshot should be printed as 'cargo:rerun-if-changed' lines"] +pub fn create_snapshot( + create_snapshot_options: CreateSnapshotOptions, +) -> CreateSnapshotOutput { let mut mark = Instant::now(); - let js_runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - startup_snapshot: create_snapshot_options.startup_snapshot, - extensions: create_snapshot_options.extensions, - snapshot_module_load_cb: create_snapshot_options.snapshot_module_load_cb, - ..Default::default() - }); + let js_runtime = JsRuntimeForSnapshot::new( + RuntimeOptions { + startup_snapshot: create_snapshot_options.startup_snapshot, + extensions: create_snapshot_options.extensions, + ..Default::default() + }, + RuntimeSnapshotOptions { + snapshot_module_load_cb: create_snapshot_options.snapshot_module_load_cb, + }, + ); println!( "JsRuntime for snapshot prepared, took {:#?} ({})", Instant::now().saturating_duration_since(mark), @@ -38,6 +51,22 @@ pub fn create_snapshot(create_snapshot_options: CreateSnapshotOptions) { ); mark = Instant::now(); + let mut files_loaded_during_snapshot = vec![]; + for source in js_runtime + .extensions() + .iter() + .flat_map(|e| vec![e.get_esm_sources(), e.get_js_sources()]) + .flatten() + .flatten() + { + use crate::ExtensionFileSourceCode; + if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = + &source.code + { + files_loaded_during_snapshot.push(path.clone()); + } + } + let snapshot = js_runtime.snapshot(); let snapshot_slice: &[u8] = &snapshot; println!( @@ -83,6 +112,9 @@ pub fn create_snapshot(create_snapshot_options: CreateSnapshotOptions) { Instant::now().saturating_duration_since(mark), create_snapshot_options.snapshot_path.display(), ); + CreateSnapshotOutput { + files_loaded_during_snapshot, + } } pub type FilterFn = Box bool>; @@ -121,33 +153,6 @@ fn data_error_to_panic(err: v8::DataError) -> ! { } } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub(crate) enum SnapshotOptions { - Load, - CreateFromExisting, - Create, - None, -} - -impl SnapshotOptions { - pub fn loaded(&self) -> bool { - matches!(self, Self::Load | Self::CreateFromExisting) - } - - pub fn will_snapshot(&self) -> bool { - matches!(self, Self::Create | Self::CreateFromExisting) - } - - pub fn from_bools(snapshot_loaded: bool, will_snapshot: bool) -> Self { - match (snapshot_loaded, will_snapshot) { - (true, true) => Self::CreateFromExisting, - (false, true) => Self::Create, - (true, false) => Self::Load, - (false, false) => Self::None, - } - } -} - pub(crate) struct SnapshottedData { pub module_map_data: v8::Global, pub module_handles: Vec>, diff --git a/core/task.rs b/core/task.rs new file mode 100644 index 0000000000..3e728a08fd --- /dev/null +++ b/core/task.rs @@ -0,0 +1,135 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use core::pin::Pin; +use core::task::Context; +use core::task::Poll; +use futures::Future; +use std::marker::PhantomData; +use tokio::runtime::Handle; +use tokio::runtime::RuntimeFlavor; + +/// Equivalent to [`tokio::task::JoinHandle`]. +#[repr(transparent)] +pub struct JoinHandle { + handle: tokio::task::JoinHandle>, + _r: PhantomData, +} + +impl JoinHandle { + /// Equivalent to [`tokio::task::JoinHandle::abort`]. + pub fn abort(&self) { + self.handle.abort() + } +} + +impl Future for JoinHandle { + type Output = Result; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + // SAFETY: We are sure that handle is valid here + unsafe { + let me: &mut Self = Pin::into_inner_unchecked(self); + let handle = Pin::new_unchecked(&mut me.handle); + match handle.poll(cx) { + Poll::Pending => Poll::Pending, + Poll::Ready(Ok(r)) => Poll::Ready(Ok(r.into_inner())), + Poll::Ready(Err(e)) => Poll::Ready(Err(e)), + } + } + } +} + +/// Equivalent to [`tokio::task::spawn`], but does not require the future to be [`Send`]. Must only be +/// used on a [`RuntimeFlavor::CurrentThread`] executor, though this is only checked when running with +/// debug assertions. +#[inline(always)] +pub fn spawn + 'static, R: 'static>( + f: F, +) -> JoinHandle { + debug_assert!( + Handle::current().runtime_flavor() == RuntimeFlavor::CurrentThread + ); + // SAFETY: we know this is a current-thread executor + let future = unsafe { MaskFutureAsSend::new(f) }; + JoinHandle { + handle: tokio::task::spawn(future), + _r: Default::default(), + } +} + +/// Equivalent to [`tokio::task::spawn_blocking`]. Currently a thin wrapper around the tokio API, but this +/// may change in the future. +#[inline(always)] +pub fn spawn_blocking< + F: (FnOnce() -> R) + Send + 'static, + R: Send + 'static, +>( + f: F, +) -> JoinHandle { + let handle = tokio::task::spawn_blocking(|| MaskResultAsSend { result: f() }); + JoinHandle { + handle, + _r: Default::default(), + } +} + +#[repr(transparent)] +#[doc(hidden)] +pub struct MaskResultAsSend { + result: R, +} + +/// SAFETY: We ensure that Send bounds are only faked when tokio is running on a current-thread executor +unsafe impl Send for MaskResultAsSend {} + +impl MaskResultAsSend { + #[inline(always)] + pub fn into_inner(self) -> R { + self.result + } +} + +#[repr(transparent)] +pub struct MaskFutureAsSend { + future: F, +} + +impl MaskFutureAsSend { + /// Mark a non-`Send` future as `Send`. This is a trick to be able to use + /// `tokio::spawn()` (which requires `Send` futures) in a current thread + /// runtime. + /// + /// # Safety + /// + /// You must ensure that the future is actually used on the same + /// thread, ie. always use current thread runtime flavor from Tokio. + #[inline(always)] + pub unsafe fn new(future: F) -> Self { + Self { future } + } +} + +// SAFETY: we are cheating here - this struct is NOT really Send, +// but we need to mark it Send so that we can use `spawn()` in Tokio. +unsafe impl Send for MaskFutureAsSend {} + +impl Future for MaskFutureAsSend { + type Output = MaskResultAsSend; + + fn poll( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + // SAFETY: We are sure that future is valid here + unsafe { + let me: &mut MaskFutureAsSend = Pin::into_inner_unchecked(self); + let future = Pin::new_unchecked(&mut me.future); + match future.poll(cx) { + Poll::Pending => Poll::Pending, + Poll::Ready(result) => Poll::Ready(MaskResultAsSend { result }), + } + } + } +} diff --git a/core/task_queue.rs b/core/task_queue.rs index 36a169650c..adb25a4f62 100644 --- a/core/task_queue.rs +++ b/core/task_queue.rs @@ -127,7 +127,7 @@ mod tests { for i in 0..100 { let data = data.clone(); tasks.push(task_queue.queue(async move { - tokio::task::spawn_blocking(move || { + crate::task::spawn_blocking(move || { let mut data = data.lock(); if *data != i { panic!("Value was not equal."); diff --git a/ext/broadcast_channel/01_broadcast_channel.js b/ext/broadcast_channel/01_broadcast_channel.js index a9a7eb9912..d92aef0728 100644 --- a/ext/broadcast_channel/01_broadcast_channel.js +++ b/ext/broadcast_channel/01_broadcast_channel.js @@ -85,10 +85,7 @@ class BroadcastChannel extends EventTarget { const prefix = "Failed to construct 'BroadcastChannel'"; webidl.requiredArguments(arguments.length, 1, prefix); - this[_name] = webidl.converters["DOMString"](name, { - prefix, - context: "Argument 1", - }); + this[_name] = webidl.converters["DOMString"](name, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 005806aff9..2be134d310 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.93.0" +version = "0.101.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/01_cache.js b/ext/cache/01_cache.js index 48651dfb69..9476420efa 100644 --- a/ext/cache/01_cache.js +++ b/ext/cache/01_cache.js @@ -4,9 +4,12 @@ const core = globalThis.Deno.core; import * as webidl from "ext:deno_webidl/00_webidl.js"; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypePush, + ObjectPrototypeIsPrototypeOf, + StringPrototypeSplit, + StringPrototypeTrim, Symbol, TypeError, - ObjectPrototypeIsPrototypeOf, } = primordials; import { Request, @@ -27,10 +30,7 @@ class CacheStorage { webidl.assertBranded(this, CacheStoragePrototype); const prefix = "Failed to execute 'open' on 'CacheStorage'"; webidl.requiredArguments(arguments.length, 1, prefix); - cacheName = webidl.converters["DOMString"](cacheName, { - prefix, - context: "Argument 1", - }); + cacheName = webidl.converters["DOMString"](cacheName, prefix, "Argument 1"); const cacheId = await core.opAsync("op_cache_storage_open", cacheName); const cache = webidl.createBranded(Cache); cache[_id] = cacheId; @@ -41,10 +41,7 @@ class CacheStorage { webidl.assertBranded(this, CacheStoragePrototype); const prefix = "Failed to execute 'has' on 'CacheStorage'"; webidl.requiredArguments(arguments.length, 1, prefix); - cacheName = webidl.converters["DOMString"](cacheName, { - prefix, - context: "Argument 1", - }); + cacheName = webidl.converters["DOMString"](cacheName, prefix, "Argument 1"); return await core.opAsync("op_cache_storage_has", cacheName); } @@ -52,10 +49,7 @@ class CacheStorage { webidl.assertBranded(this, CacheStoragePrototype); const prefix = "Failed to execute 'delete' on 'CacheStorage'"; webidl.requiredArguments(arguments.length, 1, prefix); - cacheName = webidl.converters["DOMString"](cacheName, { - prefix, - context: "Argument 1", - }); + cacheName = webidl.converters["DOMString"](cacheName, prefix, "Argument 1"); return await core.opAsync("op_cache_storage_delete", cacheName); } } @@ -76,14 +70,12 @@ class Cache { webidl.assertBranded(this, CachePrototype); const prefix = "Failed to execute 'put' on 'Cache'"; webidl.requiredArguments(arguments.length, 2, prefix); - request = webidl.converters["RequestInfo_DOMString"](request, { + request = webidl.converters["RequestInfo_DOMString"]( + request, prefix, - context: "Argument 1", - }); - response = webidl.converters["Response"](response, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + response = webidl.converters["Response"](response, prefix, "Argument 2"); // Step 1. let innerRequest = null; // Step 2. @@ -112,10 +104,10 @@ class Cache { // Step 7. const varyHeader = getHeader(innerResponse.headerList, "vary"); if (varyHeader) { - const fieldValues = varyHeader.split(","); + const fieldValues = StringPrototypeSplit(varyHeader, ","); for (let i = 0; i < fieldValues.length; ++i) { const field = fieldValues[i]; - if (field.trim() === "*") { + if (StringPrototypeTrim(field) === "*") { throw new TypeError("Vary header must not contain '*'"); } } @@ -136,6 +128,7 @@ class Cache { "op_cache_put", { cacheId: this[_id], + // deno-lint-ignore prefer-primordials requestUrl: reqUrl.toString(), responseHeaders: innerResponse.headerList, requestHeaders: innerRequest.headerList, @@ -166,10 +159,11 @@ class Cache { webidl.assertBranded(this, CachePrototype); const prefix = "Failed to execute 'match' on 'Cache'"; webidl.requiredArguments(arguments.length, 1, prefix); - request = webidl.converters["RequestInfo_DOMString"](request, { + request = webidl.converters["RequestInfo_DOMString"]( + request, prefix, - context: "Argument 1", - }); + "Argument 1", + ); const p = await this[_matchAll](request, options); if (p.length > 0) { return p[0]; @@ -183,10 +177,11 @@ class Cache { webidl.assertBranded(this, CachePrototype); const prefix = "Failed to execute 'delete' on 'Cache'"; webidl.requiredArguments(arguments.length, 1, prefix); - request = webidl.converters["RequestInfo_DOMString"](request, { + request = webidl.converters["RequestInfo_DOMString"]( + request, prefix, - context: "Argument 1", - }); + "Argument 1", + ); // Step 1. let r = null; // Step 2. @@ -249,6 +244,7 @@ class Cache { "op_cache_match", { cacheId: this[_id], + // deno-lint-ignore prefer-primordials requestUrl: url.toString(), requestHeaders: innerRequest.headerList, }, @@ -267,7 +263,7 @@ class Cache { statusText: meta.responseStatusText, }, ); - responses.push(response); + ArrayPrototypePush(responses, response); } } // Step 5.4-5.5: don't apply in this context. diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 57d025bba3..1796e0639e 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.31.0" +version = "0.39.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs index 2853f793db..4eb9924c7a 100644 --- a/ext/cache/sqlite.rs +++ b/ext/cache/sqlite.rs @@ -10,6 +10,7 @@ use std::time::UNIX_EPOCH; use async_trait::async_trait; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::ByteString; @@ -99,7 +100,7 @@ impl Cache for SqliteBackedCache { async fn storage_open(&self, cache_name: String) -> Result { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let db = db.lock(); db.execute( "INSERT OR IGNORE INTO cache_storage (cache_name) VALUES (?1)", @@ -124,7 +125,7 @@ impl Cache for SqliteBackedCache { /// Note: this doesn't check the disk, it only checks the sqlite db. async fn storage_has(&self, cache_name: String) -> Result { let db = self.connection.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let db = db.lock(); let cache_exists = db.query_row( "SELECT count(id) FROM cache_storage WHERE cache_name = ?1", @@ -143,7 +144,7 @@ impl Cache for SqliteBackedCache { async fn storage_delete(&self, cache_name: String) -> Result { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let db = db.lock(); let maybe_cache_id = db .query_row( @@ -210,7 +211,7 @@ impl Cache for SqliteBackedCache { > { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - let query_result = tokio::task::spawn_blocking(move || { + let query_result = spawn_blocking(move || { let db = db.lock(); let result = db.query_row( "SELECT response_body_key, response_headers, response_status, response_status_text, request_headers @@ -269,7 +270,7 @@ impl Cache for SqliteBackedCache { request: CacheDeleteRequest, ) -> Result { let db = self.connection.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { // TODO(@satyarohith): remove the response body from disk if one exists let db = db.lock(); let rows_effected = db.execute( @@ -287,7 +288,7 @@ async fn insert_cache_asset( put: CachePutRequest, response_body_key: Option, ) -> Result, deno_core::anyhow::Error> { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let maybe_response_body = { let db = db.lock(); db.query_row( diff --git a/ext/console/01_colors.js b/ext/console/01_colors.js deleted file mode 100644 index a598db9217..0000000000 --- a/ext/console/01_colors.js +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -/// - -const primordials = globalThis.__bootstrap.primordials; -const { - SafeRegExp, - StringPrototypeReplace, - ArrayPrototypeJoin, -} = primordials; - -let noColor = false; - -function setNoColor(value) { - noColor = value; -} - -function getNoColor() { - return noColor; -} - -function code(open, close) { - return { - open: `\x1b[${open}m`, - close: `\x1b[${close}m`, - regexp: new SafeRegExp(`\\x1b\\[${close}m`, "g"), - }; -} - -function run(str, code) { - return `${code.open}${ - StringPrototypeReplace(str, code.regexp, code.open) - }${code.close}`; -} - -function bold(str) { - return run(str, code(1, 22)); -} - -function italic(str) { - return run(str, code(3, 23)); -} - -function yellow(str) { - return run(str, code(33, 39)); -} - -function cyan(str) { - return run(str, code(36, 39)); -} - -function red(str) { - return run(str, code(31, 39)); -} - -function green(str) { - return run(str, code(32, 39)); -} - -function bgRed(str) { - return run(str, code(41, 49)); -} - -function white(str) { - return run(str, code(37, 39)); -} - -function gray(str) { - return run(str, code(90, 39)); -} - -function magenta(str) { - return run(str, code(35, 39)); -} - -// https://github.com/chalk/ansi-regex/blob/02fa893d619d3da85411acc8fd4e2eea0e95a9d9/index.js -const ANSI_PATTERN = new SafeRegExp( - ArrayPrototypeJoin([ - "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", - "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))", - ], "|"), - "g", -); - -function stripColor(string) { - return StringPrototypeReplace(string, ANSI_PATTERN, ""); -} - -function maybeColor(fn) { - return !noColor ? fn : (s) => s; -} - -export { - bgRed, - bold, - cyan, - getNoColor, - gray, - green, - italic, - magenta, - maybeColor, - red, - setNoColor, - stripColor, - white, - yellow, -}; diff --git a/ext/console/01_console.js b/ext/console/01_console.js new file mode 100644 index 0000000000..fbc36ca9c7 --- /dev/null +++ b/ext/console/01_console.js @@ -0,0 +1,3641 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +/// + +const core = globalThis.Deno.core; +const internals = globalThis.__bootstrap.internals; +const primordials = globalThis.__bootstrap.primordials; +const { + AggregateErrorPrototype, + Array, + ArrayBufferIsView, + ArrayBufferPrototypeGetByteLength, + ArrayIsArray, + ArrayPrototypeFill, + ArrayPrototypeFilter, + ArrayPrototypeFind, + ArrayPrototypeForEach, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeReduce, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeSort, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + BigIntPrototypeValueOf, + Boolean, + BooleanPrototypeValueOf, + DateNow, + DatePrototype, + DatePrototypeGetTime, + DatePrototypeToISOString, + Error, + ErrorCaptureStackTrace, + ErrorPrototype, + ErrorPrototypeToString, + FunctionPrototypeBind, + FunctionPrototypeCall, + FunctionPrototypeToString, + MapPrototype, + MapPrototypeDelete, + MapPrototypeEntries, + MapPrototypeForEach, + MapPrototypeGet, + MapPrototypeGetSize, + MapPrototypeHas, + MapPrototypeSet, + MathAbs, + MathFloor, + MathMax, + MathMin, + MathRound, + MathSqrt, + Number, + NumberIsInteger, + NumberParseInt, + NumberPrototypeToString, + NumberPrototypeValueOf, + ObjectAssign, + ObjectCreate, + ObjectDefineProperty, + ObjectFreeze, + ObjectFromEntries, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertyNames, + ObjectGetOwnPropertySymbols, + ObjectGetPrototypeOf, + ObjectHasOwn, + ObjectIs, + ObjectKeys, + ObjectPrototype, + ObjectPrototypeIsPrototypeOf, + ObjectPrototypePropertyIsEnumerable, + ObjectPrototypeToString, + ObjectSetPrototypeOf, + ObjectValues, + Proxy, + ReflectGet, + ReflectGetOwnPropertyDescriptor, + ReflectGetPrototypeOf, + ReflectHas, + ReflectOwnKeys, + RegExpPrototypeExec, + RegExpPrototypeSymbolReplace, + RegExpPrototypeTest, + RegExpPrototypeToString, + SafeArrayIterator, + SafeMap, + SafeMapIterator, + SafeRegExp, + SafeSet, + SafeSetIterator, + SafeStringIterator, + SetPrototype, + SetPrototypeAdd, + SetPrototypeHas, + SetPrototypeGetSize, + SetPrototypeValues, + String, + StringPrototypeCharCodeAt, + StringPrototypeCodePointAt, + StringPrototypeEndsWith, + StringPrototypeIncludes, + StringPrototypeIndexOf, + StringPrototypeLastIndexOf, + StringPrototypeMatch, + StringPrototypeNormalize, + StringPrototypePadEnd, + StringPrototypePadStart, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeReplaceAll, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeStartsWith, + StringPrototypeToLowerCase, + StringPrototypeTrim, + StringPrototypeValueOf, + Symbol, + SymbolFor, + SymbolHasInstance, + SymbolIterator, + SymbolPrototypeGetDescription, + SymbolPrototypeToString, + SymbolPrototypeValueOf, + SymbolToStringTag, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeGetSymbolToStringTag, + Uint8Array, + WeakMapPrototypeHas, + WeakSetPrototypeHas, + isNaN, +} = primordials; + +let noColor = false; + +function setNoColor(value) { + noColor = value; +} + +function getNoColor() { + return noColor; +} + +// Don't use 'blue' not visible on cmd.exe +const styles = { + special: "cyan", + number: "yellow", + bigint: "yellow", + boolean: "yellow", + undefined: "grey", + null: "bold", + string: "green", + symbol: "green", + date: "magenta", + // "name": intentionally not styling + // TODO(BridgeAR): Highlight regular expressions properly. + regexp: "red", + module: "underline", + internalError: "red", +}; + +const defaultFG = 39; +const defaultBG = 49; + +// Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics +// Each color consists of an array with the color code as first entry and the +// reset code as second entry. +const colors = { + reset: [0, 0], + bold: [1, 22], + dim: [2, 22], // Alias: faint + italic: [3, 23], + underline: [4, 24], + blink: [5, 25], + // Swap foreground and background colors + inverse: [7, 27], // Alias: swapcolors, swapColors + hidden: [8, 28], // Alias: conceal + strikethrough: [9, 29], // Alias: strikeThrough, crossedout, crossedOut + doubleunderline: [21, 24], // Alias: doubleUnderline + black: [30, defaultFG], + red: [31, defaultFG], + green: [32, defaultFG], + yellow: [33, defaultFG], + blue: [34, defaultFG], + magenta: [35, defaultFG], + cyan: [36, defaultFG], + white: [37, defaultFG], + bgBlack: [40, defaultBG], + bgRed: [41, defaultBG], + bgGreen: [42, defaultBG], + bgYellow: [43, defaultBG], + bgBlue: [44, defaultBG], + bgMagenta: [45, defaultBG], + bgCyan: [46, defaultBG], + bgWhite: [47, defaultBG], + framed: [51, 54], + overlined: [53, 55], + gray: [90, defaultFG], // Alias: grey, blackBright + redBright: [91, defaultFG], + greenBright: [92, defaultFG], + yellowBright: [93, defaultFG], + blueBright: [94, defaultFG], + magentaBright: [95, defaultFG], + cyanBright: [96, defaultFG], + whiteBright: [97, defaultFG], + bgGray: [100, defaultBG], // Alias: bgGrey, bgBlackBright + bgRedBright: [101, defaultBG], + bgGreenBright: [102, defaultBG], + bgYellowBright: [103, defaultBG], + bgBlueBright: [104, defaultBG], + bgMagentaBright: [105, defaultBG], + bgCyanBright: [106, defaultBG], + bgWhiteBright: [107, defaultBG], +}; + +function defineColorAlias(target, alias) { + ObjectDefineProperty(colors, alias, { + get() { + return this[target]; + }, + set(value) { + this[target] = value; + }, + configurable: true, + enumerable: false, + }); +} + +defineColorAlias("gray", "grey"); +defineColorAlias("gray", "blackBright"); +defineColorAlias("bgGray", "bgGrey"); +defineColorAlias("bgGray", "bgBlackBright"); +defineColorAlias("dim", "faint"); +defineColorAlias("strikethrough", "crossedout"); +defineColorAlias("strikethrough", "strikeThrough"); +defineColorAlias("strikethrough", "crossedOut"); +defineColorAlias("hidden", "conceal"); +defineColorAlias("inverse", "swapColors"); +defineColorAlias("inverse", "swapcolors"); +defineColorAlias("doubleunderline", "doubleUnderline"); + +// https://tc39.es/ecma262/#sec-get-sharedarraybuffer.prototype.bytelength +let _getSharedArrayBufferByteLength; + +function getSharedArrayBufferByteLength(value) { + // TODO(kt3k): add SharedArrayBuffer to primordials + _getSharedArrayBufferByteLength ??= ObjectGetOwnPropertyDescriptor( + // deno-lint-ignore prefer-primordials + SharedArrayBuffer.prototype, + "byteLength", + ).get; + + return FunctionPrototypeCall(_getSharedArrayBufferByteLength, value); +} + +function isObjectLike(value) { + return value !== null && typeof value === "object"; +} + +export function isAnyArrayBuffer(value) { + return isArrayBuffer(value) || isSharedArrayBuffer(value); +} + +export function isArgumentsObject(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === undefined && + ObjectPrototypeToString(value) === "[object Arguments]" + ); +} + +export function isArrayBuffer(value) { + try { + ArrayBufferPrototypeGetByteLength(value); + return true; + } catch { + return false; + } +} + +export function isAsyncFunction(value) { + return ( + typeof value === "function" && + (value[SymbolToStringTag] === "AsyncFunction") + ); +} + +export function isAsyncGeneratorFunction(value) { + return ( + typeof value === "function" && + (value[SymbolToStringTag] === "AsyncGeneratorFunction") + ); +} + +export function isBooleanObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + BooleanPrototypeValueOf(value); + return true; + } catch { + return false; + } +} + +export function isBoxedPrimitive( + value, +) { + return ( + isBooleanObject(value) || + isStringObject(value) || + isNumberObject(value) || + isSymbolObject(value) || + isBigIntObject(value) + ); +} + +export function isDataView(value) { + return ( + ArrayBufferIsView(value) && + TypedArrayPrototypeGetSymbolToStringTag(value) === undefined + ); +} + +export function isTypedArray(value) { + return TypedArrayPrototypeGetSymbolToStringTag(value) !== undefined; +} + +export function isGeneratorFunction( + value, +) { + return ( + typeof value === "function" && + value[SymbolToStringTag] === "GeneratorFunction" + ); +} + +export function isMap(value) { + try { + MapPrototypeGetSize(value); + return true; + } catch { + return false; + } +} + +export function isMapIterator( + value, +) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Map Iterator" + ); +} + +export function isModuleNamespaceObject( + value, +) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Module" + ); +} + +export function isNativeError(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === undefined && + ObjectPrototypeToString(value) === "[object Error]" + ); +} + +export function isNumberObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + NumberPrototypeValueOf(value); + return true; + } catch { + return false; + } +} + +export function isBigIntObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + BigIntPrototypeValueOf(value); + return true; + } catch { + return false; + } +} + +export function isPromise(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Promise" + ); +} +export function isRegExp(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === undefined && + ObjectPrototypeToString(value) === "[object RegExp]" + ); +} + +export function isSet(value) { + try { + SetPrototypeGetSize(value); + return true; + } catch { + return false; + } +} + +export function isSetIterator( + value, +) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Set Iterator" + ); +} + +export function isSharedArrayBuffer( + value, +) { + try { + getSharedArrayBufferByteLength(value); + return true; + } catch { + return false; + } +} + +export function isStringObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + StringPrototypeValueOf(value); + return true; + } catch { + return false; + } +} + +export function isSymbolObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + SymbolPrototypeValueOf(value); + return true; + } catch { + return false; + } +} + +export function isWeakMap( + value, +) { + try { + WeakMapPrototypeHas(value, null); + return true; + } catch { + return false; + } +} + +export function isWeakSet( + value, +) { + try { + WeakSetPrototypeHas(value, null); + return true; + } catch { + return false; + } +} + +const kObjectType = 0; +const kArrayType = 1; +const kArrayExtrasType = 2; + +const kMinLineLength = 16; + +// Constants to map the iterator state. +const kWeak = 0; +const kIterator = 1; +const kMapEntries = 2; + +// Escaped control characters (plus the single quote and the backslash). Use +// empty strings to fill up unused entries. +// deno-fmt-ignore +const meta = [ + '\\x00', '\\x01', '\\x02', '\\x03', '\\x04', '\\x05', '\\x06', '\\x07', // x07 + '\\b', '\\t', '\\n', '\\x0B', '\\f', '\\r', '\\x0E', '\\x0F', // x0F + '\\x10', '\\x11', '\\x12', '\\x13', '\\x14', '\\x15', '\\x16', '\\x17', // x17 + '\\x18', '\\x19', '\\x1A', '\\x1B', '\\x1C', '\\x1D', '\\x1E', '\\x1F', // x1F + '', '', '', '', '', '', '', "\\'", '', '', '', '', '', '', '', '', // x2F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x3F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x4F + '', '', '', '', '', '', '', '', '', '', '', '', '\\\\', '', '', '', // x5F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x6F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '\\x7F', // x7F + '\\x80', '\\x81', '\\x82', '\\x83', '\\x84', '\\x85', '\\x86', '\\x87', // x87 + '\\x88', '\\x89', '\\x8A', '\\x8B', '\\x8C', '\\x8D', '\\x8E', '\\x8F', // x8F + '\\x90', '\\x91', '\\x92', '\\x93', '\\x94', '\\x95', '\\x96', '\\x97', // x97 + '\\x98', '\\x99', '\\x9A', '\\x9B', '\\x9C', '\\x9D', '\\x9E', '\\x9F', // x9F +]; + +// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot +const isUndetectableObject = (v) => typeof v === "undefined" && v !== undefined; + +const strEscapeSequencesReplacer = new SafeRegExp( + "[\x00-\x1f\x27\x5c\x7f-\x9f]", + "g", +); + +const keyStrRegExp = new SafeRegExp("^[a-zA-Z_][a-zA-Z_0-9]*$"); +const numberRegExp = new SafeRegExp("^(0|[1-9][0-9]*)$"); + +// TODO(wafuwafu13): Figure out +const escapeFn = (str) => meta[StringPrototypeCharCodeAt(str, 0)]; + +function stylizeNoColor(str) { + return str; +} + +// node custom inspect symbol +const nodeCustomInspectSymbol = SymbolFor("nodejs.util.inspect.custom"); + +// This non-unique symbol is used to support op_crates, ie. +// in extensions/web we don't want to depend on public +// Symbol.for("Deno.customInspect") symbol defined in the public API. +// Internal only, shouldn't be used by users. +const privateCustomInspect = SymbolFor("Deno.privateCustomInspect"); + +function getUserOptions(ctx, isCrossContext) { + const ret = { + stylize: ctx.stylize, + showHidden: ctx.showHidden, + depth: ctx.depth, + colors: ctx.colors, + customInspect: ctx.customInspect, + showProxy: ctx.showProxy, + maxArrayLength: ctx.maxArrayLength, + maxStringLength: ctx.maxStringLength, + breakLength: ctx.breakLength, + compact: ctx.compact, + sorted: ctx.sorted, + getters: ctx.getters, + numericSeparator: ctx.numericSeparator, + ...ctx.userOptions, + }; + + // Typically, the target value will be an instance of `Object`. If that is + // *not* the case, the object may come from another vm.Context, and we want + // to avoid passing it objects from this Context in that case, so we remove + // the prototype from the returned object itself + the `stylize()` function, + // and remove all other non-primitives, including non-primitive user options. + if (isCrossContext) { + ObjectSetPrototypeOf(ret, null); + for (const key of new SafeArrayIterator(ObjectKeys(ret))) { + if ( + (typeof ret[key] === "object" || typeof ret[key] === "function") && + ret[key] !== null + ) { + delete ret[key]; + } + } + ret.stylize = ObjectSetPrototypeOf((value, flavour) => { + let stylized; + try { + stylized = `${ctx.stylize(value, flavour)}`; + } catch { + // Continue regardless of error. + } + + if (typeof stylized !== "string") return value; + // `stylized` is a string as it should be, which is safe to pass along. + return stylized; + }, null); + } + + return ret; +} + +// Note: using `formatValue` directly requires the indentation level to be +// corrected by setting `ctx.indentationLvL += diff` and then to decrease the +// value afterwards again. +function formatValue( + ctx, + value, + recurseTimes, + typedArray, +) { + // Primitive types cannot have properties. + if ( + typeof value !== "object" && + typeof value !== "function" && + !isUndetectableObject(value) + ) { + return formatPrimitive(ctx.stylize, value, ctx); + } + if (value === null) { + return ctx.stylize("null", "null"); + } + + // Memorize the context for custom inspection on proxies. + const context = value; + // Always check for proxies to prevent side effects and to prevent triggering + // any proxy handlers. + // TODO(wafuwafu13): Set Proxy + const proxyDetails = core.getProxyDetails(value); + // const proxy = getProxyDetails(value, !!ctx.showProxy); + // if (proxy !== undefined) { + // if (ctx.showProxy) { + // return formatProxy(ctx, proxy, recurseTimes); + // } + // value = proxy; + // } + + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it. + if (ctx.customInspect) { + if ( + ReflectHas(value, customInspect) && + typeof value[customInspect] === "function" + ) { + return String(value[customInspect](inspect, ctx)); + } else if ( + ReflectHas(value, privateCustomInspect) && + typeof value[privateCustomInspect] === "function" + ) { + // TODO(nayeemrmn): `inspect` is passed as an argument because custom + // inspect implementations in `extensions` need it, but may not have access + // to the `Deno` namespace in web workers. Remove when the `Deno` + // namespace is always enabled. + return String(value[privateCustomInspect](inspect, ctx)); + } else if (ReflectHas(value, nodeCustomInspectSymbol)) { + const maybeCustom = value[nodeCustomInspectSymbol]; + if ( + typeof maybeCustom === "function" && + // Filter out the util module, its inspect function is special. + maybeCustom !== ctx.inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value) + ) { + // This makes sure the recurseTimes are reported as before while using + // a counter internally. + const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; + // TODO(@crowlKats): proxy handling + const isCrossContext = !ObjectPrototypeIsPrototypeOf( + ObjectPrototype, + context, + ); + const ret = FunctionPrototypeCall( + maybeCustom, + context, + depth, + getUserOptions(ctx, isCrossContext), + ctx.inspect, + ); + // If the custom inspection method returned `this`, don't go into + // infinite recursion. + if (ret !== context) { + if (typeof ret !== "string") { + return formatValue(ctx, ret, recurseTimes); + } + return StringPrototypeReplaceAll( + ret, + "\n", + `\n${StringPrototypeRepeat(" ", ctx.indentationLvl)}`, + ); + } + } + } + } + + // Using an array here is actually better for the average case than using + // a Set. `seen` will only check for the depth and will never grow too large. + if (ArrayPrototypeIncludes(ctx.seen, value)) { + let index = 1; + if (ctx.circular === undefined) { + ctx.circular = new SafeMap(); + MapPrototypeSet(ctx.circular, value, index); + } else { + index = ctx.circular.get(value); + if (index === undefined) { + index = ctx.circular.size + 1; + MapPrototypeSet(ctx.circular, value, index); + } + } + return ctx.stylize(`[Circular *${index}]`, "special"); + } + + return formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails); +} + +function getClassBase(value, constructor, tag) { + const hasName = ObjectHasOwn(value, "name"); + const name = (hasName && value.name) || "(anonymous)"; + let base = `class ${name}`; + if (constructor !== "Function" && constructor !== null) { + base += ` [${constructor}]`; + } + if (tag !== "" && constructor !== tag) { + base += ` [${tag}]`; + } + if (constructor !== null) { + const superName = ObjectGetPrototypeOf(value).name; + if (superName) { + base += ` extends ${superName}`; + } + } else { + base += " extends [null prototype]"; + } + return `[${base}]`; +} + +const stripCommentsRegExp = new SafeRegExp( + "(\\/\\/.*?\\n)|(\\/\\*(.|\\n)*?\\*\\/)", + "g", +); +const classRegExp = new SafeRegExp("^(\\s+[^(]*?)\\s*{"); + +function getFunctionBase(value, constructor, tag) { + const stringified = FunctionPrototypeToString(value); + if ( + StringPrototypeStartsWith(stringified, "class") && + StringPrototypeEndsWith(stringified, "}") + ) { + const slice = StringPrototypeSlice(stringified, 5, -1); + const bracketIndex = StringPrototypeIndexOf(slice, "{"); + if ( + bracketIndex !== -1 && + (!StringPrototypeIncludes( + StringPrototypeSlice(slice, 0, bracketIndex), + "(", + ) || + // Slow path to guarantee that it's indeed a class. + RegExpPrototypeExec( + classRegExp, + RegExpPrototypeSymbolReplace(stripCommentsRegExp, slice), + ) !== null) + ) { + return getClassBase(value, constructor, tag); + } + } + let type = "Function"; + if (isGeneratorFunction(value)) { + type = `Generator${type}`; + } + if (isAsyncFunction(value)) { + type = `Async${type}`; + } + if (isAsyncGeneratorFunction(value)) { + type = `AsyncGenerator${type}`; + } + let base = `[${type}`; + if (constructor === null) { + base += " (null prototype)"; + } + if (value.name === "") { + base += " (anonymous)"; + } else { + base += `: ${value.name}`; + } + base += "]"; + if (constructor !== type && constructor !== null) { + base += ` ${constructor}`; + } + if (tag !== "" && constructor !== tag) { + base += ` [${tag}]`; + } + return base; +} + +function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { + let keys; + let protoProps; + if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { + protoProps = []; + } + + const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); + // Reset the variable to check for this later on. + if (protoProps !== undefined && protoProps.length === 0) { + protoProps = undefined; + } + + let tag = value[SymbolToStringTag]; + // Only list the tag in case it's non-enumerable / not an own property. + // Otherwise we'd print this twice. + if ( + typeof tag !== "string" + // TODO(wafuwafu13): Implement + // (tag !== "" && + // (ctx.showHidden + // ? Object.prototype.hasOwnProperty + // : Object.prototype.propertyIsEnumerable)( + // value, + // Symbol.toStringTag, + // )) + ) { + tag = ""; + } + let base = ""; + let formatter = () => []; + let braces; + let noIterator = true; + let i = 0; + const filter = ctx.showHidden ? 0 : 2; + + let extrasType = kObjectType; + + if (proxyDetails != null && ctx.showProxy) { + return `Proxy ` + formatValue(ctx, proxyDetails, recurseTimes); + } else { + // Iterators and the rest are split to reduce checks. + // We have to check all values in case the constructor is set to null. + // Otherwise it would not possible to identify all types properly. + if (ReflectHas(value, SymbolIterator) || constructor === null) { + noIterator = false; + if (ArrayIsArray(value)) { + // Only set the constructor for non ordinary ("Array [...]") arrays. + const prefix = (constructor !== "Array" || tag !== "") + ? getPrefix(constructor, tag, "Array", `(${value.length})`) + : ""; + keys = core.ops.op_get_non_index_property_names(value, filter); + braces = [`${prefix}[`, "]"]; + if ( + value.length === 0 && keys.length === 0 && protoProps === undefined + ) { + return `${braces[0]}]`; + } + extrasType = kArrayExtrasType; + formatter = formatArray; + } else if (isSet(value)) { + const size = SetPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, "Set", `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null + ? FunctionPrototypeBind(formatSet, null, value) + : FunctionPrototypeBind(formatSet, null, SetPrototypeValues(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) { + return `${prefix}{}`; + } + braces = [`${prefix}{`, "}"]; + } else if (isMap(value)) { + const size = MapPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, "Map", `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null + ? FunctionPrototypeBind(formatMap, null, value) + : FunctionPrototypeBind(formatMap, null, MapPrototypeEntries(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) { + return `${prefix}{}`; + } + braces = [`${prefix}{`, "}"]; + } else if (isTypedArray(value)) { + keys = core.ops.op_get_non_index_property_names(value, filter); + const bound = value; + const fallback = ""; + if (constructor === null) { + // TODO(wafuwafu13): Implement + // fallback = TypedArrayPrototypeGetSymbolToStringTag(value); + // // Reconstruct the array information. + // bound = new primordials[fallback](value); + } + const size = TypedArrayPrototypeGetLength(value); + const prefix = getPrefix(constructor, tag, fallback, `(${size})`); + braces = [`${prefix}[`, "]"]; + if (value.length === 0 && keys.length === 0 && !ctx.showHidden) { + return `${braces[0]}]`; + } + // Special handle the value. The original value is required below. The + // bound function is required to reconstruct missing information. + formatter = FunctionPrototypeBind(formatTypedArray, null, bound, size); + extrasType = kArrayExtrasType; + } else if (isMapIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces("Map", tag); + // Add braces to the formatter parameters. + formatter = FunctionPrototypeBind(formatIterator, null, braces); + } else if (isSetIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces("Set", tag); + // Add braces to the formatter parameters. + formatter = FunctionPrototypeBind(formatIterator, null, braces); + } else { + noIterator = true; + } + } + if (noIterator) { + keys = getKeys(value, ctx.showHidden); + braces = ["{", "}"]; + if (constructor === "Object") { + if (isArgumentsObject(value)) { + braces[0] = "[Arguments] {"; + } else if (tag !== "") { + braces[0] = `${getPrefix(constructor, tag, "Object")}{`; + } + if (keys.length === 0 && protoProps === undefined) { + return `${braces[0]}}`; + } + } else if (typeof value === "function") { + base = getFunctionBase(value, constructor, tag); + if (keys.length === 0 && protoProps === undefined) { + return ctx.stylize(base, "special"); + } + } else if (isRegExp(value)) { + // Make RegExps say that they are RegExps + base = RegExpPrototypeToString( + constructor !== null ? value : new SafeRegExp(value), + ); + const prefix = getPrefix(constructor, tag, "RegExp"); + if (prefix !== "RegExp ") { + base = `${prefix}${base}`; + } + if ( + (keys.length === 0 && protoProps === undefined) || + (recurseTimes > ctx.depth && ctx.depth !== null) + ) { + return ctx.stylize(base, "regexp"); + } + } else if (ObjectPrototypeIsPrototypeOf(DatePrototype, value)) { + const date = proxyDetails ? proxyDetails[0] : value; + if (isNaN(DatePrototypeGetTime(date))) { + return ctx.stylize("Invalid Date", "date"); + } else { + base = DatePrototypeToISOString(date); + if (keys.length === 0 && protoProps === undefined) { + return ctx.stylize(base, "date"); + } + } + } else if (ObjectPrototypeIsPrototypeOf(ErrorPrototype, value)) { + base = inspectError(value, ctx); + if (keys.length === 0 && protoProps === undefined) { + return base; + } + } else if (isAnyArrayBuffer(value)) { + // Fast path for ArrayBuffer and SharedArrayBuffer. + // Can't do the same for DataView because it has a non-primitive + // .buffer property that we need to recurse for. + const arrayType = isArrayBuffer(value) + ? "ArrayBuffer" + : "SharedArrayBuffer"; + + const prefix = getPrefix(constructor, tag, arrayType); + if (typedArray === undefined) { + formatter = formatArrayBuffer; + } else if (keys.length === 0 && protoProps === undefined) { + return prefix + + `{ byteLength: ${ + formatNumber(ctx.stylize, TypedArrayPrototypeGetByteLength(value)) + } }`; + } + braces[0] = `${prefix}{`; + ArrayPrototypeUnshift(keys, "byteLength"); + } else if (isDataView(value)) { + braces[0] = `${getPrefix(constructor, tag, "DataView")}{`; + // .buffer goes last, it's not a primitive like the others. + ArrayPrototypeUnshift(keys, "byteLength", "byteOffset", "buffer"); + } else if (isPromise(value)) { + braces[0] = `${getPrefix(constructor, tag, "Promise")}{`; + formatter = formatPromise; + } else if (isWeakSet(value)) { + braces[0] = `${getPrefix(constructor, tag, "WeakSet")}{`; + formatter = ctx.showHidden ? formatWeakSet : formatWeakCollection; + } else if (isWeakMap(value)) { + braces[0] = `${getPrefix(constructor, tag, "WeakMap")}{`; + formatter = ctx.showHidden ? formatWeakMap : formatWeakCollection; + } else if (isModuleNamespaceObject(value)) { + braces[0] = `${getPrefix(constructor, tag, "Module")}{`; + // Special handle keys for namespace objects. + formatter = FunctionPrototypeBind(formatNamespaceObject, null, keys); + } else if (isBoxedPrimitive(value)) { + base = getBoxedBase(value, ctx, keys, constructor, tag); + if (keys.length === 0 && protoProps === undefined) { + return base; + } + } else { + if (keys.length === 0 && protoProps === undefined) { + // TODO(wafuwafu13): Implement + // if (isExternal(value)) { + // const address = getExternalValue(value).toString(16); + // return ctx.stylize(`[External: ${address}]`, 'special'); + // } + return `${getCtxStyle(value, constructor, tag)}{}`; + } + braces[0] = `${getCtxStyle(value, constructor, tag)}{`; + } + } + } + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + let constructorName = StringPrototypeSlice( + getCtxStyle(value, constructor, tag), + 0, + -1, + ); + if (constructor !== null) { + constructorName = `[${constructorName}]`; + } + return ctx.stylize(constructorName, "special"); + } + recurseTimes += 1; + + ArrayPrototypePush(ctx.seen, value); + ctx.currentDepth = recurseTimes; + let output; + try { + output = formatter(ctx, value, recurseTimes); + for (i = 0; i < keys.length; i++) { + ArrayPrototypePush( + output, + formatProperty(ctx, value, recurseTimes, keys[i], extrasType), + ); + } + if (protoProps !== undefined) { + ArrayPrototypePushApply(output, protoProps); + } + } catch (error) { + // TODO(wafuwafu13): Implement stack overflow check + return ctx.stylize( + `[Internal Formatting Error] ${error.stack}`, + "internalError", + ); + } + + if (ctx.circular !== undefined) { + const index = ctx.circular.get(value); + if (index !== undefined) { + const reference = ctx.stylize(``, "special"); + // Add reference always to the very beginning of the output. + if (ctx.compact !== true) { + base = base === "" ? reference : `${reference} ${base}`; + } else { + braces[0] = `${reference} ${braces[0]}`; + } + } + } + ArrayPrototypePop(ctx.seen); + + if (ctx.sorted) { + const comparator = ctx.sorted === true ? undefined : ctx.sorted; + if (extrasType === kObjectType) { + output = ArrayPrototypeSort(output, comparator); + } else if (keys.length > 1) { + const sorted = ArrayPrototypeSort( + ArrayPrototypeSlice(output, output.length - keys.length), + comparator, + ); + ArrayPrototypeSplice( + output, + output.length - keys.length, + keys.length, + ...new SafeArrayIterator(sorted), + ); + } + } + + const res = reduceToSingleString( + ctx, + output, + base, + braces, + extrasType, + recurseTimes, + value, + ); + const budget = ctx.budget[ctx.indentationLvl] || 0; + const newLength = budget + res.length; + ctx.budget[ctx.indentationLvl] = newLength; + // If any indentationLvl exceeds this limit, limit further inspecting to the + // minimum. Otherwise the recursive algorithm might continue inspecting the + // object even though the maximum string size (~2 ** 28 on 32 bit systems and + // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at + // exactly 2 ** 27 but a bit higher. This depends on the object shape. + // This limit also makes sure that huge objects don't block the event loop + // significantly. + if (newLength > 2 ** 27) { + ctx.depth = -1; + } + return res; +} + +const builtInObjectsRegExp = new SafeRegExp("^[A-Z][a-zA-Z0-9]+$"); +const builtInObjects = new SafeSet( + ArrayPrototypeFilter( + ObjectGetOwnPropertyNames(globalThis), + (e) => RegExpPrototypeTest(builtInObjectsRegExp, e), + ), +); + +function addPrototypeProperties( + ctx, + main, + obj, + recurseTimes, + output, +) { + let depth = 0; + let keys; + let keySet; + do { + if (depth !== 0 || main === obj) { + obj = ObjectGetPrototypeOf(obj); + // Stop as soon as a null prototype is encountered. + if (obj === null) { + return; + } + // Stop as soon as a built-in object type is detected. + const descriptor = ObjectGetOwnPropertyDescriptor(obj, "constructor"); + if ( + descriptor !== undefined && + typeof descriptor.value === "function" && + SetPrototypeHas(builtInObjects, descriptor.value.name) + ) { + return; + } + } + + if (depth === 0) { + keySet = new SafeSet(); + } else { + ArrayPrototypeForEach(keys, (key) => SetPrototypeAdd(keySet, key)); + } + // Get all own property names and symbols. + keys = ReflectOwnKeys(obj); + ArrayPrototypePush(ctx.seen, main); + for (const key of new SafeArrayIterator(keys)) { + // Ignore the `constructor` property and keys that exist on layers above. + if ( + key === "constructor" || + ObjectHasOwn(main, key) || + (depth !== 0 && SetPrototypeHas(keySet, key)) + ) { + continue; + } + const desc = ObjectGetOwnPropertyDescriptor(obj, key); + if (typeof desc.value === "function") { + continue; + } + const value = formatProperty( + ctx, + obj, + recurseTimes, + key, + kObjectType, + desc, + main, + ); + if (ctx.colors) { + // Faint! + ArrayPrototypePush(output, `\u001b[2m${value}\u001b[22m`); + } else { + ArrayPrototypePush(output, value); + } + } + ArrayPrototypePop(ctx.seen); + // Limit the inspection to up to three prototype layers. Using `recurseTimes` + // is not a good choice here, because it's as if the properties are declared + // on the current object from the users perspective. + } while (++depth !== 3); +} + +function isInstanceof(proto, object) { + try { + return ObjectPrototypeIsPrototypeOf(proto, object); + } catch { + return false; + } +} + +function getConstructorName(obj, ctx, recurseTimes, protoProps) { + let firstProto; + const tmp = obj; + while (obj || isUndetectableObject(obj)) { + const descriptor = ObjectGetOwnPropertyDescriptor(obj, "constructor"); + if ( + descriptor !== undefined && + typeof descriptor.value === "function" && + descriptor.value.name !== "" && + isInstanceof(descriptor.value.prototype, tmp) + ) { + if ( + protoProps !== undefined && + (firstProto !== obj || + !SetPrototypeHas(builtInObjects, descriptor.value.name)) + ) { + addPrototypeProperties( + ctx, + tmp, + firstProto || tmp, + recurseTimes, + protoProps, + ); + } + return String(descriptor.value.name); + } + + obj = ObjectGetPrototypeOf(obj); + if (firstProto === undefined) { + firstProto = obj; + } + } + + if (firstProto === null) { + return null; + } + + const res = core.ops.op_get_constructor_name(tmp); + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + return `${res} `; + } + + const protoConstr = getConstructorName( + firstProto, + ctx, + recurseTimes + 1, + protoProps, + ); + + if (protoConstr === null) { + return `${res} <${ + inspect(firstProto, { + ...ctx, + customInspect: false, + depth: -1, + }) + }>`; + } + + return `${res} <${protoConstr}>`; +} + +const formatPrimitiveRegExp = new SafeRegExp("(?<=\n)"); +function formatPrimitive(fn, value, ctx) { + if (typeof value === "string") { + let trailer = ""; + if (value.length > ctx.maxStringLength) { + const remaining = value.length - ctx.maxStringLength; + value = StringPrototypeSlice(value, 0, ctx.maxStringLength); + trailer = `... ${remaining} more character${remaining > 1 ? "s" : ""}`; + } + if ( + ctx.compact !== true && + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. + value.length > kMinLineLength && + value.length > ctx.breakLength - ctx.indentationLvl - 4 + ) { + return ArrayPrototypeJoin( + ArrayPrototypeMap( + StringPrototypeSplit(value, formatPrimitiveRegExp), + (line) => fn(quoteString(line, ctx), "string"), + ), + ` +\n${StringPrototypeRepeat(" ", ctx.indentationLvl + 2)}`, + ) + trailer; + } + return fn(quoteString(value, ctx), "string") + trailer; + } + if (typeof value === "number") { + return formatNumber(fn, value); + } + if (typeof value === "bigint") { + return formatBigInt(fn, value); + } + if (typeof value === "boolean") { + return fn(`${value}`, "boolean"); + } + if (typeof value === "undefined") { + return fn("undefined", "undefined"); + } + // es6 symbol primitive + return fn(maybeQuoteSymbol(value, ctx), "symbol"); +} + +function getPrefix(constructor, tag, fallback, size = "") { + if (constructor === null) { + if (tag !== "" && fallback !== tag) { + return `[${fallback}${size}: null prototype] [${tag}] `; + } + return `[${fallback}${size}: null prototype] `; + } + + if (tag !== "" && constructor !== tag) { + return `${constructor}${size} [${tag}] `; + } + return `${constructor}${size} `; +} + +function formatArray(ctx, value, recurseTimes) { + const valLen = value.length; + const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + // Special handle sparse arrays. + if (!ObjectHasOwn(value, i)) { + return formatSpecialArray(ctx, value, recurseTimes, len, output, i); + } + ArrayPrototypePush( + output, + formatProperty(ctx, value, recurseTimes, i, kArrayType), + ); + } + if (remaining > 0) { + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + return output; +} + +function getCtxStyle(value, constructor, tag) { + let fallback = ""; + if (constructor === null) { + fallback = core.ops.op_get_constructor_name(value); + if (fallback === tag) { + fallback = "Object"; + } + } + return getPrefix(constructor, tag, fallback); +} + +// Look up the keys of the object. +function getKeys(value, showHidden) { + let keys; + const symbols = ObjectGetOwnPropertySymbols(value); + if (showHidden) { + keys = ObjectGetOwnPropertyNames(value); + if (symbols.length !== 0) { + ArrayPrototypePushApply(keys, symbols); + } + } else { + // This might throw if `value` is a Module Namespace Object from an + // unevaluated module, but we don't want to perform the actual type + // check because it's expensive. + // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 + // and modify this logic as needed. + try { + keys = ObjectKeys(value); + } catch (err) { + assert( + isNativeError(err) && err.name === "ReferenceError" && + isModuleNamespaceObject(value), + ); + keys = ObjectGetOwnPropertyNames(value); + } + if (symbols.length !== 0) { + const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); + ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter)); + } + } + return keys; +} + +function formatSet(value, ctx, _ignored, recurseTimes) { + ctx.indentationLvl += 2; + + const values = [...new SafeSetIterator(value)]; + const valLen = SetPrototypeGetSize(value); + const len = MathMin(MathMax(0, ctx.iterableLimit), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + ArrayPrototypePush(output, formatValue(ctx, values[i], recurseTimes)); + } + if (remaining > 0) { + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + + ctx.indentationLvl -= 2; + return output; +} + +function formatMap(value, ctx, _gnored, recurseTimes) { + ctx.indentationLvl += 2; + + const values = [...new SafeMapIterator(value)]; + const valLen = MapPrototypeGetSize(value); + const len = MathMin(MathMax(0, ctx.iterableLimit), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + ArrayPrototypePush( + output, + `${formatValue(ctx, values[i][0], recurseTimes)} => ${ + formatValue(ctx, values[i][1], recurseTimes) + }`, + ); + } + if (remaining > 0) { + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + + ctx.indentationLvl -= 2; + return output; +} + +function formatTypedArray( + value, + length, + ctx, + _ignored, + recurseTimes, +) { + const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), length); + const remaining = value.length - maxLength; + const output = new Array(maxLength); + const elementFormatter = value.length > 0 && typeof value[0] === "number" + ? formatNumber + : formatBigInt; + for (let i = 0; i < maxLength; ++i) { + output[i] = elementFormatter(ctx.stylize, value[i]); + } + if (remaining > 0) { + output[maxLength] = `... ${remaining} more item${remaining > 1 ? "s" : ""}`; + } + if (ctx.showHidden) { + // .buffer goes last, it's not a primitive like the others. + // All besides `BYTES_PER_ELEMENT` are actually getters. + ctx.indentationLvl += 2; + for ( + const key of new SafeArrayIterator([ + "BYTES_PER_ELEMENT", + "length", + "byteLength", + "byteOffset", + "buffer", + ]) + ) { + const str = formatValue(ctx, value[key], recurseTimes, true); + ArrayPrototypePush(output, `[${key}]: ${str}`); + } + ctx.indentationLvl -= 2; + } + return output; +} + +function getIteratorBraces(type, tag) { + if (tag !== `${type} Iterator`) { + if (tag !== "") { + tag += "] ["; + } + tag += `${type} Iterator`; + } + return [`[${tag}] {`, "}"]; +} + +const iteratorRegExp = new SafeRegExp(" Iterator] {$"); +function formatIterator(braces, ctx, value, recurseTimes) { + // TODO(wafuwafu13): Implement + // const { 0: entries, 1: isKeyValue } = previewEntries(value, true); + const { 0: entries, 1: isKeyValue } = value; + if (isKeyValue) { + // Mark entry iterators as such. + braces[0] = StringPrototypeReplace( + braces[0], + iteratorRegExp, + " Entries] {", + ); + return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); + } + + return formatSetIterInner(ctx, recurseTimes, entries, kIterator); +} + +function handleCircular(value, ctx) { + let index = 1; + if (ctx.circular === undefined) { + ctx.circular = new SafeMap(); + MapPrototypeSet(ctx.circular, value, index); + } else { + index = MapPrototypeGet(ctx.circular, value); + if (index === undefined) { + index = MapPrototypeGetSize(ctx.circular) + 1; + MapPrototypeSet(ctx.circular, value, index); + } + } + // Circular string is cyan + return ctx.stylize(`[Circular *${index}]`, "special"); +} + +const AGGREGATE_ERROR_HAS_AT_PATTERN = new SafeRegExp(/\s+at/); +const AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN = new SafeRegExp(/^(?!\s*$)/gm); + +function inspectError(value, ctx) { + const causes = [value]; + + let err = value; + while (err.cause) { + if (ArrayPrototypeIncludes(causes, err.cause)) { + ArrayPrototypePush(causes, handleCircular(err.cause, ctx)); + break; + } else { + ArrayPrototypePush(causes, err.cause); + err = err.cause; + } + } + + const refMap = new SafeMap(); + for (let i = 0; i < causes.length; ++i) { + const cause = causes[i]; + if (ctx.circular !== undefined) { + const index = MapPrototypeGet(ctx.circular, cause); + if (index !== undefined) { + MapPrototypeSet( + refMap, + cause, + ctx.stylize(` `, "special"), + ); + } + } + } + ArrayPrototypeShift(causes); + + let finalMessage = MapPrototypeGet(refMap, value) ?? ""; + + if (ObjectPrototypeIsPrototypeOf(AggregateErrorPrototype, value)) { + const stackLines = StringPrototypeSplit(value.stack, "\n"); + while (true) { + const line = ArrayPrototypeShift(stackLines); + if (RegExpPrototypeTest(AGGREGATE_ERROR_HAS_AT_PATTERN, line)) { + ArrayPrototypeUnshift(stackLines, line); + break; + } else if (typeof line === "undefined") { + break; + } + + finalMessage += line; + finalMessage += "\n"; + } + const aggregateMessage = ArrayPrototypeJoin( + ArrayPrototypeMap( + value.errors, + (error) => + StringPrototypeReplace( + inspectArgs([error]), + AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN, + StringPrototypeRepeat(" ", 4), + ), + ), + "\n", + ); + finalMessage += aggregateMessage; + finalMessage += "\n"; + finalMessage += ArrayPrototypeJoin(stackLines, "\n"); + } else { + const stack = value.stack; + if (stack?.includes("\n at")) { + finalMessage += stack; + } else { + finalMessage += `[${stack || ErrorPrototypeToString(value)}]`; + } + } + finalMessage += ArrayPrototypeJoin( + ArrayPrototypeMap( + causes, + (cause) => + "\nCaused by " + (MapPrototypeGet(refMap, cause) ?? "") + + (cause?.stack ?? cause), + ), + "", + ); + + return finalMessage; +} + +const hexSliceLookupTable = function () { + const alphabet = "0123456789abcdef"; + const table = new Array(256); + for (let i = 0; i < 16; ++i) { + const i16 = i * 16; + for (let j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j]; + } + } + return table; +}(); + +function hexSlice(buf, start, end) { + const len = TypedArrayPrototypeGetLength(buf); + if (!start || start < 0) { + start = 0; + } + if (!end || end < 0 || end > len) { + end = len; + } + let out = ""; + for (let i = start; i < end; ++i) { + out += hexSliceLookupTable[buf[i]]; + } + return out; +} + +const arrayBufferRegExp = new SafeRegExp("(.{2})", "g"); +function formatArrayBuffer(ctx, value) { + let valLen; + try { + valLen = ArrayBufferPrototypeGetByteLength(value); + } catch { + valLen = getSharedArrayBufferByteLength(value); + } + const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); + let buffer; + try { + buffer = new Uint8Array(value, 0, len); + } catch { + return [ctx.stylize("(detached)", "special")]; + } + let str = StringPrototypeTrim( + StringPrototypeReplace(hexSlice(buffer), arrayBufferRegExp, "$1 "), + ); + + const remaining = valLen - len; + if (remaining > 0) { + str += ` ... ${remaining} more byte${remaining > 1 ? "s" : ""}`; + } + return [`${ctx.stylize("[Uint8Contents]", "special")}: <${str}>`]; +} + +function formatNumber(fn, value) { + // Format -0 as '-0'. Checking `value === -0` won't distinguish 0 from -0. + return fn(ObjectIs(value, -0) ? "-0" : `${value}`, "number"); +} + +const PromiseState = { + Pending: 0, + Fulfilled: 1, + Rejected: 2, +}; + +function formatPromise(ctx, value, recurseTimes) { + let output; + let opResult; + // This op will fail for non-promises, but we get here for some promise-likes. + try { + opResult = core.getPromiseDetails(value); + } catch { + return [ctx.stylize("", "special")]; + } + const { 0: state, 1: result } = opResult; + if (state === PromiseState.Pending) { + output = [ctx.stylize("", "special")]; + } else { + ctx.indentationLvl += 2; + const str = formatValue(ctx, result, recurseTimes); + ctx.indentationLvl -= 2; + output = [ + state === PromiseState.Rejected + ? `${ctx.stylize("", "special")} ${str}` + : str, + ]; + } + return output; +} + +function formatWeakCollection(ctx) { + return [ctx.stylize("", "special")]; +} + +function formatWeakSet(ctx, value, recurseTimes) { + // TODO(wafuwafu13): Implement + // const entries = previewEntries(value); + const entries = value; + return formatSetIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatWeakMap(ctx, value, recurseTimes) { + // TODO(wafuwafu13): Implement + // const entries = previewEntries(value); + const entries = value; + return formatMapIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatProperty( + ctx, + value, + recurseTimes, + key, + type, + desc, + original = value, +) { + let name, str; + let extra = " "; + desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || + { value: value[key], enumerable: true }; + if (desc.value !== undefined) { + const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; + ctx.indentationLvl += diff; + str = formatValue(ctx, desc.value, recurseTimes); + if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { + extra = `\n${StringPrototypeRepeat(" ", ctx.indentationLvl)}`; + } + ctx.indentationLvl -= diff; + } else if (desc.get !== undefined) { + const label = desc.set !== undefined ? "Getter/Setter" : "Getter"; + const s = ctx.stylize; + const sp = "special"; + if ( + ctx.getters && (ctx.getters === true || + (ctx.getters === "get" && desc.set === undefined) || + (ctx.getters === "set" && desc.set !== undefined)) + ) { + try { + const tmp = FunctionPrototypeCall(desc.get, original); + ctx.indentationLvl += 2; + if (tmp === null) { + str = `${s(`[${label}:`, sp)} ${s("null", "null")}${s("]", sp)}`; + } else if (typeof tmp === "object") { + str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; + } else { + const primitive = formatPrimitive(s, tmp, ctx); + str = `${s(`[${label}:`, sp)} ${primitive}${s("]", sp)}`; + } + ctx.indentationLvl -= 2; + } catch (err) { + const message = ``; + str = `${s(`[${label}:`, sp)} ${message}${s("]", sp)}`; + } + } else { + str = ctx.stylize(`[${label}]`, sp); + } + } else if (desc.set !== undefined) { + str = ctx.stylize("[Setter]", "special"); + } else { + str = ctx.stylize("undefined", "undefined"); + } + if (type === kArrayType) { + return str; + } + if (typeof key === "symbol") { + name = `[${ctx.stylize(maybeQuoteSymbol(key, ctx), "symbol")}]`; + } else if (key === "__proto__") { + name = "['__proto__']"; + } else if (desc.enumerable === false) { + const tmp = StringPrototypeReplace( + key, + strEscapeSequencesReplacer, + escapeFn, + ); + + name = `[${tmp}]`; + } else if (keyStrRegExp.test(key)) { + name = ctx.stylize(key, "name"); + } else { + name = ctx.stylize(quoteString(key, ctx), "string"); + } + return `${name}:${extra}${str}`; +} + +const colorRegExp = new SafeRegExp("\u001b\\[\\d\\d?m", "g"); +function removeColors(str) { + return StringPrototypeReplace(str, colorRegExp, ""); +} + +function isBelowBreakLength(ctx, output, start, base) { + // Each entry is separated by at least a comma. Thus, we start with a total + // length of at least `output.length`. In addition, some cases have a + // whitespace in-between each other that is added to the total as well. + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. Check the performance overhead and make it an opt-in in case it's + // significant. + let totalLength = output.length + start; + if (totalLength + output.length > ctx.breakLength) { + return false; + } + for (let i = 0; i < output.length; i++) { + if (ctx.colors) { + totalLength += removeColors(output[i]).length; + } else { + totalLength += output[i].length; + } + if (totalLength > ctx.breakLength) { + return false; + } + } + // Do not line up properties on the same line if `base` contains line breaks. + return base === "" || !StringPrototypeIncludes(base, "\n"); +} + +function formatBigInt(fn, value) { + return fn(`${value}n`, "bigint"); +} + +function formatNamespaceObject( + keys, + ctx, + value, + recurseTimes, +) { + const output = new Array(keys.length); + for (let i = 0; i < keys.length; i++) { + try { + output[i] = formatProperty( + ctx, + value, + recurseTimes, + keys[i], + kObjectType, + ); + } catch (_err) { + // TODO(wafuwfu13): Implement + // assert(isNativeError(err) && err.name === 'ReferenceError'); + // Use the existing functionality. This makes sure the indentation and + // line breaks are always correct. Otherwise it is very difficult to keep + // this aligned, even though this is a hacky way of dealing with this. + const tmp = { [keys[i]]: "" }; + output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); + const pos = StringPrototypeLastIndexOf(output[i], " "); + // We have to find the last whitespace and have to replace that value as + // it will be visualized as a regular string. + output[i] = StringPrototypeSlice(output[i], 0, pos + 1) + + ctx.stylize("", "special"); + } + } + // Reset the keys to an empty array. This prevents duplicated inspection. + keys.length = 0; + return output; +} + +// The array is sparse and/or has extra keys +function formatSpecialArray( + ctx, + value, + recurseTimes, + maxLength, + output, + i, +) { + const keys = ObjectKeys(value); + let index = i; + for (; i < keys.length && output.length < maxLength; i++) { + const key = keys[i]; + const tmp = +key; + // Arrays can only have up to 2^32 - 1 entries + if (tmp > 2 ** 32 - 2) { + break; + } + if (`${index}` !== key) { + if (!numberRegExp.test(key)) { + break; + } + const emptyItems = tmp - index; + const ending = emptyItems > 1 ? "s" : ""; + const message = `<${emptyItems} empty item${ending}>`; + ArrayPrototypePush(output, ctx.stylize(message, "undefined")); + index = tmp; + if (output.length === maxLength) { + break; + } + } + ArrayPrototypePush( + output, + formatProperty(ctx, value, recurseTimes, key, kArrayType), + ); + index++; + } + const remaining = value.length - index; + if (output.length !== maxLength) { + if (remaining > 0) { + const ending = remaining > 1 ? "s" : ""; + const message = `<${remaining} empty item${ending}>`; + ArrayPrototypePush(output, ctx.stylize(message, "undefined")); + } + } else if (remaining > 0) { + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + return output; +} + +function getBoxedBase( + value, + ctx, + keys, + constructor, + tag, +) { + let type, primitive; + if (isNumberObject(value)) { + type = "Number"; + primitive = NumberPrototypeValueOf(value); + } else if (isStringObject(value)) { + type = "String"; + primitive = StringPrototypeValueOf(value); + // For boxed Strings, we have to remove the 0-n indexed entries, + // since they just noisy up the output and are redundant + // Make boxed primitive Strings look like such + ArrayPrototypeSplice(keys, 0, value.length); + } else if (isBooleanObject(value)) { + type = "Boolean"; + primitive = BooleanPrototypeValueOf(value); + } else if (isBigIntObject(value)) { + type = "BigInt"; + primitive = BigIntPrototypeValueOf(value); + } else { + type = "Symbol"; + primitive = SymbolPrototypeValueOf(value); + } + + let base = `[${type}`; + if (type !== constructor) { + if (constructor === null) { + base += " (null prototype)"; + } else { + base += ` (${constructor})`; + } + } + base += `: ${formatPrimitive(stylizeNoColor, primitive, ctx)}]`; + if (tag !== "" && tag !== constructor) { + base += ` [${tag}]`; + } + if (keys.length !== 0 || ctx.stylize === stylizeNoColor) { + return base; + } + return ctx.stylize(base, StringPrototypeToLowerCase(type)); +} + +function reduceToSingleString( + ctx, + output, + base, + braces, + extrasType, + recurseTimes, + value, +) { + if (ctx.compact !== true) { + if (typeof ctx.compact === "number" && ctx.compact >= 1) { + // Memorize the original output length. In case the output is grouped, + // prevent lining up the entries on a single line. + const entries = output.length; + // Group array elements together if the array contains at least six + // separate entries. + if (extrasType === kArrayExtrasType && entries > 6) { + output = groupArrayElements(ctx, output, value); + } + // `ctx.currentDepth` is set to the most inner depth of the currently + // inspected object part while `recurseTimes` is the actual current depth + // that is inspected. + // + // Example: + // + // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } + // + // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max + // depth of 1. + // + // Consolidate all entries of the local most inner depth up to + // `ctx.compact`, as long as the properties are smaller than + // `ctx.breakLength`. + if ( + ctx.currentDepth - recurseTimes < ctx.compact && + entries === output.length + ) { + // Line up all entries on a single line in case the entries do not + // exceed `breakLength`. Add 10 as constant to start next to all other + // factors that may reduce `breakLength`. + const start = output.length + ctx.indentationLvl + + braces[0].length + base.length + 10; + if (isBelowBreakLength(ctx, output, start, base)) { + const joinedOutput = ArrayPrototypeJoin(output, ", "); + if (!StringPrototypeIncludes(joinedOutput, "\n")) { + return `${base ? `${base} ` : ""}${braces[0]} ${joinedOutput}` + + ` ${braces[1]}`; + } + } + } + } + // Line up each entry on an individual line. + const indentation = `\n${StringPrototypeRepeat(" ", ctx.indentationLvl)}`; + return `${base ? `${base} ` : ""}${braces[0]}${indentation} ` + + `${ArrayPrototypeJoin(output, `,${indentation} `)}${ + ctx.trailingComma ? "," : "" + }${indentation}${braces[1]}`; + } + // Line up all entries on a single line in case the entries do not exceed + // `breakLength`. + if (isBelowBreakLength(ctx, output, 0, base)) { + return `${braces[0]}${base ? ` ${base}` : ""} ${ + ArrayPrototypeJoin(output, ", ") + } ` + + braces[1]; + } + const indentation = StringPrototypeRepeat(" ", ctx.indentationLvl); + // If the opening "brace" is too large, like in the case of "Set {", + // we need to force the first item to be on the next line or the + // items will not line up correctly. + const ln = base === "" && braces[0].length === 1 + ? " " + : `${base ? ` ${base}` : ""}\n${indentation} `; + // Line up each entry on an individual line. + return `${braces[0]}${ln}${ + ArrayPrototypeJoin(output, `,\n${indentation} `) + } ${braces[1]}`; +} + +function groupArrayElements(ctx, output, value) { + let totalLength = 0; + let maxLength = 0; + let i = 0; + let outputLength = output.length; + if (ctx.maxArrayLength < output.length) { + // This makes sure the "... n more items" part is not taken into account. + outputLength--; + } + const separatorSpace = 2; // Add 1 for the space and 1 for the separator. + const dataLen = new Array(outputLength); + // Calculate the total length of all output entries and the individual max + // entries length of all output entries. We have to remove colors first, + // otherwise the length would not be calculated properly. + for (; i < outputLength; i++) { + const len = getStringWidth(output[i], ctx.colors); + dataLen[i] = len; + totalLength += len + separatorSpace; + if (maxLength < len) { + maxLength = len; + } + } + // Add two to `maxLength` as we add a single whitespace character plus a comma + // in-between two entries. + const actualMax = maxLength + separatorSpace; + // Check if at least three entries fit next to each other and prevent grouping + // of arrays that contains entries of very different length (i.e., if a single + // entry is longer than 1/5 of all other entries combined). Otherwise the + // space in-between small entries would be enormous. + if ( + actualMax * 3 + ctx.indentationLvl < ctx.breakLength && + (totalLength / actualMax > 5 || maxLength <= 6) + ) { + const approxCharHeights = 2.5; + const averageBias = MathSqrt(actualMax - totalLength / output.length); + const biasedMax = MathMax(actualMax - 3 - averageBias, 1); + // Dynamically check how many columns seem possible. + const columns = MathMin( + // Ideally a square should be drawn. We expect a character to be about 2.5 + // times as high as wide. This is the area formula to calculate a square + // which contains n rectangles of size `actualMax * approxCharHeights`. + // Divide that by `actualMax` to receive the correct number of columns. + // The added bias increases the columns for short entries. + MathRound( + MathSqrt( + approxCharHeights * biasedMax * outputLength, + ) / biasedMax, + ), + // Do not exceed the breakLength. + MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), + // Limit array grouping for small `compact` modes as the user requested + // minimal grouping. + ctx.compact * 4, + // Limit the columns to a maximum of fifteen. + 15, + ); + // Return with the original output if no grouping should happen. + if (columns <= 1) { + return output; + } + const tmp = []; + const maxLineLength = []; + for (let i = 0; i < columns; i++) { + let lineMaxLength = 0; + for (let j = i; j < output.length; j += columns) { + if (dataLen[j] > lineMaxLength) { + lineMaxLength = dataLen[j]; + } + } + lineMaxLength += separatorSpace; + maxLineLength[i] = lineMaxLength; + } + let order = StringPrototypePadStart; + if (value !== undefined) { + for (let i = 0; i < output.length; i++) { + if (typeof value[i] !== "number" && typeof value[i] !== "bigint") { + order = StringPrototypePadEnd; + break; + } + } + } + // Each iteration creates a single line of grouped entries. + for (let i = 0; i < outputLength; i += columns) { + // The last lines may contain less entries than columns. + const max = MathMin(i + columns, outputLength); + let str = ""; + let j = i; + for (; j < max - 1; j++) { + // Calculate extra color padding in case it's active. This has to be + // done line by line as some lines might contain more colors than + // others. + const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; + str += order(`${output[j]}, `, padding, " "); + } + if (order === StringPrototypePadStart) { + const padding = maxLineLength[j - i] + + output[j].length - + dataLen[j] - + separatorSpace; + str += StringPrototypePadStart(output[j], padding, " "); + } else { + str += output[j]; + } + ArrayPrototypePush(tmp, str); + } + if (ctx.maxArrayLength < output.length) { + ArrayPrototypePush(tmp, output[outputLength]); + } + output = tmp; + } + return output; +} + +function formatMapIterInner( + ctx, + recurseTimes, + entries, + state, +) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + // Entries exist as [key1, val1, key2, val2, ...] + const len = entries.length / 2; + const remaining = len - maxArrayLength; + const maxLength = MathMin(maxArrayLength, len); + const output = new Array(maxLength); + let i = 0; + ctx.indentationLvl += 2; + if (state === kWeak) { + for (; i < maxLength; i++) { + const pos = i * 2; + output[i] = `${formatValue(ctx, entries[pos], recurseTimes)} => ${ + formatValue(ctx, entries[pos + 1], recurseTimes) + }`; + } + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + if (!ctx.sorted) { + ArrayPrototypeSort(output); + } + } else { + for (; i < maxLength; i++) { + const pos = i * 2; + const res = [ + formatValue(ctx, entries[pos], recurseTimes), + formatValue(ctx, entries[pos + 1], recurseTimes), + ]; + output[i] = reduceToSingleString( + ctx, + res, + "", + ["[", "]"], + kArrayExtrasType, + recurseTimes, + ); + } + } + ctx.indentationLvl -= 2; + if (remaining > 0) { + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + return output; +} + +function formatSetIterInner( + ctx, + recurseTimes, + entries, + state, +) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + const maxLength = MathMin(maxArrayLength, entries.length); + const output = new Array(maxLength); + ctx.indentationLvl += 2; + for (let i = 0; i < maxLength; i++) { + output[i] = formatValue(ctx, entries[i], recurseTimes); + } + ctx.indentationLvl -= 2; + if (state === kWeak && !ctx.sorted) { + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + ArrayPrototypeSort(output); + } + const remaining = entries.length - maxLength; + if (remaining > 0) { + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + return output; +} + +// Regex used for ansi escape code splitting +// Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js +// License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore +// Matches all ansi escape code sequences in a string +const ansiPattern = "[\\u001B\\u009B][[\\]()#;?]*" + + "(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*" + + "|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)" + + "|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))"; +const ansi = new SafeRegExp(ansiPattern, "g"); + +/** + * Returns the number of columns required to display the given string. + */ +export function getStringWidth(str, removeControlChars = true) { + let width = 0; + + if (removeControlChars) { + str = stripVTControlCharacters(str); + } + str = StringPrototypeNormalize(str, "NFC"); + for (const char of new SafeStringIterator(str)) { + const code = StringPrototypeCodePointAt(char, 0); + if (isFullWidthCodePoint(code)) { + width += 2; + } else if (!isZeroWidthCodePoint(code)) { + width++; + } + } + + return width; +} + +const isZeroWidthCodePoint = (code) => { + return code <= 0x1F || // C0 control codes + (code >= 0x7F && code <= 0x9F) || // C1 control codes + (code >= 0x300 && code <= 0x36F) || // Combining Diacritical Marks + (code >= 0x200B && code <= 0x200F) || // Modifying Invisible Characters + // Combining Diacritical Marks for Symbols + (code >= 0x20D0 && code <= 0x20FF) || + (code >= 0xFE00 && code <= 0xFE0F) || // Variation Selectors + (code >= 0xFE20 && code <= 0xFE2F) || // Combining Half Marks + (code >= 0xE0100 && code <= 0xE01EF); // Variation Selectors +}; + +/** + * Remove all VT control characters. Use to estimate displayed string width. + */ +export function stripVTControlCharacters(str) { + return StringPrototypeReplace(str, ansi, ""); +} + +function hasOwnProperty(obj, v) { + if (obj == null) { + return false; + } + return ObjectHasOwn(obj, v); +} + +// Copyright Joyent, Inc. and other Node contributors. MIT license. +// Forked from Node's lib/internal/cli_table.js + +const tableChars = { + middleMiddle: "\u2500", + rowMiddle: "\u253c", + topRight: "\u2510", + topLeft: "\u250c", + leftMiddle: "\u251c", + topMiddle: "\u252c", + bottomRight: "\u2518", + bottomLeft: "\u2514", + bottomMiddle: "\u2534", + rightMiddle: "\u2524", + left: "\u2502 ", + right: " \u2502", + middle: " \u2502 ", +}; + +function isFullWidthCodePoint(code) { + // Code points are partially derived from: + // http://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt + return ( + code >= 0x1100 && + (code <= 0x115f || // Hangul Jamo + code === 0x2329 || // LEFT-POINTING ANGLE BRACKET + code === 0x232a || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (code >= 0x3250 && code <= 0x4dbf) || + // CJK Unified Ideographs .. Yi Radicals + (code >= 0x4e00 && code <= 0xa4c6) || + // Hangul Jamo Extended-A + (code >= 0xa960 && code <= 0xa97c) || + // Hangul Syllables + (code >= 0xac00 && code <= 0xd7a3) || + // CJK Compatibility Ideographs + (code >= 0xf900 && code <= 0xfaff) || + // Vertical Forms + (code >= 0xfe10 && code <= 0xfe19) || + // CJK Compatibility Forms .. Small Form Variants + (code >= 0xfe30 && code <= 0xfe6b) || + // Halfwidth and Fullwidth Forms + (code >= 0xff01 && code <= 0xff60) || + (code >= 0xffe0 && code <= 0xffe6) || + // Kana Supplement + (code >= 0x1b000 && code <= 0x1b001) || + // Enclosed Ideographic Supplement + (code >= 0x1f200 && code <= 0x1f251) || + // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff + // Emoticons 0x1f600 - 0x1f64f + (code >= 0x1f300 && code <= 0x1f64f) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (code >= 0x20000 && code <= 0x3fffd)) + ); +} + +function renderRow(row, columnWidths, columnRightAlign) { + let out = tableChars.left; + for (let i = 0; i < row.length; i++) { + const cell = row[i]; + const len = getStringWidth(cell); + const padding = StringPrototypeRepeat(" ", columnWidths[i] - len); + if (columnRightAlign?.[i]) { + out += `${padding}${cell}`; + } else { + out += `${cell}${padding}`; + } + if (i !== row.length - 1) { + out += tableChars.middle; + } + } + out += tableChars.right; + return out; +} + +function cliTable(head, columns) { + const rows = []; + const columnWidths = ArrayPrototypeMap(head, (h) => getStringWidth(h)); + const longestColumn = ArrayPrototypeReduce( + columns, + (n, a) => MathMax(n, a.length), + 0, + ); + const columnRightAlign = ArrayPrototypeFill( + new Array(columnWidths.length), + true, + ); + + for (let i = 0; i < head.length; i++) { + const column = columns[i]; + for (let j = 0; j < longestColumn; j++) { + if (rows[j] === undefined) { + rows[j] = []; + } + const value = (rows[j][i] = hasOwnProperty(column, j) ? column[j] : ""); + const width = columnWidths[i] || 0; + const counted = getStringWidth(value); + columnWidths[i] = MathMax(width, counted); + columnRightAlign[i] &= NumberIsInteger(+value); + } + } + + const divider = ArrayPrototypeMap( + columnWidths, + (i) => StringPrototypeRepeat(tableChars.middleMiddle, i + 2), + ); + + let result = + `${tableChars.topLeft}${ + ArrayPrototypeJoin(divider, tableChars.topMiddle) + }` + + `${tableChars.topRight}\n${renderRow(head, columnWidths)}\n` + + `${tableChars.leftMiddle}${ + ArrayPrototypeJoin(divider, tableChars.rowMiddle) + }` + + `${tableChars.rightMiddle}\n`; + + for (let i = 0; i < rows.length; ++i) { + const row = rows[i]; + result += `${renderRow(row, columnWidths, columnRightAlign)}\n`; + } + + result += + `${tableChars.bottomLeft}${ + ArrayPrototypeJoin(divider, tableChars.bottomMiddle) + }` + + tableChars.bottomRight; + + return result; +} +/* End of forked part */ + +// We can match Node's quoting behavior exactly by swapping the double quote and +// single quote in this array. That would give preference to single quotes. +// However, we prefer double quotes as the default. + +const denoInspectDefaultOptions = { + indentationLvl: 0, + currentDepth: 0, + stylize: stylizeNoColor, + + showHidden: false, + depth: 4, + colors: false, + showProxy: false, + breakLength: 80, + escapeSequences: true, + compact: 3, + sorted: false, + getters: false, + + // node only + maxArrayLength: 100, + maxStringLength: 100, // deno: strAbbreviateSize: 100 + customInspect: true, + + // deno only + /** You can override the quotes preference in inspectString. + * Used by util.inspect() */ + // TODO(kt3k): Consider using symbol as a key to hide this from the public + // API. + quotes: ['"', "'", "`"], + iterableLimit: 100, // similar to node's maxArrayLength, but doesn't only apply to arrays + trailingComma: false, + + inspect, + + // TODO(@crowlKats): merge into indentationLvl + indentLevel: 0, +}; + +function getDefaultInspectOptions() { + return { + budget: {}, + seen: [], + ...denoInspectDefaultOptions, + }; +} + +const DEFAULT_INDENT = " "; // Default indent string + +const STR_ABBREVIATE_SIZE = 100; + +class CSI { + static kClear = "\x1b[1;1H"; + static kClearScreenDown = "\x1b[0J"; +} + +const QUOTE_SYMBOL_REG = new SafeRegExp(/^[a-zA-Z_][a-zA-Z_.0-9]*$/); + +function maybeQuoteSymbol(symbol, ctx) { + const description = SymbolPrototypeGetDescription(symbol); + + if (description === undefined) { + return SymbolPrototypeToString(symbol); + } + + if (RegExpPrototypeTest(QUOTE_SYMBOL_REG, description)) { + return SymbolPrototypeToString(symbol); + } + + return `Symbol(${quoteString(description, ctx)})`; +} + +/** Surround the string in quotes. + * + * The quote symbol is chosen by taking the first of the `QUOTES` array which + * does not occur in the string. If they all occur, settle with `QUOTES[0]`. + * + * Insert a backslash before any occurrence of the chosen quote symbol and + * before any backslash. + */ +function quoteString(string, ctx) { + const quote = ArrayPrototypeFind( + ctx.quotes, + (c) => !StringPrototypeIncludes(string, c), + ) ?? + ctx.quotes[0]; + const escapePattern = new SafeRegExp(`(?=[${quote}\\\\])`, "g"); + string = StringPrototypeReplace(string, escapePattern, "\\"); + if (ctx.escapeSequences) { + string = replaceEscapeSequences(string); + } + return `${quote}${string}${quote}`; +} + +const ESCAPE_PATTERN = new SafeRegExp(/([\b\f\n\r\t\v])/g); +const ESCAPE_MAP = ObjectFreeze({ + "\b": "\\b", + "\f": "\\f", + "\n": "\\n", + "\r": "\\r", + "\t": "\\t", + "\v": "\\v", +}); + +const ESCAPE_PATTERN2 = new SafeRegExp("[\x00-\x1f\x7f-\x9f]", "g"); + +// Replace escape sequences that can modify output. +function replaceEscapeSequences(string) { + return StringPrototypeReplace( + StringPrototypeReplace( + string, + ESCAPE_PATTERN, + (c) => ESCAPE_MAP[c], + ), + new SafeRegExp(ESCAPE_PATTERN2), + (c) => + "\\x" + + StringPrototypePadStart( + NumberPrototypeToString(StringPrototypeCharCodeAt(c, 0), 16), + 2, + "0", + ), + ); +} + +// Print strings when they are inside of arrays or objects with quotes +function inspectValueWithQuotes( + value, + ctx, +) { + const abbreviateSize = typeof ctx.strAbbreviateSize === "undefined" + ? STR_ABBREVIATE_SIZE + : ctx.strAbbreviateSize; + switch (typeof value) { + case "string": { + const trunc = value.length > abbreviateSize + ? StringPrototypeSlice(value, 0, abbreviateSize) + "..." + : value; + return ctx.stylize(quoteString(trunc, ctx), "string"); // Quoted strings are green + } + default: + return formatValue(ctx, value, 0); + } +} + +const colorKeywords = new SafeMap([ + ["black", "#000000"], + ["silver", "#c0c0c0"], + ["gray", "#808080"], + ["white", "#ffffff"], + ["maroon", "#800000"], + ["red", "#ff0000"], + ["purple", "#800080"], + ["fuchsia", "#ff00ff"], + ["green", "#008000"], + ["lime", "#00ff00"], + ["olive", "#808000"], + ["yellow", "#ffff00"], + ["navy", "#000080"], + ["blue", "#0000ff"], + ["teal", "#008080"], + ["aqua", "#00ffff"], + ["orange", "#ffa500"], + ["aliceblue", "#f0f8ff"], + ["antiquewhite", "#faebd7"], + ["aquamarine", "#7fffd4"], + ["azure", "#f0ffff"], + ["beige", "#f5f5dc"], + ["bisque", "#ffe4c4"], + ["blanchedalmond", "#ffebcd"], + ["blueviolet", "#8a2be2"], + ["brown", "#a52a2a"], + ["burlywood", "#deb887"], + ["cadetblue", "#5f9ea0"], + ["chartreuse", "#7fff00"], + ["chocolate", "#d2691e"], + ["coral", "#ff7f50"], + ["cornflowerblue", "#6495ed"], + ["cornsilk", "#fff8dc"], + ["crimson", "#dc143c"], + ["cyan", "#00ffff"], + ["darkblue", "#00008b"], + ["darkcyan", "#008b8b"], + ["darkgoldenrod", "#b8860b"], + ["darkgray", "#a9a9a9"], + ["darkgreen", "#006400"], + ["darkgrey", "#a9a9a9"], + ["darkkhaki", "#bdb76b"], + ["darkmagenta", "#8b008b"], + ["darkolivegreen", "#556b2f"], + ["darkorange", "#ff8c00"], + ["darkorchid", "#9932cc"], + ["darkred", "#8b0000"], + ["darksalmon", "#e9967a"], + ["darkseagreen", "#8fbc8f"], + ["darkslateblue", "#483d8b"], + ["darkslategray", "#2f4f4f"], + ["darkslategrey", "#2f4f4f"], + ["darkturquoise", "#00ced1"], + ["darkviolet", "#9400d3"], + ["deeppink", "#ff1493"], + ["deepskyblue", "#00bfff"], + ["dimgray", "#696969"], + ["dimgrey", "#696969"], + ["dodgerblue", "#1e90ff"], + ["firebrick", "#b22222"], + ["floralwhite", "#fffaf0"], + ["forestgreen", "#228b22"], + ["gainsboro", "#dcdcdc"], + ["ghostwhite", "#f8f8ff"], + ["gold", "#ffd700"], + ["goldenrod", "#daa520"], + ["greenyellow", "#adff2f"], + ["grey", "#808080"], + ["honeydew", "#f0fff0"], + ["hotpink", "#ff69b4"], + ["indianred", "#cd5c5c"], + ["indigo", "#4b0082"], + ["ivory", "#fffff0"], + ["khaki", "#f0e68c"], + ["lavender", "#e6e6fa"], + ["lavenderblush", "#fff0f5"], + ["lawngreen", "#7cfc00"], + ["lemonchiffon", "#fffacd"], + ["lightblue", "#add8e6"], + ["lightcoral", "#f08080"], + ["lightcyan", "#e0ffff"], + ["lightgoldenrodyellow", "#fafad2"], + ["lightgray", "#d3d3d3"], + ["lightgreen", "#90ee90"], + ["lightgrey", "#d3d3d3"], + ["lightpink", "#ffb6c1"], + ["lightsalmon", "#ffa07a"], + ["lightseagreen", "#20b2aa"], + ["lightskyblue", "#87cefa"], + ["lightslategray", "#778899"], + ["lightslategrey", "#778899"], + ["lightsteelblue", "#b0c4de"], + ["lightyellow", "#ffffe0"], + ["limegreen", "#32cd32"], + ["linen", "#faf0e6"], + ["magenta", "#ff00ff"], + ["mediumaquamarine", "#66cdaa"], + ["mediumblue", "#0000cd"], + ["mediumorchid", "#ba55d3"], + ["mediumpurple", "#9370db"], + ["mediumseagreen", "#3cb371"], + ["mediumslateblue", "#7b68ee"], + ["mediumspringgreen", "#00fa9a"], + ["mediumturquoise", "#48d1cc"], + ["mediumvioletred", "#c71585"], + ["midnightblue", "#191970"], + ["mintcream", "#f5fffa"], + ["mistyrose", "#ffe4e1"], + ["moccasin", "#ffe4b5"], + ["navajowhite", "#ffdead"], + ["oldlace", "#fdf5e6"], + ["olivedrab", "#6b8e23"], + ["orangered", "#ff4500"], + ["orchid", "#da70d6"], + ["palegoldenrod", "#eee8aa"], + ["palegreen", "#98fb98"], + ["paleturquoise", "#afeeee"], + ["palevioletred", "#db7093"], + ["papayawhip", "#ffefd5"], + ["peachpuff", "#ffdab9"], + ["peru", "#cd853f"], + ["pink", "#ffc0cb"], + ["plum", "#dda0dd"], + ["powderblue", "#b0e0e6"], + ["rosybrown", "#bc8f8f"], + ["royalblue", "#4169e1"], + ["saddlebrown", "#8b4513"], + ["salmon", "#fa8072"], + ["sandybrown", "#f4a460"], + ["seagreen", "#2e8b57"], + ["seashell", "#fff5ee"], + ["sienna", "#a0522d"], + ["skyblue", "#87ceeb"], + ["slateblue", "#6a5acd"], + ["slategray", "#708090"], + ["slategrey", "#708090"], + ["snow", "#fffafa"], + ["springgreen", "#00ff7f"], + ["steelblue", "#4682b4"], + ["tan", "#d2b48c"], + ["thistle", "#d8bfd8"], + ["tomato", "#ff6347"], + ["turquoise", "#40e0d0"], + ["violet", "#ee82ee"], + ["wheat", "#f5deb3"], + ["whitesmoke", "#f5f5f5"], + ["yellowgreen", "#9acd32"], + ["rebeccapurple", "#663399"], +]); + +const HASH_PATTERN = new SafeRegExp( + /^#([\dA-Fa-f]{2})([\dA-Fa-f]{2})([\dA-Fa-f]{2})([\dA-Fa-f]{2})?$/, +); +const SMALL_HASH_PATTERN = new SafeRegExp( + /^#([\dA-Fa-f])([\dA-Fa-f])([\dA-Fa-f])([\dA-Fa-f])?$/, +); +const RGB_PATTERN = new SafeRegExp( + /^rgba?\(\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)\s*(,\s*([+\-]?\d*\.?\d+)\s*)?\)$/, +); +const HSL_PATTERN = new SafeRegExp( + /^hsla?\(\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)%\s*,\s*([+\-]?\d*\.?\d+)%\s*(,\s*([+\-]?\d*\.?\d+)\s*)?\)$/, +); + +function parseCssColor(colorString) { + if (MapPrototypeHas(colorKeywords, colorString)) { + colorString = MapPrototypeGet(colorKeywords, colorString); + } + // deno-fmt-ignore + const hashMatch = StringPrototypeMatch(colorString, HASH_PATTERN); + if (hashMatch != null) { + return [ + Number(`0x${hashMatch[1]}`), + Number(`0x${hashMatch[2]}`), + Number(`0x${hashMatch[3]}`), + ]; + } + // deno-fmt-ignore + const smallHashMatch = StringPrototypeMatch(colorString, SMALL_HASH_PATTERN); + if (smallHashMatch != null) { + return [ + Number(`0x${smallHashMatch[1]}0`), + Number(`0x${smallHashMatch[2]}0`), + Number(`0x${smallHashMatch[3]}0`), + ]; + } + // deno-fmt-ignore + const rgbMatch = StringPrototypeMatch(colorString, RGB_PATTERN); + if (rgbMatch != null) { + return [ + MathRound(MathMax(0, MathMin(255, Number(rgbMatch[1])))), + MathRound(MathMax(0, MathMin(255, Number(rgbMatch[2])))), + MathRound(MathMax(0, MathMin(255, Number(rgbMatch[3])))), + ]; + } + // deno-fmt-ignore + const hslMatch = StringPrototypeMatch(colorString, HSL_PATTERN); + if (hslMatch != null) { + // https://www.rapidtables.com/convert/color/hsl-to-rgb.html + let h = Number(hslMatch[1]) % 360; + if (h < 0) { + h += 360; + } + const s = MathMax(0, MathMin(100, Number(hslMatch[2]))) / 100; + const l = MathMax(0, MathMin(100, Number(hslMatch[3]))) / 100; + const c = (1 - MathAbs(2 * l - 1)) * s; + const x = c * (1 - MathAbs((h / 60) % 2 - 1)); + const m = l - c / 2; + let r_; + let g_; + let b_; + if (h < 60) { + ({ 0: r_, 1: g_, 2: b_ } = [c, x, 0]); + } else if (h < 120) { + ({ 0: r_, 1: g_, 2: b_ } = [x, c, 0]); + } else if (h < 180) { + ({ 0: r_, 1: g_, 2: b_ } = [0, c, x]); + } else if (h < 240) { + ({ 0: r_, 1: g_, 2: b_ } = [0, x, c]); + } else if (h < 300) { + ({ 0: r_, 1: g_, 2: b_ } = [x, 0, c]); + } else { + ({ 0: r_, 1: g_, 2: b_ } = [c, 0, x]); + } + return [ + MathRound((r_ + m) * 255), + MathRound((g_ + m) * 255), + MathRound((b_ + m) * 255), + ]; + } + return null; +} + +function getDefaultCss() { + return { + backgroundColor: null, + color: null, + fontWeight: null, + fontStyle: null, + textDecorationColor: null, + textDecorationLine: [], + }; +} + +const SPACE_PATTERN = new SafeRegExp(/\s+/g); + +function parseCss(cssString) { + const css = getDefaultCss(); + + const rawEntries = []; + let inValue = false; + let currentKey = null; + let parenthesesDepth = 0; + let currentPart = ""; + for (let i = 0; i < cssString.length; i++) { + const c = cssString[i]; + if (c == "(") { + parenthesesDepth++; + } else if (parenthesesDepth > 0) { + if (c == ")") { + parenthesesDepth--; + } + } else if (inValue) { + if (c == ";") { + const value = StringPrototypeTrim(currentPart); + if (value != "") { + ArrayPrototypePush(rawEntries, [currentKey, value]); + } + currentKey = null; + currentPart = ""; + inValue = false; + continue; + } + } else if (c == ":") { + currentKey = StringPrototypeTrim(currentPart); + currentPart = ""; + inValue = true; + continue; + } + currentPart += c; + } + if (inValue && parenthesesDepth == 0) { + const value = StringPrototypeTrim(currentPart); + if (value != "") { + ArrayPrototypePush(rawEntries, [currentKey, value]); + } + currentKey = null; + currentPart = ""; + } + + for (let i = 0; i < rawEntries.length; ++i) { + const { 0: key, 1: value } = rawEntries[i]; + if (key == "background-color") { + if (value != null) { + css.backgroundColor = value; + } + } else if (key == "color") { + if (value != null) { + css.color = value; + } + } else if (key == "font-weight") { + if (value == "bold") { + css.fontWeight = value; + } + } else if (key == "font-style") { + if ( + ArrayPrototypeIncludes(["italic", "oblique", "oblique 14deg"], value) + ) { + css.fontStyle = "italic"; + } + } else if (key == "text-decoration-line") { + css.textDecorationLine = []; + const lineTypes = StringPrototypeSplit(value, SPACE_PATTERN); + for (let i = 0; i < lineTypes.length; ++i) { + const lineType = lineTypes[i]; + if ( + ArrayPrototypeIncludes( + ["line-through", "overline", "underline"], + lineType, + ) + ) { + ArrayPrototypePush(css.textDecorationLine, lineType); + } + } + } else if (key == "text-decoration-color") { + const color = parseCssColor(value); + if (color != null) { + css.textDecorationColor = color; + } + } else if (key == "text-decoration") { + css.textDecorationColor = null; + css.textDecorationLine = []; + const args = StringPrototypeSplit(value, SPACE_PATTERN); + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + const maybeColor = parseCssColor(arg); + if (maybeColor != null) { + css.textDecorationColor = maybeColor; + } else if ( + ArrayPrototypeIncludes( + ["line-through", "overline", "underline"], + arg, + ) + ) { + ArrayPrototypePush(css.textDecorationLine, arg); + } + } + } + } + + return css; +} + +function colorEquals(color1, color2) { + return color1?.[0] == color2?.[0] && color1?.[1] == color2?.[1] && + color1?.[2] == color2?.[2]; +} + +function cssToAnsi(css, prevCss = null) { + prevCss = prevCss ?? getDefaultCss(); + let ansi = ""; + if (!colorEquals(css.backgroundColor, prevCss.backgroundColor)) { + if (css.backgroundColor == null) { + ansi += "\x1b[49m"; + } else if (css.backgroundColor == "black") { + ansi += `\x1b[40m`; + } else if (css.backgroundColor == "red") { + ansi += `\x1b[41m`; + } else if (css.backgroundColor == "green") { + ansi += `\x1b[42m`; + } else if (css.backgroundColor == "yellow") { + ansi += `\x1b[43m`; + } else if (css.backgroundColor == "blue") { + ansi += `\x1b[44m`; + } else if (css.backgroundColor == "magenta") { + ansi += `\x1b[45m`; + } else if (css.backgroundColor == "cyan") { + ansi += `\x1b[46m`; + } else if (css.backgroundColor == "white") { + ansi += `\x1b[47m`; + } else { + if (ArrayIsArray(css.backgroundColor)) { + const { 0: r, 1: g, 2: b } = css.backgroundColor; + ansi += `\x1b[48;2;${r};${g};${b}m`; + } else { + const parsed = parseCssColor(css.backgroundColor); + if (parsed !== null) { + const { 0: r, 1: g, 2: b } = parsed; + ansi += `\x1b[48;2;${r};${g};${b}m`; + } else { + ansi += "\x1b[49m"; + } + } + } + } + if (!colorEquals(css.color, prevCss.color)) { + if (css.color == null) { + ansi += "\x1b[39m"; + } else if (css.color == "black") { + ansi += `\x1b[30m`; + } else if (css.color == "red") { + ansi += `\x1b[31m`; + } else if (css.color == "green") { + ansi += `\x1b[32m`; + } else if (css.color == "yellow") { + ansi += `\x1b[33m`; + } else if (css.color == "blue") { + ansi += `\x1b[34m`; + } else if (css.color == "magenta") { + ansi += `\x1b[35m`; + } else if (css.color == "cyan") { + ansi += `\x1b[36m`; + } else if (css.color == "white") { + ansi += `\x1b[37m`; + } else { + if (ArrayIsArray(css.color)) { + const { 0: r, 1: g, 2: b } = css.color; + ansi += `\x1b[38;2;${r};${g};${b}m`; + } else { + const parsed = parseCssColor(css.color); + if (parsed !== null) { + const { 0: r, 1: g, 2: b } = parsed; + ansi += `\x1b[38;2;${r};${g};${b}m`; + } else { + ansi += "\x1b[39m"; + } + } + } + } + if (css.fontWeight != prevCss.fontWeight) { + if (css.fontWeight == "bold") { + ansi += `\x1b[1m`; + } else { + ansi += "\x1b[22m"; + } + } + if (css.fontStyle != prevCss.fontStyle) { + if (css.fontStyle == "italic") { + ansi += `\x1b[3m`; + } else { + ansi += "\x1b[23m"; + } + } + if (!colorEquals(css.textDecorationColor, prevCss.textDecorationColor)) { + if (css.textDecorationColor != null) { + const { 0: r, 1: g, 2: b } = css.textDecorationColor; + ansi += `\x1b[58;2;${r};${g};${b}m`; + } else { + ansi += "\x1b[59m"; + } + } + if ( + ArrayPrototypeIncludes(css.textDecorationLine, "line-through") != + ArrayPrototypeIncludes(prevCss.textDecorationLine, "line-through") + ) { + if (ArrayPrototypeIncludes(css.textDecorationLine, "line-through")) { + ansi += "\x1b[9m"; + } else { + ansi += "\x1b[29m"; + } + } + if ( + ArrayPrototypeIncludes(css.textDecorationLine, "overline") != + ArrayPrototypeIncludes(prevCss.textDecorationLine, "overline") + ) { + if (ArrayPrototypeIncludes(css.textDecorationLine, "overline")) { + ansi += "\x1b[53m"; + } else { + ansi += "\x1b[55m"; + } + } + if ( + ArrayPrototypeIncludes(css.textDecorationLine, "underline") != + ArrayPrototypeIncludes(prevCss.textDecorationLine, "underline") + ) { + if (ArrayPrototypeIncludes(css.textDecorationLine, "underline")) { + ansi += "\x1b[4m"; + } else { + ansi += "\x1b[24m"; + } + } + return ansi; +} + +function inspectArgs(args, inspectOptions = {}) { + const ctx = { + ...getDefaultInspectOptions(), + ...inspectOptions, + }; + if (inspectOptions.iterableLimit !== undefined) { + ctx.maxArrayLength = inspectOptions.iterableLimit; + } + if (inspectOptions.strAbbreviateSize !== undefined) { + ctx.maxStringLength = inspectOptions.strAbbreviateSize; + } + if (ctx.colors) ctx.stylize = createStylizeWithColor(styles, colors); + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + + const noColor = getNoColor(); + const first = args[0]; + let a = 0; + let string = ""; + + if (typeof first == "string" && args.length > 1) { + a++; + // Index of the first not-yet-appended character. Use this so we only + // have to append to `string` when a substitution occurs / at the end. + let appendedChars = 0; + let usedStyle = false; + let prevCss = null; + for (let i = 0; i < first.length - 1; i++) { + if (first[i] == "%") { + const char = first[++i]; + if (a < args.length) { + let formattedArg = null; + if (char == "s") { + // Format as a string. + formattedArg = String(args[a++]); + } else if (ArrayPrototypeIncludes(["d", "i"], char)) { + // Format as an integer. + const value = args[a++]; + if (typeof value == "bigint") { + formattedArg = `${value}n`; + } else if (typeof value == "number") { + formattedArg = `${NumberParseInt(String(value))}`; + } else { + formattedArg = "NaN"; + } + } else if (char == "f") { + // Format as a floating point value. + const value = args[a++]; + if (typeof value == "number") { + formattedArg = `${value}`; + } else { + formattedArg = "NaN"; + } + } else if (ArrayPrototypeIncludes(["O", "o"], char)) { + // Format as an object. + formattedArg = formatValue(ctx, args[a++], 0); + } else if (char == "c") { + const value = args[a++]; + if (!noColor) { + const css = parseCss(value); + formattedArg = cssToAnsi(css, prevCss); + if (formattedArg != "") { + usedStyle = true; + prevCss = css; + } + } else { + formattedArg = ""; + } + } + + if (formattedArg != null) { + string += StringPrototypeSlice(first, appendedChars, i - 1) + + formattedArg; + appendedChars = i + 1; + } + } + if (char == "%") { + string += StringPrototypeSlice(first, appendedChars, i - 1) + "%"; + appendedChars = i + 1; + } + } + } + string += StringPrototypeSlice(first, appendedChars); + if (usedStyle) { + string += "\x1b[0m"; + } + } + + for (; a < args.length; a++) { + if (a > 0) { + string += " "; + } + if (typeof args[a] == "string") { + string += args[a]; + } else { + // Use default maximum depth for null or undefined arguments. + string += formatValue(ctx, args[a], 0); + } + } + + if (ctx.indentLevel > 0) { + const groupIndent = StringPrototypeRepeat( + DEFAULT_INDENT, + ctx.indentLevel, + ); + string = groupIndent + + StringPrototypeReplaceAll(string, "\n", `\n${groupIndent}`); + } + + return string; +} + +function createStylizeWithColor(styles, colors) { + return function stylizeWithColor(str, styleType) { + const style = styles[styleType]; + if (style !== undefined) { + const color = colors[style]; + if (color !== undefined) { + return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; + } + } + return str; + }; +} + +const countMap = new SafeMap(); +const timerMap = new SafeMap(); +const isConsoleInstance = Symbol("isConsoleInstance"); + +function getConsoleInspectOptions() { + const color = !getNoColor(); + return { + ...getDefaultInspectOptions(), + colors: color, + stylize: color ? createStylizeWithColor(styles, colors) : stylizeNoColor, + }; +} + +class Console { + #printFunc = null; + [isConsoleInstance] = false; + + constructor(printFunc) { + this.#printFunc = printFunc; + this.indentLevel = 0; + this[isConsoleInstance] = true; + + // ref https://console.spec.whatwg.org/#console-namespace + // For historical web-compatibility reasons, the namespace object for + // console must have as its [[Prototype]] an empty object, created as if + // by ObjectCreate(%ObjectPrototype%), instead of %ObjectPrototype%. + const console = ObjectCreate({}, { + [SymbolToStringTag]: { + enumerable: false, + writable: false, + configurable: true, + value: "console", + }, + }); + ObjectAssign(console, this); + return console; + } + + log = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 1, + ); + }; + + debug = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 0, + ); + }; + + info = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 1, + ); + }; + + dir = (obj = undefined, options = {}) => { + this.#printFunc( + inspectArgs([obj], { ...getConsoleInspectOptions(), ...options }) + + "\n", + 1, + ); + }; + + dirxml = this.dir; + + warn = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 2, + ); + }; + + error = (...args) => { + this.#printFunc( + inspectArgs(args, { + ...getConsoleInspectOptions(), + indentLevel: this.indentLevel, + }) + "\n", + 3, + ); + }; + + assert = (condition = false, ...args) => { + if (condition) { + return; + } + + if (args.length === 0) { + this.error("Assertion failed"); + return; + } + + const [first, ...rest] = new SafeArrayIterator(args); + + if (typeof first === "string") { + this.error( + `Assertion failed: ${first}`, + ...new SafeArrayIterator(rest), + ); + return; + } + + this.error(`Assertion failed:`, ...new SafeArrayIterator(args)); + }; + + count = (label = "default") => { + label = String(label); + + if (MapPrototypeHas(countMap, label)) { + const current = MapPrototypeGet(countMap, label) || 0; + MapPrototypeSet(countMap, label, current + 1); + } else { + MapPrototypeSet(countMap, label, 1); + } + + this.info(`${label}: ${MapPrototypeGet(countMap, label)}`); + }; + + countReset = (label = "default") => { + label = String(label); + + if (MapPrototypeHas(countMap, label)) { + MapPrototypeSet(countMap, label, 0); + } else { + this.warn(`Count for '${label}' does not exist`); + } + }; + + table = (data = undefined, properties = undefined) => { + if (properties !== undefined && !ArrayIsArray(properties)) { + throw new Error( + "The 'properties' argument must be of type Array. " + + "Received type string", + ); + } + + if (data === null || typeof data !== "object") { + return this.log(data); + } + + const stringifyValue = (value) => + inspectValueWithQuotes(value, { + ...getDefaultInspectOptions(), + depth: 1, + compact: true, + }); + const toTable = (header, body) => this.log(cliTable(header, body)); + + let resultData; + const isSet = ObjectPrototypeIsPrototypeOf(SetPrototype, data); + const isMap = ObjectPrototypeIsPrototypeOf(MapPrototype, data); + const valuesKey = "Values"; + const indexKey = isSet || isMap ? "(iter idx)" : "(idx)"; + + if (isSet) { + resultData = [...new SafeSetIterator(data)]; + } else if (isMap) { + let idx = 0; + resultData = {}; + + MapPrototypeForEach(data, (v, k) => { + resultData[idx] = { Key: k, Values: v }; + idx++; + }); + } else { + resultData = data; + } + + const keys = ObjectKeys(resultData); + const numRows = keys.length; + + const objectValues = properties + ? ObjectFromEntries( + ArrayPrototypeMap( + properties, + (name) => [name, ArrayPrototypeFill(new Array(numRows), "")], + ), + ) + : {}; + const indexKeys = []; + const values = []; + + let hasPrimitives = false; + ArrayPrototypeForEach(keys, (k, idx) => { + const value = resultData[k]; + const primitive = value === null || + (typeof value !== "function" && typeof value !== "object"); + if (properties === undefined && primitive) { + hasPrimitives = true; + ArrayPrototypePush(values, stringifyValue(value)); + } else { + const valueObj = value || {}; + const keys = properties || ObjectKeys(valueObj); + for (let i = 0; i < keys.length; ++i) { + const k = keys[i]; + if (!primitive && ReflectHas(valueObj, k)) { + if (!(ReflectHas(objectValues, k))) { + objectValues[k] = ArrayPrototypeFill(new Array(numRows), ""); + } + objectValues[k][idx] = stringifyValue(valueObj[k]); + } + } + ArrayPrototypePush(values, ""); + } + + ArrayPrototypePush(indexKeys, k); + }); + + const headerKeys = ObjectKeys(objectValues); + const bodyValues = ObjectValues(objectValues); + const headerProps = properties || + [ + ...new SafeArrayIterator(headerKeys), + !isMap && hasPrimitives && valuesKey, + ]; + const header = ArrayPrototypeFilter([ + indexKey, + ...new SafeArrayIterator(headerProps), + ], Boolean); + const body = [indexKeys, ...new SafeArrayIterator(bodyValues), values]; + + toTable(header, body); + }; + + time = (label = "default") => { + label = String(label); + + if (MapPrototypeHas(timerMap, label)) { + this.warn(`Timer '${label}' already exists`); + return; + } + + MapPrototypeSet(timerMap, label, DateNow()); + }; + + timeLog = (label = "default", ...args) => { + label = String(label); + + if (!MapPrototypeHas(timerMap, label)) { + this.warn(`Timer '${label}' does not exists`); + return; + } + + const startTime = MapPrototypeGet(timerMap, label); + const duration = DateNow() - startTime; + + this.info(`${label}: ${duration}ms`, ...new SafeArrayIterator(args)); + }; + + timeEnd = (label = "default") => { + label = String(label); + + if (!MapPrototypeHas(timerMap, label)) { + this.warn(`Timer '${label}' does not exist`); + return; + } + + const startTime = MapPrototypeGet(timerMap, label); + MapPrototypeDelete(timerMap, label); + const duration = DateNow() - startTime; + + this.info(`${label}: ${duration}ms`); + }; + + group = (...label) => { + if (label.length > 0) { + this.log(...new SafeArrayIterator(label)); + } + this.indentLevel += 2; + }; + + groupCollapsed = this.group; + + groupEnd = () => { + if (this.indentLevel > 0) { + this.indentLevel -= 2; + } + }; + + clear = () => { + this.indentLevel = 0; + this.#printFunc(CSI.kClear, 1); + this.#printFunc(CSI.kClearScreenDown, 1); + }; + + trace = (...args) => { + const message = inspectArgs( + args, + { ...getConsoleInspectOptions(), indentLevel: 0 }, + ); + const err = { + name: "Trace", + message, + }; + ErrorCaptureStackTrace(err, this.trace); + this.error(err.stack); + }; + + // These methods are noops, but when the inspector is connected, they + // call into V8. + profile = (_label) => {}; + profileEnd = (_label) => {}; + timeStamp = (_label) => {}; + + static [SymbolHasInstance](instance) { + return instance[isConsoleInstance]; + } +} + +const customInspect = SymbolFor("Deno.customInspect"); + +function inspect( + value, + inspectOptions = {}, +) { + // Default options + const ctx = { + ...getDefaultInspectOptions(), + ...inspectOptions, + }; + if (inspectOptions.iterableLimit !== undefined) { + ctx.maxArrayLength = inspectOptions.iterableLimit; + } + if (inspectOptions.strAbbreviateSize !== undefined) { + ctx.maxStringLength = inspectOptions.strAbbreviateSize; + } + + if (ctx.colors) ctx.stylize = createStylizeWithColor(styles, colors); + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + return formatValue(ctx, value, 0); +} + +/** Creates a proxy that represents a subset of the properties + * of the original object optionally without evaluating the properties + * in order to get the values. */ +function createFilteredInspectProxy({ object, keys, evaluate }) { + const obj = class {}; + if (object.constructor?.name) { + ObjectDefineProperty(obj, "name", { value: object.constructor.name }); + } + + return new Proxy(new obj(), { + get(_target, key) { + if (key === SymbolToStringTag) { + return object.constructor?.name; + } else if (ArrayPrototypeIncludes(keys, key)) { + return ReflectGet(object, key); + } else { + return undefined; + } + }, + getOwnPropertyDescriptor(_target, key) { + if (!ArrayPrototypeIncludes(keys, key)) { + return undefined; + } else if (evaluate) { + return getEvaluatedDescriptor(object, key); + } else { + return getDescendantPropertyDescriptor(object, key) ?? + getEvaluatedDescriptor(object, key); + } + }, + has(_target, key) { + return ArrayPrototypeIncludes(keys, key); + }, + ownKeys() { + return keys; + }, + }); + + function getDescendantPropertyDescriptor(object, key) { + let propertyDescriptor = ReflectGetOwnPropertyDescriptor(object, key); + if (!propertyDescriptor) { + const prototype = ReflectGetPrototypeOf(object); + if (prototype) { + propertyDescriptor = getDescendantPropertyDescriptor(prototype, key); + } + } + return propertyDescriptor; + } + + function getEvaluatedDescriptor(object, key) { + return { + configurable: true, + enumerable: true, + value: object[key], + }; + } +} + +// A helper function that will bind our own console implementation +// with default implementation of Console from V8. This will cause +// console messages to be piped to inspector console. +// +// We are using `Deno.core.callConsole` binding to preserve proper stack +// frames in inspector console. This has to be done because V8 considers +// the last JS stack frame as gospel for the inspector. In our case we +// specifically want the latest user stack frame to be the one that matters +// though. +// +// Inspired by: +// https://github.com/nodejs/node/blob/1317252dfe8824fd9cfee125d2aaa94004db2f3b/lib/internal/util/inspector.js#L39-L61 +function wrapConsole(consoleFromDeno, consoleFromV8) { + const callConsole = core.callConsole; + + const keys = ObjectKeys(consoleFromV8); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (ObjectHasOwn(consoleFromDeno, key)) { + consoleFromDeno[key] = FunctionPrototypeBind( + callConsole, + consoleFromDeno, + consoleFromV8[key], + consoleFromDeno[key], + ); + } else { + // Add additional console APIs from the inspector + consoleFromDeno[key] = consoleFromV8[key]; + } + } +} + +// Expose these fields to internalObject for tests. +internals.Console = Console; +internals.cssToAnsi = cssToAnsi; +internals.inspectArgs = inspectArgs; +internals.parseCss = parseCss; +internals.parseCssColor = parseCssColor; + +export { + colors, + Console, + createFilteredInspectProxy, + createStylizeWithColor, + CSI, + customInspect, + formatBigInt, + formatNumber, + formatValue, + getDefaultInspectOptions, + getNoColor, + inspect, + inspectArgs, + quoteString, + setNoColor, + styles, + wrapConsole, +}; diff --git a/ext/console/02_console.js b/ext/console/02_console.js deleted file mode 100644 index 3e55efb749..0000000000 --- a/ext/console/02_console.js +++ /dev/null @@ -1,2428 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -/// - -const core = globalThis.Deno.core; -const internals = globalThis.__bootstrap.internals; -const primordials = globalThis.__bootstrap.primordials; -const { - AggregateErrorPrototype, - ArrayPrototypeUnshift, - isNaN, - DatePrototype, - DateNow, - DatePrototypeGetTime, - DatePrototypeToISOString, - Boolean, - BooleanPrototype, - BooleanPrototypeToString, - ObjectKeys, - ObjectAssign, - ObjectCreate, - ObjectFreeze, - ObjectIs, - ObjectValues, - ObjectFromEntries, - ObjectGetPrototypeOf, - ObjectGetOwnPropertyDescriptor, - ObjectGetOwnPropertySymbols, - ObjectPrototypeHasOwnProperty, - ObjectPrototypeIsPrototypeOf, - ObjectPrototypePropertyIsEnumerable, - PromisePrototype, - String, - StringPrototype, - StringPrototypeRepeat, - StringPrototypeReplace, - StringPrototypeReplaceAll, - StringPrototypeSplit, - StringPrototypeSlice, - StringPrototypeCodePointAt, - StringPrototypeCharCodeAt, - StringPrototypeNormalize, - StringPrototypeMatch, - StringPrototypePadStart, - StringPrototypeLocaleCompare, - StringPrototypeToString, - StringPrototypeTrim, - StringPrototypeIncludes, - StringPrototypeStartsWith, - TypeError, - NumberIsInteger, - NumberParseInt, - RegExpPrototype, - RegExpPrototypeTest, - RegExpPrototypeToString, - SafeArrayIterator, - SafeMap, - SafeStringIterator, - SafeSet, - SafeRegExp, - SetPrototype, - SetPrototypeEntries, - SetPrototypeGetSize, - Symbol, - SymbolPrototype, - SymbolPrototypeToString, - SymbolPrototypeValueOf, - SymbolPrototypeGetDescription, - SymbolToStringTag, - SymbolHasInstance, - SymbolFor, - Array, - ArrayIsArray, - ArrayPrototypeJoin, - ArrayPrototypeMap, - ArrayPrototypeReduce, - ArrayPrototypeEntries, - ArrayPrototypePush, - ArrayPrototypePop, - ArrayPrototypeSort, - ArrayPrototypeSlice, - ArrayPrototypeShift, - ArrayPrototypeIncludes, - ArrayPrototypeFill, - ArrayPrototypeFilter, - ArrayPrototypeFind, - FunctionPrototypeBind, - FunctionPrototypeToString, - MapPrototype, - MapPrototypeHas, - MapPrototypeGet, - MapPrototypeSet, - MapPrototypeDelete, - MapPrototypeEntries, - MapPrototypeForEach, - MapPrototypeGetSize, - Error, - ErrorPrototype, - ErrorCaptureStackTrace, - MathAbs, - MathMax, - MathMin, - MathSqrt, - MathRound, - MathFloor, - Number, - NumberPrototype, - NumberPrototypeToString, - NumberPrototypeValueOf, - BigIntPrototype, - BigIntPrototypeToString, - Proxy, - ReflectGet, - ReflectGetOwnPropertyDescriptor, - ReflectGetPrototypeOf, - ReflectHas, - TypedArrayPrototypeGetLength, - TypedArrayPrototypeGetSymbolToStringTag, - WeakMapPrototype, - WeakSetPrototype, -} = primordials; -import * as colors from "ext:deno_console/01_colors.js"; - -function isInvalidDate(x) { - return isNaN(DatePrototypeGetTime(x)); -} - -function hasOwnProperty(obj, v) { - if (obj == null) { - return false; - } - return ObjectPrototypeHasOwnProperty(obj, v); -} - -function propertyIsEnumerable(obj, prop) { - if ( - obj == null || - typeof obj.propertyIsEnumerable !== "function" - ) { - return false; - } - - return ObjectPrototypePropertyIsEnumerable(obj, prop); -} - -// Copyright Joyent, Inc. and other Node contributors. MIT license. -// Forked from Node's lib/internal/cli_table.js - -function isTypedArray(x) { - return TypedArrayPrototypeGetSymbolToStringTag(x) !== undefined; -} - -const tableChars = { - middleMiddle: "\u2500", - rowMiddle: "\u253c", - topRight: "\u2510", - topLeft: "\u250c", - leftMiddle: "\u251c", - topMiddle: "\u252c", - bottomRight: "\u2518", - bottomLeft: "\u2514", - bottomMiddle: "\u2534", - rightMiddle: "\u2524", - left: "\u2502 ", - right: " \u2502", - middle: " \u2502 ", -}; - -function isFullWidthCodePoint(code) { - // Code points are partially derived from: - // http://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt - return ( - code >= 0x1100 && - (code <= 0x115f || // Hangul Jamo - code === 0x2329 || // LEFT-POINTING ANGLE BRACKET - code === 0x232a || // RIGHT-POINTING ANGLE BRACKET - // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || - // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - (code >= 0x3250 && code <= 0x4dbf) || - // CJK Unified Ideographs .. Yi Radicals - (code >= 0x4e00 && code <= 0xa4c6) || - // Hangul Jamo Extended-A - (code >= 0xa960 && code <= 0xa97c) || - // Hangul Syllables - (code >= 0xac00 && code <= 0xd7a3) || - // CJK Compatibility Ideographs - (code >= 0xf900 && code <= 0xfaff) || - // Vertical Forms - (code >= 0xfe10 && code <= 0xfe19) || - // CJK Compatibility Forms .. Small Form Variants - (code >= 0xfe30 && code <= 0xfe6b) || - // Halfwidth and Fullwidth Forms - (code >= 0xff01 && code <= 0xff60) || - (code >= 0xffe0 && code <= 0xffe6) || - // Kana Supplement - (code >= 0x1b000 && code <= 0x1b001) || - // Enclosed Ideographic Supplement - (code >= 0x1f200 && code <= 0x1f251) || - // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff - // Emoticons 0x1f600 - 0x1f64f - (code >= 0x1f300 && code <= 0x1f64f) || - // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - (code >= 0x20000 && code <= 0x3fffd)) - ); -} - -function getStringWidth(str) { - str = StringPrototypeNormalize(colors.stripColor(str), "NFC"); - let width = 0; - - for (const ch of new SafeStringIterator(str)) { - width += isFullWidthCodePoint(StringPrototypeCodePointAt(ch, 0)) ? 2 : 1; - } - - return width; -} - -function renderRow(row, columnWidths, columnRightAlign) { - let out = tableChars.left; - for (let i = 0; i < row.length; i++) { - const cell = row[i]; - const len = getStringWidth(cell); - const padding = StringPrototypeRepeat(" ", columnWidths[i] - len); - if (columnRightAlign?.[i]) { - out += `${padding}${cell}`; - } else { - out += `${cell}${padding}`; - } - if (i !== row.length - 1) { - out += tableChars.middle; - } - } - out += tableChars.right; - return out; -} - -function cliTable(head, columns) { - const rows = []; - const columnWidths = ArrayPrototypeMap(head, (h) => getStringWidth(h)); - const longestColumn = ArrayPrototypeReduce( - columns, - (n, a) => MathMax(n, a.length), - 0, - ); - const columnRightAlign = new Array(columnWidths.length).fill(true); - - for (let i = 0; i < head.length; i++) { - const column = columns[i]; - for (let j = 0; j < longestColumn; j++) { - if (rows[j] === undefined) { - rows[j] = []; - } - const value = (rows[j][i] = hasOwnProperty(column, j) ? column[j] : ""); - const width = columnWidths[i] || 0; - const counted = getStringWidth(value); - columnWidths[i] = MathMax(width, counted); - columnRightAlign[i] &= NumberIsInteger(+value); - } - } - - const divider = ArrayPrototypeMap( - columnWidths, - (i) => StringPrototypeRepeat(tableChars.middleMiddle, i + 2), - ); - - let result = - `${tableChars.topLeft}${ - ArrayPrototypeJoin(divider, tableChars.topMiddle) - }` + - `${tableChars.topRight}\n${renderRow(head, columnWidths)}\n` + - `${tableChars.leftMiddle}${ - ArrayPrototypeJoin(divider, tableChars.rowMiddle) - }` + - `${tableChars.rightMiddle}\n`; - - for (let i = 0; i < rows.length; ++i) { - const row = rows[i]; - result += `${renderRow(row, columnWidths, columnRightAlign)}\n`; - } - - result += - `${tableChars.bottomLeft}${ - ArrayPrototypeJoin(divider, tableChars.bottomMiddle) - }` + - tableChars.bottomRight; - - return result; -} -/* End of forked part */ - -// We can match Node's quoting behavior exactly by swapping the double quote and -// single quote in this array. That would give preference to single quotes. -// However, we prefer double quotes as the default. -const QUOTES = ['"', "'", "`"]; - -const DEFAULT_INSPECT_OPTIONS = { - depth: 4, - indentLevel: 0, - sorted: false, - trailingComma: false, - compact: true, - iterableLimit: 100, - showProxy: false, - colors: false, - getters: false, - showHidden: false, - strAbbreviateSize: 100, - /** You can override the quotes preference in inspectString. - * Used by util.inspect() */ - // TODO(kt3k): Consider using symbol as a key to hide this from the public - // API. - quotes: QUOTES, -}; - -const DEFAULT_INDENT = " "; // Default indent string - -const LINE_BREAKING_LENGTH = 80; -const MIN_GROUP_LENGTH = 6; -const STR_ABBREVIATE_SIZE = 100; - -const PROMISE_STRING_BASE_LENGTH = 12; - -class CSI { - static kClear = "\x1b[1;1H"; - static kClearScreenDown = "\x1b[0J"; -} - -function getClassInstanceName(instance) { - if (typeof instance != "object") { - return ""; - } - const constructor = instance?.constructor; - if (typeof constructor == "function") { - return constructor.name ?? ""; - } - return ""; -} - -function maybeColor(fn, inspectOptions) { - return inspectOptions.colors ? fn : (s) => s; -} - -function inspectFunction(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - if ( - ReflectHas(value, customInspect) && - typeof value[customInspect] === "function" - ) { - return String(value[customInspect](inspect, inspectOptions)); - } - // Might be Function/AsyncFunction/GeneratorFunction/AsyncGeneratorFunction - let cstrName = ObjectGetPrototypeOf(value)?.constructor?.name; - if (!cstrName) { - // If prototype is removed or broken, - // use generic 'Function' instead. - cstrName = "Function"; - } - const stringValue = FunctionPrototypeToString(value); - // Might be Class - if (StringPrototypeStartsWith(stringValue, "class")) { - cstrName = "Class"; - } - - // Our function may have properties, so we want to format those - // as if our function was an object - // If we didn't find any properties, we will just append an - // empty suffix. - let suffix = ``; - let refStr = ""; - if ( - ObjectKeys(value).length > 0 || - ObjectGetOwnPropertySymbols(value).length > 0 - ) { - const { 0: propString, 1: refIndex } = inspectRawObject( - value, - inspectOptions, - ); - refStr = refIndex; - // Filter out the empty string for the case we only have - // non-enumerable symbols. - if ( - propString.length > 0 && - propString !== "{}" - ) { - suffix = ` ${propString}`; - } - } - - if (value.name && value.name !== "anonymous") { - // from MDN spec - return cyan(`${refStr}[${cstrName}: ${value.name}]`) + suffix; - } - return cyan(`${refStr}[${cstrName} (anonymous)]`) + suffix; -} - -function inspectIterable( - value, - options, - inspectOptions, -) { - const cyan = maybeColor(colors.cyan, inspectOptions); - if (inspectOptions.indentLevel >= inspectOptions.depth) { - return cyan(`[${options.typeName}]`); - } - - const entries = []; - let iter; - let valueIsTypedArray = false; - let entriesLength; - - switch (options.typeName) { - case "Map": - iter = MapPrototypeEntries(value); - entriesLength = MapPrototypeGetSize(value); - break; - case "Set": - iter = SetPrototypeEntries(value); - entriesLength = SetPrototypeGetSize(value); - break; - case "Array": - entriesLength = value.length; - break; - default: - if (isTypedArray(value)) { - entriesLength = TypedArrayPrototypeGetLength(value); - iter = ArrayPrototypeEntries(value); - valueIsTypedArray = true; - } else { - throw new TypeError("unreachable"); - } - } - - let entriesLengthWithoutEmptyItems = entriesLength; - if (options.typeName === "Array") { - for ( - let i = 0, j = 0; - i < entriesLength && j < inspectOptions.iterableLimit; - i++, j++ - ) { - inspectOptions.indentLevel++; - const { entry, skipTo } = options.entryHandler( - [i, value[i]], - inspectOptions, - ); - ArrayPrototypePush(entries, entry); - inspectOptions.indentLevel--; - - if (skipTo) { - // subtract skipped (empty) items - entriesLengthWithoutEmptyItems -= skipTo - i; - i = skipTo; - } - } - } else { - let i = 0; - while (true) { - let el; - try { - const res = iter.next(); - if (res.done) { - break; - } - el = res.value; - } catch (err) { - if (valueIsTypedArray) { - // TypedArray.prototype.entries doesn't throw, unless the ArrayBuffer - // is detached. We don't want to show the exception in that case, so - // we catch it here and pretend the ArrayBuffer has no entries (like - // Chrome DevTools does). - break; - } - throw err; - } - if (i < inspectOptions.iterableLimit) { - inspectOptions.indentLevel++; - ArrayPrototypePush( - entries, - options.entryHandler( - el, - inspectOptions, - ), - ); - inspectOptions.indentLevel--; - } else { - break; - } - i++; - } - } - - if (options.sort) { - ArrayPrototypeSort(entries); - } - - if (entriesLengthWithoutEmptyItems > inspectOptions.iterableLimit) { - const nmore = entriesLengthWithoutEmptyItems - - inspectOptions.iterableLimit; - ArrayPrototypePush(entries, `... ${nmore} more items`); - } - - const iPrefix = `${options.displayName ? options.displayName + " " : ""}`; - - const level = inspectOptions.indentLevel; - const initIndentation = `\n${ - StringPrototypeRepeat(DEFAULT_INDENT, level + 1) - }`; - const entryIndentation = `,\n${ - StringPrototypeRepeat(DEFAULT_INDENT, level + 1) - }`; - const closingDelimIndentation = StringPrototypeRepeat( - DEFAULT_INDENT, - level, - ); - const closingIndentation = `${ - inspectOptions.trailingComma ? "," : "" - }\n${closingDelimIndentation}`; - - let iContent; - if (entries.length === 0 && !inspectOptions.compact) { - iContent = `\n${closingDelimIndentation}`; - } else if (options.group && entries.length > MIN_GROUP_LENGTH) { - const groups = groupEntries(entries, level, value); - iContent = `${initIndentation}${ - ArrayPrototypeJoin(groups, entryIndentation) - }${closingIndentation}`; - } else { - iContent = entries.length === 0 - ? "" - : ` ${ArrayPrototypeJoin(entries, ", ")} `; - if ( - colors.stripColor(iContent).length > LINE_BREAKING_LENGTH || - !inspectOptions.compact - ) { - iContent = `${initIndentation}${ - ArrayPrototypeJoin(entries, entryIndentation) - }${closingIndentation}`; - } - } - - return `${iPrefix}${options.delims[0]}${iContent}${options.delims[1]}`; -} - -// Ported from Node.js -// Copyright Node.js contributors. All rights reserved. -function groupEntries( - entries, - level, - value, - iterableLimit = 100, -) { - let totalLength = 0; - let maxLength = 0; - let entriesLength = entries.length; - if (iterableLimit < entriesLength) { - // This makes sure the "... n more items" part is not taken into account. - entriesLength--; - } - const separatorSpace = 2; // Add 1 for the space and 1 for the separator. - const dataLen = new Array(entriesLength); - // Calculate the total length of all output entries and the individual max - // entries length of all output entries. - // IN PROGRESS: Colors are being taken into account. - for (let i = 0; i < entriesLength; i++) { - // Taking colors into account: removing the ANSI color - // codes from the string before measuring its length - const len = colors.stripColor(entries[i]).length; - dataLen[i] = len; - totalLength += len + separatorSpace; - if (maxLength < len) maxLength = len; - } - // Add two to `maxLength` as we add a single whitespace character plus a comma - // in-between two entries. - const actualMax = maxLength + separatorSpace; - // Check if at least three entries fit next to each other and prevent grouping - // of arrays that contains entries of very different length (i.e., if a single - // entry is longer than 1/5 of all other entries combined). Otherwise the - // space in-between small entries would be enormous. - if ( - actualMax * 3 + (level + 1) < LINE_BREAKING_LENGTH && - (totalLength / actualMax > 5 || maxLength <= 6) - ) { - const approxCharHeights = 2.5; - const averageBias = MathSqrt(actualMax - totalLength / entries.length); - const biasedMax = MathMax(actualMax - 3 - averageBias, 1); - // Dynamically check how many columns seem possible. - const columns = MathMin( - // Ideally a square should be drawn. We expect a character to be about 2.5 - // times as high as wide. This is the area formula to calculate a square - // which contains n rectangles of size `actualMax * approxCharHeights`. - // Divide that by `actualMax` to receive the correct number of columns. - // The added bias increases the columns for short entries. - MathRound( - MathSqrt(approxCharHeights * biasedMax * entriesLength) / biasedMax, - ), - // Do not exceed the breakLength. - MathFloor((LINE_BREAKING_LENGTH - (level + 1)) / actualMax), - // Limit the columns to a maximum of fifteen. - 15, - ); - // Return with the original output if no grouping should happen. - if (columns <= 1) { - return entries; - } - const tmp = []; - const maxLineLength = []; - for (let i = 0; i < columns; i++) { - let lineMaxLength = 0; - for (let j = i; j < entries.length; j += columns) { - if (dataLen[j] > lineMaxLength) lineMaxLength = dataLen[j]; - } - lineMaxLength += separatorSpace; - maxLineLength[i] = lineMaxLength; - } - let order = "padStart"; - if (value !== undefined) { - for (let i = 0; i < entries.length; i++) { - if ( - typeof value[i] !== "number" && - typeof value[i] !== "bigint" - ) { - order = "padEnd"; - break; - } - } - } - // Each iteration creates a single line of grouped entries. - for (let i = 0; i < entriesLength; i += columns) { - // The last lines may contain less entries than columns. - const max = MathMin(i + columns, entriesLength); - let str = ""; - let j = i; - for (; j < max - 1; j++) { - const lengthOfColorCodes = entries[j].length - dataLen[j]; - const padding = maxLineLength[j - i] + lengthOfColorCodes; - str += `${entries[j]}, `[order](padding, " "); - } - if (order === "padStart") { - const lengthOfColorCodes = entries[j].length - dataLen[j]; - const padding = maxLineLength[j - i] + - lengthOfColorCodes - - separatorSpace; - str += StringPrototypePadStart(entries[j], padding, " "); - } else { - str += entries[j]; - } - ArrayPrototypePush(tmp, str); - } - if (iterableLimit < entries.length) { - ArrayPrototypePush(tmp, entries[entriesLength]); - } - entries = tmp; - } - return entries; -} - -let circular; -function handleCircular(value, cyan) { - let index = 1; - if (circular === undefined) { - circular = new SafeMap(); - MapPrototypeSet(circular, value, index); - } else { - index = MapPrototypeGet(circular, value); - if (index === undefined) { - index = MapPrototypeGetSize(circular) + 1; - MapPrototypeSet(circular, value, index); - } - } - // Circular string is cyan - return cyan(`[Circular *${index}]`); -} - -function _inspectValue( - value, - inspectOptions, -) { - const proxyDetails = core.getProxyDetails(value); - if (proxyDetails != null && inspectOptions.showProxy) { - return inspectProxy(proxyDetails, inspectOptions); - } - - const green = maybeColor(colors.green, inspectOptions); - const yellow = maybeColor(colors.yellow, inspectOptions); - const gray = maybeColor(colors.gray, inspectOptions); - const cyan = maybeColor(colors.cyan, inspectOptions); - const bold = maybeColor(colors.bold, inspectOptions); - const red = maybeColor(colors.red, inspectOptions); - - switch (typeof value) { - case "string": - return green(quoteString(value, inspectOptions)); - case "number": // Numbers are yellow - // Special handling of -0 - return yellow(ObjectIs(value, -0) ? "-0" : `${value}`); - case "boolean": // booleans are yellow - return yellow(String(value)); - case "undefined": // undefined is gray - return gray(String(value)); - case "symbol": // Symbols are green - return green(maybeQuoteSymbol(value, inspectOptions)); - case "bigint": // Bigints are yellow - return yellow(`${value}n`); - case "function": // Function string is cyan - if (ctxHas(value)) { - // Circular string is cyan - return handleCircular(value, cyan); - } - - return inspectFunction(value, inspectOptions); - case "object": // null is bold - if (value === null) { - return bold("null"); - } - - if (ctxHas(value)) { - return handleCircular(value, cyan); - } - - return inspectObject( - value, - inspectOptions, - proxyDetails, - ); - default: - // Not implemented is red - return red("[Not Implemented]"); - } -} - -function inspectValue( - value, - inspectOptions, -) { - ArrayPrototypePush(CTX_STACK, value); - let x; - try { - x = _inspectValue(value, inspectOptions); - } finally { - ArrayPrototypePop(CTX_STACK); - } - return x; -} - -/** Surround the string in quotes. - * - * The quote symbol is chosen by taking the first of the `QUOTES` array which - * does not occur in the string. If they all occur, settle with `QUOTES[0]`. - * - * Insert a backslash before any occurrence of the chosen quote symbol and - * before any backslash. - */ -function quoteString(string, inspectOptions = DEFAULT_INSPECT_OPTIONS) { - const quotes = inspectOptions.quotes; - const quote = - ArrayPrototypeFind(quotes, (c) => !StringPrototypeIncludes(string, c)) ?? - quotes[0]; - const escapePattern = new SafeRegExp(`(?=[${quote}\\\\])`, "g"); - string = StringPrototypeReplace(string, escapePattern, "\\"); - string = replaceEscapeSequences(string); - return `${quote}${string}${quote}`; -} - -const ESCAPE_PATTERN = new SafeRegExp(/([\b\f\n\r\t\v])/g); -const ESCAPE_MAP = ObjectFreeze({ - "\b": "\\b", - "\f": "\\f", - "\n": "\\n", - "\r": "\\r", - "\t": "\\t", - "\v": "\\v", -}); - -// deno-lint-ignore no-control-regex -const ESCAPE_PATTERN2 = new SafeRegExp(/[\x00-\x1f\x7f-\x9f]/g); - -// Replace escape sequences that can modify output. -function replaceEscapeSequences(string) { - return StringPrototypeReplace( - StringPrototypeReplace( - string, - ESCAPE_PATTERN, - (c) => ESCAPE_MAP[c], - ), - new SafeRegExp(ESCAPE_PATTERN2), - (c) => - "\\x" + - StringPrototypePadStart( - NumberPrototypeToString(StringPrototypeCharCodeAt(c, 0), 16), - 2, - "0", - ), - ); -} - -const QUOTE_STRING_PATTERN = new SafeRegExp(/^[a-zA-Z_][a-zA-Z_0-9]*$/); - -// Surround a string with quotes when it is required (e.g the string not a valid identifier). -function maybeQuoteString(string, inspectOptions) { - if ( - RegExpPrototypeTest(QUOTE_STRING_PATTERN, string) - ) { - return replaceEscapeSequences(string); - } - - return quoteString(string, inspectOptions); -} - -const QUOTE_SYMBOL_REG = new SafeRegExp(/^[a-zA-Z_][a-zA-Z_.0-9]*$/); - -// Surround a symbol's description in quotes when it is required (e.g the description has non printable characters). -function maybeQuoteSymbol(symbol, inspectOptions) { - const description = SymbolPrototypeGetDescription(symbol); - - if (description === undefined) { - return SymbolPrototypeToString(symbol); - } - - if (RegExpPrototypeTest(QUOTE_SYMBOL_REG, description)) { - return SymbolPrototypeToString(symbol); - } - - return `Symbol(${quoteString(description, inspectOptions)})`; -} - -const CTX_STACK = []; -function ctxHas(x) { - // Only check parent contexts - return ArrayPrototypeIncludes( - ArrayPrototypeSlice(CTX_STACK, 0, CTX_STACK.length - 1), - x, - ); -} - -// Print strings when they are inside of arrays or objects with quotes -function inspectValueWithQuotes( - value, - inspectOptions, -) { - const abbreviateSize = typeof inspectOptions.strAbbreviateSize === "undefined" - ? STR_ABBREVIATE_SIZE - : inspectOptions.strAbbreviateSize; - const green = maybeColor(colors.green, inspectOptions); - switch (typeof value) { - case "string": { - const trunc = value.length > abbreviateSize - ? StringPrototypeSlice(value, 0, abbreviateSize) + "..." - : value; - return green(quoteString(trunc, inspectOptions)); // Quoted strings are green - } - default: - return inspectValue(value, inspectOptions); - } -} - -function inspectArray( - value, - inspectOptions, -) { - const gray = maybeColor(colors.gray, inspectOptions); - let lastValidIndex = 0; - let keys; - const options = { - typeName: "Array", - displayName: "", - delims: ["[", "]"], - entryHandler: (entry, inspectOptions) => { - const { 0: index, 1: val } = entry; - let i = index; - lastValidIndex = index; - if (!ObjectPrototypeHasOwnProperty(value, i)) { - let skipTo; - keys = keys || ObjectKeys(value); - i = value.length; - if (keys.length === 0) { - // fast path, all items are empty - skipTo = i; - } else { - // Not all indexes are empty or there's a non-index property - // Find first non-empty array index - while (keys.length) { - const key = ArrayPrototypeShift(keys); - // check if it's a valid array index - if (key > lastValidIndex && key < 2 ** 32 - 1) { - i = Number(key); - break; - } - } - - skipTo = i - 1; - } - const emptyItems = i - index; - const ending = emptyItems > 1 ? "s" : ""; - return { - entry: gray(`<${emptyItems} empty item${ending}>`), - skipTo, - }; - } else { - return { entry: inspectValueWithQuotes(val, inspectOptions) }; - } - }, - group: inspectOptions.compact, - sort: false, - }; - return inspectIterable(value, options, inspectOptions); -} - -function inspectTypedArray( - typedArrayName, - value, - inspectOptions, -) { - const valueLength = value.length; - const options = { - typeName: typedArrayName, - displayName: `${typedArrayName}(${valueLength})`, - delims: ["[", "]"], - entryHandler: (entry, inspectOptions) => { - const val = entry[1]; - inspectOptions.indentLevel++; - const inspectedValue = inspectValueWithQuotes(val, inspectOptions); - inspectOptions.indentLevel--; - return inspectedValue; - }, - group: inspectOptions.compact, - sort: false, - }; - return inspectIterable(value, options, inspectOptions); -} - -function inspectSet( - value, - inspectOptions, -) { - const options = { - typeName: "Set", - displayName: "Set", - delims: ["{", "}"], - entryHandler: (entry, inspectOptions) => { - const val = entry[1]; - inspectOptions.indentLevel++; - const inspectedValue = inspectValueWithQuotes(val, inspectOptions); - inspectOptions.indentLevel--; - return inspectedValue; - }, - group: false, - sort: inspectOptions.sorted, - }; - return inspectIterable(value, options, inspectOptions); -} - -function inspectMap( - value, - inspectOptions, -) { - const options = { - typeName: "Map", - displayName: "Map", - delims: ["{", "}"], - entryHandler: (entry, inspectOptions) => { - const { 0: key, 1: val } = entry; - inspectOptions.indentLevel++; - const inspectedValue = `${ - inspectValueWithQuotes(key, inspectOptions) - } => ${inspectValueWithQuotes(val, inspectOptions)}`; - inspectOptions.indentLevel--; - return inspectedValue; - }, - group: false, - sort: inspectOptions.sorted, - }; - return inspectIterable( - value, - options, - inspectOptions, - ); -} - -function inspectWeakSet(inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return `WeakSet { ${cyan("[items unknown]")} }`; // as seen in Node, with cyan color -} - -function inspectWeakMap(inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return `WeakMap { ${cyan("[items unknown]")} }`; // as seen in Node, with cyan color -} - -function inspectDate(value, inspectOptions) { - // without quotes, ISO format, in magenta like before - const magenta = maybeColor(colors.magenta, inspectOptions); - return magenta( - isInvalidDate(value) ? "Invalid Date" : DatePrototypeToISOString(value), - ); -} - -function inspectRegExp(value, inspectOptions) { - const red = maybeColor(colors.red, inspectOptions); - return red(RegExpPrototypeToString(value)); // RegExps are red -} - -const AGGREGATE_ERROR_HAS_AT_PATTERN = new SafeRegExp(/\s+at/); -const AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN = new SafeRegExp(/^(?!\s*$)/gm); - -function inspectError(value, cyan) { - const causes = [value]; - - let err = value; - while (err.cause) { - if (ArrayPrototypeIncludes(causes, err.cause)) { - ArrayPrototypePush(causes, handleCircular(err.cause, cyan)); - break; - } else { - ArrayPrototypePush(causes, err.cause); - err = err.cause; - } - } - - const refMap = new SafeMap(); - for (let i = 0; i < causes.length; ++i) { - const cause = causes[i]; - if (circular !== undefined) { - const index = MapPrototypeGet(circular, cause); - if (index !== undefined) { - MapPrototypeSet(refMap, cause, cyan(` `)); - } - } - } - ArrayPrototypeShift(causes); - - let finalMessage = MapPrototypeGet(refMap, value) ?? ""; - - if (ObjectPrototypeIsPrototypeOf(AggregateErrorPrototype, value)) { - const stackLines = StringPrototypeSplit(value.stack, "\n"); - while (true) { - const line = ArrayPrototypeShift(stackLines); - if (RegExpPrototypeTest(AGGREGATE_ERROR_HAS_AT_PATTERN, line)) { - ArrayPrototypeUnshift(stackLines, line); - break; - } else if (typeof line === "undefined") { - break; - } - - finalMessage += line; - finalMessage += "\n"; - } - const aggregateMessage = ArrayPrototypeJoin( - ArrayPrototypeMap( - value.errors, - (error) => - StringPrototypeReplace( - inspectArgs([error]), - AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN, - StringPrototypeRepeat(" ", 4), - ), - ), - "\n", - ); - finalMessage += aggregateMessage; - finalMessage += "\n"; - finalMessage += ArrayPrototypeJoin(stackLines, "\n"); - } else { - finalMessage += value.stack; - } - - finalMessage += ArrayPrototypeJoin( - ArrayPrototypeMap( - causes, - (cause) => - "\nCaused by " + (MapPrototypeGet(refMap, cause) ?? "") + - (cause?.stack ?? cause), - ), - "", - ); - - return finalMessage; -} - -function inspectStringObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan(`[String: "${StringPrototypeToString(value)}"]`); // wrappers are in cyan -} - -function inspectBooleanObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan(`[Boolean: ${BooleanPrototypeToString(value)}]`); // wrappers are in cyan -} - -function inspectNumberObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - // Special handling of -0 - return cyan( - `[Number: ${ - ObjectIs(NumberPrototypeValueOf(value), -0) - ? "-0" - : NumberPrototypeToString(value) - }]`, - ); // wrappers are in cyan -} - -function inspectBigIntObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan(`[BigInt: ${BigIntPrototypeToString(value)}n]`); // wrappers are in cyan -} - -function inspectSymbolObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan( - `[Symbol: ${ - maybeQuoteSymbol(SymbolPrototypeValueOf(value), inspectOptions) - }]`, - ); // wrappers are in cyan -} - -const PromiseState = { - Pending: 0, - Fulfilled: 1, - Rejected: 2, -}; - -function inspectPromise( - value, - inspectOptions, -) { - const cyan = maybeColor(colors.cyan, inspectOptions); - const red = maybeColor(colors.red, inspectOptions); - - const { 0: state, 1: result } = core.getPromiseDetails(value); - - if (state === PromiseState.Pending) { - return `Promise { ${cyan("")} }`; - } - - const prefix = state === PromiseState.Fulfilled - ? "" - : `${red("")} `; - - inspectOptions.indentLevel++; - const str = `${prefix}${inspectValueWithQuotes(result, inspectOptions)}`; - inspectOptions.indentLevel--; - - if (str.length + PROMISE_STRING_BASE_LENGTH > LINE_BREAKING_LENGTH) { - return `Promise {\n${ - StringPrototypeRepeat(DEFAULT_INDENT, inspectOptions.indentLevel + 1) - }${str}\n}`; - } - - return `Promise { ${str} }`; -} - -function inspectProxy( - targetAndHandler, - inspectOptions, -) { - return `Proxy ${inspectArray(targetAndHandler, inspectOptions)}`; -} - -function inspectRawObject( - value, - inspectOptions, -) { - const cyan = maybeColor(colors.cyan, inspectOptions); - - if (inspectOptions.indentLevel >= inspectOptions.depth) { - return [cyan("[Object]"), ""]; // wrappers are in cyan - } - - let baseString; - - let shouldShowDisplayName = false; - let displayName = value[ - SymbolToStringTag - ]; - if (!displayName) { - displayName = getClassInstanceName(value); - } - if ( - displayName && displayName !== "Object" && displayName !== "anonymous" - ) { - shouldShowDisplayName = true; - } - - const entries = []; - const stringKeys = ObjectKeys(value); - const symbolKeys = ObjectGetOwnPropertySymbols(value); - if (inspectOptions.sorted) { - ArrayPrototypeSort(stringKeys); - ArrayPrototypeSort( - symbolKeys, - (s1, s2) => - StringPrototypeLocaleCompare( - SymbolPrototypeGetDescription(s1) ?? "", - SymbolPrototypeGetDescription(s2) ?? "", - ), - ); - } - - const red = maybeColor(colors.red, inspectOptions); - - inspectOptions.indentLevel++; - - for (let i = 0; i < stringKeys.length; ++i) { - const key = stringKeys[i]; - if (inspectOptions.getters) { - let propertyValue; - let error = null; - try { - propertyValue = value[key]; - } catch (error_) { - error = error_; - } - const inspectedValue = error == null - ? inspectValueWithQuotes(propertyValue, inspectOptions) - : red(`[Thrown ${error.name}: ${error.message}]`); - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: ${inspectedValue}`, - ); - } else { - const descriptor = ObjectGetOwnPropertyDescriptor(value, key); - if (descriptor.get !== undefined && descriptor.set !== undefined) { - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: [Getter/Setter]`, - ); - } else if (descriptor.get !== undefined) { - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: [Getter]`, - ); - } else { - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: ${ - inspectValueWithQuotes(value[key], inspectOptions) - }`, - ); - } - } - } - - for (let i = 0; i < symbolKeys.length; ++i) { - const key = symbolKeys[i]; - if ( - !inspectOptions.showHidden && - !propertyIsEnumerable(value, key) - ) { - continue; - } - - if (inspectOptions.getters) { - let propertyValue; - let error; - try { - propertyValue = value[key]; - } catch (error_) { - error = error_; - } - const inspectedValue = error == null - ? inspectValueWithQuotes(propertyValue, inspectOptions) - : red(`Thrown ${error.name}: ${error.message}`); - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: ${inspectedValue}`, - ); - } else { - const descriptor = ObjectGetOwnPropertyDescriptor(value, key); - if (descriptor.get !== undefined && descriptor.set !== undefined) { - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: [Getter/Setter]`, - ); - } else if (descriptor.get !== undefined) { - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: [Getter]`, - ); - } else { - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: ${ - inspectValueWithQuotes(value[key], inspectOptions) - }`, - ); - } - } - } - - inspectOptions.indentLevel--; - - // Making sure color codes are ignored when calculating the total length - const entriesText = colors.stripColor(ArrayPrototypeJoin(entries, "")); - const totalLength = entries.length + inspectOptions.indentLevel + - entriesText.length; - - if (entries.length === 0) { - baseString = "{}"; - } else if ( - totalLength > LINE_BREAKING_LENGTH || - !inspectOptions.compact || - StringPrototypeIncludes(entriesText, "\n") - ) { - const entryIndent = StringPrototypeRepeat( - DEFAULT_INDENT, - inspectOptions.indentLevel + 1, - ); - const closingIndent = StringPrototypeRepeat( - DEFAULT_INDENT, - inspectOptions.indentLevel, - ); - baseString = `{\n${entryIndent}${ - ArrayPrototypeJoin(entries, `,\n${entryIndent}`) - }${inspectOptions.trailingComma ? "," : ""}\n${closingIndent}}`; - } else { - baseString = `{ ${ArrayPrototypeJoin(entries, ", ")} }`; - } - - if (shouldShowDisplayName) { - baseString = `${displayName} ${baseString}`; - } - - let refIndex = ""; - if (circular !== undefined) { - const index = MapPrototypeGet(circular, value); - if (index !== undefined) { - refIndex = cyan(` `); - } - } - - return [baseString, refIndex]; -} - -function inspectObject(value, inspectOptions, proxyDetails) { - if ( - ReflectHas(value, customInspect) && - typeof value[customInspect] === "function" - ) { - return String(value[customInspect](inspect, inspectOptions)); - } - // This non-unique symbol is used to support op_crates, ie. - // in extensions/web we don't want to depend on public - // Symbol.for("Deno.customInspect") symbol defined in the public API. - // Internal only, shouldn't be used by users. - const privateCustomInspect = SymbolFor("Deno.privateCustomInspect"); - if ( - ReflectHas(value, privateCustomInspect) && - typeof value[privateCustomInspect] === "function" - ) { - // TODO(nayeemrmn): `inspect` is passed as an argument because custom - // inspect implementations in `extensions` need it, but may not have access - // to the `Deno` namespace in web workers. Remove when the `Deno` - // namespace is always enabled. - return String( - value[privateCustomInspect](inspect, inspectOptions), - ); - } - if (ObjectPrototypeIsPrototypeOf(ErrorPrototype, value)) { - return inspectError(value, maybeColor(colors.cyan, inspectOptions)); - } else if (ArrayIsArray(value)) { - return inspectArray(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(NumberPrototype, value)) { - return inspectNumberObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(BigIntPrototype, value)) { - return inspectBigIntObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(BooleanPrototype, value)) { - return inspectBooleanObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(StringPrototype, value)) { - return inspectStringObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(SymbolPrototype, value)) { - return inspectSymbolObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(PromisePrototype, value)) { - return inspectPromise(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(RegExpPrototype, value)) { - return inspectRegExp(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(DatePrototype, value)) { - return inspectDate( - proxyDetails ? proxyDetails[0] : value, - inspectOptions, - ); - } else if (ObjectPrototypeIsPrototypeOf(SetPrototype, value)) { - return inspectSet( - proxyDetails ? proxyDetails[0] : value, - inspectOptions, - ); - } else if (ObjectPrototypeIsPrototypeOf(MapPrototype, value)) { - return inspectMap( - proxyDetails ? proxyDetails[0] : value, - inspectOptions, - ); - } else if (ObjectPrototypeIsPrototypeOf(WeakSetPrototype, value)) { - return inspectWeakSet(inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(WeakMapPrototype, value)) { - return inspectWeakMap(inspectOptions); - } else if (isTypedArray(value)) { - return inspectTypedArray( - ObjectGetPrototypeOf(value).constructor.name, - value, - inspectOptions, - ); - } else { - // Otherwise, default object formatting - let { 0: insp, 1: refIndex } = inspectRawObject(value, inspectOptions); - insp = refIndex + insp; - return insp; - } -} - -const colorKeywords = new SafeMap([ - ["black", "#000000"], - ["silver", "#c0c0c0"], - ["gray", "#808080"], - ["white", "#ffffff"], - ["maroon", "#800000"], - ["red", "#ff0000"], - ["purple", "#800080"], - ["fuchsia", "#ff00ff"], - ["green", "#008000"], - ["lime", "#00ff00"], - ["olive", "#808000"], - ["yellow", "#ffff00"], - ["navy", "#000080"], - ["blue", "#0000ff"], - ["teal", "#008080"], - ["aqua", "#00ffff"], - ["orange", "#ffa500"], - ["aliceblue", "#f0f8ff"], - ["antiquewhite", "#faebd7"], - ["aquamarine", "#7fffd4"], - ["azure", "#f0ffff"], - ["beige", "#f5f5dc"], - ["bisque", "#ffe4c4"], - ["blanchedalmond", "#ffebcd"], - ["blueviolet", "#8a2be2"], - ["brown", "#a52a2a"], - ["burlywood", "#deb887"], - ["cadetblue", "#5f9ea0"], - ["chartreuse", "#7fff00"], - ["chocolate", "#d2691e"], - ["coral", "#ff7f50"], - ["cornflowerblue", "#6495ed"], - ["cornsilk", "#fff8dc"], - ["crimson", "#dc143c"], - ["cyan", "#00ffff"], - ["darkblue", "#00008b"], - ["darkcyan", "#008b8b"], - ["darkgoldenrod", "#b8860b"], - ["darkgray", "#a9a9a9"], - ["darkgreen", "#006400"], - ["darkgrey", "#a9a9a9"], - ["darkkhaki", "#bdb76b"], - ["darkmagenta", "#8b008b"], - ["darkolivegreen", "#556b2f"], - ["darkorange", "#ff8c00"], - ["darkorchid", "#9932cc"], - ["darkred", "#8b0000"], - ["darksalmon", "#e9967a"], - ["darkseagreen", "#8fbc8f"], - ["darkslateblue", "#483d8b"], - ["darkslategray", "#2f4f4f"], - ["darkslategrey", "#2f4f4f"], - ["darkturquoise", "#00ced1"], - ["darkviolet", "#9400d3"], - ["deeppink", "#ff1493"], - ["deepskyblue", "#00bfff"], - ["dimgray", "#696969"], - ["dimgrey", "#696969"], - ["dodgerblue", "#1e90ff"], - ["firebrick", "#b22222"], - ["floralwhite", "#fffaf0"], - ["forestgreen", "#228b22"], - ["gainsboro", "#dcdcdc"], - ["ghostwhite", "#f8f8ff"], - ["gold", "#ffd700"], - ["goldenrod", "#daa520"], - ["greenyellow", "#adff2f"], - ["grey", "#808080"], - ["honeydew", "#f0fff0"], - ["hotpink", "#ff69b4"], - ["indianred", "#cd5c5c"], - ["indigo", "#4b0082"], - ["ivory", "#fffff0"], - ["khaki", "#f0e68c"], - ["lavender", "#e6e6fa"], - ["lavenderblush", "#fff0f5"], - ["lawngreen", "#7cfc00"], - ["lemonchiffon", "#fffacd"], - ["lightblue", "#add8e6"], - ["lightcoral", "#f08080"], - ["lightcyan", "#e0ffff"], - ["lightgoldenrodyellow", "#fafad2"], - ["lightgray", "#d3d3d3"], - ["lightgreen", "#90ee90"], - ["lightgrey", "#d3d3d3"], - ["lightpink", "#ffb6c1"], - ["lightsalmon", "#ffa07a"], - ["lightseagreen", "#20b2aa"], - ["lightskyblue", "#87cefa"], - ["lightslategray", "#778899"], - ["lightslategrey", "#778899"], - ["lightsteelblue", "#b0c4de"], - ["lightyellow", "#ffffe0"], - ["limegreen", "#32cd32"], - ["linen", "#faf0e6"], - ["magenta", "#ff00ff"], - ["mediumaquamarine", "#66cdaa"], - ["mediumblue", "#0000cd"], - ["mediumorchid", "#ba55d3"], - ["mediumpurple", "#9370db"], - ["mediumseagreen", "#3cb371"], - ["mediumslateblue", "#7b68ee"], - ["mediumspringgreen", "#00fa9a"], - ["mediumturquoise", "#48d1cc"], - ["mediumvioletred", "#c71585"], - ["midnightblue", "#191970"], - ["mintcream", "#f5fffa"], - ["mistyrose", "#ffe4e1"], - ["moccasin", "#ffe4b5"], - ["navajowhite", "#ffdead"], - ["oldlace", "#fdf5e6"], - ["olivedrab", "#6b8e23"], - ["orangered", "#ff4500"], - ["orchid", "#da70d6"], - ["palegoldenrod", "#eee8aa"], - ["palegreen", "#98fb98"], - ["paleturquoise", "#afeeee"], - ["palevioletred", "#db7093"], - ["papayawhip", "#ffefd5"], - ["peachpuff", "#ffdab9"], - ["peru", "#cd853f"], - ["pink", "#ffc0cb"], - ["plum", "#dda0dd"], - ["powderblue", "#b0e0e6"], - ["rosybrown", "#bc8f8f"], - ["royalblue", "#4169e1"], - ["saddlebrown", "#8b4513"], - ["salmon", "#fa8072"], - ["sandybrown", "#f4a460"], - ["seagreen", "#2e8b57"], - ["seashell", "#fff5ee"], - ["sienna", "#a0522d"], - ["skyblue", "#87ceeb"], - ["slateblue", "#6a5acd"], - ["slategray", "#708090"], - ["slategrey", "#708090"], - ["snow", "#fffafa"], - ["springgreen", "#00ff7f"], - ["steelblue", "#4682b4"], - ["tan", "#d2b48c"], - ["thistle", "#d8bfd8"], - ["tomato", "#ff6347"], - ["turquoise", "#40e0d0"], - ["violet", "#ee82ee"], - ["wheat", "#f5deb3"], - ["whitesmoke", "#f5f5f5"], - ["yellowgreen", "#9acd32"], - ["rebeccapurple", "#663399"], -]); - -const HASH_PATTERN = new SafeRegExp( - /^#([\dA-Fa-f]{2})([\dA-Fa-f]{2})([\dA-Fa-f]{2})([\dA-Fa-f]{2})?$/, -); -const SMALL_HASH_PATTERN = new SafeRegExp( - /^#([\dA-Fa-f])([\dA-Fa-f])([\dA-Fa-f])([\dA-Fa-f])?$/, -); -const RGB_PATTERN = new SafeRegExp( - /^rgba?\(\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)\s*(,\s*([+\-]?\d*\.?\d+)\s*)?\)$/, -); -const HSL_PATTERN = new SafeRegExp( - /^hsla?\(\s*([+\-]?\d*\.?\d+)\s*,\s*([+\-]?\d*\.?\d+)%\s*,\s*([+\-]?\d*\.?\d+)%\s*(,\s*([+\-]?\d*\.?\d+)\s*)?\)$/, -); - -function parseCssColor(colorString) { - if (MapPrototypeHas(colorKeywords, colorString)) { - colorString = MapPrototypeGet(colorKeywords, colorString); - } - // deno-fmt-ignore - const hashMatch = StringPrototypeMatch(colorString, HASH_PATTERN); - if (hashMatch != null) { - return [ - Number(`0x${hashMatch[1]}`), - Number(`0x${hashMatch[2]}`), - Number(`0x${hashMatch[3]}`), - ]; - } - // deno-fmt-ignore - const smallHashMatch = StringPrototypeMatch(colorString, SMALL_HASH_PATTERN); - if (smallHashMatch != null) { - return [ - Number(`0x${smallHashMatch[1]}0`), - Number(`0x${smallHashMatch[2]}0`), - Number(`0x${smallHashMatch[3]}0`), - ]; - } - // deno-fmt-ignore - const rgbMatch = StringPrototypeMatch(colorString, RGB_PATTERN); - if (rgbMatch != null) { - return [ - MathRound(MathMax(0, MathMin(255, Number(rgbMatch[1])))), - MathRound(MathMax(0, MathMin(255, Number(rgbMatch[2])))), - MathRound(MathMax(0, MathMin(255, Number(rgbMatch[3])))), - ]; - } - // deno-fmt-ignore - const hslMatch = StringPrototypeMatch(colorString, HSL_PATTERN); - if (hslMatch != null) { - // https://www.rapidtables.com/convert/color/hsl-to-rgb.html - let h = Number(hslMatch[1]) % 360; - if (h < 0) { - h += 360; - } - const s = MathMax(0, MathMin(100, Number(hslMatch[2]))) / 100; - const l = MathMax(0, MathMin(100, Number(hslMatch[3]))) / 100; - const c = (1 - MathAbs(2 * l - 1)) * s; - const x = c * (1 - MathAbs((h / 60) % 2 - 1)); - const m = l - c / 2; - let r_; - let g_; - let b_; - if (h < 60) { - ({ 0: r_, 1: g_, 2: b_ } = [c, x, 0]); - } else if (h < 120) { - ({ 0: r_, 1: g_, 2: b_ } = [x, c, 0]); - } else if (h < 180) { - ({ 0: r_, 1: g_, 2: b_ } = [0, c, x]); - } else if (h < 240) { - ({ 0: r_, 1: g_, 2: b_ } = [0, x, c]); - } else if (h < 300) { - ({ 0: r_, 1: g_, 2: b_ } = [x, 0, c]); - } else { - ({ 0: r_, 1: g_, 2: b_ } = [c, 0, x]); - } - return [ - MathRound((r_ + m) * 255), - MathRound((g_ + m) * 255), - MathRound((b_ + m) * 255), - ]; - } - return null; -} - -function getDefaultCss() { - return { - backgroundColor: null, - color: null, - fontWeight: null, - fontStyle: null, - textDecorationColor: null, - textDecorationLine: [], - }; -} - -const SPACE_PATTERN = new SafeRegExp(/\s+/g); - -function parseCss(cssString) { - const css = getDefaultCss(); - - const rawEntries = []; - let inValue = false; - let currentKey = null; - let parenthesesDepth = 0; - let currentPart = ""; - for (let i = 0; i < cssString.length; i++) { - const c = cssString[i]; - if (c == "(") { - parenthesesDepth++; - } else if (parenthesesDepth > 0) { - if (c == ")") { - parenthesesDepth--; - } - } else if (inValue) { - if (c == ";") { - const value = StringPrototypeTrim(currentPart); - if (value != "") { - ArrayPrototypePush(rawEntries, [currentKey, value]); - } - currentKey = null; - currentPart = ""; - inValue = false; - continue; - } - } else if (c == ":") { - currentKey = StringPrototypeTrim(currentPart); - currentPart = ""; - inValue = true; - continue; - } - currentPart += c; - } - if (inValue && parenthesesDepth == 0) { - const value = StringPrototypeTrim(currentPart); - if (value != "") { - ArrayPrototypePush(rawEntries, [currentKey, value]); - } - currentKey = null; - currentPart = ""; - } - - for (let i = 0; i < rawEntries.length; ++i) { - const { 0: key, 1: value } = rawEntries[i]; - if (key == "background-color") { - if (value != null) { - css.backgroundColor = value; - } - } else if (key == "color") { - if (value != null) { - css.color = value; - } - } else if (key == "font-weight") { - if (value == "bold") { - css.fontWeight = value; - } - } else if (key == "font-style") { - if ( - ArrayPrototypeIncludes(["italic", "oblique", "oblique 14deg"], value) - ) { - css.fontStyle = "italic"; - } - } else if (key == "text-decoration-line") { - css.textDecorationLine = []; - const lineTypes = StringPrototypeSplit(value, SPACE_PATTERN); - for (let i = 0; i < lineTypes.length; ++i) { - const lineType = lineTypes[i]; - if ( - ArrayPrototypeIncludes( - ["line-through", "overline", "underline"], - lineType, - ) - ) { - ArrayPrototypePush(css.textDecorationLine, lineType); - } - } - } else if (key == "text-decoration-color") { - const color = parseCssColor(value); - if (color != null) { - css.textDecorationColor = color; - } - } else if (key == "text-decoration") { - css.textDecorationColor = null; - css.textDecorationLine = []; - const args = StringPrototypeSplit(value, SPACE_PATTERN); - for (let i = 0; i < args.length; ++i) { - const arg = args[i]; - const maybeColor = parseCssColor(arg); - if (maybeColor != null) { - css.textDecorationColor = maybeColor; - } else if ( - ArrayPrototypeIncludes( - ["line-through", "overline", "underline"], - arg, - ) - ) { - ArrayPrototypePush(css.textDecorationLine, arg); - } - } - } - } - - return css; -} - -function colorEquals(color1, color2) { - return color1?.[0] == color2?.[0] && color1?.[1] == color2?.[1] && - color1?.[2] == color2?.[2]; -} - -function cssToAnsi(css, prevCss = null) { - prevCss = prevCss ?? getDefaultCss(); - let ansi = ""; - if (!colorEquals(css.backgroundColor, prevCss.backgroundColor)) { - if (css.backgroundColor == null) { - ansi += "\x1b[49m"; - } else if (css.backgroundColor == "black") { - ansi += `\x1b[40m`; - } else if (css.backgroundColor == "red") { - ansi += `\x1b[41m`; - } else if (css.backgroundColor == "green") { - ansi += `\x1b[42m`; - } else if (css.backgroundColor == "yellow") { - ansi += `\x1b[43m`; - } else if (css.backgroundColor == "blue") { - ansi += `\x1b[44m`; - } else if (css.backgroundColor == "magenta") { - ansi += `\x1b[45m`; - } else if (css.backgroundColor == "cyan") { - ansi += `\x1b[46m`; - } else if (css.backgroundColor == "white") { - ansi += `\x1b[47m`; - } else { - if (ArrayIsArray(css.backgroundColor)) { - const { 0: r, 1: g, 2: b } = css.backgroundColor; - ansi += `\x1b[48;2;${r};${g};${b}m`; - } else { - const parsed = parseCssColor(css.backgroundColor); - if (parsed !== null) { - const { 0: r, 1: g, 2: b } = parsed; - ansi += `\x1b[48;2;${r};${g};${b}m`; - } else { - ansi += "\x1b[49m"; - } - } - } - } - if (!colorEquals(css.color, prevCss.color)) { - if (css.color == null) { - ansi += "\x1b[39m"; - } else if (css.color == "black") { - ansi += `\x1b[30m`; - } else if (css.color == "red") { - ansi += `\x1b[31m`; - } else if (css.color == "green") { - ansi += `\x1b[32m`; - } else if (css.color == "yellow") { - ansi += `\x1b[33m`; - } else if (css.color == "blue") { - ansi += `\x1b[34m`; - } else if (css.color == "magenta") { - ansi += `\x1b[35m`; - } else if (css.color == "cyan") { - ansi += `\x1b[36m`; - } else if (css.color == "white") { - ansi += `\x1b[37m`; - } else { - if (ArrayIsArray(css.color)) { - const { 0: r, 1: g, 2: b } = css.color; - ansi += `\x1b[38;2;${r};${g};${b}m`; - } else { - const parsed = parseCssColor(css.color); - if (parsed !== null) { - const { 0: r, 1: g, 2: b } = parsed; - ansi += `\x1b[38;2;${r};${g};${b}m`; - } else { - ansi += "\x1b[39m"; - } - } - } - } - if (css.fontWeight != prevCss.fontWeight) { - if (css.fontWeight == "bold") { - ansi += `\x1b[1m`; - } else { - ansi += "\x1b[22m"; - } - } - if (css.fontStyle != prevCss.fontStyle) { - if (css.fontStyle == "italic") { - ansi += `\x1b[3m`; - } else { - ansi += "\x1b[23m"; - } - } - if (!colorEquals(css.textDecorationColor, prevCss.textDecorationColor)) { - if (css.textDecorationColor != null) { - const { 0: r, 1: g, 2: b } = css.textDecorationColor; - ansi += `\x1b[58;2;${r};${g};${b}m`; - } else { - ansi += "\x1b[59m"; - } - } - if ( - ArrayPrototypeIncludes(css.textDecorationLine, "line-through") != - ArrayPrototypeIncludes(prevCss.textDecorationLine, "line-through") - ) { - if (ArrayPrototypeIncludes(css.textDecorationLine, "line-through")) { - ansi += "\x1b[9m"; - } else { - ansi += "\x1b[29m"; - } - } - if ( - ArrayPrototypeIncludes(css.textDecorationLine, "overline") != - ArrayPrototypeIncludes(prevCss.textDecorationLine, "overline") - ) { - if (ArrayPrototypeIncludes(css.textDecorationLine, "overline")) { - ansi += "\x1b[53m"; - } else { - ansi += "\x1b[55m"; - } - } - if ( - ArrayPrototypeIncludes(css.textDecorationLine, "underline") != - ArrayPrototypeIncludes(prevCss.textDecorationLine, "underline") - ) { - if (ArrayPrototypeIncludes(css.textDecorationLine, "underline")) { - ansi += "\x1b[4m"; - } else { - ansi += "\x1b[24m"; - } - } - return ansi; -} - -function inspectArgs(args, inspectOptions = {}) { - circular = undefined; - - const noColor = colors.getNoColor(); - const rInspectOptions = { ...DEFAULT_INSPECT_OPTIONS, ...inspectOptions }; - const first = args[0]; - let a = 0; - let string = ""; - - if (typeof first == "string" && args.length > 1) { - a++; - // Index of the first not-yet-appended character. Use this so we only - // have to append to `string` when a substitution occurs / at the end. - let appendedChars = 0; - let usedStyle = false; - let prevCss = null; - for (let i = 0; i < first.length - 1; i++) { - if (first[i] == "%") { - const char = first[++i]; - if (a < args.length) { - let formattedArg = null; - if (char == "s") { - // Format as a string. - formattedArg = String(args[a++]); - } else if (ArrayPrototypeIncludes(["d", "i"], char)) { - // Format as an integer. - const value = args[a++]; - if (typeof value == "bigint") { - formattedArg = `${value}n`; - } else if (typeof value == "number") { - formattedArg = `${NumberParseInt(String(value))}`; - } else { - formattedArg = "NaN"; - } - } else if (char == "f") { - // Format as a floating point value. - const value = args[a++]; - if (typeof value == "number") { - formattedArg = `${value}`; - } else { - formattedArg = "NaN"; - } - } else if (ArrayPrototypeIncludes(["O", "o"], char)) { - // Format as an object. - formattedArg = inspectValue(args[a++], rInspectOptions); - } else if (char == "c") { - const value = args[a++]; - if (!noColor) { - const css = parseCss(value); - formattedArg = cssToAnsi(css, prevCss); - if (formattedArg != "") { - usedStyle = true; - prevCss = css; - } - } else { - formattedArg = ""; - } - } - - if (formattedArg != null) { - string += StringPrototypeSlice(first, appendedChars, i - 1) + - formattedArg; - appendedChars = i + 1; - } - } - if (char == "%") { - string += StringPrototypeSlice(first, appendedChars, i - 1) + "%"; - appendedChars = i + 1; - } - } - } - string += StringPrototypeSlice(first, appendedChars); - if (usedStyle) { - string += "\x1b[0m"; - } - } - - for (; a < args.length; a++) { - if (a > 0) { - string += " "; - } - if (typeof args[a] == "string") { - string += args[a]; - } else { - // Use default maximum depth for null or undefined arguments. - string += inspectValue(args[a], rInspectOptions); - } - } - - if (rInspectOptions.indentLevel > 0) { - const groupIndent = StringPrototypeRepeat( - DEFAULT_INDENT, - rInspectOptions.indentLevel, - ); - string = groupIndent + - StringPrototypeReplaceAll(string, "\n", `\n${groupIndent}`); - } - - return string; -} - -const countMap = new SafeMap(); -const timerMap = new SafeMap(); -const isConsoleInstance = Symbol("isConsoleInstance"); - -function getConsoleInspectOptions() { - return { - ...DEFAULT_INSPECT_OPTIONS, - colors: !colors.getNoColor(), - }; -} - -class Console { - #printFunc = null; - [isConsoleInstance] = false; - - constructor(printFunc) { - this.#printFunc = printFunc; - this.indentLevel = 0; - this[isConsoleInstance] = true; - - // ref https://console.spec.whatwg.org/#console-namespace - // For historical web-compatibility reasons, the namespace object for - // console must have as its [[Prototype]] an empty object, created as if - // by ObjectCreate(%ObjectPrototype%), instead of %ObjectPrototype%. - const console = ObjectCreate({}, { - [SymbolToStringTag]: { - enumerable: false, - writable: false, - configurable: true, - value: "console", - }, - }); - ObjectAssign(console, this); - return console; - } - - log = (...args) => { - this.#printFunc( - inspectArgs(args, { - ...getConsoleInspectOptions(), - indentLevel: this.indentLevel, - }) + "\n", - 1, - ); - }; - - debug = (...args) => { - this.#printFunc( - inspectArgs(args, { - ...getConsoleInspectOptions(), - indentLevel: this.indentLevel, - }) + "\n", - 0, - ); - }; - - info = (...args) => { - this.#printFunc( - inspectArgs(args, { - ...getConsoleInspectOptions(), - indentLevel: this.indentLevel, - }) + "\n", - 1, - ); - }; - - dir = (obj = undefined, options = {}) => { - this.#printFunc( - inspectArgs([obj], { ...getConsoleInspectOptions(), ...options }) + - "\n", - 1, - ); - }; - - dirxml = this.dir; - - warn = (...args) => { - this.#printFunc( - inspectArgs(args, { - ...getConsoleInspectOptions(), - indentLevel: this.indentLevel, - }) + "\n", - 2, - ); - }; - - error = (...args) => { - this.#printFunc( - inspectArgs(args, { - ...getConsoleInspectOptions(), - indentLevel: this.indentLevel, - }) + "\n", - 3, - ); - }; - - assert = (condition = false, ...args) => { - if (condition) { - return; - } - - if (args.length === 0) { - this.error("Assertion failed"); - return; - } - - const [first, ...rest] = new SafeArrayIterator(args); - - if (typeof first === "string") { - this.error( - `Assertion failed: ${first}`, - ...new SafeArrayIterator(rest), - ); - return; - } - - this.error(`Assertion failed:`, ...new SafeArrayIterator(args)); - }; - - count = (label = "default") => { - label = String(label); - - if (MapPrototypeHas(countMap, label)) { - const current = MapPrototypeGet(countMap, label) || 0; - MapPrototypeSet(countMap, label, current + 1); - } else { - MapPrototypeSet(countMap, label, 1); - } - - this.info(`${label}: ${MapPrototypeGet(countMap, label)}`); - }; - - countReset = (label = "default") => { - label = String(label); - - if (MapPrototypeHas(countMap, label)) { - MapPrototypeSet(countMap, label, 0); - } else { - this.warn(`Count for '${label}' does not exist`); - } - }; - - table = (data = undefined, properties = undefined) => { - if (properties !== undefined && !ArrayIsArray(properties)) { - throw new Error( - "The 'properties' argument must be of type Array. " + - "Received type string", - ); - } - - if (data === null || typeof data !== "object") { - return this.log(data); - } - - const stringifyValue = (value) => - inspectValueWithQuotes(value, { - ...DEFAULT_INSPECT_OPTIONS, - depth: 1, - }); - const toTable = (header, body) => this.log(cliTable(header, body)); - - let resultData; - const isSet = ObjectPrototypeIsPrototypeOf(SetPrototype, data); - const isMap = ObjectPrototypeIsPrototypeOf(MapPrototype, data); - const valuesKey = "Values"; - const indexKey = isSet || isMap ? "(iter idx)" : "(idx)"; - - if (isSet) { - resultData = [...new SafeSet(data)]; - } else if (isMap) { - let idx = 0; - resultData = {}; - - MapPrototypeForEach(data, (v, k) => { - resultData[idx] = { Key: k, Values: v }; - idx++; - }); - } else { - resultData = data; - } - - const keys = ObjectKeys(resultData); - const numRows = keys.length; - - const objectValues = properties - ? ObjectFromEntries( - ArrayPrototypeMap( - properties, - (name) => [name, ArrayPrototypeFill(new Array(numRows), "")], - ), - ) - : {}; - const indexKeys = []; - const values = []; - - let hasPrimitives = false; - keys.forEach((k, idx) => { - const value = resultData[k]; - const primitive = value === null || - (typeof value !== "function" && typeof value !== "object"); - if (properties === undefined && primitive) { - hasPrimitives = true; - ArrayPrototypePush(values, stringifyValue(value)); - } else { - const valueObj = value || {}; - const keys = properties || ObjectKeys(valueObj); - for (let i = 0; i < keys.length; ++i) { - const k = keys[i]; - if (!primitive && ReflectHas(valueObj, k)) { - if (!(ReflectHas(objectValues, k))) { - objectValues[k] = ArrayPrototypeFill(new Array(numRows), ""); - } - objectValues[k][idx] = stringifyValue(valueObj[k]); - } - } - ArrayPrototypePush(values, ""); - } - - ArrayPrototypePush(indexKeys, k); - }); - - const headerKeys = ObjectKeys(objectValues); - const bodyValues = ObjectValues(objectValues); - const headerProps = properties || - [ - ...new SafeArrayIterator(headerKeys), - !isMap && hasPrimitives && valuesKey, - ]; - const header = ArrayPrototypeFilter([ - indexKey, - ...new SafeArrayIterator(headerProps), - ], Boolean); - const body = [indexKeys, ...new SafeArrayIterator(bodyValues), values]; - - toTable(header, body); - }; - - time = (label = "default") => { - label = String(label); - - if (MapPrototypeHas(timerMap, label)) { - this.warn(`Timer '${label}' already exists`); - return; - } - - MapPrototypeSet(timerMap, label, DateNow()); - }; - - timeLog = (label = "default", ...args) => { - label = String(label); - - if (!MapPrototypeHas(timerMap, label)) { - this.warn(`Timer '${label}' does not exists`); - return; - } - - const startTime = MapPrototypeGet(timerMap, label); - const duration = DateNow() - startTime; - - this.info(`${label}: ${duration}ms`, ...new SafeArrayIterator(args)); - }; - - timeEnd = (label = "default") => { - label = String(label); - - if (!MapPrototypeHas(timerMap, label)) { - this.warn(`Timer '${label}' does not exist`); - return; - } - - const startTime = MapPrototypeGet(timerMap, label); - MapPrototypeDelete(timerMap, label); - const duration = DateNow() - startTime; - - this.info(`${label}: ${duration}ms`); - }; - - group = (...label) => { - if (label.length > 0) { - this.log(...new SafeArrayIterator(label)); - } - this.indentLevel += 2; - }; - - groupCollapsed = this.group; - - groupEnd = () => { - if (this.indentLevel > 0) { - this.indentLevel -= 2; - } - }; - - clear = () => { - this.indentLevel = 0; - this.#printFunc(CSI.kClear, 1); - this.#printFunc(CSI.kClearScreenDown, 1); - }; - - trace = (...args) => { - const message = inspectArgs( - args, - { ...getConsoleInspectOptions(), indentLevel: 0 }, - ); - const err = { - name: "Trace", - message, - }; - ErrorCaptureStackTrace(err, this.trace); - this.error(err.stack); - }; - - // These methods are noops, but when the inspector is connected, they - // call into V8. - profile = (_label) => {}; - profileEnd = (_label) => {}; - timeStamp = (_label) => {}; - - static [SymbolHasInstance](instance) { - return instance[isConsoleInstance]; - } -} - -const customInspect = SymbolFor("Deno.customInspect"); - -function inspect( - value, - inspectOptions = {}, -) { - circular = undefined; - return inspectValue(value, { - ...DEFAULT_INSPECT_OPTIONS, - ...inspectOptions, - }); -} - -/** Creates a proxy that represents a subset of the properties - * of the original object optionally without evaluating the properties - * in order to get the values. */ -function createFilteredInspectProxy({ object, keys, evaluate }) { - return new Proxy({}, { - get(_target, key) { - if (key === SymbolToStringTag) { - return object.constructor?.name; - } else if (ArrayPrototypeIncludes(keys, key)) { - return ReflectGet(object, key); - } else { - return undefined; - } - }, - getOwnPropertyDescriptor(_target, key) { - if (!ArrayPrototypeIncludes(keys, key)) { - return undefined; - } else if (evaluate) { - return getEvaluatedDescriptor(object, key); - } else { - return getDescendantPropertyDescriptor(object, key) ?? - getEvaluatedDescriptor(object, key); - } - }, - has(_target, key) { - return ArrayPrototypeIncludes(keys, key); - }, - ownKeys() { - return keys; - }, - }); - - function getDescendantPropertyDescriptor(object, key) { - let propertyDescriptor = ReflectGetOwnPropertyDescriptor(object, key); - if (!propertyDescriptor) { - const prototype = ReflectGetPrototypeOf(object); - if (prototype) { - propertyDescriptor = getDescendantPropertyDescriptor(prototype, key); - } - } - return propertyDescriptor; - } - - function getEvaluatedDescriptor(object, key) { - return { - configurable: true, - enumerable: true, - value: object[key], - }; - } -} - -// A helper function that will bind our own console implementation -// with default implementation of Console from V8. This will cause -// console messages to be piped to inspector console. -// -// We are using `Deno.core.callConsole` binding to preserve proper stack -// frames in inspector console. This has to be done because V8 considers -// the last JS stack frame as gospel for the inspector. In our case we -// specifically want the latest user stack frame to be the one that matters -// though. -// -// Inspired by: -// https://github.com/nodejs/node/blob/1317252dfe8824fd9cfee125d2aaa94004db2f3b/lib/internal/util/inspector.js#L39-L61 -function wrapConsole(consoleFromDeno, consoleFromV8) { - const callConsole = core.callConsole; - - const keys = ObjectKeys(consoleFromV8); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (ObjectPrototypeHasOwnProperty(consoleFromDeno, key)) { - consoleFromDeno[key] = FunctionPrototypeBind( - callConsole, - consoleFromDeno, - consoleFromV8[key], - consoleFromDeno[key], - ); - } else { - // Add additional console APIs from the inspector - consoleFromDeno[key] = consoleFromV8[key]; - } - } -} - -// Expose these fields to internalObject for tests. -internals.Console = Console; -internals.cssToAnsi = cssToAnsi; -internals.inspectArgs = inspectArgs; -internals.parseCss = parseCss; -internals.parseCssColor = parseCssColor; - -export { - Console, - createFilteredInspectProxy, - CSI, - customInspect, - inspect, - inspectArgs, - quoteString, - wrapConsole, -}; diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index be3bd94698..ceb9c0c200 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.99.0" +version = "0.107.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/internal.d.ts b/ext/console/internal.d.ts index d344f3a777..1fbc893785 100644 --- a/ext/console/internal.d.ts +++ b/ext/console/internal.d.ts @@ -3,7 +3,7 @@ /// /// -declare module "ext:deno_console/02_console.js" { +declare module "ext:deno_console/01_console.js" { function createFilteredInspectProxy(params: { object: TObject; keys: (keyof TObject)[]; diff --git a/ext/console/lib.rs b/ext/console/lib.rs index a45b856cd9..a31470e080 100644 --- a/ext/console/lib.rs +++ b/ext/console/lib.rs @@ -1,7 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::path::PathBuf; -deno_core::extension!(deno_console, esm = ["01_colors.js", "02_console.js"],); +deno_core::extension!(deno_console, esm = ["01_console.js"],); pub fn get_declaration() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_console.d.ts") diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 4d54b52a9d..d88aef219d 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -12,11 +12,12 @@ const primordials = globalThis.__bootstrap.primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; import DOMException from "ext:deno_web/01_dom_exception.js"; const { - ArrayBufferPrototype, - ArrayBufferPrototypeSlice, - ArrayBufferPrototypeGetByteLength, ArrayBufferIsView, + ArrayBufferPrototype, + ArrayBufferPrototypeGetByteLength, + ArrayBufferPrototypeSlice, ArrayPrototypeEvery, + ArrayPrototypeFilter, ArrayPrototypeFind, ArrayPrototypeIncludes, DataViewPrototypeGetBuffer, @@ -26,23 +27,23 @@ const { JSONStringify, MathCeil, ObjectAssign, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, - StringPrototypeToLowerCase, - StringPrototypeToUpperCase, - StringPrototypeCharCodeAt, - StringFromCharCode, SafeArrayIterator, SafeWeakMap, + StringFromCharCode, + StringPrototypeCharCodeAt, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, Symbol, SymbolFor, SyntaxError, - TypedArrayPrototypeSlice, + TypeError, TypedArrayPrototypeGetBuffer, TypedArrayPrototypeGetByteLength, TypedArrayPrototypeGetByteOffset, TypedArrayPrototypeGetSymbolToStringTag, - TypeError, + TypedArrayPrototypeSlice, Uint8Array, WeakMapPrototypeGet, WeakMapPrototypeSet, @@ -199,17 +200,18 @@ function normalizeAlgorithm(algorithm, op) { // 1. const registeredAlgorithms = supportedAlgorithms[op]; // 2. 3. - const initialAlg = webidl.converters.Algorithm(algorithm, { - prefix: "Failed to normalize algorithm", - context: "passed algorithm", - }); + const initialAlg = webidl.converters.Algorithm( + algorithm, + "Failed to normalize algorithm", + "passed algorithm", + ); // 4. let algName = initialAlg.name; // 5. let desiredType = undefined; for (const key in registeredAlgorithms) { - if (!ObjectPrototypeHasOwnProperty(registeredAlgorithms, key)) { + if (!ObjectHasOwn(registeredAlgorithms, key)) { continue; } if ( @@ -232,10 +234,11 @@ function normalizeAlgorithm(algorithm, op) { } // 6. - const normalizedAlgorithm = webidl.converters[desiredType](algorithm, { - prefix: "Failed to normalize algorithm", - context: "passed algorithm", - }); + const normalizedAlgorithm = webidl.converters[desiredType]( + algorithm, + "Failed to normalize algorithm", + "passed algorithm", + ); // 7. normalizedAlgorithm.name = algName; @@ -243,7 +246,7 @@ function normalizeAlgorithm(algorithm, op) { const dict = simpleAlgorithmDictionaries[desiredType]; // 10. for (const member in dict) { - if (!ObjectPrototypeHasOwnProperty(dict, member)) { + if (!ObjectHasOwn(dict, member)) { continue; } const idlType = dict[member]; @@ -386,7 +389,10 @@ function constructKey(type, extractable, usages, algorithm, handle) { * @returns */ function usageIntersection(a, b) { - return a.filter((i) => b.includes(i)); + return ArrayPrototypeFilter( + a, + (i) => ArrayPrototypeIncludes(b, i), + ); } // TODO(lucacasonato): this should be moved to rust @@ -469,14 +475,12 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'digest' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 2, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + data = webidl.converters.BufferSource(data, prefix, "Argument 2"); data = copyBuffer(data); @@ -501,18 +505,13 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'encrypt' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 3", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + data = webidl.converters.BufferSource(data, prefix, "Argument 3"); // 2. data = copyBuffer(data); @@ -549,18 +548,13 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'decrypt' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 3", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + data = webidl.converters.BufferSource(data, prefix, "Argument 3"); // 2. data = copyBuffer(data); @@ -757,18 +751,13 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'sign' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 3", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + data = webidl.converters.BufferSource(data, prefix, "Argument 3"); // 1. data = copyBuffer(data); @@ -895,7 +884,7 @@ class SubtleCrypto { // https://briansmith.org/rustdoc/src/ring/ec/curve25519/ed25519/signing.rs.html#260 const SIGNATURE_LEN = 32 * 2; // ELEM_LEN + SCALAR_LEN const signature = new Uint8Array(SIGNATURE_LEN); - if (!ops.op_sign_ed25519(keyData, data, signature)) { + if (!ops.op_crypto_sign_ed25519(keyData, data, signature)) { throw new DOMException( "Failed to sign", "OperationError", @@ -921,26 +910,23 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'importKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 4, prefix); - format = webidl.converters.KeyFormat(format, { + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + keyData = webidl.converters["BufferSource or JsonWebKey"]( + keyData, prefix, - context: "Argument 1", - }); - keyData = webidl.converters["BufferSource or JsonWebKey"](keyData, { + "Argument 2", + ); + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 2", - }); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + "Argument 3", + ); + extractable = webidl.converters.boolean(extractable, prefix, "Argument 4"); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 3", - }); - extractable = webidl.converters.boolean(extractable, { - prefix, - context: "Argument 4", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { - prefix, - context: "Argument 5", - }); + "Argument 5", + ); // 2. if (format !== "jwk") { @@ -1055,14 +1041,8 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'exportKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 2, prefix); - format = webidl.converters.KeyFormat(format, { - prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); const handle = key[_handle]; // 2. @@ -1127,19 +1107,14 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'deriveBits' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - baseKey = webidl.converters.CryptoKey(baseKey, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + baseKey = webidl.converters.CryptoKey(baseKey, prefix, "Argument 2"); if (length !== null) { - length = webidl.converters["unsigned long"](length, { - prefix, - context: "Argument 3", - }); + length = webidl.converters["unsigned long"](length, prefix, "Argument 3"); } // 2. @@ -1177,26 +1152,27 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'deriveKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 5, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - baseKey = webidl.converters.CryptoKey(baseKey, { + "Argument 1", + ); + baseKey = webidl.converters.CryptoKey(baseKey, prefix, "Argument 2"); + derivedKeyType = webidl.converters.AlgorithmIdentifier( + derivedKeyType, prefix, - context: "Argument 2", - }); - derivedKeyType = webidl.converters.AlgorithmIdentifier(derivedKeyType, { + "Argument 3", + ); + extractable = webidl.converters["boolean"]( + extractable, prefix, - context: "Argument 3", - }); - extractable = webidl.converters["boolean"](extractable, { + "Argument 4", + ); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 4", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { - prefix, - context: "Argument 5", - }); + "Argument 5", + ); // 2-3. const normalizedAlgorithm = normalizeAlgorithm(algorithm, "deriveBits"); @@ -1272,22 +1248,14 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'verify' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 4, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - signature = webidl.converters.BufferSource(signature, { - prefix, - context: "Argument 3", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 4", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + signature = webidl.converters.BufferSource(signature, prefix, "Argument 3"); + data = webidl.converters.BufferSource(data, prefix, "Argument 4"); // 2. signature = copyBuffer(signature); @@ -1395,7 +1363,7 @@ class SubtleCrypto { ); } - return ops.op_verify_ed25519(keyData, data, signature); + return ops.op_crypto_verify_ed25519(keyData, data, signature); } } @@ -1412,22 +1380,18 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'wrapKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 4, prefix); - format = webidl.converters.KeyFormat(format, { + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + wrappingKey = webidl.converters.CryptoKey( + wrappingKey, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { + "Argument 3", + ); + wrapAlgorithm = webidl.converters.AlgorithmIdentifier( + wrapAlgorithm, prefix, - context: "Argument 2", - }); - wrappingKey = webidl.converters.CryptoKey(wrappingKey, { - prefix, - context: "Argument 3", - }); - wrapAlgorithm = webidl.converters.AlgorithmIdentifier(wrapAlgorithm, { - prefix, - context: "Argument 4", - }); + "Argument 4", + ); let normalizedAlgorithm; @@ -1548,37 +1512,33 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'unwrapKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 7, prefix); - format = webidl.converters.KeyFormat(format, { + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + wrappedKey = webidl.converters.BufferSource( + wrappedKey, prefix, - context: "Argument 1", - }); - wrappedKey = webidl.converters.BufferSource(wrappedKey, { + "Argument 2", + ); + unwrappingKey = webidl.converters.CryptoKey( + unwrappingKey, prefix, - context: "Argument 2", - }); - unwrappingKey = webidl.converters.CryptoKey(unwrappingKey, { + "Argument 3", + ); + unwrapAlgorithm = webidl.converters.AlgorithmIdentifier( + unwrapAlgorithm, prefix, - context: "Argument 3", - }); - unwrapAlgorithm = webidl.converters.AlgorithmIdentifier(unwrapAlgorithm, { - prefix, - context: "Argument 4", - }); + "Argument 4", + ); unwrappedKeyAlgorithm = webidl.converters.AlgorithmIdentifier( unwrappedKeyAlgorithm, - { - prefix, - context: "Argument 5", - }, + prefix, + "Argument 5", ); - extractable = webidl.converters.boolean(extractable, { + extractable = webidl.converters.boolean(extractable, prefix, "Argument 6"); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 6", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { - prefix, - context: "Argument 7", - }); + "Argument 7", + ); // 2. wrappedKey = copyBuffer(wrappedKey); @@ -1709,18 +1669,21 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'generateKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - extractable = webidl.converters["boolean"](extractable, { + "Argument 1", + ); + extractable = webidl.converters["boolean"]( + extractable, prefix, - context: "Argument 2", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { + "Argument 2", + ); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 3", - }); + "Argument 3", + ); const usages = keyUsages; @@ -2034,7 +1997,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) { } const privateKeyData = new Uint8Array(32); const publicKeyData = new Uint8Array(32); - ops.op_generate_x25519_keypair(privateKeyData, publicKeyData); + ops.op_crypto_generate_x25519_keypair(privateKeyData, publicKeyData); const handle = {}; WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData); @@ -2079,7 +2042,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) { const privateKeyData = new Uint8Array(ED25519_SEED_LEN); const publicKeyData = new Uint8Array(ED25519_PUBLIC_KEY_LEN); if ( - !ops.op_generate_ed25519_keypair(privateKeyData, publicKeyData) + !ops.op_crypto_generate_ed25519_keypair(privateKeyData, publicKeyData) ) { throw new DOMException("Failed to generate key", "OperationError"); } @@ -2216,7 +2179,7 @@ function importKeyEd25519( } const publicKeyData = new Uint8Array(32); - if (!ops.op_import_spki_ed25519(keyData, publicKeyData)) { + if (!ops.op_crypto_import_spki_ed25519(keyData, publicKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -2247,7 +2210,7 @@ function importKeyEd25519( } const privateKeyData = new Uint8Array(32); - if (!ops.op_import_pkcs8_ed25519(keyData, privateKeyData)) { + if (!ops.op_crypto_import_pkcs8_ed25519(keyData, privateKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -2356,7 +2319,12 @@ function importKeyEd25519( // 9. if (jwk.d !== undefined) { // https://www.rfc-editor.org/rfc/rfc8037#section-2 - const privateKeyData = ops.op_crypto_base64url_decode(jwk.d); + let privateKeyData; + try { + privateKeyData = ops.op_crypto_base64url_decode(jwk.d); + } catch (_) { + throw new DOMException("invalid private key data", "DataError"); + } const handle = {}; WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData); @@ -2374,7 +2342,12 @@ function importKeyEd25519( ); } else { // https://www.rfc-editor.org/rfc/rfc8037#section-2 - const publicKeyData = ops.op_crypto_base64url_decode(jwk.x); + let publicKeyData; + try { + publicKeyData = ops.op_crypto_base64url_decode(jwk.x); + } catch (_) { + throw new DOMException("invalid public key data", "DataError"); + } const handle = {}; WeakMapPrototypeSet(KEY_STORE, handle, publicKeyData); @@ -2434,7 +2407,7 @@ function importKeyX25519( } const publicKeyData = new Uint8Array(32); - if (!ops.op_import_spki_x25519(keyData, publicKeyData)) { + if (!ops.op_crypto_import_spki_x25519(keyData, publicKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -2465,7 +2438,7 @@ function importKeyX25519( } const privateKeyData = new Uint8Array(32); - if (!ops.op_import_pkcs8_x25519(keyData, privateKeyData)) { + if (!ops.op_crypto_import_pkcs8_x25519(keyData, privateKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -4092,7 +4065,7 @@ function exportKeyEd25519(format, key, innerKey) { ); } - const spkiDer = ops.op_export_spki_ed25519(innerKey); + const spkiDer = ops.op_crypto_export_spki_ed25519(innerKey); return TypedArrayPrototypeGetBuffer(spkiDer); } case "pkcs8": { @@ -4104,7 +4077,7 @@ function exportKeyEd25519(format, key, innerKey) { ); } - const pkcs8Der = ops.op_export_pkcs8_ed25519( + const pkcs8Der = ops.op_crypto_export_pkcs8_ed25519( new Uint8Array([0x04, 0x22, ...new SafeArrayIterator(innerKey)]), ); pkcs8Der[15] = 0x20; @@ -4112,7 +4085,7 @@ function exportKeyEd25519(format, key, innerKey) { } case "jwk": { const x = key[_type] === "private" - ? ops.op_jwk_x_ed25519(innerKey) + ? ops.op_crypto_jwk_x_ed25519(innerKey) : ops.op_crypto_base64url_encode(innerKey); const jwk = { kty: "OKP", @@ -4155,7 +4128,7 @@ function exportKeyX25519(format, key, innerKey) { ); } - const spkiDer = ops.op_export_spki_x25519(innerKey); + const spkiDer = ops.op_crypto_export_spki_x25519(innerKey); return TypedArrayPrototypeGetBuffer(spkiDer); } case "pkcs8": { @@ -4167,7 +4140,7 @@ function exportKeyX25519(format, key, innerKey) { ); } - const pkcs8Der = ops.op_export_pkcs8_x25519( + const pkcs8Der = ops.op_crypto_export_pkcs8_x25519( new Uint8Array([0x04, 0x22, ...new SafeArrayIterator(innerKey)]), ); pkcs8Der[15] = 0x20; @@ -4513,7 +4486,7 @@ async function deriveBits(normalizedAlgorithm, baseKey, length) { const u = WeakMapPrototypeGet(KEY_STORE, uHandle); const secret = new Uint8Array(32); - const isIdentity = ops.op_derive_bits_x25519(k, u, secret); + const isIdentity = ops.op_crypto_derive_bits_x25519(k, u, secret); // 6. if (isIdentity) { @@ -4722,10 +4695,11 @@ class Crypto { ops.op_crypto_get_random_values(typedArray); return typedArray; } - typedArray = webidl.converters.ArrayBufferView(typedArray, { + typedArray = webidl.converters.ArrayBufferView( + typedArray, prefix, - context: "Argument 1", - }); + "Argument 1", + ); switch (tag) { case "Int8Array": case "Uint8ClampedArray": @@ -4770,4 +4744,506 @@ webidl.configurePrototype(Crypto); const CryptoPrototype = Crypto.prototype; const crypto = webidl.createBranded(Crypto); + +webidl.converters.AlgorithmIdentifier = (V, prefix, context, opts) => { + // Union for (object or DOMString) + if (webidl.type(V) == "Object") { + return webidl.converters.object(V, prefix, context, opts); + } + return webidl.converters.DOMString(V, prefix, context, opts); +}; + +webidl.converters["BufferSource or JsonWebKey"] = ( + V, + prefix, + context, + opts, +) => { + // Union for (BufferSource or JsonWebKey) + if ( + ArrayBufferIsView(V) || + ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) + ) { + return webidl.converters.BufferSource(V, prefix, context, opts); + } + return webidl.converters.JsonWebKey(V, prefix, context, opts); +}; + +webidl.converters.KeyType = webidl.createEnumConverter("KeyType", [ + "public", + "private", + "secret", +]); + +webidl.converters.KeyFormat = webidl.createEnumConverter("KeyFormat", [ + "raw", + "pkcs8", + "spki", + "jwk", +]); + +webidl.converters.KeyUsage = webidl.createEnumConverter("KeyUsage", [ + "encrypt", + "decrypt", + "sign", + "verify", + "deriveKey", + "deriveBits", + "wrapKey", + "unwrapKey", +]); + +webidl.converters["sequence"] = webidl.createSequenceConverter( + webidl.converters.KeyUsage, +); + +webidl.converters.HashAlgorithmIdentifier = + webidl.converters.AlgorithmIdentifier; + +/** @type {webidl.Dictionary} */ +const dictAlgorithm = [{ + key: "name", + converter: webidl.converters.DOMString, + required: true, +}]; + +webidl.converters.Algorithm = webidl + .createDictionaryConverter("Algorithm", dictAlgorithm); + +webidl.converters.BigInteger = webidl.converters.Uint8Array; + +/** @type {webidl.Dictionary} */ +const dictRsaKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "modulusLength", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, + { + key: "publicExponent", + converter: webidl.converters.BigInteger, + required: true, + }, +]; + +webidl.converters.RsaKeyGenParams = webidl + .createDictionaryConverter("RsaKeyGenParams", dictRsaKeyGenParams); + +const dictRsaHashedKeyGenParams = [ + ...new SafeArrayIterator(dictRsaKeyGenParams), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, +]; + +webidl.converters.RsaHashedKeyGenParams = webidl.createDictionaryConverter( + "RsaHashedKeyGenParams", + dictRsaHashedKeyGenParams, +); + +const dictRsaHashedImportParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, +]; + +webidl.converters.RsaHashedImportParams = webidl.createDictionaryConverter( + "RsaHashedImportParams", + dictRsaHashedImportParams, +); + +webidl.converters.NamedCurve = webidl.converters.DOMString; + +const dictEcKeyImportParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "namedCurve", + converter: webidl.converters.NamedCurve, + required: true, + }, +]; + +webidl.converters.EcKeyImportParams = webidl.createDictionaryConverter( + "EcKeyImportParams", + dictEcKeyImportParams, +); + +const dictEcKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "namedCurve", + converter: webidl.converters.NamedCurve, + required: true, + }, +]; + +webidl.converters.EcKeyGenParams = webidl + .createDictionaryConverter("EcKeyGenParams", dictEcKeyGenParams); + +const dictAesKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned short"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +webidl.converters.AesKeyGenParams = webidl + .createDictionaryConverter("AesKeyGenParams", dictAesKeyGenParams); + +const dictHmacKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + }, +]; + +webidl.converters.HmacKeyGenParams = webidl + .createDictionaryConverter("HmacKeyGenParams", dictHmacKeyGenParams); + +const dictRsaPssParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "saltLength", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +webidl.converters.RsaPssParams = webidl + .createDictionaryConverter("RsaPssParams", dictRsaPssParams); + +const dictRsaOaepParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "label", + converter: webidl.converters["BufferSource"], + }, +]; + +webidl.converters.RsaOaepParams = webidl + .createDictionaryConverter("RsaOaepParams", dictRsaOaepParams); + +const dictEcdsaParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, +]; + +webidl.converters["EcdsaParams"] = webidl + .createDictionaryConverter("EcdsaParams", dictEcdsaParams); + +const dictHmacImportParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + }, +]; + +webidl.converters.HmacImportParams = webidl + .createDictionaryConverter("HmacImportParams", dictHmacImportParams); + +const dictRsaOtherPrimesInfo = [ + { + key: "r", + converter: webidl.converters["DOMString"], + }, + { + key: "d", + converter: webidl.converters["DOMString"], + }, + { + key: "t", + converter: webidl.converters["DOMString"], + }, +]; + +webidl.converters.RsaOtherPrimesInfo = webidl.createDictionaryConverter( + "RsaOtherPrimesInfo", + dictRsaOtherPrimesInfo, +); +webidl.converters["sequence"] = webidl + .createSequenceConverter( + webidl.converters.RsaOtherPrimesInfo, + ); + +const dictJsonWebKey = [ + // Sections 4.2 and 4.3 of RFC7517. + // https://datatracker.ietf.org/doc/html/rfc7517#section-4 + { + key: "kty", + converter: webidl.converters["DOMString"], + }, + { + key: "use", + converter: webidl.converters["DOMString"], + }, + { + key: "key_ops", + converter: webidl.converters["sequence"], + }, + { + key: "alg", + converter: webidl.converters["DOMString"], + }, + // JSON Web Key Parameters Registration + { + key: "ext", + converter: webidl.converters["boolean"], + }, + // Section 6 of RFC7518 JSON Web Algorithms + // https://datatracker.ietf.org/doc/html/rfc7518#section-6 + { + key: "crv", + converter: webidl.converters["DOMString"], + }, + { + key: "x", + converter: webidl.converters["DOMString"], + }, + { + key: "y", + converter: webidl.converters["DOMString"], + }, + { + key: "d", + converter: webidl.converters["DOMString"], + }, + { + key: "n", + converter: webidl.converters["DOMString"], + }, + { + key: "e", + converter: webidl.converters["DOMString"], + }, + { + key: "p", + converter: webidl.converters["DOMString"], + }, + { + key: "q", + converter: webidl.converters["DOMString"], + }, + { + key: "dp", + converter: webidl.converters["DOMString"], + }, + { + key: "dq", + converter: webidl.converters["DOMString"], + }, + { + key: "qi", + converter: webidl.converters["DOMString"], + }, + { + key: "oth", + converter: webidl.converters["sequence"], + }, + { + key: "k", + converter: webidl.converters["DOMString"], + }, +]; + +webidl.converters.JsonWebKey = webidl.createDictionaryConverter( + "JsonWebKey", + dictJsonWebKey, +); + +const dictHkdfParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "salt", + converter: webidl.converters["BufferSource"], + required: true, + }, + { + key: "info", + converter: webidl.converters["BufferSource"], + required: true, + }, +]; + +webidl.converters.HkdfParams = webidl + .createDictionaryConverter("HkdfParams", dictHkdfParams); + +const dictPbkdf2Params = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "iterations", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, + { + key: "salt", + converter: webidl.converters["BufferSource"], + required: true, + }, +]; + +webidl.converters.Pbkdf2Params = webidl + .createDictionaryConverter("Pbkdf2Params", dictPbkdf2Params); + +const dictAesDerivedKeyParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +const dictAesCbcParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "iv", + converter: webidl.converters["BufferSource"], + required: true, + }, +]; + +const dictAesGcmParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "iv", + converter: webidl.converters["BufferSource"], + required: true, + }, + { + key: "tagLength", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + }, + { + key: "additionalData", + converter: webidl.converters["BufferSource"], + }, +]; + +const dictAesCtrParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "counter", + converter: webidl.converters["BufferSource"], + required: true, + }, + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned short"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +webidl.converters.AesDerivedKeyParams = webidl + .createDictionaryConverter("AesDerivedKeyParams", dictAesDerivedKeyParams); + +webidl.converters.AesCbcParams = webidl + .createDictionaryConverter("AesCbcParams", dictAesCbcParams); + +webidl.converters.AesGcmParams = webidl + .createDictionaryConverter("AesGcmParams", dictAesGcmParams); + +webidl.converters.AesCtrParams = webidl + .createDictionaryConverter("AesCtrParams", dictAesCtrParams); + +webidl.converters.CryptoKey = webidl.createInterfaceConverter( + "CryptoKey", + CryptoKey.prototype, +); + +const dictCryptoKeyPair = [ + { + key: "publicKey", + converter: webidl.converters.CryptoKey, + }, + { + key: "privateKey", + converter: webidl.converters.CryptoKey, + }, +]; + +webidl.converters.CryptoKeyPair = webidl + .createDictionaryConverter("CryptoKeyPair", dictCryptoKeyPair); + +const dictEcdhKeyDeriveParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "public", + converter: webidl.converters.CryptoKey, + required: true, + }, +]; + +webidl.converters.EcdhKeyDeriveParams = webidl + .createDictionaryConverter("EcdhKeyDeriveParams", dictEcdhKeyDeriveParams); + export { Crypto, crypto, CryptoKey, SubtleCrypto }; diff --git a/ext/crypto/01_webidl.js b/ext/crypto/01_webidl.js deleted file mode 100644 index 715e5a9773..0000000000 --- a/ext/crypto/01_webidl.js +++ /dev/null @@ -1,484 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// @ts-check -/// -/// - -const primordials = globalThis.__bootstrap.primordials; -import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { CryptoKey } from "ext:deno_crypto/00_crypto.js"; -const { - ArrayBufferIsView, - ArrayBufferPrototype, - ObjectPrototypeIsPrototypeOf, - SafeArrayIterator, -} = primordials; - -webidl.converters.AlgorithmIdentifier = (V, opts) => { - // Union for (object or DOMString) - if (webidl.type(V) == "Object") { - return webidl.converters.object(V, opts); - } - return webidl.converters.DOMString(V, opts); -}; - -webidl.converters["BufferSource or JsonWebKey"] = (V, opts) => { - // Union for (BufferSource or JsonWebKey) - if ( - ArrayBufferIsView(V) || - ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) - ) { - return webidl.converters.BufferSource(V, opts); - } - return webidl.converters.JsonWebKey(V, opts); -}; - -webidl.converters.KeyType = webidl.createEnumConverter("KeyType", [ - "public", - "private", - "secret", -]); - -webidl.converters.KeyFormat = webidl.createEnumConverter("KeyFormat", [ - "raw", - "pkcs8", - "spki", - "jwk", -]); - -webidl.converters.KeyUsage = webidl.createEnumConverter("KeyUsage", [ - "encrypt", - "decrypt", - "sign", - "verify", - "deriveKey", - "deriveBits", - "wrapKey", - "unwrapKey", -]); - -webidl.converters["sequence"] = webidl.createSequenceConverter( - webidl.converters.KeyUsage, -); - -webidl.converters.HashAlgorithmIdentifier = - webidl.converters.AlgorithmIdentifier; - -/** @type {webidl.Dictionary} */ -const dictAlgorithm = [{ - key: "name", - converter: webidl.converters.DOMString, - required: true, -}]; - -webidl.converters.Algorithm = webidl - .createDictionaryConverter("Algorithm", dictAlgorithm); - -webidl.converters.BigInteger = webidl.converters.Uint8Array; - -/** @type {webidl.Dictionary} */ -const dictRsaKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "modulusLength", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - required: true, - }, - { - key: "publicExponent", - converter: webidl.converters.BigInteger, - required: true, - }, -]; - -webidl.converters.RsaKeyGenParams = webidl - .createDictionaryConverter("RsaKeyGenParams", dictRsaKeyGenParams); - -const dictRsaHashedKeyGenParams = [ - ...new SafeArrayIterator(dictRsaKeyGenParams), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, -]; - -webidl.converters.RsaHashedKeyGenParams = webidl.createDictionaryConverter( - "RsaHashedKeyGenParams", - dictRsaHashedKeyGenParams, -); - -const dictRsaHashedImportParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, -]; - -webidl.converters.RsaHashedImportParams = webidl.createDictionaryConverter( - "RsaHashedImportParams", - dictRsaHashedImportParams, -); - -webidl.converters.NamedCurve = webidl.converters.DOMString; - -const dictEcKeyImportParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "namedCurve", - converter: webidl.converters.NamedCurve, - required: true, - }, -]; - -webidl.converters.EcKeyImportParams = webidl.createDictionaryConverter( - "EcKeyImportParams", - dictEcKeyImportParams, -); - -const dictEcKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "namedCurve", - converter: webidl.converters.NamedCurve, - required: true, - }, -]; - -webidl.converters.EcKeyGenParams = webidl - .createDictionaryConverter("EcKeyGenParams", dictEcKeyGenParams); - -const dictAesKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "length", - converter: (V, opts) => - webidl.converters["unsigned short"](V, { ...opts, enforceRange: true }), - required: true, - }, -]; - -webidl.converters.AesKeyGenParams = webidl - .createDictionaryConverter("AesKeyGenParams", dictAesKeyGenParams); - -const dictHmacKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "length", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - }, -]; - -webidl.converters.HmacKeyGenParams = webidl - .createDictionaryConverter("HmacKeyGenParams", dictHmacKeyGenParams); - -const dictRsaPssParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "saltLength", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - required: true, - }, -]; - -webidl.converters.RsaPssParams = webidl - .createDictionaryConverter("RsaPssParams", dictRsaPssParams); - -const dictRsaOaepParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "label", - converter: webidl.converters["BufferSource"], - }, -]; - -webidl.converters.RsaOaepParams = webidl - .createDictionaryConverter("RsaOaepParams", dictRsaOaepParams); - -const dictEcdsaParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, -]; - -webidl.converters["EcdsaParams"] = webidl - .createDictionaryConverter("EcdsaParams", dictEcdsaParams); - -const dictHmacImportParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "length", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - }, -]; - -webidl.converters.HmacImportParams = webidl - .createDictionaryConverter("HmacImportParams", dictHmacImportParams); - -const dictRsaOtherPrimesInfo = [ - { - key: "r", - converter: webidl.converters["DOMString"], - }, - { - key: "d", - converter: webidl.converters["DOMString"], - }, - { - key: "t", - converter: webidl.converters["DOMString"], - }, -]; - -webidl.converters.RsaOtherPrimesInfo = webidl.createDictionaryConverter( - "RsaOtherPrimesInfo", - dictRsaOtherPrimesInfo, -); -webidl.converters["sequence"] = webidl - .createSequenceConverter( - webidl.converters.RsaOtherPrimesInfo, - ); - -const dictJsonWebKey = [ - // Sections 4.2 and 4.3 of RFC7517. - // https://datatracker.ietf.org/doc/html/rfc7517#section-4 - { - key: "kty", - converter: webidl.converters["DOMString"], - }, - { - key: "use", - converter: webidl.converters["DOMString"], - }, - { - key: "key_ops", - converter: webidl.converters["sequence"], - }, - { - key: "alg", - converter: webidl.converters["DOMString"], - }, - // JSON Web Key Parameters Registration - { - key: "ext", - converter: webidl.converters["boolean"], - }, - // Section 6 of RFC7518 JSON Web Algorithms - // https://datatracker.ietf.org/doc/html/rfc7518#section-6 - { - key: "crv", - converter: webidl.converters["DOMString"], - }, - { - key: "x", - converter: webidl.converters["DOMString"], - }, - { - key: "y", - converter: webidl.converters["DOMString"], - }, - { - key: "d", - converter: webidl.converters["DOMString"], - }, - { - key: "n", - converter: webidl.converters["DOMString"], - }, - { - key: "e", - converter: webidl.converters["DOMString"], - }, - { - key: "p", - converter: webidl.converters["DOMString"], - }, - { - key: "q", - converter: webidl.converters["DOMString"], - }, - { - key: "dp", - converter: webidl.converters["DOMString"], - }, - { - key: "dq", - converter: webidl.converters["DOMString"], - }, - { - key: "qi", - converter: webidl.converters["DOMString"], - }, - { - key: "oth", - converter: webidl.converters["sequence"], - }, - { - key: "k", - converter: webidl.converters["DOMString"], - }, -]; - -webidl.converters.JsonWebKey = webidl.createDictionaryConverter( - "JsonWebKey", - dictJsonWebKey, -); - -const dictHkdfParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "salt", - converter: webidl.converters["BufferSource"], - required: true, - }, - { - key: "info", - converter: webidl.converters["BufferSource"], - required: true, - }, -]; - -webidl.converters.HkdfParams = webidl - .createDictionaryConverter("HkdfParams", dictHkdfParams); - -const dictPbkdf2Params = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "iterations", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - required: true, - }, - { - key: "salt", - converter: webidl.converters["BufferSource"], - required: true, - }, -]; - -webidl.converters.Pbkdf2Params = webidl - .createDictionaryConverter("Pbkdf2Params", dictPbkdf2Params); - -const dictAesDerivedKeyParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "length", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - required: true, - }, -]; - -const dictAesCbcParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "iv", - converter: webidl.converters["BufferSource"], - required: true, - }, -]; - -const dictAesGcmParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "iv", - converter: webidl.converters["BufferSource"], - required: true, - }, - { - key: "tagLength", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), - }, - { - key: "additionalData", - converter: webidl.converters["BufferSource"], - }, -]; - -const dictAesCtrParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "counter", - converter: webidl.converters["BufferSource"], - required: true, - }, - { - key: "length", - converter: (V, opts) => - webidl.converters["unsigned short"](V, { ...opts, enforceRange: true }), - required: true, - }, -]; - -webidl.converters.AesDerivedKeyParams = webidl - .createDictionaryConverter("AesDerivedKeyParams", dictAesDerivedKeyParams); - -webidl.converters.AesCbcParams = webidl - .createDictionaryConverter("AesCbcParams", dictAesCbcParams); - -webidl.converters.AesGcmParams = webidl - .createDictionaryConverter("AesGcmParams", dictAesGcmParams); - -webidl.converters.AesCtrParams = webidl - .createDictionaryConverter("AesCtrParams", dictAesCtrParams); - -webidl.converters.CryptoKey = webidl.createInterfaceConverter( - "CryptoKey", - CryptoKey.prototype, -); - -const dictCryptoKeyPair = [ - { - key: "publicKey", - converter: webidl.converters.CryptoKey, - }, - { - key: "privateKey", - converter: webidl.converters.CryptoKey, - }, -]; - -webidl.converters.CryptoKeyPair = webidl - .createDictionaryConverter("CryptoKeyPair", dictCryptoKeyPair); - -const dictEcdhKeyDeriveParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "public", - converter: webidl.converters.CryptoKey, - required: true, - }, -]; - -webidl.converters.EcdhKeyDeriveParams = webidl - .createDictionaryConverter("EcdhKeyDeriveParams", dictEcdhKeyDeriveParams); diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 7f0d1fdf0e..71e0f31dfe 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.113.0" +version = "0.121.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/decrypt.rs b/ext/crypto/decrypt.rs index 6c4d5b6ba5..fc54fe8182 100644 --- a/ext/crypto/decrypt.rs +++ b/ext/crypto/decrypt.rs @@ -20,6 +20,7 @@ use deno_core::error::custom_error; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::ZeroCopyBuf; use rsa::pkcs1::DecodeRsaPrivateKey; use rsa::PaddingScheme; @@ -98,7 +99,7 @@ pub async fn op_crypto_decrypt( tag_length, } => decrypt_aes_gcm(key, length, tag_length, iv, additional_data, &data), }; - let buf = tokio::task::spawn_blocking(fun).await.unwrap()?; + let buf = spawn_blocking(fun).await.unwrap()?; Ok(buf.into()) } diff --git a/ext/crypto/ed25519.rs b/ext/crypto/ed25519.rs index 898366bbc1..784583c6b8 100644 --- a/ext/crypto/ed25519.rs +++ b/ext/crypto/ed25519.rs @@ -12,7 +12,10 @@ use spki::der::Decode; use spki::der::Encode; #[op(fast)] -pub fn op_generate_ed25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) -> bool { +pub fn op_crypto_generate_ed25519_keypair( + pkey: &mut [u8], + pubkey: &mut [u8], +) -> bool { let mut rng = OsRng; rng.fill_bytes(pkey); @@ -25,7 +28,11 @@ pub fn op_generate_ed25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_sign_ed25519(key: &[u8], data: &[u8], signature: &mut [u8]) -> bool { +pub fn op_crypto_sign_ed25519( + key: &[u8], + data: &[u8], + signature: &mut [u8], +) -> bool { let pair = match Ed25519KeyPair::from_seed_unchecked(key) { Ok(p) => p, Err(_) => return false, @@ -35,7 +42,11 @@ pub fn op_sign_ed25519(key: &[u8], data: &[u8], signature: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_verify_ed25519(pubkey: &[u8], data: &[u8], signature: &[u8]) -> bool { +pub fn op_crypto_verify_ed25519( + pubkey: &[u8], + data: &[u8], + signature: &[u8], +) -> bool { ring::signature::UnparsedPublicKey::new(&ring::signature::ED25519, pubkey) .verify(data, signature) .is_ok() @@ -46,7 +57,7 @@ pub const ED25519_OID: const_oid::ObjectIdentifier = const_oid::ObjectIdentifier::new_unwrap("1.3.101.112"); #[op(fast)] -pub fn op_import_spki_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_spki_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. let pk_info = match spki::SubjectPublicKeyInfo::from_der(key_data) { Ok(pk_info) => pk_info, @@ -66,7 +77,7 @@ pub fn op_import_spki_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_import_pkcs8_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_pkcs8_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. // This should probably use OneAsymmetricKey instead let pk_info = match PrivateKeyInfo::from_der(key_data) { @@ -92,7 +103,9 @@ pub fn op_import_pkcs8_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op] -pub fn op_export_spki_ed25519(pubkey: &[u8]) -> Result { +pub fn op_crypto_export_spki_ed25519( + pubkey: &[u8], +) -> Result { let key_info = spki::SubjectPublicKeyInfo { algorithm: spki::AlgorithmIdentifier { // id-Ed25519 @@ -105,7 +118,9 @@ pub fn op_export_spki_ed25519(pubkey: &[u8]) -> Result { } #[op] -pub fn op_export_pkcs8_ed25519(pkey: &[u8]) -> Result { +pub fn op_crypto_export_pkcs8_ed25519( + pkey: &[u8], +) -> Result { // This should probably use OneAsymmetricKey instead let pk_info = rsa::pkcs8::PrivateKeyInfo { public_key: None, @@ -123,7 +138,7 @@ pub fn op_export_pkcs8_ed25519(pkey: &[u8]) -> Result { // 'x' from Section 2 of RFC 8037 // https://www.rfc-editor.org/rfc/rfc8037#section-2 #[op] -pub fn op_jwk_x_ed25519(pkey: &[u8]) -> Result { +pub fn op_crypto_jwk_x_ed25519(pkey: &[u8]) -> Result { let pair = Ed25519KeyPair::from_seed_unchecked(pkey)?; Ok(base64::encode_config( pair.public_key().as_ref(), diff --git a/ext/crypto/encrypt.rs b/ext/crypto/encrypt.rs index f34e0cbc6b..2831ca0f4a 100644 --- a/ext/crypto/encrypt.rs +++ b/ext/crypto/encrypt.rs @@ -19,6 +19,7 @@ use ctr::Ctr64BE; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::ZeroCopyBuf; use rand::rngs::OsRng; use rsa::pkcs1::DecodeRsaPublicKey; @@ -99,7 +100,7 @@ pub async fn op_crypto_encrypt( key_length, } => encrypt_aes_ctr(key, key_length, &counter, ctr_length, &data), }; - let buf = tokio::task::spawn_blocking(fun).await.unwrap()?; + let buf = spawn_blocking(fun).await.unwrap()?; Ok(buf.into()) } diff --git a/ext/crypto/generate_key.rs b/ext/crypto/generate_key.rs index 2a9452c433..426c61376e 100644 --- a/ext/crypto/generate_key.rs +++ b/ext/crypto/generate_key.rs @@ -2,6 +2,7 @@ use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::ZeroCopyBuf; use elliptic_curve::rand_core::OsRng; use num_traits::FromPrimitive; @@ -56,7 +57,7 @@ pub async fn op_crypto_generate_key( generate_key_hmac(hash, length) } }; - let buf = tokio::task::spawn_blocking(fun).await.unwrap()?; + let buf = spawn_blocking(fun).await.unwrap()?; Ok(buf.into()) } diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs index 6056b02a45..dc5faf5e7e 100644 --- a/ext/crypto/lib.rs +++ b/ext/crypto/lib.rs @@ -10,6 +10,7 @@ use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::OpState; use deno_core::ZeroCopyBuf; use serde::Deserialize; @@ -88,22 +89,22 @@ deno_core::extension!(deno_crypto, op_crypto_unwrap_key, op_crypto_base64url_decode, op_crypto_base64url_encode, - x25519::op_generate_x25519_keypair, - x25519::op_derive_bits_x25519, - x25519::op_import_spki_x25519, - x25519::op_import_pkcs8_x25519, - ed25519::op_generate_ed25519_keypair, - ed25519::op_import_spki_ed25519, - ed25519::op_import_pkcs8_ed25519, - ed25519::op_sign_ed25519, - ed25519::op_verify_ed25519, - ed25519::op_export_spki_ed25519, - ed25519::op_export_pkcs8_ed25519, - ed25519::op_jwk_x_ed25519, - x25519::op_export_spki_x25519, - x25519::op_export_pkcs8_x25519, + x25519::op_crypto_generate_x25519_keypair, + x25519::op_crypto_derive_bits_x25519, + x25519::op_crypto_import_spki_x25519, + x25519::op_crypto_import_pkcs8_x25519, + ed25519::op_crypto_generate_ed25519_keypair, + ed25519::op_crypto_import_spki_ed25519, + ed25519::op_crypto_import_pkcs8_ed25519, + ed25519::op_crypto_sign_ed25519, + ed25519::op_crypto_verify_ed25519, + ed25519::op_crypto_export_spki_ed25519, + ed25519::op_crypto_export_pkcs8_ed25519, + ed25519::op_crypto_jwk_x_ed25519, + x25519::op_crypto_export_spki_x25519, + x25519::op_crypto_export_pkcs8_x25519, ], - esm = [ "00_crypto.js", "01_webidl.js" ], + esm = [ "00_crypto.js" ], options = { maybe_seed: Option, }, @@ -115,10 +116,11 @@ deno_core::extension!(deno_crypto, ); #[op] -pub fn op_crypto_base64url_decode(data: String) -> ZeroCopyBuf { - let data: Vec = - base64::decode_config(data, base64::URL_SAFE_NO_PAD).unwrap(); - data.into() +pub fn op_crypto_base64url_decode( + data: String, +) -> Result { + let data: Vec = base64::decode_config(data, base64::URL_SAFE_NO_PAD)?; + Ok(data.into()) } #[op] @@ -601,7 +603,7 @@ pub async fn op_crypto_subtle_digest( algorithm: CryptoHash, data: ZeroCopyBuf, ) -> Result { - let output = tokio::task::spawn_blocking(move || { + let output = spawn_blocking(move || { digest::digest(algorithm.into(), &data) .as_ref() .to_vec() diff --git a/ext/crypto/x25519.rs b/ext/crypto/x25519.rs index 0ecdf4ddc0..99914e14e5 100644 --- a/ext/crypto/x25519.rs +++ b/ext/crypto/x25519.rs @@ -12,7 +12,7 @@ use spki::der::Decode; use spki::der::Encode; #[op(fast)] -pub fn op_generate_x25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) { +pub fn op_crypto_generate_x25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) { // u-coordinate of the base point. const X25519_BASEPOINT_BYTES: [u8; 32] = [ 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -32,7 +32,11 @@ pub fn op_generate_x25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) { const MONTGOMERY_IDENTITY: MontgomeryPoint = MontgomeryPoint([0; 32]); #[op(fast)] -pub fn op_derive_bits_x25519(k: &[u8], u: &[u8], secret: &mut [u8]) -> bool { +pub fn op_crypto_derive_bits_x25519( + k: &[u8], + u: &[u8], + secret: &mut [u8], +) -> bool { let k: [u8; 32] = k.try_into().expect("Expected byteLength 32"); let u: [u8; 32] = u.try_into().expect("Expected byteLength 32"); let sh_sec = x25519_dalek::x25519(k, u); @@ -49,7 +53,7 @@ pub const X25519_OID: const_oid::ObjectIdentifier = const_oid::ObjectIdentifier::new_unwrap("1.3.101.110"); #[op(fast)] -pub fn op_import_spki_x25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_spki_x25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. let pk_info = match spki::SubjectPublicKeyInfo::from_der(key_data) { Ok(pk_info) => pk_info, @@ -69,7 +73,7 @@ pub fn op_import_spki_x25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_import_pkcs8_x25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_pkcs8_x25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. // This should probably use OneAsymmetricKey instead let pk_info = match PrivateKeyInfo::from_der(key_data) { @@ -95,7 +99,9 @@ pub fn op_import_pkcs8_x25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op] -pub fn op_export_spki_x25519(pubkey: &[u8]) -> Result { +pub fn op_crypto_export_spki_x25519( + pubkey: &[u8], +) -> Result { let key_info = spki::SubjectPublicKeyInfo { algorithm: spki::AlgorithmIdentifier { // id-X25519 @@ -108,7 +114,9 @@ pub fn op_export_spki_x25519(pubkey: &[u8]) -> Result { } #[op] -pub fn op_export_pkcs8_x25519(pkey: &[u8]) -> Result { +pub fn op_crypto_export_pkcs8_x25519( + pkey: &[u8], +) -> Result { // This should probably use OneAsymmetricKey instead let pk_info = rsa::pkcs8::PrivateKeyInfo { public_key: None, diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index a96d0da3b8..6d934a7c1c 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -28,9 +28,9 @@ const { ArrayPrototypeJoin, ArrayPrototypeSplice, ArrayPrototypeFilter, - ObjectPrototypeHasOwnProperty, ObjectEntries, - RegExpPrototypeTest, + ObjectHasOwn, + RegExpPrototypeExec, SafeArrayIterator, SafeRegExp, Symbol, @@ -79,7 +79,7 @@ function fillHeaders(headers, object) { } } else { for (const key in object) { - if (!ObjectPrototypeHasOwnProperty(object, key)) { + if (!ObjectHasOwn(object, key)) { continue; } appendHeader(headers, key, object[key]); @@ -102,10 +102,10 @@ function appendHeader(headers, name, value) { value = normalizeHeaderValue(value); // 2. - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } - if (RegExpPrototypeTest(ILLEGAL_VALUE_CHARS, value)) { + if (RegExpPrototypeExec(ILLEGAL_VALUE_CHARS, value) !== null) { throw new TypeError("Header value is not valid."); } @@ -251,10 +251,7 @@ class Headers { constructor(init = undefined) { const prefix = "Failed to construct 'Headers'"; if (init !== undefined) { - init = webidl.converters["HeadersInit"](init, { - prefix, - context: "Argument 1", - }); + init = webidl.converters["HeadersInit"](init, prefix, "Argument 1"); } this[webidl.brand] = webidl.brand; @@ -272,14 +269,8 @@ class Headers { webidl.assertBranded(this, HeadersPrototype); const prefix = "Failed to execute 'append' on 'Headers'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters["ByteString"](value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); + value = webidl.converters["ByteString"](value, prefix, "Argument 2"); appendHeader(this, name, value); } @@ -289,12 +280,9 @@ class Headers { delete(name) { const prefix = "Failed to execute 'delete' on 'Headers'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } if (this[_guard] == "immutable") { @@ -317,12 +305,9 @@ class Headers { get(name) { const prefix = "Failed to execute 'get' on 'Headers'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } @@ -336,12 +321,9 @@ class Headers { has(name) { const prefix = "Failed to execute 'has' on 'Headers'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } @@ -363,22 +345,16 @@ class Headers { webidl.assertBranded(this, HeadersPrototype); const prefix = "Failed to execute 'set' on 'Headers'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters["ByteString"](value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); + value = webidl.converters["ByteString"](value, prefix, "Argument 2"); value = normalizeHeaderValue(value); // 2. - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } - if (RegExpPrototypeTest(ILLEGAL_VALUE_CHARS, value)) { + if (RegExpPrototypeExec(ILLEGAL_VALUE_CHARS, value) !== null) { throw new TypeError("Header value is not valid."); } @@ -420,18 +396,29 @@ webidl.mixinPairIterable("Headers", Headers, _iterableHeaders, 0, 1); webidl.configurePrototype(Headers); const HeadersPrototype = Headers.prototype; -webidl.converters["HeadersInit"] = (V, opts) => { +webidl.converters["HeadersInit"] = (V, prefix, context, opts) => { // Union for (sequence> or record) if (webidl.type(V) === "Object" && V !== null) { if (V[SymbolIterator] !== undefined) { - return webidl.converters["sequence>"](V, opts); + return webidl.converters["sequence>"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters["record"](V, opts); + return webidl.converters["record"]( + V, + prefix, + context, + opts, + ); } throw webidl.makeException( TypeError, "The provided value is not of type '(sequence> or record)'", - opts, + prefix, + context, ); }; webidl.converters["Headers"] = webidl.createInterfaceConverter( diff --git a/ext/fetch/21_formdata.js b/ext/fetch/21_formdata.js index 1961643d2e..1f0f00088f 100644 --- a/ext/fetch/21_formdata.js +++ b/ext/fetch/21_formdata.js @@ -31,6 +31,7 @@ const { SafeRegExp, Symbol, StringFromCharCode, + StringPrototypeCharCodeAt, StringPrototypeTrim, StringPrototypeSlice, StringPrototypeSplit, @@ -41,6 +42,7 @@ const { StringPrototypeReplaceAll, TypeError, TypedArrayPrototypeSubarray, + Uint8Array, } = primordials; const entryList = Symbol("entry list"); @@ -103,26 +105,26 @@ class FormData { const prefix = "Failed to execute 'append' on 'FormData'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); if (ObjectPrototypeIsPrototypeOf(BlobPrototype, valueOrBlobValue)) { - valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["Blob"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); if (filename !== undefined) { - filename = webidl.converters["USVString"](filename, { + filename = webidl.converters["USVString"]( + filename, prefix, - context: "Argument 3", - }); + "Argument 3", + ); } } else { - valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["USVString"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } const entry = createEntry(name, valueOrBlobValue, filename); @@ -139,10 +141,7 @@ class FormData { const prefix = "Failed to execute 'name' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const list = this[entryList]; for (let i = 0; i < list.length; i++) { @@ -162,10 +161,7 @@ class FormData { const prefix = "Failed to execute 'get' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const entries = this[entryList]; for (let i = 0; i < entries.length; ++i) { @@ -184,10 +180,7 @@ class FormData { const prefix = "Failed to execute 'getAll' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const returnList = []; const entries = this[entryList]; @@ -207,10 +200,7 @@ class FormData { const prefix = "Failed to execute 'has' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const entries = this[entryList]; for (let i = 0; i < entries.length; ++i) { @@ -231,26 +221,26 @@ class FormData { const prefix = "Failed to execute 'set' on 'FormData'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); if (ObjectPrototypeIsPrototypeOf(BlobPrototype, valueOrBlobValue)) { - valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["Blob"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); if (filename !== undefined) { - filename = webidl.converters["USVString"](filename, { + filename = webidl.converters["USVString"]( + filename, prefix, - context: "Argument 3", - }); + "Argument 3", + ); } } else { - valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["USVString"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } const entry = createEntry(name, valueOrBlobValue, filename); @@ -370,6 +360,20 @@ function parseContentDisposition(value) { return params; } +/** + * Decodes a string containing UTF-8 mistakenly decoded as Latin-1 and + * decodes it correctly. + * @param {string} latin1String + * @returns {string} + */ +function decodeLatin1StringAsUtf8(latin1String) { + const buffer = new Uint8Array(latin1String.length); + for (let i = 0; i < latin1String.length; i++) { + buffer[i] = StringPrototypeCharCodeAt(latin1String, i); + } + return core.decode(buffer); +} + const CRLF = "\r\n"; const LF = StringPrototypeCodePointAt(CRLF, 1); const CR = StringPrototypeCodePointAt(CRLF, 0); @@ -477,23 +481,31 @@ class MultipartParser { i - boundaryIndex - 1, ); // https://fetch.spec.whatwg.org/#ref-for-dom-body-formdata - const filename = MapPrototypeGet(disposition, "filename"); - const name = MapPrototypeGet(disposition, "name"); + // These are UTF-8 decoded as if it was Latin-1. + // TODO(@andreubotella): Maybe we shouldn't be parsing entry headers + // as Latin-1. + const latin1Filename = MapPrototypeGet(disposition, "filename"); + const latin1Name = MapPrototypeGet(disposition, "name"); state = 5; // Reset boundaryIndex = 0; headerText = ""; - if (!name) { + if (!latin1Name) { continue; // Skip, unknown name } - if (filename) { + const name = decodeLatin1StringAsUtf8(latin1Name); + if (latin1Filename) { const blob = new Blob([content], { type: headers.get("Content-Type") || "application/octet-stream", }); - formData.append(name, blob, filename); + formData.append( + name, + blob, + decodeLatin1StringAsUtf8(latin1Filename), + ); } else { formData.append(name, core.decode(content)); } diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index 9dbd58fa4b..9fe00b1445 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -38,7 +38,6 @@ import { const primordials = globalThis.__bootstrap.primordials; const { ArrayBufferPrototype, - ArrayBufferPrototypeGetByteLength, ArrayBufferIsView, ArrayPrototypeMap, DataViewPrototypeGetBuffer, @@ -394,44 +393,27 @@ function extractBody(object) { } } else if (ArrayBufferIsView(object)) { const tag = TypedArrayPrototypeGetSymbolToStringTag(object); - if (tag === "Uint8Array") { - // Fast(er) path for common case of Uint8Array - const copy = TypedArrayPrototypeSlice( - object, - TypedArrayPrototypeGetByteOffset(/** @type {Uint8Array} */ (object)), - TypedArrayPrototypeGetByteLength(/** @type {Uint8Array} */ (object)), - ); - source = copy; - } else if (tag !== undefined) { + if (tag !== undefined) { // TypedArray - const copy = TypedArrayPrototypeSlice( - new Uint8Array( + if (tag !== "Uint8Array") { + // TypedArray, unless it's Uint8Array + object = new Uint8Array( TypedArrayPrototypeGetBuffer(/** @type {Uint8Array} */ (object)), TypedArrayPrototypeGetByteOffset(/** @type {Uint8Array} */ (object)), TypedArrayPrototypeGetByteLength(/** @type {Uint8Array} */ (object)), - ), - ); - source = copy; + ); + } } else { // DataView - const copy = TypedArrayPrototypeSlice( - new Uint8Array( - DataViewPrototypeGetBuffer(/** @type {DataView} */ (object)), - DataViewPrototypeGetByteOffset(/** @type {DataView} */ (object)), - DataViewPrototypeGetByteLength(/** @type {DataView} */ (object)), - ), + object = new Uint8Array( + DataViewPrototypeGetBuffer(/** @type {DataView} */ (object)), + DataViewPrototypeGetByteOffset(/** @type {DataView} */ (object)), + DataViewPrototypeGetByteLength(/** @type {DataView} */ (object)), ); - source = copy; } + source = TypedArrayPrototypeSlice(object); } else if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, object)) { - const copy = TypedArrayPrototypeSlice( - new Uint8Array( - object, - 0, - ArrayBufferPrototypeGetByteLength(object), - ), - ); - source = copy; + source = TypedArrayPrototypeSlice(new Uint8Array(object)); } else if (ObjectPrototypeIsPrototypeOf(FormDataPrototype, object)) { const res = formDataToBlob(object); stream = res.stream(); @@ -442,6 +424,7 @@ function extractBody(object) { ObjectPrototypeIsPrototypeOf(URLSearchParamsPrototype, object) ) { // TODO(@satyarohith): not sure what primordial here. + // deno-lint-ignore prefer-primordials source = object.toString(); contentType = "application/x-www-form-urlencoded;charset=UTF-8"; } else if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, object)) { @@ -466,16 +449,16 @@ function extractBody(object) { return { body, contentType }; } -webidl.converters["BodyInit_DOMString"] = (V, opts) => { +webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => { // Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString) if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, V)) { - return webidl.converters["ReadableStream"](V, opts); + return webidl.converters["ReadableStream"](V, prefix, context, opts); } else if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) { - return webidl.converters["Blob"](V, opts); + return webidl.converters["Blob"](V, prefix, context, opts); } else if (ObjectPrototypeIsPrototypeOf(FormDataPrototype, V)) { - return webidl.converters["FormData"](V, opts); + return webidl.converters["FormData"](V, prefix, context, opts); } else if (ObjectPrototypeIsPrototypeOf(URLSearchParamsPrototype, V)) { - return webidl.converters["URLSearchParams"](V, opts); + return webidl.converters["URLSearchParams"](V, prefix, context, opts); } if (typeof V === "object") { if ( @@ -483,16 +466,16 @@ webidl.converters["BodyInit_DOMString"] = (V, opts) => { // deno-lint-ignore prefer-primordials ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V) ) { - return webidl.converters["ArrayBuffer"](V, opts); + return webidl.converters["ArrayBuffer"](V, prefix, context, opts); } if (ArrayBufferIsView(V)) { - return webidl.converters["ArrayBufferView"](V, opts); + return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } } // BodyInit conversion is passed to extractBody(), which calls core.encode(). // core.encode() will UTF-8 encode strings with replacement, being equivalent to the USV normalization. // Therefore we can convert to DOMString instead of USVString and avoid a costly redundant conversion. - return webidl.converters["DOMString"](V, opts); + return webidl.converters["DOMString"](V, prefix, context, opts); }; webidl.converters["BodyInit_DOMString?"] = webidl.createNullableConverter( webidl.converters["BodyInit_DOMString"], diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 798346ae62..daf77a834e 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -10,7 +10,7 @@ /// import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { byteUpperCase, HTTP_TOKEN_CODE_POINT_RE, @@ -36,7 +36,8 @@ const { ArrayPrototypeSplice, ObjectKeys, ObjectPrototypeIsPrototypeOf, - RegExpPrototypeTest, + RegExpPrototypeExec, + StringPrototypeStartsWith, Symbol, SymbolFor, TypeError, @@ -90,7 +91,11 @@ function processUrlList(urlList, urlListProcessed) { */ function newInnerRequest(method, url, headerList, body, maybeBlob) { let blobUrlEntry = null; - if (maybeBlob && typeof url === "string" && url.startsWith("blob:")) { + if ( + maybeBlob && + typeof url === "string" && + StringPrototypeStartsWith(url, "blob:") + ) { blobUrlEntry = blobFromObjectUrl(url); } return { @@ -222,7 +227,7 @@ function validateAndNormalizeMethod(m) { } // Regular path - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, m)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, m) === null) { throw new TypeError("Method is not valid."); } const upperCase = byteUpperCase(m); @@ -274,14 +279,12 @@ class Request { constructor(input, init = {}) { const prefix = "Failed to construct 'Request'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters["RequestInfo_DOMString"](input, { + input = webidl.converters["RequestInfo_DOMString"]( + input, prefix, - context: "Argument 1", - }); - init = webidl.converters["RequestInit"](init, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + init = webidl.converters["RequestInit"](init, prefix, "Argument 2"); this[webidl.brand] = webidl.brand; @@ -341,7 +344,8 @@ class Request { throw webidl.makeException( TypeError, "`client` must be a Deno.HttpClient", - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); } request.clientRid = init.client?.rid ?? null; @@ -500,15 +504,15 @@ webidl.converters["Request"] = webidl.createInterfaceConverter( "Request", RequestPrototype, ); -webidl.converters["RequestInfo_DOMString"] = (V, opts) => { +webidl.converters["RequestInfo_DOMString"] = (V, prefix, context, opts) => { // Union for (Request or USVString) if (typeof V == "object") { if (ObjectPrototypeIsPrototypeOf(RequestPrototype, V)) { - return webidl.converters["Request"](V, opts); + return webidl.converters["Request"](V, prefix, context, opts); } } // Passed to new URL(...) which implicitly converts DOMString -> USVString - return webidl.converters["DOMString"](V, opts); + return webidl.converters["DOMString"](V, prefix, context, opts); }; webidl.converters["RequestRedirect"] = webidl.createEnumConverter( "RequestRedirect", diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js index ffbfe49360..dc4e754342 100644 --- a/ext/fetch/23_response.js +++ b/ext/fetch/23_response.js @@ -12,7 +12,7 @@ const core = globalThis.Deno.core; import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { byteLowerCase, HTTP_TAB_OR_SPACE, @@ -37,7 +37,7 @@ const { ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, RangeError, - RegExpPrototypeTest, + RegExpPrototypeExec, SafeArrayIterator, SafeRegExp, Symbol, @@ -179,7 +179,7 @@ function initializeAResponse(response, init, bodyWithType) { // 2. if ( init.statusText && - !RegExpPrototypeTest(REASON_PHRASE_RE, init.statusText) + RegExpPrototypeExec(REASON_PHRASE_RE, init.statusText) === null ) { throw new TypeError("Status text is not valid."); } @@ -257,14 +257,8 @@ class Response { */ static redirect(url, status = 302) { const prefix = "Failed to call 'Response.redirect'"; - url = webidl.converters["USVString"](url, { - prefix, - context: "Argument 1", - }); - status = webidl.converters["unsigned short"](status, { - prefix, - context: "Argument 2", - }); + url = webidl.converters["USVString"](url, prefix, "Argument 1"); + status = webidl.converters["unsigned short"](status, prefix, "Argument 2"); const baseURL = getLocationHref(); const parsedURL = new URL(url, baseURL); @@ -291,10 +285,7 @@ class Response { static json(data = undefined, init = {}) { const prefix = "Failed to call 'Response.json'"; data = webidl.converters.any(data); - init = webidl.converters["ResponseInit_fast"](init, { - prefix, - context: "Argument 2", - }); + init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); const str = serializeJSValueToJSONString(data); const res = extractBody(str); @@ -315,14 +306,8 @@ class Response { */ constructor(body = null, init = undefined) { const prefix = "Failed to construct 'Response'"; - body = webidl.converters["BodyInit_DOMString?"](body, { - prefix, - context: "Argument 1", - }); - init = webidl.converters["ResponseInit_fast"](init, { - prefix, - context: "Argument 2", - }); + body = webidl.converters["BodyInit_DOMString?"](body, prefix, "Argument 1"); + init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); this[_response] = newInnerResponse(); this[_headers] = headersFromHeaderList( @@ -463,7 +448,12 @@ webidl.converters["ResponseInit"] = webidl.createDictionaryConverter( converter: webidl.converters["HeadersInit"], }], ); -webidl.converters["ResponseInit_fast"] = function (init, opts) { +webidl.converters["ResponseInit_fast"] = function ( + init, + prefix, + context, + opts, +) { if (init === undefined || init === null) { return { status: 200, statusText: "", headers: undefined }; } @@ -482,7 +472,7 @@ webidl.converters["ResponseInit_fast"] = function (init, opts) { return { status, statusText, headers }; } // Slow default path - return webidl.converters["ResponseInit"](init, opts); + return webidl.converters["ResponseInit"](init, prefix, context, opts); }; /** diff --git a/ext/fetch/26_fetch.js b/ext/fetch/26_fetch.js index 42e1ae962a..5084fab343 100644 --- a/ext/fetch/26_fetch.js +++ b/ext/fetch/26_fetch.js @@ -523,10 +523,11 @@ function handleWasmStreaming(source, rid) { // This implements part of // https://webassembly.github.io/spec/web-api/#compile-a-potential-webassembly-response try { - const res = webidl.converters["Response"](source, { - prefix: "Failed to call 'WebAssembly.compileStreaming'", - context: "Argument 1", - }); + const res = webidl.converters["Response"]( + source, + "Failed to call 'WebAssembly.compileStreaming'", + "Argument 1", + ); // 2.3. // The spec is ambiguous here, see diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 754a8d3325..0ec87fe3e1 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.123.0" +version = "0.131.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 17f30d8ed3..a36512c774 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -3,7 +3,16 @@ mod byte_stream; mod fs_fetch_handler; -use data_url::DataUrl; +use std::borrow::Cow; +use std::cell::RefCell; +use std::cmp::min; +use std::convert::From; +use std::path::Path; +use std::path::PathBuf; +use std::pin::Pin; +use std::rc::Rc; +use std::sync::Arc; + use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures::stream::Peekable; @@ -29,6 +38,9 @@ use deno_core::ResourceId; use deno_core::ZeroCopyBuf; use deno_tls::rustls::RootCertStore; use deno_tls::Proxy; +use deno_tls::RootCertStoreProvider; + +use data_url::DataUrl; use http::header::CONTENT_LENGTH; use http::Uri; use reqwest::header::HeaderMap; @@ -46,14 +58,6 @@ use reqwest::RequestBuilder; use reqwest::Response; use serde::Deserialize; use serde::Serialize; -use std::borrow::Cow; -use std::cell::RefCell; -use std::cmp::min; -use std::convert::From; -use std::path::Path; -use std::path::PathBuf; -use std::pin::Pin; -use std::rc::Rc; use tokio::sync::mpsc; // Re-export reqwest and data_url @@ -62,12 +66,12 @@ pub use reqwest; pub use fs_fetch_handler::FsFetchHandler; -use crate::byte_stream::MpscByteStream; +pub use crate::byte_stream::MpscByteStream; #[derive(Clone)] pub struct Options { pub user_agent: String, - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, pub proxy: Option, pub request_builder_hook: Option Result>, @@ -76,11 +80,20 @@ pub struct Options { pub file_fetch_handler: Rc, } +impl Options { + pub fn root_cert_store(&self) -> Result, AnyError> { + Ok(match &self.root_cert_store_provider { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }) + } +} + impl Default for Options { fn default() -> Self { Self { user_agent: "".to_string(), - root_cert_store: None, + root_cert_store_provider: None, proxy: None, request_builder_hook: None, unsafely_ignore_certificate_errors: None, @@ -111,18 +124,7 @@ deno_core::extension!(deno_fetch, options: Options, }, state = |state, options| { - state.put::(options.options.clone()); - state.put::({ - create_http_client( - &options.options.user_agent, - options.options.root_cert_store, - vec![], - options.options.proxy, - options.options.unsafely_ignore_certificate_errors, - options.options.client_cert_chain_and_key - ) - .unwrap() - }); + state.put::(options.options); }, ); @@ -184,9 +186,37 @@ pub fn get_declaration() -> PathBuf { #[derive(Serialize)] #[serde(rename_all = "camelCase")] pub struct FetchReturn { - request_rid: ResourceId, - request_body_rid: Option, - cancel_handle_rid: Option, + pub request_rid: ResourceId, + pub request_body_rid: Option, + pub cancel_handle_rid: Option, +} + +pub fn get_or_create_client_from_state( + state: &mut OpState, +) -> Result { + if let Some(client) = state.try_borrow::() { + Ok(client.clone()) + } else { + let options = state.borrow::(); + let client = create_http_client( + &options.user_agent, + CreateHttpClientOptions { + root_cert_store: options.root_cert_store()?, + ca_certs: vec![], + proxy: options.proxy.clone(), + unsafely_ignore_certificate_errors: options + .unsafely_ignore_certificate_errors + .clone(), + client_cert_chain_and_key: options.client_cert_chain_and_key.clone(), + pool_max_idle_per_host: None, + pool_idle_timeout: None, + http1: true, + http2: true, + }, + )?; + state.put::(client.clone()); + Ok(client) + } } #[op] @@ -207,8 +237,7 @@ where let r = state.resource_table.get::(rid)?; r.client.clone() } else { - let client = state.borrow::(); - client.clone() + get_or_create_client_from_state(state)? }; let method = Method::from_bytes(&method)?; @@ -281,7 +310,7 @@ where } Some(data) => { // If a body is passed, we use it, and don't return a body for streaming. - request = request.body(Vec::from(&*data)); + request = request.body(data.to_vec()); None } } @@ -379,12 +408,12 @@ where #[derive(Serialize)] #[serde(rename_all = "camelCase")] pub struct FetchResponse { - status: u16, - status_text: String, - headers: Vec<(ByteString, ByteString)>, - url: String, - response_rid: ResourceId, - content_length: Option, + pub status: u16, + pub status_text: String, + pub headers: Vec<(ByteString, ByteString)>, + pub url: String, + pub response_rid: ResourceId, + pub content_length: Option, } #[op] @@ -441,8 +470,8 @@ pub async fn op_fetch_send( type CancelableResponseResult = Result, Canceled>; -struct FetchRequestResource( - Pin>>, +pub struct FetchRequestResource( + pub Pin>>, ); impl Resource for FetchRequestResource { @@ -451,7 +480,7 @@ impl Resource for FetchRequestResource { } } -struct FetchCancelHandle(Rc); +pub struct FetchCancelHandle(pub Rc); impl Resource for FetchCancelHandle { fn name(&self) -> Cow { @@ -464,8 +493,8 @@ impl Resource for FetchCancelHandle { } pub struct FetchRequestBodyResource { - body: AsyncRefCell>>, - cancel: CancelHandle, + pub body: AsyncRefCell>>, + pub cancel: CancelHandle, } impl Resource for FetchRequestBodyResource { @@ -516,10 +545,10 @@ impl Resource for FetchRequestBodyResource { type BytesStream = Pin> + Unpin>>; -struct FetchResponseBodyResource { - reader: AsyncRefCell>, - cancel: CancelHandle, - size: Option, +pub struct FetchResponseBodyResource { + pub reader: AsyncRefCell>, + pub cancel: CancelHandle, + pub size: Option, } impl Resource for FetchResponseBodyResource { @@ -569,8 +598,8 @@ impl Resource for FetchResponseBodyResource { } } -struct HttpClientResource { - client: Client, +pub struct HttpClientResource { + pub client: Client, } impl Resource for HttpClientResource { @@ -585,19 +614,36 @@ impl HttpClientResource { } } +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub enum PoolIdleTimeout { + State(bool), + Specify(u64), +} + #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] -pub struct CreateHttpClientOptions { +pub struct CreateHttpClientArgs { ca_certs: Vec, proxy: Option, cert_chain: Option, private_key: Option, + pool_max_idle_per_host: Option, + pool_idle_timeout: Option, + #[serde(default = "default_true")] + http1: bool, + #[serde(default = "default_true")] + http2: bool, +} + +fn default_true() -> bool { + true } #[op] pub fn op_fetch_custom_client( state: &mut OpState, - args: CreateHttpClientOptions, + args: CreateHttpClientArgs, ) -> Result where FP: FetchPermissions + 'static, @@ -632,35 +678,81 @@ where let client = create_http_client( &options.user_agent, - options.root_cert_store.clone(), - ca_certs, - args.proxy, - options.unsafely_ignore_certificate_errors.clone(), - client_cert_chain_and_key, + CreateHttpClientOptions { + root_cert_store: options.root_cert_store()?, + ca_certs, + proxy: args.proxy, + unsafely_ignore_certificate_errors: options + .unsafely_ignore_certificate_errors + .clone(), + client_cert_chain_and_key, + pool_max_idle_per_host: args.pool_max_idle_per_host, + pool_idle_timeout: args.pool_idle_timeout.and_then( + |timeout| match timeout { + PoolIdleTimeout::State(true) => None, + PoolIdleTimeout::State(false) => Some(None), + PoolIdleTimeout::Specify(specify) => Some(Some(specify)), + }, + ), + http1: args.http1, + http2: args.http2, + }, )?; let rid = state.resource_table.add(HttpClientResource::new(client)); Ok(rid) } +#[derive(Debug, Clone)] +pub struct CreateHttpClientOptions { + pub root_cert_store: Option, + pub ca_certs: Vec>, + pub proxy: Option, + pub unsafely_ignore_certificate_errors: Option>, + pub client_cert_chain_and_key: Option<(String, String)>, + pub pool_max_idle_per_host: Option, + pub pool_idle_timeout: Option>, + pub http1: bool, + pub http2: bool, +} + +impl Default for CreateHttpClientOptions { + fn default() -> Self { + CreateHttpClientOptions { + root_cert_store: None, + ca_certs: vec![], + proxy: None, + unsafely_ignore_certificate_errors: None, + client_cert_chain_and_key: None, + pool_max_idle_per_host: None, + pool_idle_timeout: None, + http1: true, + http2: true, + } + } +} + /// Create new instance of async reqwest::Client. This client supports /// proxies and doesn't follow redirects. pub fn create_http_client( user_agent: &str, - root_cert_store: Option, - ca_certs: Vec>, - proxy: Option, - unsafely_ignore_certificate_errors: Option>, - client_cert_chain_and_key: Option<(String, String)>, + options: CreateHttpClientOptions, ) -> Result { let mut tls_config = deno_tls::create_client_config( - root_cert_store, - ca_certs, - unsafely_ignore_certificate_errors, - client_cert_chain_and_key, + options.root_cert_store, + options.ca_certs, + options.unsafely_ignore_certificate_errors, + options.client_cert_chain_and_key, )?; - tls_config.alpn_protocols = vec!["h2".into(), "http/1.1".into()]; + let mut alpn_protocols = vec![]; + if options.http2 { + alpn_protocols.push("h2".into()); + } + if options.http1 { + alpn_protocols.push("http/1.1".into()); + } + tls_config.alpn_protocols = alpn_protocols; let mut headers = HeaderMap::new(); headers.insert(USER_AGENT, user_agent.parse().unwrap()); @@ -669,7 +761,7 @@ pub fn create_http_client( .default_headers(headers) .use_preconfigured_tls(tls_config); - if let Some(proxy) = proxy { + if let Some(proxy) = options.proxy { let mut reqwest_proxy = reqwest::Proxy::all(&proxy.url)?; if let Some(basic_auth) = &proxy.basic_auth { reqwest_proxy = @@ -678,6 +770,24 @@ pub fn create_http_client( builder = builder.proxy(reqwest_proxy); } - // unwrap here because it can only fail when native TLS is used. - Ok(builder.build().unwrap()) + if let Some(pool_max_idle_per_host) = options.pool_max_idle_per_host { + builder = builder.pool_max_idle_per_host(pool_max_idle_per_host); + } + + if let Some(pool_idle_timeout) = options.pool_idle_timeout { + builder = builder.pool_idle_timeout( + pool_idle_timeout.map(std::time::Duration::from_millis), + ); + } + + match (options.http1, options.http2) { + (true, false) => builder = builder.http1_only(), + (false, true) => builder = builder.http2_prior_knowledge(), + (true, true) => {} + (false, false) => { + return Err(type_error("Either `http1` or `http2` needs to be true")) + } + } + + builder.build().map_err(|e| e.into()) } diff --git a/ext/ffi/00_ffi.js b/ext/ffi/00_ffi.js index f36690226e..67cb13ab6d 100644 --- a/ext/ffi/00_ffi.js +++ b/ext/ffi/00_ffi.js @@ -10,7 +10,7 @@ const { ArrayPrototypeJoin, DataViewPrototypeGetByteLength, ObjectDefineProperty, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, Number, NumberIsSafeInteger, @@ -426,7 +426,7 @@ class UnsafeCallback { close() { this.#refcount = 0; - core.close(this.#rid); + ops.op_ffi_unsafe_callback_close(this.#rid); } } @@ -439,7 +439,7 @@ class DynamicLibrary { constructor(path, symbols) { ({ 0: this.#rid, 1: this.symbols } = ops.op_ffi_load({ path, symbols })); for (const symbol in symbols) { - if (!ObjectPrototypeHasOwnProperty(symbols, symbol)) { + if (!ObjectHasOwn(symbols, symbol)) { continue; } diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 8486754f45..a1d2a68c34 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.86.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -17,7 +17,8 @@ path = "lib.rs" deno_core.workspace = true dlopen.workspace = true dynasmrt = "1.2.3" -libffi = "3.1.0" +libffi = "=3.1.0" +libffi-sys = "=2.1.0" # temporary pin for downgrade to Rust 1.69 serde.workspace = true serde-value = "0.7" serde_json = "1.0" diff --git a/ext/ffi/call.rs b/ext/ffi/call.rs index 98186936cf..21358d851e 100644 --- a/ext/ffi/call.rs +++ b/ext/ffi/call.rs @@ -15,6 +15,7 @@ use deno_core::op; use deno_core::serde_json::Value; use deno_core::serde_v8; use deno_core::serde_v8::ExternalPointer; +use deno_core::task::spawn_blocking; use deno_core::v8; use deno_core::OpState; use deno_core::ResourceId; @@ -298,7 +299,7 @@ where .map(|v| v8::Local::::try_from(v.v8_value).unwrap()); let out_buffer_ptr = out_buffer_as_ptr(scope, out_buffer); - let join_handle = tokio::task::spawn_blocking(move || { + let join_handle = spawn_blocking(move || { let PtrSymbol { cif, ptr } = symbol.clone(); ffi_call( call_args, @@ -345,7 +346,7 @@ pub fn op_ffi_call_nonblocking<'scope>( .map(|v| v8::Local::::try_from(v.v8_value).unwrap()); let out_buffer_ptr = out_buffer_as_ptr(scope, out_buffer); - let join_handle = tokio::task::spawn_blocking(move || { + let join_handle = spawn_blocking(move || { let Symbol { cif, ptr, diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index bd4d6a5454..78a21ab8f4 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -6,11 +6,11 @@ use crate::FfiPermissions; use crate::FfiState; use crate::ForeignFunction; use crate::PendingFfiAsyncWork; -use crate::LOCAL_ISOLATE_POINTER; use crate::MAX_SAFE_INTEGER; use crate::MIN_SAFE_INTEGER; use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; +use deno_core::futures::task::AtomicWaker; use deno_core::op; use deno_core::serde_v8; use deno_core::v8; @@ -30,9 +30,18 @@ use std::pin::Pin; use std::ptr; use std::ptr::NonNull; use std::rc::Rc; +use std::sync::atomic; +use std::sync::atomic::AtomicU32; use std::sync::mpsc::sync_channel; +use std::sync::Arc; use std::task::Poll; -use std::task::Waker; + +static THREAD_ID_COUNTER: AtomicU32 = AtomicU32::new(1); + +thread_local! { + static LOCAL_THREAD_ID: RefCell = RefCell::new(0); +} + #[derive(Clone)] pub struct PtrSymbol { pub cif: libffi::middle::Cif, @@ -81,49 +90,38 @@ impl Resource for UnsafeCallbackResource { fn close(self: Rc) { self.cancel.cancel(); - // SAFETY: This drops the closure and the callback info associated with it. - // Any retained function pointers to the closure become dangling pointers. - // It is up to the user to know that it is safe to call the `close()` on the - // UnsafeCallback instance. - unsafe { - let info = Box::from_raw(self.info); - let isolate = info.isolate.as_mut().unwrap(); - let _ = v8::Global::from_raw(isolate, info.callback); - let _ = v8::Global::from_raw(isolate, info.context); - } } } struct CallbackInfo { - pub parameters: Vec, - pub result: NativeType, pub async_work_sender: mpsc::UnboundedSender, pub callback: NonNull, pub context: NonNull, - pub isolate: *mut v8::Isolate, - pub waker: Option, + pub parameters: Box<[NativeType]>, + pub result: NativeType, + pub thread_id: u32, + pub waker: Arc, } impl Future for CallbackInfo { type Output = (); fn poll( - mut self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, + self: Pin<&mut Self>, + _cx: &mut std::task::Context<'_>, ) -> std::task::Poll { - // Always replace the waker to make sure it's bound to the proper Future. - self.waker.replace(cx.waker().clone()); // The future for the CallbackInfo never resolves: It can only be canceled. Poll::Pending } } + unsafe extern "C" fn deno_ffi_callback( cif: &libffi::low::ffi_cif, result: &mut c_void, args: *const *const c_void, info: &CallbackInfo, ) { - LOCAL_ISOLATE_POINTER.with(|s| { - if ptr::eq(*s.borrow(), info.isolate) { + LOCAL_THREAD_ID.with(|s| { + if *s.borrow() == info.thread_id { // Own isolate thread, okay to call directly do_ffi_callback(cif, info, result, args); } else { @@ -138,10 +136,8 @@ unsafe extern "C" fn deno_ffi_callback( response_sender.send(()).unwrap(); }); async_work_sender.unbounded_send(fut).unwrap(); - if let Some(waker) = info.waker.as_ref() { - // Make sure event loop wakes up to receive our message before we start waiting for a response. - waker.wake_by_ref(); - } + // Make sure event loop wakes up to receive our message before we start waiting for a response. + info.waker.wake(); response_receiver.recv().unwrap(); } }); @@ -155,9 +151,6 @@ unsafe fn do_ffi_callback( ) { let callback: NonNull = info.callback; let context: NonNull = info.context; - let isolate: *mut v8::Isolate = info.isolate; - let isolate = &mut *isolate; - let callback = v8::Global::from_raw(isolate, callback); let context = std::mem::transmute::< NonNull, v8::Local, @@ -174,7 +167,10 @@ unsafe fn do_ffi_callback( // refer the same `let bool_value`. let mut cb_scope = v8::CallbackScope::new(context); let scope = &mut v8::HandleScope::new(&mut cb_scope); - let func = callback.open(scope); + let func = std::mem::transmute::< + NonNull, + v8::Local, + >(callback); let result = result as *mut c_void; let vals: &[*const c_void] = std::slice::from_raw_parts(args, info.parameters.len()); @@ -267,7 +263,6 @@ unsafe fn do_ffi_callback( let recv = v8::undefined(scope); let call_result = func.call(scope, recv.into(), ¶ms); - std::mem::forget(callback); if call_result.is_none() { // JS function threw an exception. Set the return value to zero and return. @@ -323,17 +318,6 @@ unsafe fn do_ffi_callback( }; *(result as *mut bool) = value; } - NativeType::I32 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { - value.value() as i32 - } else { - // Fallthrough, probably UB. - value - .int32_value(scope) - .expect("Unable to deserialize result parameter.") - }; - *(result as *mut i32) = value; - } NativeType::F32 => { let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as f32 @@ -395,7 +379,7 @@ unsafe fn do_ffi_callback( *(result as *mut *mut c_void) = pointer; } NativeType::I8 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as i8 } else { // Fallthrough, essentially UB. @@ -406,7 +390,7 @@ unsafe fn do_ffi_callback( *(result as *mut i8) = value; } NativeType::U8 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as u8 } else { // Fallthrough, essentially UB. @@ -417,7 +401,7 @@ unsafe fn do_ffi_callback( *(result as *mut u8) = value; } NativeType::I16 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as i16 } else { // Fallthrough, essentially UB. @@ -428,7 +412,7 @@ unsafe fn do_ffi_callback( *(result as *mut i16) = value; } NativeType::U16 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as u16 } else { // Fallthrough, essentially UB. @@ -438,9 +422,20 @@ unsafe fn do_ffi_callback( }; *(result as *mut u16) = value; } + NativeType::I32 => { + let value = if let Ok(value) = v8::Local::::try_from(value) { + value.value() + } else { + // Fallthrough, essentially UB. + value + .int32_value(scope) + .expect("Unable to deserialize result parameter.") + }; + *(result as *mut i32) = value; + } NativeType::U32 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { - value.value() as u32 + let value = if let Ok(value) = v8::Local::::try_from(value) { + value.value() } else { // Fallthrough, essentially UB. value @@ -449,21 +444,25 @@ unsafe fn do_ffi_callback( }; *(result as *mut u32) = value; } - NativeType::I64 => { + NativeType::I64 | NativeType::ISize => { if let Ok(value) = v8::Local::::try_from(value) { *(result as *mut i64) = value.i64_value().0; - } else if let Ok(value) = v8::Local::::try_from(value) { - *(result as *mut i64) = value.value(); + } else if let Ok(value) = v8::Local::::try_from(value) { + *(result as *mut i64) = value.value() as i64; + } else if let Ok(value) = v8::Local::::try_from(value) { + *(result as *mut i64) = value.value() as i64; } else { *(result as *mut i64) = value .integer_value(scope) .expect("Unable to deserialize result parameter."); } } - NativeType::U64 => { + NativeType::U64 | NativeType::USize => { if let Ok(value) = v8::Local::::try_from(value) { *(result as *mut u64) = value.u64_value().0; - } else if let Ok(value) = v8::Local::::try_from(value) { + } else if let Ok(value) = v8::Local::::try_from(value) { + *(result as *mut u64) = value.value() as u64; + } else if let Ok(value) = v8::Local::::try_from(value) { *(result as *mut u64) = value.value() as u64; } else { *(result as *mut u64) = value @@ -504,9 +503,6 @@ unsafe fn do_ffi_callback( NativeType::Void => { // nop } - _ => { - unreachable!(); - } }; } @@ -555,27 +551,36 @@ where let v8_value = cb.v8_value; let cb = v8::Local::::try_from(v8_value)?; - let isolate: *mut v8::Isolate = &mut *scope as &mut v8::Isolate; - LOCAL_ISOLATE_POINTER.with(|s| { - if s.borrow().is_null() { - s.replace(isolate); + let thread_id: u32 = LOCAL_THREAD_ID.with(|s| { + let value = *s.borrow(); + if value == 0 { + let res = THREAD_ID_COUNTER.fetch_add(1, atomic::Ordering::SeqCst); + s.replace(res); + res + } else { + value } }); + if thread_id == 0 { + panic!("Isolate ID counter overflowed u32"); + } + let async_work_sender = state.borrow_mut::().async_work_sender.clone(); let callback = v8::Global::new(scope, cb).into_raw(); let current_context = scope.get_current_context(); let context = v8::Global::new(scope, current_context).into_raw(); + let waker = state.waker.clone(); let info: *mut CallbackInfo = Box::leak(Box::new(CallbackInfo { - parameters: args.parameters.clone(), - result: args.result.clone(), async_work_sender, callback, context, - isolate, - waker: None, + parameters: args.parameters.clone().into(), + result: args.result.clone(), + thread_id, + waker, })); let cif = Cif::new( args @@ -607,3 +612,24 @@ where Ok(array_value.into()) } + +#[op(v8)] +pub fn op_ffi_unsafe_callback_close( + state: &mut OpState, + scope: &mut v8::HandleScope, + rid: ResourceId, +) -> Result<(), AnyError> { + // SAFETY: This drops the closure and the callback info associated with it. + // Any retained function pointers to the closure become dangling pointers. + // It is up to the user to know that it is safe to call the `close()` on the + // UnsafeCallback instance. + unsafe { + let callback_resource = + state.resource_table.take::(rid)?; + let info = Box::from_raw(callback_resource.info); + let _ = v8::Global::from_raw(scope, info.callback); + let _ = v8::Global::from_raw(scope, info.context); + callback_resource.close(); + } + Ok(()) +} diff --git a/ext/ffi/lib.rs b/ext/ffi/lib.rs index c11f08dd8e..ccad69d738 100644 --- a/ext/ffi/lib.rs +++ b/ext/ffi/lib.rs @@ -2,7 +2,6 @@ use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; -use deno_core::v8; use deno_core::OpState; use std::cell::RefCell; @@ -10,7 +9,6 @@ use std::mem::size_of; use std::os::raw::c_char; use std::os::raw::c_short; use std::path::Path; -use std::ptr; use std::rc::Rc; mod call; @@ -25,6 +23,7 @@ mod turbocall; use call::op_ffi_call_nonblocking; use call::op_ffi_call_ptr; use call::op_ffi_call_ptr_nonblocking; +use callback::op_ffi_unsafe_callback_close; use callback::op_ffi_unsafe_callback_create; use callback::op_ffi_unsafe_callback_ref; use dlfcn::op_ffi_load; @@ -43,10 +42,6 @@ const _: () = { assert!(size_of::<*const ()>() == 8); }; -thread_local! { - static LOCAL_ISOLATE_POINTER: RefCell<*const v8::Isolate> = RefCell::new(ptr::null()); -} - pub(crate) const MAX_SAFE_INTEGER: isize = 9007199254740991; pub(crate) const MIN_SAFE_INTEGER: isize = -9007199254740991; @@ -109,6 +104,7 @@ deno_core::extension!(deno_ffi, op_ffi_read_f64

, op_ffi_read_ptr

, op_ffi_unsafe_callback_create

, + op_ffi_unsafe_callback_close, op_ffi_unsafe_callback_ref, ], esm = [ "00_ffi.js" ], diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index bddafb09ee..f7c07f26a7 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -1,18 +1,30 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase + const core = globalThis.Deno.core; const ops = core.ops; +const { + op_fs_chmod_async, + op_fs_ftruncate_async, + op_fs_truncate_async, + op_fs_link_async, + op_fs_flock_async, +} = Deno.core.ensureFastOps(); const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeFilter, Date, DatePrototype, + DatePrototypeGetTime, Error, Function, MathTrunc, ObjectEntries, ObjectPrototypeIsPrototypeOf, ObjectValues, + StringPrototypeSlice, + StringPrototypeStartsWith, SymbolAsyncIterator, SymbolIterator, Uint32Array, @@ -27,11 +39,11 @@ import { import { pathFromURL } from "ext:deno_web/00_infra.js"; function chmodSync(path, mode) { - ops.op_chmod_sync(pathFromURL(path), mode); + ops.op_fs_chmod_sync(pathFromURL(path), mode); } async function chmod(path, mode) { - await core.opAsync2("op_chmod_async", pathFromURL(path), mode); + await op_fs_chmod_async(pathFromURL(path), mode); } function chownSync( @@ -39,7 +51,7 @@ function chownSync( uid, gid, ) { - ops.op_chown_sync(pathFromURL(path), uid, gid); + ops.op_fs_chown_sync(pathFromURL(path), uid, gid); } async function chown( @@ -48,7 +60,7 @@ async function chown( gid, ) { await core.opAsync( - "op_chown_async", + "op_fs_chown_async", pathFromURL(path), uid, gid, @@ -59,7 +71,7 @@ function copyFileSync( fromPath, toPath, ) { - ops.op_copy_file_sync( + ops.op_fs_copy_file_sync( pathFromURL(fromPath), pathFromURL(toPath), ); @@ -70,27 +82,31 @@ async function copyFile( toPath, ) { await core.opAsync( - "op_copy_file_async", + "op_fs_copy_file_async", pathFromURL(fromPath), pathFromURL(toPath), ); } function cwd() { - return ops.op_cwd(); + return ops.op_fs_cwd(); } function chdir(directory) { - ops.op_chdir(pathFromURL(directory)); + ops.op_fs_chdir(pathFromURL(directory)); } function makeTempDirSync(options = {}) { - return ops.op_make_temp_dir_sync(options.dir, options.prefix, options.suffix); + return ops.op_fs_make_temp_dir_sync( + options.dir, + options.prefix, + options.suffix, + ); } function makeTempDir(options = {}) { return core.opAsync( - "op_make_temp_dir_async", + "op_fs_make_temp_dir_async", options.dir, options.prefix, options.suffix, @@ -98,7 +114,7 @@ function makeTempDir(options = {}) { } function makeTempFileSync(options = {}) { - return ops.op_make_temp_file_sync( + return ops.op_fs_make_temp_file_sync( options.dir, options.prefix, options.suffix, @@ -107,7 +123,7 @@ function makeTempFileSync(options = {}) { function makeTempFile(options = {}) { return core.opAsync( - "op_make_temp_file_async", + "op_fs_make_temp_file_async", options.dir, options.prefix, options.suffix, @@ -115,7 +131,7 @@ function makeTempFile(options = {}) { } function mkdirSync(path, options) { - ops.op_mkdir_sync( + ops.op_fs_mkdir_sync( pathFromURL(path), options?.recursive ?? false, options?.mode, @@ -124,7 +140,7 @@ function mkdirSync(path, options) { async function mkdir(path, options) { await core.opAsync( - "op_mkdir_async", + "op_fs_mkdir_async", pathFromURL(path), options?.recursive ?? false, options?.mode, @@ -132,14 +148,14 @@ async function mkdir(path, options) { } function readDirSync(path) { - return ops.op_read_dir_sync(pathFromURL(path))[ + return ops.op_fs_read_dir_sync(pathFromURL(path))[ SymbolIterator ](); } function readDir(path) { const array = core.opAsync( - "op_read_dir_async", + "op_fs_read_dir_async", pathFromURL(path), ); return { @@ -153,26 +169,26 @@ function readDir(path) { } function readLinkSync(path) { - return ops.op_read_link_sync(pathFromURL(path)); + return ops.op_fs_read_link_sync(pathFromURL(path)); } function readLink(path) { - return core.opAsync("op_read_link_async", pathFromURL(path)); + return core.opAsync("op_fs_read_link_async", pathFromURL(path)); } function realPathSync(path) { - return ops.op_realpath_sync(pathFromURL(path)); + return ops.op_fs_realpath_sync(pathFromURL(path)); } function realPath(path) { - return core.opAsync("op_realpath_async", pathFromURL(path)); + return core.opAsync("op_fs_realpath_async", pathFromURL(path)); } function removeSync( path, options = {}, ) { - ops.op_remove_sync( + ops.op_fs_remove_sync( pathFromURL(path), !!options.recursive, ); @@ -183,14 +199,14 @@ async function remove( options = {}, ) { await core.opAsync( - "op_remove_async", + "op_fs_remove_async", pathFromURL(path), !!options.recursive, ); } function renameSync(oldpath, newpath) { - ops.op_rename_sync( + ops.op_fs_rename_sync( pathFromURL(oldpath), pathFromURL(newpath), ); @@ -198,7 +214,7 @@ function renameSync(oldpath, newpath) { async function rename(oldpath, newpath) { await core.opAsync( - "op_rename_async", + "op_fs_rename_async", pathFromURL(oldpath), pathFromURL(newpath), ); @@ -223,6 +239,7 @@ async function rename(oldpath, newpath) { // high u32 | low u32 // // 4. ?u64 converts a zero u64 value to JS null on Windows. +// ?bool converts a false bool value to JS null on Windows. function createByteStruct(types) { // types can be "date", "bool" or "u64". let offset = 0; @@ -232,8 +249,8 @@ function createByteStruct(types) { for (let i = 0; i < typeEntries.length; ++i) { let { 0: name, 1: type } = typeEntries[i]; - const optional = type.startsWith("?"); - if (optional) type = type.slice(1); + const optional = StringPrototypeStartsWith(type, "?"); + if (optional) type = StringPrototypeSlice(type, 1); if (type == "u64") { if (!optional) { @@ -251,7 +268,15 @@ function createByteStruct(types) { }] + view[${offset + 3}] * 2**32),`; offset += 2; } else { - str += `${name}: !!(view[${offset}] + view[${offset + 1}] * 2**32),`; + if (!optional) { + str += `${name}: !!(view[${offset}] + view[${offset + 1}] * 2**32),`; + } else { + str += `${name}: (unix ? !!((view[${offset}] + view[${ + offset + 1 + }] * 2**32)) : !!((view[${offset}] + view[${ + offset + 1 + }] * 2**32)) || null),`; + } } offset += 2; } @@ -277,6 +302,10 @@ const { 0: statStruct, 1: statBuf } = createByteStruct({ rdev: "?u64", blksize: "?u64", blocks: "?u64", + isBlockDevice: "?bool", + isCharDevice: "?bool", + isFifo: "?bool", + isSocket: "?bool", }); function parseFileInfo(response) { @@ -300,35 +329,39 @@ function parseFileInfo(response) { rdev: unix ? response.rdev : null, blksize: unix ? response.blksize : null, blocks: unix ? response.blocks : null, + isBlockDevice: unix ? response.isBlockDevice : null, + isCharDevice: unix ? response.isCharDevice : null, + isFifo: unix ? response.isFifo : null, + isSocket: unix ? response.isSocket : null, }; } function fstatSync(rid) { - ops.op_fstat_sync(rid, statBuf); + ops.op_fs_fstat_sync(rid, statBuf); return statStruct(statBuf); } async function fstat(rid) { - return parseFileInfo(await core.opAsync("op_fstat_async", rid)); + return parseFileInfo(await core.opAsync("op_fs_fstat_async", rid)); } async function lstat(path) { - const res = await core.opAsync("op_lstat_async", pathFromURL(path)); + const res = await core.opAsync("op_fs_lstat_async", pathFromURL(path)); return parseFileInfo(res); } function lstatSync(path) { - ops.op_lstat_sync(pathFromURL(path), statBuf); + ops.op_fs_lstat_sync(pathFromURL(path), statBuf); return statStruct(statBuf); } async function stat(path) { - const res = await core.opAsync("op_stat_async", pathFromURL(path)); + const res = await core.opAsync("op_fs_stat_async", pathFromURL(path)); return parseFileInfo(res); } function statSync(path) { - ops.op_stat_sync(pathFromURL(path), statBuf); + ops.op_fs_stat_sync(pathFromURL(path), statBuf); return statStruct(statBuf); } @@ -340,36 +373,36 @@ function coerceLen(len) { } function ftruncateSync(rid, len) { - ops.op_ftruncate_sync(rid, coerceLen(len)); + ops.op_fs_ftruncate_sync(rid, coerceLen(len)); } async function ftruncate(rid, len) { - await core.opAsync2("op_ftruncate_async", rid, coerceLen(len)); + await op_fs_ftruncate_async(rid, coerceLen(len)); } function truncateSync(path, len) { - ops.op_truncate_sync(path, coerceLen(len)); + ops.op_fs_truncate_sync(path, coerceLen(len)); } async function truncate(path, len) { - await core.opAsync2("op_truncate_async", path, coerceLen(len)); + await op_fs_truncate_async(path, coerceLen(len)); } function umask(mask) { - return ops.op_umask(mask); + return ops.op_fs_umask(mask); } function linkSync(oldpath, newpath) { - ops.op_link_sync(oldpath, newpath); + ops.op_fs_link_sync(oldpath, newpath); } async function link(oldpath, newpath) { - await core.opAsync2("op_link_async", oldpath, newpath); + await op_fs_link_async(oldpath, newpath); } function toUnixTimeFromEpoch(value) { if (ObjectPrototypeIsPrototypeOf(DatePrototype, value)) { - const time = value.valueOf(); + const time = DatePrototypeGetTime(value); const seconds = MathTrunc(time / 1e3); const nanoseconds = MathTrunc(time - (seconds * 1e3)) * 1e6; @@ -395,7 +428,7 @@ function futimeSync( ) { const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); - ops.op_futime_sync(rid, atimeSec, atimeNsec, mtimeSec, mtimeNsec); + ops.op_fs_futime_sync(rid, atimeSec, atimeNsec, mtimeSec, mtimeNsec); } async function futime( @@ -406,7 +439,7 @@ async function futime( const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); await core.opAsync( - "op_futime_async", + "op_fs_futime_async", rid, atimeSec, atimeNsec, @@ -422,7 +455,7 @@ function utimeSync( ) { const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); - ops.op_utime_sync( + ops.op_fs_utime_sync( pathFromURL(path), atimeSec, atimeNsec, @@ -439,7 +472,7 @@ async function utime( const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); await core.opAsync( - "op_utime_async", + "op_fs_utime_async", pathFromURL(path), atimeSec, atimeNsec, @@ -453,7 +486,7 @@ function symlinkSync( newpath, options, ) { - ops.op_symlink_sync( + ops.op_fs_symlink_sync( pathFromURL(oldpath), pathFromURL(newpath), options?.type, @@ -466,7 +499,7 @@ async function symlink( options, ) { await core.opAsync( - "op_symlink_async", + "op_fs_symlink_async", pathFromURL(oldpath), pathFromURL(newpath), options?.type, @@ -474,35 +507,35 @@ async function symlink( } function fdatasyncSync(rid) { - ops.op_fdatasync_sync(rid); + ops.op_fs_fdatasync_sync(rid); } async function fdatasync(rid) { - await core.opAsync("op_fdatasync_async", rid); + await core.opAsync("op_fs_fdatasync_async", rid); } function fsyncSync(rid) { - ops.op_fsync_sync(rid); + ops.op_fs_fsync_sync(rid); } async function fsync(rid) { - await core.opAsync("op_fsync_async", rid); + await core.opAsync("op_fs_fsync_async", rid); } function flockSync(rid, exclusive) { - ops.op_flock_sync(rid, exclusive === true); + ops.op_fs_flock_sync(rid, exclusive === true); } async function flock(rid, exclusive) { - await core.opAsync2("op_flock_async", rid, exclusive === true); + await op_fs_flock_async(rid, exclusive === true); } function funlockSync(rid) { - ops.op_funlock_sync(rid); + ops.op_fs_funlock_sync(rid); } async function funlock(rid) { - await core.opAsync("op_funlock_async", rid); + await core.opAsync("op_fs_funlock_async", rid); } function seekSync( @@ -510,7 +543,7 @@ function seekSync( offset, whence, ) { - return ops.op_seek_sync(rid, offset, whence); + return ops.op_fs_seek_sync(rid, offset, whence); } function seek( @@ -518,7 +551,7 @@ function seek( offset, whence, ) { - return core.opAsync("op_seek_async", rid, offset, whence); + return core.opAsync("op_fs_seek_async", rid, offset, whence); } function openSync( @@ -526,7 +559,7 @@ function openSync( options, ) { if (options) checkOpenOptions(options); - const rid = ops.op_open_sync( + const rid = ops.op_fs_open_sync( pathFromURL(path), options, ); @@ -540,7 +573,7 @@ async function open( ) { if (options) checkOpenOptions(options); const rid = await core.opAsync( - "op_open_async", + "op_fs_open_async", pathFromURL(path), options, ); @@ -667,7 +700,7 @@ function checkOpenOptions(options) { const File = FsFile; function readFileSync(path) { - return ops.op_read_file_sync(pathFromURL(path)); + return ops.op_fs_read_file_sync(pathFromURL(path)); } async function readFile(path, options) { @@ -682,7 +715,7 @@ async function readFile(path, options) { try { const read = await core.opAsync( - "op_read_file_async", + "op_fs_read_file_async", pathFromURL(path), cancelRid, ); @@ -698,7 +731,7 @@ async function readFile(path, options) { } function readTextFileSync(path) { - return ops.op_read_file_text_sync(pathFromURL(path)); + return ops.op_fs_read_file_text_sync(pathFromURL(path)); } async function readTextFile(path, options) { @@ -713,7 +746,7 @@ async function readTextFile(path, options) { try { const read = await core.opAsync( - "op_read_file_text_async", + "op_fs_read_file_text_async", pathFromURL(path), cancelRid, ); @@ -734,7 +767,7 @@ function writeFileSync( options = {}, ) { options.signal?.throwIfAborted(); - ops.op_write_file_sync( + ops.op_fs_write_file_sync( pathFromURL(path), options.mode, options.append ?? false, @@ -771,7 +804,7 @@ async function writeFile( }); } else { await core.opAsync( - "op_write_file_async", + "op_fs_write_file_async", pathFromURL(path), options.mode, options.append ?? false, diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index d6976d3744..10f297b48e 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.9.0" +version = "0.17.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -13,12 +13,15 @@ description = "Ops for interacting with the file system" [lib] path = "lib.rs" +[features] +sync_fs = [] + [dependencies] async-trait.workspace = true deno_core.workspace = true deno_io.workspace = true -filetime = "0.2.16" -fs3 = "0.5.0" +filetime.workspace = true +fs3.workspace = true libc.workspace = true log.workspace = true rand.workspace = true diff --git a/ext/fs/clippy.toml b/ext/fs/clippy.toml index 53676a90e6..023769214b 100644 --- a/ext/fs/clippy.toml +++ b/ext/fs/clippy.toml @@ -43,3 +43,6 @@ disallowed-methods = [ { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, { path = "std::path::Path::exists", reason = "File system operations should be done using FileSystem trait" }, ] +disallowed-types = [ + { path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" }, +] diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 184cb8096f..7624535c92 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -1,6 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use std::io; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; @@ -8,6 +7,13 @@ use std::rc::Rc; use serde::Deserialize; use serde::Serialize; +use deno_io::fs::File; +use deno_io::fs::FsResult; +use deno_io::fs::FsStat; + +use crate::sync::MaybeSend; +use crate::sync::MaybeSync; + #[derive(Deserialize, Default, Debug, Clone, Copy)] #[serde(rename_all = "camelCase")] #[serde(default)] @@ -52,27 +58,6 @@ impl OpenOptions { } } -pub struct FsStat { - pub is_file: bool, - pub is_directory: bool, - pub is_symlink: bool, - pub size: u64, - - pub mtime: Option, - pub atime: Option, - pub birthtime: Option, - - pub dev: u64, - pub ino: u64, - pub mode: u32, - pub nlink: u64, - pub uid: u32, - pub gid: u32, - pub rdev: u64, - pub blksize: u64, - pub blocks: u64, -} - #[derive(Deserialize)] pub enum FsFileType { #[serde(rename = "file")] @@ -90,106 +75,41 @@ pub struct FsDirEntry { pub is_symlink: bool, } -pub enum FsError { - Io(io::Error), - FileBusy, - NotSupported, -} - -impl From for FsError { - fn from(err: io::Error) -> Self { - Self::Io(err) - } -} - -pub type FsResult = Result; +#[allow(clippy::disallowed_types)] +pub type FileSystemRc = crate::sync::MaybeArc; #[async_trait::async_trait(?Send)] -pub trait File { - fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()>; - async fn write_all_async(self: Rc, buf: Vec) -> FsResult<()>; - - fn read_all_sync(self: Rc) -> FsResult>; - async fn read_all_async(self: Rc) -> FsResult>; - - fn chmod_sync(self: Rc, pathmode: u32) -> FsResult<()>; - async fn chmod_async(self: Rc, mode: u32) -> FsResult<()>; - - fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult; - async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult; - - fn datasync_sync(self: Rc) -> FsResult<()>; - async fn datasync_async(self: Rc) -> FsResult<()>; - - fn sync_sync(self: Rc) -> FsResult<()>; - async fn sync_async(self: Rc) -> FsResult<()>; - - fn stat_sync(self: Rc) -> FsResult; - async fn stat_async(self: Rc) -> FsResult; - - fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()>; - async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()>; - fn unlock_sync(self: Rc) -> FsResult<()>; - async fn unlock_async(self: Rc) -> FsResult<()>; - - fn truncate_sync(self: Rc, len: u64) -> FsResult<()>; - async fn truncate_async(self: Rc, len: u64) -> FsResult<()>; - - fn utime_sync( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()>; - async fn utime_async( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()>; -} - -#[async_trait::async_trait(?Send)] -pub trait FileSystem: Clone { - type File: File; - +pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync { fn cwd(&self) -> FsResult; fn tmp_dir(&self) -> FsResult; - fn chdir(&self, path: impl AsRef) -> FsResult<()>; + fn chdir(&self, path: &Path) -> FsResult<()>; fn umask(&self, mask: Option) -> FsResult; fn open_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, - ) -> FsResult; + ) -> FsResult>; async fn open_async( &self, path: PathBuf, options: OpenOptions, - ) -> FsResult; + ) -> FsResult>; - fn mkdir_sync( - &self, - path: impl AsRef, - recusive: bool, - mode: u32, - ) -> FsResult<()>; + fn mkdir_sync(&self, path: &Path, recusive: bool, mode: u32) -> FsResult<()>; async fn mkdir_async( &self, path: PathBuf, - recusive: bool, + recursive: bool, mode: u32, ) -> FsResult<()>; - fn chmod_sync(&self, path: impl AsRef, mode: u32) -> FsResult<()>; + fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()>; async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()>; fn chown_sync( &self, - path: impl AsRef, + path: &Path, uid: Option, gid: Option, ) -> FsResult<()>; @@ -200,52 +120,36 @@ pub trait FileSystem: Clone { gid: Option, ) -> FsResult<()>; - fn remove_sync( - &self, - path: impl AsRef, - recursive: bool, - ) -> FsResult<()>; + fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()>; async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()>; - fn copy_file_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()>; + fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()>; async fn copy_file_async( &self, oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()>; - fn stat_sync(&self, path: impl AsRef) -> FsResult; + fn stat_sync(&self, path: &Path) -> FsResult; async fn stat_async(&self, path: PathBuf) -> FsResult; - fn lstat_sync(&self, path: impl AsRef) -> FsResult; + fn lstat_sync(&self, path: &Path) -> FsResult; async fn lstat_async(&self, path: PathBuf) -> FsResult; - fn realpath_sync(&self, path: impl AsRef) -> FsResult; + fn realpath_sync(&self, path: &Path) -> FsResult; async fn realpath_async(&self, path: PathBuf) -> FsResult; - fn read_dir_sync(&self, path: impl AsRef) -> FsResult>; + fn read_dir_sync(&self, path: &Path) -> FsResult>; async fn read_dir_async(&self, path: PathBuf) -> FsResult>; - fn rename_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()>; + fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()>; async fn rename_async( &self, oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()>; - fn link_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()>; + fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()>; async fn link_async( &self, oldpath: PathBuf, @@ -254,8 +158,8 @@ pub trait FileSystem: Clone { fn symlink_sync( &self, - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, file_type: Option, ) -> FsResult<()>; async fn symlink_async( @@ -265,15 +169,15 @@ pub trait FileSystem: Clone { file_type: Option, ) -> FsResult<()>; - fn read_link_sync(&self, path: impl AsRef) -> FsResult; + fn read_link_sync(&self, path: &Path) -> FsResult; async fn read_link_async(&self, path: PathBuf) -> FsResult; - fn truncate_sync(&self, path: impl AsRef, len: u64) -> FsResult<()>; + fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()>; async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()>; fn utime_sync( &self, - path: impl AsRef, + path: &Path, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, @@ -290,12 +194,11 @@ pub trait FileSystem: Clone { fn write_file_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, data: &[u8], ) -> FsResult<()> { let file = self.open_sync(path, options)?; - let file = Rc::new(file); if let Some(mode) = options.mode { file.clone().chmod_sync(mode)?; } @@ -309,26 +212,45 @@ pub trait FileSystem: Clone { data: Vec, ) -> FsResult<()> { let file = self.open_async(path, options).await?; - let file = Rc::new(file); if let Some(mode) = options.mode { file.clone().chmod_async(mode).await?; } - file.write_all_async(data).await?; + file.write_all(data.into()).await?; Ok(()) } - fn read_file_sync(&self, path: impl AsRef) -> FsResult> { + fn read_file_sync(&self, path: &Path) -> FsResult> { let options = OpenOptions::read(); let file = self.open_sync(path, options)?; - let file = Rc::new(file); let buf = file.read_all_sync()?; Ok(buf) } async fn read_file_async(&self, path: PathBuf) -> FsResult> { let options = OpenOptions::read(); - let file = self.clone().open_async(path, options).await?; - let file = Rc::new(file); + let file = self.open_async(path, options).await?; let buf = file.read_all_async().await?; Ok(buf) } + + fn is_file(&self, path: &Path) -> bool { + self.stat_sync(path).map(|m| m.is_file).unwrap_or(false) + } + + fn is_dir(&self, path: &Path) -> bool { + self + .stat_sync(path) + .map(|m| m.is_directory) + .unwrap_or(false) + } + + fn exists(&self, path: &Path) -> bool { + self.stat_sync(path).is_ok() + } + + fn read_to_string(&self, path: &Path) -> FsResult { + let buf = self.read_file_sync(path)?; + String::from_utf8(buf).map_err(|err| { + std::io::Error::new(std::io::ErrorKind::InvalidData, err).into() + }) + } } diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index 464d84adeb..7ba6cd7cac 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -3,22 +3,21 @@ mod interface; mod ops; mod std_fs; +pub mod sync; -pub use crate::interface::File; pub use crate::interface::FileSystem; +pub use crate::interface::FileSystemRc; pub use crate::interface::FsDirEntry; -pub use crate::interface::FsError; pub use crate::interface::FsFileType; -pub use crate::interface::FsResult; -pub use crate::interface::FsStat; pub use crate::interface::OpenOptions; -use crate::ops::*; +pub use crate::std_fs::RealFs; +pub use crate::sync::MaybeSend; +pub use crate::sync::MaybeSync; -pub use crate::std_fs::StdFs; +use crate::ops::*; use deno_core::error::AnyError; use deno_core::OpState; -use deno_core::Resource; use std::cell::RefCell; use std::convert::From; use std::path::Path; @@ -87,78 +86,77 @@ pub(crate) fn check_unstable2(state: &Rc>, api_name: &str) { deno_core::extension!(deno_fs, deps = [ deno_web ], - parameters = [Fs: FileSystem, P: FsPermissions], - bounds = [Fs::File: Resource], + parameters = [P: FsPermissions], ops = [ - op_cwd, - op_umask, - op_chdir, + op_fs_cwd

, + op_fs_umask, + op_fs_chdir

, - op_open_sync, - op_open_async, - op_mkdir_sync, - op_mkdir_async, - op_chmod_sync, - op_chmod_async, - op_chown_sync, - op_chown_async, - op_remove_sync, - op_remove_async, - op_copy_file_sync, - op_copy_file_async, - op_stat_sync, - op_stat_async, - op_lstat_sync, - op_lstat_async, - op_realpath_sync, - op_realpath_async, - op_read_dir_sync, - op_read_dir_async, - op_rename_sync, - op_rename_async, - op_link_sync, - op_link_async, - op_symlink_sync, - op_symlink_async, - op_read_link_sync, - op_read_link_async, - op_truncate_sync, - op_truncate_async, - op_utime_sync, - op_utime_async, - op_make_temp_dir_sync, - op_make_temp_dir_async, - op_make_temp_file_sync, - op_make_temp_file_async, - op_write_file_sync, - op_write_file_async, - op_read_file_sync, - op_read_file_async, - op_read_file_text_sync, - op_read_file_text_async, + op_fs_open_sync

, + op_fs_open_async

, + op_fs_mkdir_sync

, + op_fs_mkdir_async

, + op_fs_chmod_sync

, + op_fs_chmod_async

, + op_fs_chown_sync

, + op_fs_chown_async

, + op_fs_remove_sync

, + op_fs_remove_async

, + op_fs_copy_file_sync

, + op_fs_copy_file_async

, + op_fs_stat_sync

, + op_fs_stat_async

, + op_fs_lstat_sync

, + op_fs_lstat_async

, + op_fs_realpath_sync

, + op_fs_realpath_async

, + op_fs_read_dir_sync

, + op_fs_read_dir_async

, + op_fs_rename_sync

, + op_fs_rename_async

, + op_fs_link_sync

, + op_fs_link_async

, + op_fs_symlink_sync

, + op_fs_symlink_async

, + op_fs_read_link_sync

, + op_fs_read_link_async

, + op_fs_truncate_sync

, + op_fs_truncate_async

, + op_fs_utime_sync

, + op_fs_utime_async

, + op_fs_make_temp_dir_sync

, + op_fs_make_temp_dir_async

, + op_fs_make_temp_file_sync

, + op_fs_make_temp_file_async

, + op_fs_write_file_sync

, + op_fs_write_file_async

, + op_fs_read_file_sync

, + op_fs_read_file_async

, + op_fs_read_file_text_sync

, + op_fs_read_file_text_async

, - op_seek_sync, - op_seek_async, - op_fdatasync_sync, - op_fdatasync_async, - op_fsync_sync, - op_fsync_async, - op_fstat_sync, - op_fstat_async, - op_flock_sync, - op_flock_async, - op_funlock_sync, - op_funlock_async, - op_ftruncate_sync, - op_ftruncate_async, - op_futime_sync, - op_futime_async, + op_fs_seek_sync, + op_fs_seek_async, + op_fs_fdatasync_sync, + op_fs_fdatasync_async, + op_fs_fsync_sync, + op_fs_fsync_async, + op_fs_fstat_sync, + op_fs_fstat_async, + op_fs_flock_sync, + op_fs_flock_async, + op_fs_funlock_sync, + op_fs_funlock_async, + op_fs_ftruncate_sync, + op_fs_ftruncate_async, + op_fs_futime_sync, + op_fs_futime_async, ], esm = [ "30_fs.js" ], options = { unstable: bool, - fs: Fs, + fs: FileSystemRc, }, state = |state, options| { state.put(UnstableChecker { unstable: options.unstable }); diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index 8c5d212015..5bf3b1c6fd 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -9,63 +9,36 @@ use std::path::PathBuf; use std::rc::Rc; use deno_core::error::custom_error; -use deno_core::error::not_supported; -use deno_core::error::resource_unavailable; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; use deno_core::CancelFuture; use deno_core::CancelHandle; use deno_core::OpState; -use deno_core::Resource; use deno_core::ResourceId; use deno_core::ZeroCopyBuf; +use deno_io::fs::FileResource; +use deno_io::fs::FsError; +use deno_io::fs::FsStat; use rand::rngs::ThreadRng; use rand::thread_rng; use rand::Rng; use serde::Serialize; -use tokio::task::JoinError; use crate::check_unstable; use crate::check_unstable2; +use crate::interface::FileSystemRc; use crate::interface::FsDirEntry; -use crate::interface::FsError; use crate::interface::FsFileType; -use crate::interface::FsStat; -use crate::File; -use crate::FileSystem; use crate::FsPermissions; use crate::OpenOptions; -impl From for FsError { - fn from(err: JoinError) -> Self { - if err.is_cancelled() { - todo!("async tasks must not be cancelled") - } - if err.is_panic() { - std::panic::resume_unwind(err.into_panic()); // resume the panic on the main thread - } - unreachable!() - } -} - -impl From for AnyError { - fn from(err: FsError) -> Self { - match err { - FsError::Io(err) => AnyError::from(err), - FsError::FileBusy => resource_unavailable(), - FsError::NotSupported => not_supported(), - } - } -} - #[op] -pub fn op_cwd(state: &mut OpState) -> Result +pub fn op_fs_cwd

(state: &mut OpState) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let fs = state.borrow::(); + let fs = state.borrow::(); let path = fs.cwd()?; state .borrow_mut::

() @@ -75,34 +48,36 @@ where } #[op] -fn op_chdir(state: &mut OpState, directory: &str) -> Result<(), AnyError> +fn op_fs_chdir

(state: &mut OpState, directory: &str) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let d = PathBuf::from(&directory); state.borrow_mut::

().check_read(&d, "Deno.chdir()")?; - state.borrow::().chdir(&d).context_path("chdir", &d) + state + .borrow::() + .chdir(&d) + .context_path("chdir", &d) } #[op] -fn op_umask(state: &mut OpState, mask: Option) -> Result +fn op_fs_umask( + state: &mut OpState, + mask: Option, +) -> Result where - Fs: FileSystem + 'static, { check_unstable(state, "Deno.umask"); - state.borrow::().umask(mask).context("umask") + state.borrow::().umask(mask).context("umask") } #[op] -fn op_open_sync( +fn op_fs_open_sync

( state: &mut OpState, path: String, options: Option, ) -> Result where - Fs: FileSystem + 'static, - Fs::File: Resource, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -111,22 +86,22 @@ where let permissions = state.borrow_mut::

(); permissions.check(&options, &path, "Deno.openSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); let file = fs.open_sync(&path, options).context_path("open", &path)?; - let rid = state.resource_table.add(file); + let rid = state + .resource_table + .add(FileResource::new(file, "fsFile".to_string())); Ok(rid) } #[op] -async fn op_open_async( +async fn op_fs_open_async

( state: Rc>, path: String, options: Option, ) -> Result where - Fs: FileSystem + 'static, - Fs::File: Resource, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -136,26 +111,28 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); permissions.check(&options, &path, "Deno.open()")?; - state.borrow::().clone() + state.borrow::().clone() }; let file = fs .open_async(path.clone(), options) .await .context_path("open", &path)?; - let rid = state.borrow_mut().resource_table.add(file); + let rid = state + .borrow_mut() + .resource_table + .add(FileResource::new(file, "fsFile".to_string())); Ok(rid) } #[op] -fn op_mkdir_sync( +fn op_fs_mkdir_sync

( state: &mut OpState, path: String, recursive: bool, mode: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -166,7 +143,7 @@ where .borrow_mut::

() .check_write(&path, "Deno.mkdirSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.mkdir_sync(&path, recursive, mode) .context_path("mkdir", &path)?; @@ -174,14 +151,13 @@ where } #[op] -async fn op_mkdir_async( +async fn op_fs_mkdir_async

( state: Rc>, path: String, recursive: bool, mode: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -191,7 +167,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

().check_write(&path, "Deno.mkdir()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.mkdir_async(path.clone(), recursive, mode) @@ -202,39 +178,37 @@ where } #[op] -fn op_chmod_sync( +fn op_fs_chmod_sync

( state: &mut OpState, path: String, mode: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

() .check_write(&path, "Deno.chmodSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.chmod_sync(&path, mode).context_path("chmod", &path)?; Ok(()) } #[op] -async fn op_chmod_async( +async fn op_fs_chmod_async

( state: Rc>, path: String, mode: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

().check_write(&path, "Deno.chmod()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.chmod_async(path.clone(), mode) .await @@ -243,42 +217,40 @@ where } #[op] -fn op_chown_sync( +fn op_fs_chown_sync

( state: &mut OpState, path: String, uid: Option, gid: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

() .check_write(&path, "Deno.chownSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.chown_sync(&path, uid, gid) .context_path("chown", &path)?; Ok(()) } #[op] -async fn op_chown_async( +async fn op_fs_chown_async

( state: Rc>, path: String, uid: Option, gid: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

().check_write(&path, "Deno.chown()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.chown_async(path.clone(), uid, gid) .await @@ -287,13 +259,12 @@ where } #[op] -fn op_remove_sync( +fn op_fs_remove_sync

( state: &mut OpState, path: &str, recursive: bool, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -302,7 +273,7 @@ where .borrow_mut::

() .check_write(&path, "Deno.removeSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.remove_sync(&path, recursive) .context_path("remove", &path)?; @@ -310,13 +281,12 @@ where } #[op] -async fn op_remove_async( +async fn op_fs_remove_async

( state: Rc>, path: String, recursive: bool, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -326,7 +296,7 @@ where state .borrow_mut::

() .check_write(&path, "Deno.remove()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.remove_async(path.clone(), recursive) @@ -337,13 +307,12 @@ where } #[op] -fn op_copy_file_sync( +fn op_fs_copy_file_sync

( state: &mut OpState, from: &str, to: &str, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let from = PathBuf::from(from); @@ -353,7 +322,7 @@ where permissions.check_read(&from, "Deno.copyFileSync()")?; permissions.check_write(&to, "Deno.copyFileSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.copy_file_sync(&from, &to) .context_two_path("copy", &from, &to)?; @@ -361,13 +330,12 @@ where } #[op] -async fn op_copy_file_async( +async fn op_fs_copy_file_async

( state: Rc>, from: String, to: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let from = PathBuf::from(from); @@ -378,7 +346,7 @@ where let permissions = state.borrow_mut::

(); permissions.check_read(&from, "Deno.copyFile()")?; permissions.check_write(&to, "Deno.copyFile()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.copy_file_async(from.clone(), to.clone()) @@ -389,20 +357,19 @@ where } #[op] -fn op_stat_sync( +fn op_fs_stat_sync

( state: &mut OpState, path: String, stat_out_buf: &mut [u32], ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

() .check_read(&path, "Deno.statSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); let stat = fs.stat_sync(&path).context_path("stat", &path)?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -410,12 +377,11 @@ where } #[op] -async fn op_stat_async( +async fn op_fs_stat_async

( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -423,7 +389,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); permissions.check_read(&path, "Deno.stat()")?; - state.borrow::().clone() + state.borrow::().clone() }; let stat = fs .stat_async(path.clone()) @@ -433,20 +399,19 @@ where } #[op] -fn op_lstat_sync( +fn op_fs_lstat_sync

( state: &mut OpState, path: String, stat_out_buf: &mut [u32], ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

() .check_read(&path, "Deno.lstatSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); let stat = fs.lstat_sync(&path).context_path("lstat", &path)?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -454,12 +419,11 @@ where } #[op] -async fn op_lstat_async( +async fn op_fs_lstat_async

( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -467,7 +431,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); permissions.check_read(&path, "Deno.lstat()")?; - state.borrow::().clone() + state.borrow::().clone() }; let stat = fs .lstat_async(path.clone()) @@ -477,17 +441,16 @@ where } #[op] -fn op_realpath_sync( +fn op_fs_realpath_sync

( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); - let fs = state.borrow::().clone(); + let fs = state.borrow::().clone(); let permissions = state.borrow_mut::

(); permissions.check_read(&path, "Deno.realPathSync()")?; if path.is_relative() { @@ -502,12 +465,11 @@ where } #[op] -async fn op_realpath_async( +async fn op_fs_realpath_async

( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -515,7 +477,7 @@ where let fs; { let mut state = state.borrow_mut(); - fs = state.borrow::().clone(); + fs = state.borrow::().clone(); let permissions = state.borrow_mut::

(); permissions.check_read(&path, "Deno.realPath()")?; if path.is_relative() { @@ -532,12 +494,11 @@ where } #[op] -fn op_read_dir_sync( +fn op_fs_read_dir_sync

( state: &mut OpState, path: String, ) -> Result, AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -546,19 +507,18 @@ where .borrow_mut::

() .check_read(&path, "Deno.readDirSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); let entries = fs.read_dir_sync(&path).context_path("readdir", &path)?; Ok(entries) } #[op] -async fn op_read_dir_async( +async fn op_fs_read_dir_async

( state: Rc>, path: String, ) -> Result, AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -568,7 +528,7 @@ where state .borrow_mut::

() .check_read(&path, "Deno.readDir()")?; - state.borrow::().clone() + state.borrow::().clone() }; let entries = fs @@ -580,13 +540,12 @@ where } #[op] -fn op_rename_sync( +fn op_fs_rename_sync

( state: &mut OpState, oldpath: String, newpath: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -597,7 +556,7 @@ where permissions.check_write(&oldpath, "Deno.renameSync()")?; permissions.check_write(&newpath, "Deno.renameSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.rename_sync(&oldpath, &newpath) .context_two_path("rename", &oldpath, &newpath)?; @@ -605,13 +564,12 @@ where } #[op] -async fn op_rename_async( +async fn op_fs_rename_async

( state: Rc>, oldpath: String, newpath: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -623,7 +581,7 @@ where permissions.check_read(&oldpath, "Deno.rename()")?; permissions.check_write(&oldpath, "Deno.rename()")?; permissions.check_write(&newpath, "Deno.rename()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.rename_async(oldpath.clone(), newpath.clone()) @@ -634,13 +592,12 @@ where } #[op] -fn op_link_sync( +fn op_fs_link_sync

( state: &mut OpState, oldpath: &str, newpath: &str, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -652,7 +609,7 @@ where permissions.check_read(&newpath, "Deno.linkSync()")?; permissions.check_write(&newpath, "Deno.linkSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.link_sync(&oldpath, &newpath) .context_two_path("link", &oldpath, &newpath)?; @@ -660,13 +617,12 @@ where } #[op] -async fn op_link_async( +async fn op_fs_link_async

( state: Rc>, oldpath: String, newpath: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(&oldpath); @@ -679,7 +635,7 @@ where permissions.check_write(&oldpath, "Deno.link()")?; permissions.check_read(&newpath, "Deno.link()")?; permissions.check_write(&newpath, "Deno.link()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.link_async(oldpath.clone(), newpath.clone()) @@ -690,14 +646,13 @@ where } #[op] -fn op_symlink_sync( +fn op_fs_symlink_sync

( state: &mut OpState, oldpath: &str, newpath: &str, file_type: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -707,7 +662,7 @@ where permissions.check_write_all("Deno.symlinkSync()")?; permissions.check_read_all("Deno.symlinkSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.symlink_sync(&oldpath, &newpath, file_type) .context_two_path("symlink", &oldpath, &newpath)?; @@ -715,14 +670,13 @@ where } #[op] -async fn op_symlink_async( +async fn op_fs_symlink_async

( state: Rc>, oldpath: String, newpath: String, file_type: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(&oldpath); @@ -733,7 +687,7 @@ where let permissions = state.borrow_mut::

(); permissions.check_write_all("Deno.symlink()")?; permissions.check_read_all("Deno.symlink()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.symlink_async(oldpath.clone(), newpath.clone(), file_type) @@ -744,12 +698,11 @@ where } #[op] -fn op_read_link_sync( +fn op_fs_read_link_sync

( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -758,7 +711,7 @@ where .borrow_mut::

() .check_read(&path, "Deno.readLink()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); let target = fs.read_link_sync(&path).context_path("readlink", &path)?; let target_string = path_into_string(target.into_os_string())?; @@ -766,12 +719,11 @@ where } #[op] -async fn op_read_link_async( +async fn op_fs_read_link_async

( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -781,7 +733,7 @@ where state .borrow_mut::

() .check_read(&path, "Deno.readLink()")?; - state.borrow::().clone() + state.borrow::().clone() }; let target = fs @@ -793,13 +745,12 @@ where } #[op] -fn op_truncate_sync( +fn op_fs_truncate_sync

( state: &mut OpState, path: &str, len: u64, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -808,7 +759,7 @@ where .borrow_mut::

() .check_write(&path, "Deno.truncateSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.truncate_sync(&path, len) .context_path("truncate", &path)?; @@ -816,13 +767,12 @@ where } #[op] -async fn op_truncate_async( +async fn op_fs_truncate_async

( state: Rc>, path: String, len: u64, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -832,7 +782,7 @@ where state .borrow_mut::

() .check_write(&path, "Deno.truncate()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.truncate_async(path.clone(), len) @@ -843,7 +793,7 @@ where } #[op] -fn op_utime_sync( +fn op_fs_utime_sync

( state: &mut OpState, path: &str, atime_secs: i64, @@ -852,14 +802,13 @@ fn op_utime_sync( mtime_nanos: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state.borrow_mut::

().check_write(&path, "Deno.utime()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) .context_path("utime", &path)?; @@ -867,7 +816,7 @@ where } #[op] -async fn op_utime_async( +async fn op_fs_utime_async

( state: Rc>, path: String, atime_secs: i64, @@ -876,7 +825,6 @@ async fn op_utime_async( mtime_nanos: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -884,7 +832,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

().check_write(&path, "Deno.utime()")?; - state.borrow::().clone() + state.borrow::().clone() }; fs.utime_async( @@ -901,17 +849,16 @@ where } #[op] -fn op_make_temp_dir_sync( +fn op_fs_make_temp_dir_sync

( state: &mut OpState, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_sync::(state, dir)?; + let (dir, fs) = make_temp_check_sync::

(state, dir)?; let mut rng = thread_rng(); @@ -935,17 +882,16 @@ where } #[op] -async fn op_make_temp_dir_async( +async fn op_fs_make_temp_dir_async

( state: Rc>, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_async::(state, dir)?; + let (dir, fs) = make_temp_check_async::

(state, dir)?; let mut rng = thread_rng(); @@ -969,17 +915,16 @@ where } #[op] -fn op_make_temp_file_sync( +fn op_fs_make_temp_file_sync

( state: &mut OpState, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_sync::(state, dir)?; + let (dir, fs) = make_temp_check_sync::

(state, dir)?; let open_opts = OpenOptions { write: true, @@ -1010,17 +955,16 @@ where } #[op] -async fn op_make_temp_file_async( +async fn op_fs_make_temp_file_async

( state: Rc>, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_async::(state, dir)?; + let (dir, fs) = make_temp_check_async::

(state, dir)?; let open_opts = OpenOptions { write: true, @@ -1049,15 +993,14 @@ where .context("tmpfile") } -fn make_temp_check_sync( +fn make_temp_check_sync

( state: &mut OpState, dir: Option, -) -> Result<(PathBuf, Fs), AnyError> +) -> Result<(PathBuf, FileSystemRc), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let fs = state.borrow::().clone(); + let fs = state.borrow::().clone(); let dir = match dir { Some(dir) => { let dir = PathBuf::from(dir); @@ -1079,16 +1022,15 @@ where Ok((dir, fs)) } -fn make_temp_check_async( +fn make_temp_check_async

( state: Rc>, dir: Option, -) -> Result<(PathBuf, Fs), AnyError> +) -> Result<(PathBuf, FileSystemRc), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let mut state = state.borrow_mut(); - let fs = state.borrow::().clone(); + let fs = state.borrow::().clone(); let dir = match dir { Some(dir) => { let dir = PathBuf::from(dir); @@ -1128,7 +1070,7 @@ fn tmp_name( } #[op] -fn op_write_file_sync( +fn op_fs_write_file_sync

( state: &mut OpState, path: String, mode: Option, @@ -1138,7 +1080,6 @@ fn op_write_file_sync( data: ZeroCopyBuf, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1147,7 +1088,7 @@ where let options = OpenOptions::write(create, append, create_new, mode); permissions.check(&options, &path, "Deno.writeFileSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::(); fs.write_file_sync(&path, options, &data) .context_path("writefile", &path)?; @@ -1156,7 +1097,7 @@ where } #[op] -async fn op_write_file_async( +async fn op_fs_write_file_async

( state: Rc>, path: String, mode: Option, @@ -1167,7 +1108,6 @@ async fn op_write_file_async( cancel_rid: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1180,7 +1120,7 @@ where permissions.check(&options, &path, "Deno.writeFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::().clone(), cancel_handle) + (state.borrow::().clone(), cancel_handle) }; let fut = fs.write_file_async(path.clone(), options, data.to_vec()); @@ -1201,12 +1141,11 @@ where } #[op] -fn op_read_file_sync( +fn op_fs_read_file_sync

( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1214,20 +1153,19 @@ where let permissions = state.borrow_mut::

(); permissions.check_read(&path, "Deno.readFileSync()")?; - let fs = state.borrow::(); - let buf = fs.read_file_sync(path).context("readfile")?; + let fs = state.borrow::(); + let buf = fs.read_file_sync(&path).context_path("readfile", &path)?; Ok(buf.into()) } #[op] -async fn op_read_file_async( +async fn op_fs_read_file_async

( state: Rc>, path: String, cancel_rid: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1238,7 +1176,7 @@ where permissions.check_read(&path, "Deno.readFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::().clone(), cancel_handle) + (state.borrow::().clone(), cancel_handle) }; let fut = fs.read_file_async(path.clone()); @@ -1259,12 +1197,11 @@ where } #[op] -fn op_read_file_text_sync( +fn op_fs_read_file_text_sync

( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1272,20 +1209,19 @@ where let permissions = state.borrow_mut::

(); permissions.check_read(&path, "Deno.readFileSync()")?; - let fs = state.borrow::(); - let buf = fs.read_file_sync(path).context("readfile")?; + let fs = state.borrow::(); + let buf = fs.read_file_sync(&path).context_path("readfile", &path)?; Ok(string_from_utf8_lossy(buf)) } #[op] -async fn op_read_file_text_async( +async fn op_fs_read_file_text_async

( state: Rc>, path: String, cancel_rid: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1296,7 +1232,7 @@ where permissions.check_read(&path, "Deno.readFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::().clone(), cancel_handle) + (state.borrow::().clone(), cancel_handle) }; let fut = fs.read_file_async(path.clone()); @@ -1340,106 +1276,78 @@ fn to_seek_from(offset: i64, whence: i32) -> Result { } #[op] -fn op_seek_sync( +fn op_fs_seek_sync( state: &mut OpState, rid: ResourceId, offset: i64, whence: i32, -) -> Result -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result { let pos = to_seek_from(offset, whence)?; - let file = state.resource_table.get::(rid)?; + let file = FileResource::get_file(state, rid)?; let cursor = file.seek_sync(pos)?; Ok(cursor) } #[op] -async fn op_seek_async( +async fn op_fs_seek_async( state: Rc>, rid: ResourceId, offset: i64, whence: i32, -) -> Result -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result { let pos = to_seek_from(offset, whence)?; - let file = state.borrow().resource_table.get::(rid)?; + let file = FileResource::get_file(&state.borrow(), rid)?; let cursor = file.seek_async(pos).await?; Ok(cursor) } #[op] -fn op_fdatasync_sync( +fn op_fs_fdatasync_sync( state: &mut OpState, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.datasync_sync()?; Ok(()) } #[op] -async fn op_fdatasync_async( +async fn op_fs_fdatasync_async( state: Rc>, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file.datasync_async().await?; Ok(()) } #[op] -fn op_fsync_sync( +fn op_fs_fsync_sync( state: &mut OpState, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.sync_sync()?; Ok(()) } #[op] -async fn op_fsync_async( +async fn op_fs_fsync_async( state: Rc>, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file.sync_async().await?; Ok(()) } #[op] -fn op_fstat_sync( +fn op_fs_fstat_sync( state: &mut OpState, rid: ResourceId, stat_out_buf: &mut [u32], -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; let stat = file.stat_sync()?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -1447,143 +1355,107 @@ where } #[op] -async fn op_fstat_async( +async fn op_fs_fstat_async( state: Rc>, rid: ResourceId, -) -> Result -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result { + let file = FileResource::get_file(&state.borrow(), rid)?; let stat = file.stat_async().await?; Ok(stat.into()) } #[op] -fn op_flock_sync( +fn op_fs_flock_sync( state: &mut OpState, rid: ResourceId, exclusive: bool, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable(state, "Deno.flockSync"); - let file = state.resource_table.get::(rid)?; + let file = FileResource::get_file(state, rid)?; file.lock_sync(exclusive)?; Ok(()) } #[op] -async fn op_flock_async( +async fn op_fs_flock_async( state: Rc>, rid: ResourceId, exclusive: bool, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable2(&state, "Deno.flock"); - let file = state.borrow().resource_table.get::(rid)?; + let file = FileResource::get_file(&state.borrow(), rid)?; file.lock_async(exclusive).await?; Ok(()) } #[op] -fn op_funlock_sync( +fn op_fs_funlock_sync( state: &mut OpState, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable(state, "Deno.funlockSync"); - let file = state.resource_table.get::(rid)?; + let file = FileResource::get_file(state, rid)?; file.unlock_sync()?; Ok(()) } #[op] -async fn op_funlock_async( +async fn op_fs_funlock_async( state: Rc>, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable2(&state, "Deno.funlock"); - let file = state.borrow().resource_table.get::(rid)?; + let file = FileResource::get_file(&state.borrow(), rid)?; file.unlock_async().await?; Ok(()) } #[op] -fn op_ftruncate_sync( +fn op_fs_ftruncate_sync( state: &mut OpState, rid: ResourceId, len: u64, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.truncate_sync(len)?; Ok(()) } #[op] -async fn op_ftruncate_async( +async fn op_fs_ftruncate_async( state: Rc>, rid: ResourceId, len: u64, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file.truncate_async(len).await?; Ok(()) } #[op] -fn op_futime_sync( +fn op_fs_futime_sync( state: &mut OpState, rid: ResourceId, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, mtime_nanos: u32, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.utime_sync(atime_secs, atime_nanos, mtime_secs, mtime_nanos)?; Ok(()) } #[op] -async fn op_futime_async( +async fn op_fs_futime_async( state: Rc>, rid: ResourceId, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, mtime_nanos: u32, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file .utime_async(atime_secs, atime_nanos, mtime_secs, mtime_nanos) .await?; @@ -1724,6 +1596,10 @@ create_struct_writer! { rdev: u64, blksize: u64, blocks: u64, + is_block_device: bool, + is_char_device: bool, + is_fifo: bool, + is_socket: bool, } } @@ -1751,6 +1627,10 @@ impl From for SerializableStat { rdev: stat.rdev, blksize: stat.blksize, blocks: stat.blocks, + is_block_device: stat.is_block_device, + is_char_device: stat.is_char_device, + is_fifo: stat.is_fifo, + is_socket: stat.is_socket, } } } diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 49d113c011..9baf74a2a4 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -4,34 +4,30 @@ use std::fs; use std::io; -use std::io::Read; -use std::io::Seek; use std::io::Write; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use std::time::SystemTime; -use std::time::UNIX_EPOCH; -use deno_io::StdFileResource; -use fs3::FileExt; +use deno_core::task::spawn_blocking; +use deno_io::fs::File; +use deno_io::fs::FsResult; +use deno_io::fs::FsStat; +use deno_io::StdFileResourceInner; use crate::interface::FsDirEntry; -use crate::interface::FsError; use crate::interface::FsFileType; -use crate::interface::FsResult; -use crate::interface::FsStat; -use crate::File; use crate::FileSystem; use crate::OpenOptions; -#[derive(Clone)] -pub struct StdFs; +#[cfg(not(unix))] +use deno_io::fs::FsError; + +#[derive(Debug, Clone)] +pub struct RealFs; #[async_trait::async_trait(?Send)] -impl FileSystem for StdFs { - type File = StdFileResource; - +impl FileSystem for RealFs { fn cwd(&self) -> FsResult { std::env::current_dir().map_err(Into::into) } @@ -40,7 +36,7 @@ impl FileSystem for StdFs { Ok(std::env::temp_dir()) } - fn chdir(&self, path: impl AsRef) -> FsResult<()> { + fn chdir(&self, path: &Path) -> FsResult<()> { std::env::set_current_dir(path).map_err(Into::into) } @@ -78,27 +74,26 @@ impl FileSystem for StdFs { fn open_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, - ) -> FsResult { + ) -> FsResult> { let opts = open_options(options); let std_file = opts.open(path)?; - Ok(StdFileResource::fs_file(std_file)) + Ok(Rc::new(StdFileResourceInner::file(std_file))) } async fn open_async( &self, path: PathBuf, options: OpenOptions, - ) -> FsResult { + ) -> FsResult> { let opts = open_options(options); - let std_file = - tokio::task::spawn_blocking(move || opts.open(path)).await??; - Ok(StdFileResource::fs_file(std_file)) + let std_file = spawn_blocking(move || opts.open(path)).await??; + Ok(Rc::new(StdFileResourceInner::file(std_file))) } fn mkdir_sync( &self, - path: impl AsRef, + path: &Path, recursive: bool, mode: u32, ) -> FsResult<()> { @@ -110,19 +105,19 @@ impl FileSystem for StdFs { recursive: bool, mode: u32, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || mkdir(path, recursive, mode)).await? + spawn_blocking(move || mkdir(&path, recursive, mode)).await? } - fn chmod_sync(&self, path: impl AsRef, mode: u32) -> FsResult<()> { + fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> { chmod(path, mode) } async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { - tokio::task::spawn_blocking(move || chmod(path, mode)).await? + spawn_blocking(move || chmod(&path, mode)).await? } fn chown_sync( &self, - path: impl AsRef, + path: &Path, uid: Option, gid: Option, ) -> FsResult<()> { @@ -134,68 +129,52 @@ impl FileSystem for StdFs { uid: Option, gid: Option, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || chown(path, uid, gid)).await? + spawn_blocking(move || chown(&path, uid, gid)).await? } - fn remove_sync( - &self, - path: impl AsRef, - recursive: bool, - ) -> FsResult<()> { + fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> { remove(path, recursive) } async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { - tokio::task::spawn_blocking(move || remove(path, recursive)).await? + spawn_blocking(move || remove(&path, recursive)).await? } - fn copy_file_sync( - &self, - from: impl AsRef, - to: impl AsRef, - ) -> FsResult<()> { + fn copy_file_sync(&self, from: &Path, to: &Path) -> FsResult<()> { copy_file(from, to) } async fn copy_file_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { - tokio::task::spawn_blocking(move || copy_file(from, to)).await? + spawn_blocking(move || copy_file(&from, &to)).await? } - fn stat_sync(&self, path: impl AsRef) -> FsResult { + fn stat_sync(&self, path: &Path) -> FsResult { stat(path).map(Into::into) } async fn stat_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || stat(path)) - .await? - .map(Into::into) + spawn_blocking(move || stat(&path)).await?.map(Into::into) } - fn lstat_sync(&self, path: impl AsRef) -> FsResult { + fn lstat_sync(&self, path: &Path) -> FsResult { lstat(path).map(Into::into) } async fn lstat_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || lstat(path)) - .await? - .map(Into::into) + spawn_blocking(move || lstat(&path)).await?.map(Into::into) } - fn realpath_sync(&self, path: impl AsRef) -> FsResult { + fn realpath_sync(&self, path: &Path) -> FsResult { realpath(path) } async fn realpath_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || realpath(path)).await? + spawn_blocking(move || realpath(&path)).await? } - fn read_dir_sync(&self, path: impl AsRef) -> FsResult> { + fn read_dir_sync(&self, path: &Path) -> FsResult> { read_dir(path) } async fn read_dir_async(&self, path: PathBuf) -> FsResult> { - tokio::task::spawn_blocking(move || read_dir(path)).await? + spawn_blocking(move || read_dir(&path)).await? } - fn rename_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()> { + fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { fs::rename(oldpath, newpath).map_err(Into::into) } async fn rename_async( @@ -203,16 +182,12 @@ impl FileSystem for StdFs { oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || fs::rename(oldpath, newpath)) + spawn_blocking(move || fs::rename(oldpath, newpath)) .await? .map_err(Into::into) } - fn link_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()> { + fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { fs::hard_link(oldpath, newpath).map_err(Into::into) } async fn link_async( @@ -220,15 +195,15 @@ impl FileSystem for StdFs { oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || fs::hard_link(oldpath, newpath)) + spawn_blocking(move || fs::hard_link(oldpath, newpath)) .await? .map_err(Into::into) } fn symlink_sync( &self, - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, file_type: Option, ) -> FsResult<()> { symlink(oldpath, newpath, file_type) @@ -239,29 +214,28 @@ impl FileSystem for StdFs { newpath: PathBuf, file_type: Option, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || symlink(oldpath, newpath, file_type)) - .await? + spawn_blocking(move || symlink(&oldpath, &newpath, file_type)).await? } - fn read_link_sync(&self, path: impl AsRef) -> FsResult { + fn read_link_sync(&self, path: &Path) -> FsResult { fs::read_link(path).map_err(Into::into) } async fn read_link_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || fs::read_link(path)) + spawn_blocking(move || fs::read_link(path)) .await? .map_err(Into::into) } - fn truncate_sync(&self, path: impl AsRef, len: u64) -> FsResult<()> { + fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> { truncate(path, len) } async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { - tokio::task::spawn_blocking(move || truncate(path, len)).await? + spawn_blocking(move || truncate(&path, len)).await? } fn utime_sync( &self, - path: impl AsRef, + path: &Path, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, @@ -281,7 +255,7 @@ impl FileSystem for StdFs { ) -> FsResult<()> { let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { filetime::set_file_times(path, atime, mtime).map_err(Into::into) }) .await? @@ -289,7 +263,7 @@ impl FileSystem for StdFs { fn write_file_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, data: &[u8], ) -> FsResult<()> { @@ -310,7 +284,7 @@ impl FileSystem for StdFs { options: OpenOptions, data: Vec, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let opts = open_options(options); let mut file = opts.open(path)?; #[cfg(unix)] @@ -324,17 +298,17 @@ impl FileSystem for StdFs { .await? } - fn read_file_sync(&self, path: impl AsRef) -> FsResult> { + fn read_file_sync(&self, path: &Path) -> FsResult> { fs::read(path).map_err(Into::into) } async fn read_file_async(&self, path: PathBuf) -> FsResult> { - tokio::task::spawn_blocking(move || fs::read(path)) + spawn_blocking(move || fs::read(path)) .await? .map_err(Into::into) } } -fn mkdir(path: impl AsRef, recursive: bool, mode: u32) -> FsResult<()> { +fn mkdir(path: &Path, recursive: bool, mode: u32) -> FsResult<()> { let mut builder = fs::DirBuilder::new(); builder.recursive(recursive); #[cfg(unix)] @@ -350,7 +324,7 @@ fn mkdir(path: impl AsRef, recursive: bool, mode: u32) -> FsResult<()> { } #[cfg(unix)] -fn chmod(path: impl AsRef, mode: u32) -> FsResult<()> { +fn chmod(path: &Path, mode: u32) -> FsResult<()> { use std::os::unix::fs::PermissionsExt; let permissions = fs::Permissions::from_mode(mode); fs::set_permissions(path, permissions)?; @@ -359,24 +333,20 @@ fn chmod(path: impl AsRef, mode: u32) -> FsResult<()> { // TODO: implement chmod for Windows (#4357) #[cfg(not(unix))] -fn chmod(path: impl AsRef, _mode: u32) -> FsResult<()> { +fn chmod(path: &Path, _mode: u32) -> FsResult<()> { // Still check file/dir exists on Windows std::fs::metadata(path)?; Err(FsError::NotSupported) } #[cfg(unix)] -fn chown( - path: impl AsRef, - uid: Option, - gid: Option, -) -> FsResult<()> { +fn chown(path: &Path, uid: Option, gid: Option) -> FsResult<()> { use nix::unistd::chown; use nix::unistd::Gid; use nix::unistd::Uid; let owner = uid.map(Uid::from_raw); let group = gid.map(Gid::from_raw); - let res = chown(path.as_ref(), owner, group); + let res = chown(path, owner, group); if let Err(err) = res { return Err(io::Error::from_raw_os_error(err as i32).into()); } @@ -385,60 +355,57 @@ fn chown( // TODO: implement chown for Windows #[cfg(not(unix))] -fn chown( - _path: impl AsRef, - _uid: Option, - _gid: Option, -) -> FsResult<()> { +fn chown(_path: &Path, _uid: Option, _gid: Option) -> FsResult<()> { Err(FsError::NotSupported) } -fn remove(path: impl AsRef, recursive: bool) -> FsResult<()> { +fn remove(path: &Path, recursive: bool) -> FsResult<()> { // TODO: this is racy. This should open fds, and then `unlink` those. - let metadata = fs::symlink_metadata(&path)?; + let metadata = fs::symlink_metadata(path)?; let file_type = metadata.file_type(); let res = if file_type.is_dir() { if recursive { - fs::remove_dir_all(&path) + fs::remove_dir_all(path) } else { - fs::remove_dir(&path) + fs::remove_dir(path) } } else if file_type.is_symlink() { #[cfg(unix)] { - fs::remove_file(&path) + fs::remove_file(path) } #[cfg(not(unix))] { use std::os::windows::prelude::MetadataExt; use winapi::um::winnt::FILE_ATTRIBUTE_DIRECTORY; if metadata.file_attributes() & FILE_ATTRIBUTE_DIRECTORY != 0 { - fs::remove_dir(&path) + fs::remove_dir(path) } else { - fs::remove_file(&path) + fs::remove_file(path) } } } else { - fs::remove_file(&path) + fs::remove_file(path) }; res.map_err(Into::into) } -fn copy_file(from: impl AsRef, to: impl AsRef) -> FsResult<()> { +fn copy_file(from: &Path, to: &Path) -> FsResult<()> { #[cfg(target_os = "macos")] { use libc::clonefile; use libc::stat; use libc::unlink; use std::ffi::CString; + use std::io::Read; use std::os::unix::fs::OpenOptionsExt; use std::os::unix::fs::PermissionsExt; use std::os::unix::prelude::OsStrExt; - let from_str = CString::new(from.as_ref().as_os_str().as_bytes()).unwrap(); - let to_str = CString::new(to.as_ref().as_os_str().as_bytes()).unwrap(); + let from_str = CString::new(from.as_os_str().as_bytes()).unwrap(); + let to_str = CString::new(to.as_os_str().as_bytes()).unwrap(); // SAFETY: `from` and `to` are valid C strings. // std::fs::copy does open() + fcopyfile() on macOS. We try to use @@ -462,7 +429,7 @@ fn copy_file(from: impl AsRef, to: impl AsRef) -> FsResult<()> { // Do a regular copy. fcopyfile() is an overkill for < 128KB // files. let mut buf = [0u8; 128 * 1024]; - let mut from_file = fs::File::open(&from)?; + let mut from_file = fs::File::open(from)?; let perm = from_file.metadata()?.permissions(); let mut to_file = fs::OpenOptions::new() @@ -471,7 +438,7 @@ fn copy_file(from: impl AsRef, to: impl AsRef) -> FsResult<()> { .write(true) .create(true) .truncate(true) - .open(&to)?; + .open(to)?; let writer_metadata = to_file.metadata()?; if writer_metadata.is_file() { // Set the correct file permissions, in case the file already existed. @@ -499,36 +466,37 @@ fn copy_file(from: impl AsRef, to: impl AsRef) -> FsResult<()> { } #[cfg(not(windows))] -fn stat(path: impl AsRef) -> FsResult { +fn stat(path: &Path) -> FsResult { let metadata = fs::metadata(path)?; - Ok(metadata_to_fsstat(metadata)) + Ok(FsStat::from_std(metadata)) } #[cfg(windows)] -fn stat(path: impl AsRef) -> FsResult { - let metadata = fs::metadata(path.as_ref())?; - let mut fsstat = metadata_to_fsstat(metadata); +fn stat(path: &Path) -> FsResult { + let metadata = fs::metadata(path)?; + let mut fsstat = FsStat::from_std(metadata); use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS; - let path = path.as_ref().canonicalize()?; + let path = path.canonicalize()?; stat_extra(&mut fsstat, &path, FILE_FLAG_BACKUP_SEMANTICS)?; Ok(fsstat) } #[cfg(not(windows))] -fn lstat(path: impl AsRef) -> FsResult { +fn lstat(path: &Path) -> FsResult { let metadata = fs::symlink_metadata(path)?; - Ok(metadata_to_fsstat(metadata)) + Ok(FsStat::from_std(metadata)) } #[cfg(windows)] -fn lstat(path: impl AsRef) -> FsResult { - let metadata = fs::symlink_metadata(path.as_ref())?; - let mut fsstat = metadata_to_fsstat(metadata); +fn lstat(path: &Path) -> FsResult { use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS; use winapi::um::winbase::FILE_FLAG_OPEN_REPARSE_POINT; + + let metadata = fs::symlink_metadata(path)?; + let mut fsstat = FsStat::from_std(metadata); stat_extra( &mut fsstat, - path.as_ref(), + path, FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, )?; Ok(fsstat) @@ -595,70 +563,11 @@ fn stat_extra( } } -#[inline(always)] -fn metadata_to_fsstat(metadata: fs::Metadata) -> FsStat { - macro_rules! unix_or_zero { - ($member:ident) => {{ - #[cfg(unix)] - { - use std::os::unix::fs::MetadataExt; - metadata.$member() - } - #[cfg(not(unix))] - { - 0 - } - }}; - } - - #[inline(always)] - fn to_msec(maybe_time: Result) -> Option { - match maybe_time { - Ok(time) => Some( - time - .duration_since(UNIX_EPOCH) - .map(|t| t.as_millis() as u64) - .unwrap_or_else(|err| err.duration().as_millis() as u64), - ), - Err(_) => None, - } - } - - FsStat { - is_file: metadata.is_file(), - is_directory: metadata.is_dir(), - is_symlink: metadata.file_type().is_symlink(), - size: metadata.len(), - - mtime: to_msec(metadata.modified()), - atime: to_msec(metadata.accessed()), - birthtime: to_msec(metadata.created()), - - dev: unix_or_zero!(dev), - ino: unix_or_zero!(ino), - mode: unix_or_zero!(mode), - nlink: unix_or_zero!(nlink), - uid: unix_or_zero!(uid), - gid: unix_or_zero!(gid), - rdev: unix_or_zero!(rdev), - blksize: unix_or_zero!(blksize), - blocks: unix_or_zero!(blocks), - } +fn realpath(path: &Path) -> FsResult { + Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) } -fn realpath(path: impl AsRef) -> FsResult { - let canonicalized_path = path.as_ref().canonicalize()?; - #[cfg(windows)] - let canonicalized_path = PathBuf::from( - canonicalized_path - .display() - .to_string() - .trim_start_matches("\\\\?\\"), - ); - Ok(canonicalized_path) -} - -fn read_dir(path: impl AsRef) -> FsResult> { +fn read_dir(path: &Path) -> FsResult> { let entries = fs::read_dir(path)? .filter_map(|entry| { let entry = entry.ok()?; @@ -687,24 +596,24 @@ fn read_dir(path: impl AsRef) -> FsResult> { #[cfg(not(windows))] fn symlink( - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, _file_type: Option, ) -> FsResult<()> { - std::os::unix::fs::symlink(oldpath.as_ref(), newpath.as_ref())?; + std::os::unix::fs::symlink(oldpath, newpath)?; Ok(()) } #[cfg(windows)] fn symlink( - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, file_type: Option, ) -> FsResult<()> { let file_type = match file_type { Some(file_type) => file_type, None => { - let old_meta = fs::metadata(&oldpath); + let old_meta = fs::metadata(oldpath); match old_meta { Ok(metadata) => { if metadata.is_file() { @@ -731,17 +640,17 @@ fn symlink( match file_type { FsFileType::File => { - std::os::windows::fs::symlink_file(&oldpath, &newpath)?; + std::os::windows::fs::symlink_file(oldpath, newpath)?; } FsFileType::Directory => { - std::os::windows::fs::symlink_dir(&oldpath, &newpath)?; + std::os::windows::fs::symlink_dir(oldpath, newpath)?; } }; Ok(()) } -fn truncate(path: impl AsRef, len: u64) -> FsResult<()> { +fn truncate(path: &Path, len: u64) -> FsResult<()> { let file = fs::OpenOptions::new().write(true).open(path)?; file.set_len(len)?; Ok(()) @@ -768,162 +677,3 @@ fn open_options(options: OpenOptions) -> fs::OpenOptions { open_options.create_new(options.create_new); open_options } - -fn sync( - resource: Rc, - f: impl FnOnce(&mut fs::File) -> io::Result, -) -> FsResult { - let res = resource - .with_file2(|file| f(file)) - .ok_or(FsError::FileBusy)??; - Ok(res) -} - -async fn nonblocking( - resource: Rc, - f: impl FnOnce(&mut fs::File) -> io::Result + Send + 'static, -) -> FsResult { - let res = resource.with_file_blocking_task2(f).await?; - Ok(res) -} - -#[async_trait::async_trait(?Send)] -impl File for StdFileResource { - fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()> { - sync(self, |file| file.write_all(buf)) - } - async fn write_all_async(self: Rc, buf: Vec) -> FsResult<()> { - nonblocking(self, move |file| file.write_all(&buf)).await - } - - fn read_all_sync(self: Rc) -> FsResult> { - sync(self, |file| { - let mut buf = Vec::new(); - file.read_to_end(&mut buf)?; - Ok(buf) - }) - } - async fn read_all_async(self: Rc) -> FsResult> { - nonblocking(self, |file| { - let mut buf = Vec::new(); - file.read_to_end(&mut buf)?; - Ok(buf) - }) - .await - } - - fn chmod_sync(self: Rc, _mode: u32) -> FsResult<()> { - #[cfg(unix)] - { - sync(self, |file| { - use std::os::unix::prelude::PermissionsExt; - file.set_permissions(fs::Permissions::from_mode(_mode)) - }) - } - #[cfg(not(unix))] - Err(FsError::NotSupported) - } - - async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { - #[cfg(unix)] - { - nonblocking(self, move |file| { - use std::os::unix::prelude::PermissionsExt; - file.set_permissions(fs::Permissions::from_mode(_mode)) - }) - .await - } - #[cfg(not(unix))] - Err(FsError::NotSupported) - } - - fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult { - sync(self, |file| file.seek(pos)) - } - async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult { - nonblocking(self, move |file| file.seek(pos)).await - } - - fn datasync_sync(self: Rc) -> FsResult<()> { - sync(self, |file| file.sync_data()) - } - async fn datasync_async(self: Rc) -> FsResult<()> { - nonblocking(self, |file| file.sync_data()).await - } - - fn sync_sync(self: Rc) -> FsResult<()> { - sync(self, |file| file.sync_all()) - } - async fn sync_async(self: Rc) -> FsResult<()> { - nonblocking(self, |file| file.sync_all()).await - } - - fn stat_sync(self: Rc) -> FsResult { - sync(self, |file| file.metadata().map(metadata_to_fsstat)) - } - async fn stat_async(self: Rc) -> FsResult { - nonblocking(self, |file| file.metadata().map(metadata_to_fsstat)).await - } - - fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()> { - sync(self, |file| { - if exclusive { - file.lock_exclusive() - } else { - file.lock_shared() - } - }) - } - async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()> { - nonblocking(self, move |file| { - if exclusive { - file.lock_exclusive() - } else { - file.lock_shared() - } - }) - .await - } - - fn unlock_sync(self: Rc) -> FsResult<()> { - sync(self, |file| file.unlock()) - } - async fn unlock_async(self: Rc) -> FsResult<()> { - nonblocking(self, |file| file.unlock()).await - } - - fn truncate_sync(self: Rc, len: u64) -> FsResult<()> { - sync(self, |file| file.set_len(len)) - } - async fn truncate_async(self: Rc, len: u64) -> FsResult<()> { - nonblocking(self, move |file| file.set_len(len)).await - } - - fn utime_sync( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); - let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - sync(self, |file| { - filetime::set_file_handle_times(file, Some(atime), Some(mtime)) - }) - } - async fn utime_async( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); - let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - nonblocking(self, move |file| { - filetime::set_file_handle_times(file, Some(atime), Some(mtime)) - }) - .await - } -} diff --git a/ext/fs/sync.rs b/ext/fs/sync.rs new file mode 100644 index 0000000000..c43850c287 --- /dev/null +++ b/ext/fs/sync.rs @@ -0,0 +1,22 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +pub use inner::*; + +#[cfg(feature = "sync_fs")] +mod inner { + #![allow(clippy::disallowed_types)] + pub use std::sync::Arc as MaybeArc; + + pub use core::marker::Send as MaybeSend; + pub use core::marker::Sync as MaybeSync; +} + +#[cfg(not(feature = "sync_fs"))] +mod inner { + pub use std::rc::Rc as MaybeArc; + + pub trait MaybeSync {} + impl MaybeSync for T where T: ?Sized {} + pub trait MaybeSend {} + impl MaybeSend for T where T: ?Sized {} +} diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js new file mode 100644 index 0000000000..7c9b290695 --- /dev/null +++ b/ext/http/00_serve.js @@ -0,0 +1,756 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase +const core = globalThis.Deno.core; +const primordials = globalThis.__bootstrap.primordials; +const internals = globalThis.__bootstrap.internals; + +const { BadResourcePrototype } = core; +import { InnerBody } from "ext:deno_fetch/22_body.js"; +import { Event } from "ext:deno_web/02_event.js"; +import { + fromInnerResponse, + newInnerResponse, + toInnerResponse, +} from "ext:deno_fetch/23_response.js"; +import { fromInnerRequest, toInnerRequest } from "ext:deno_fetch/23_request.js"; +import { AbortController } from "ext:deno_web/03_abort_signal.js"; +import { + _eventLoop, + _idleTimeoutDuration, + _idleTimeoutTimeout, + _protocol, + _readyState, + _rid, + _role, + _server, + _serverHandleIdleTimeout, + SERVER, + WebSocket, +} from "ext:deno_websocket/01_websocket.js"; +import { + Deferred, + getReadableStreamResourceBacking, + readableStreamClose, + readableStreamForRid, + ReadableStreamPrototype, +} from "ext:deno_web/06_streams.js"; +import { listen, TcpConn } from "ext:deno_net/01_net.js"; +import { listenTls } from "ext:deno_net/02_tls.js"; +const { + ArrayPrototypePush, + ObjectPrototypeIsPrototypeOf, + PromisePrototypeCatch, + SafeSet, + SafeSetIterator, + SetPrototypeAdd, + SetPrototypeDelete, + Symbol, + SymbolFor, + TypeError, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + op_http_get_request_headers, + op_http_get_request_method_and_url, + op_http_read_request_body, + op_http_serve, + op_http_serve_on, + op_http_set_promise_complete, + op_http_set_response_body_bytes, + op_http_set_response_body_resource, + op_http_set_response_body_stream, + op_http_set_response_body_text, + op_http_set_response_header, + op_http_set_response_headers, + op_http_set_response_trailers, + op_http_upgrade_raw, + op_http_upgrade_websocket_next, + op_http_try_wait, + op_http_wait, +} = core.ensureFastOps(); +const _upgraded = Symbol("_upgraded"); + +function internalServerError() { + // "Internal Server Error" + return new Response( + new Uint8Array([ + 73, + 110, + 116, + 101, + 114, + 110, + 97, + 108, + 32, + 83, + 101, + 114, + 118, + 101, + 114, + 32, + 69, + 114, + 114, + 111, + 114, + ]), + { status: 500 }, + ); +} + +// Used to ensure that user returns a valid response (but not a different response) from handlers that are upgraded. +const UPGRADE_RESPONSE_SENTINEL = fromInnerResponse( + newInnerResponse(101), + "immutable", +); + +function upgradeHttpRaw(req, conn) { + const inner = toInnerRequest(req); + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeHttpRaw", conn); + } + throw new TypeError("upgradeHttpRaw may only be used with Deno.serve"); +} + +function addTrailers(resp, headerList) { + const inner = toInnerResponse(resp); + op_http_set_response_trailers(inner.slabId, headerList); +} + +class InnerRequest { + #slabId; + #context; + #methodAndUri; + #streamRid; + #body; + #upgraded; + + constructor(slabId, context) { + this.#slabId = slabId; + this.#context = context; + this.#upgraded = false; + } + + close() { + if (this.#streamRid !== undefined) { + core.close(this.#streamRid); + this.#streamRid = undefined; + } + this.#slabId = undefined; + } + + get [_upgraded]() { + return this.#upgraded; + } + + _wantsUpgrade(upgradeType, ...originalArgs) { + if (this.#upgraded) { + throw new Deno.errors.Http("already upgraded"); + } + if (this.#slabId === undefined) { + throw new Deno.errors.Http("already closed"); + } + + // upgradeHttp is async + // TODO(mmastrac) + if (upgradeType == "upgradeHttp") { + throw "upgradeHttp is unavailable in Deno.serve at this time"; + } + + // upgradeHttpRaw is sync + if (upgradeType == "upgradeHttpRaw") { + const slabId = this.#slabId; + const underlyingConn = originalArgs[0]; + + this.url(); + this.headerList; + this.close(); + + this.#upgraded = () => {}; + + const upgradeRid = op_http_upgrade_raw(slabId); + + const conn = new TcpConn( + upgradeRid, + underlyingConn?.remoteAddr, + underlyingConn?.localAddr, + ); + + return { response: UPGRADE_RESPONSE_SENTINEL, conn }; + } + + // upgradeWebSocket is sync + if (upgradeType == "upgradeWebSocket") { + const response = originalArgs[0]; + const ws = originalArgs[1]; + + const slabId = this.#slabId; + + this.url(); + this.headerList; + this.close(); + + const goAhead = new Deferred(); + this.#upgraded = () => { + goAhead.resolve(); + }; + + // Start the upgrade in the background. + (async () => { + try { + // Returns the upgraded websocket connection + const wsRid = await op_http_upgrade_websocket_next( + slabId, + response.headerList, + ); + + // We have to wait for the go-ahead signal + await goAhead; + + ws[_rid] = wsRid; + ws[_readyState] = WebSocket.OPEN; + ws[_role] = SERVER; + const event = new Event("open"); + ws.dispatchEvent(event); + + ws[_eventLoop](); + if (ws[_idleTimeoutDuration]) { + ws.addEventListener( + "close", + () => clearTimeout(ws[_idleTimeoutTimeout]), + ); + } + ws[_serverHandleIdleTimeout](); + } catch (error) { + const event = new ErrorEvent("error", { error }); + ws.dispatchEvent(event); + } + })(); + return { response: UPGRADE_RESPONSE_SENTINEL, socket: ws }; + } + } + + url() { + if (this.#methodAndUri === undefined) { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + // TODO(mmastrac): This is quite slow as we're serializing a large number of values. We may want to consider + // splitting this up into multiple ops. + this.#methodAndUri = op_http_get_request_method_and_url(this.#slabId); + } + + const path = this.#methodAndUri[2]; + + // * is valid for OPTIONS + if (path === "*") { + return "*"; + } + + // If the path is empty, return the authority (valid for CONNECT) + if (path == "") { + return this.#methodAndUri[1]; + } + + // CONNECT requires an authority + if (this.#methodAndUri[0] == "CONNECT") { + return this.#methodAndUri[1]; + } + + const hostname = this.#methodAndUri[1]; + if (hostname) { + // Construct a URL from the scheme, the hostname, and the path + return this.#context.scheme + hostname + path; + } + + // Construct a URL from the scheme, the fallback hostname, and the path + return this.#context.scheme + this.#context.fallbackHost + path; + } + + get remoteAddr() { + if (this.#methodAndUri === undefined) { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + this.#methodAndUri = op_http_get_request_method_and_url(this.#slabId); + } + return { + transport: "tcp", + hostname: this.#methodAndUri[3], + port: this.#methodAndUri[4], + }; + } + + get method() { + if (this.#methodAndUri === undefined) { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + this.#methodAndUri = op_http_get_request_method_and_url(this.#slabId); + } + return this.#methodAndUri[0]; + } + + get body() { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + if (this.#body !== undefined) { + return this.#body; + } + // If the method is GET or HEAD, we do not want to include a body here, even if the Rust + // side of the code is willing to provide it to us. + if (this.method == "GET" || this.method == "HEAD") { + this.#body = null; + return null; + } + this.#streamRid = op_http_read_request_body(this.#slabId); + this.#body = new InnerBody(readableStreamForRid(this.#streamRid, false)); + return this.#body; + } + + get headerList() { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + const headers = []; + const reqHeaders = op_http_get_request_headers(this.#slabId); + for (let i = 0; i < reqHeaders.length; i += 2) { + ArrayPrototypePush(headers, [reqHeaders[i], reqHeaders[i + 1]]); + } + return headers; + } + + get slabId() { + return this.#slabId; + } +} + +class CallbackContext { + abortController; + responseBodies; + scheme; + fallbackHost; + serverRid; + closed; + + constructor(signal, args) { + signal?.addEventListener( + "abort", + () => this.close(), + { once: true }, + ); + this.abortController = new AbortController(); + this.responseBodies = new SafeSet(); + this.serverRid = args[0]; + this.scheme = args[1]; + this.fallbackHost = args[2]; + this.closed = false; + } + + close() { + try { + this.closed = true; + core.tryClose(this.serverRid); + } catch { + // Pass + } + } +} + +function fastSyncResponseOrStream(req, respBody) { + if (respBody === null || respBody === undefined) { + // Don't set the body + return null; + } + + const stream = respBody.streamOrStatic; + const body = stream.body; + + if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, body)) { + op_http_set_response_body_bytes(req, body); + return null; + } + + if (typeof body === "string") { + op_http_set_response_body_text(req, body); + return null; + } + + // At this point in the response it needs to be a stream + if (!ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, stream)) { + throw TypeError("invalid response"); + } + const resourceBacking = getReadableStreamResourceBacking(stream); + if (resourceBacking) { + op_http_set_response_body_resource( + req, + resourceBacking.rid, + resourceBacking.autoClose, + ); + return null; + } + + return stream; +} + +async function asyncResponse(responseBodies, req, status, stream) { + const reader = stream.getReader(); + let responseRid; + let closed = false; + let timeout; + + try { + // IMPORTANT: We get a performance boost from this optimization, but V8 is very + // sensitive to the order and structure. Benchmark any changes to this code. + + // Optimize for streams that are done in zero or one packets. We will not + // have to allocate a resource in this case. + const { value: value1, done: done1 } = await reader.read(); + if (done1) { + closed = true; + // Exit 1: no response body at all, extreme fast path + // Reader will be closed by finally block + return; + } + + // The second value cannot block indefinitely, as someone may be waiting on a response + // of the first packet that may influence this packet. We set this timeout arbitrarily to 250ms + // and we race it. + let timeoutPromise; + timeout = setTimeout(() => { + responseRid = op_http_set_response_body_stream(req); + SetPrototypeAdd(responseBodies, responseRid); + op_http_set_promise_complete(req, status); + timeoutPromise = core.writeAll(responseRid, value1); + }, 250); + const { value: value2, done: done2 } = await reader.read(); + + if (timeoutPromise) { + await timeoutPromise; + if (done2) { + closed = true; + // Exit 2(a): read 2 is EOS, and timeout resolved. + // Reader will be closed by finally block + // Response stream will be closed by finally block. + return; + } + + // Timeout resolved, value1 written but read2 is not EOS. Carry value2 forward. + } else { + clearTimeout(timeout); + timeout = undefined; + + if (done2) { + // Exit 2(b): read 2 is EOS, and timeout did not resolve as we read fast enough. + // Reader will be closed by finally block + // No response stream + closed = true; + op_http_set_response_body_bytes(req, value1); + return; + } + + responseRid = op_http_set_response_body_stream(req); + SetPrototypeAdd(responseBodies, responseRid); + op_http_set_promise_complete(req, status); + // Write our first packet + await core.writeAll(responseRid, value1); + } + + await core.writeAll(responseRid, value2); + while (true) { + const { value, done } = await reader.read(); + if (done) { + closed = true; + break; + } + await core.writeAll(responseRid, value); + } + } catch (error) { + closed = true; + try { + await reader.cancel(error); + } catch { + // Pass + } + } finally { + if (!closed) { + readableStreamClose(reader); + } + if (timeout !== undefined) { + clearTimeout(timeout); + } + if (responseRid) { + core.tryClose(responseRid); + SetPrototypeDelete(responseBodies, responseRid); + } else { + op_http_set_promise_complete(req, status); + } + } +} + +/** + * Maps the incoming request slab ID to a fully-fledged Request object, passes it to the user-provided + * callback, then extracts the response that was returned from that callback. The response is then pulled + * apart and handled on the Rust side. + * + * This function returns a promise that will only reject in the case of abnormal exit. + */ +function mapToCallback(context, callback, onError) { + const responseBodies = context.responseBodies; + const signal = context.abortController.signal; + return async function (req) { + // Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback + // 500 error. + let innerRequest; + let response; + try { + if (callback.length > 0) { + innerRequest = new InnerRequest(req, context); + const request = fromInnerRequest(innerRequest, signal, "immutable"); + if (callback.length === 1) { + response = await callback(request); + } else { + response = await callback(request, { + get remoteAddr() { + return innerRequest.remoteAddr; + }, + }); + } + } else { + response = await callback(); + } + } catch (error) { + try { + response = await onError(error); + } catch (error) { + console.error("Exception in onError while handling exception", error); + response = internalServerError(); + } + } + + const inner = toInnerResponse(response); + if (innerRequest?.[_upgraded]) { + // We're done here as the connection has been upgraded during the callback and no longer requires servicing. + if (response !== UPGRADE_RESPONSE_SENTINEL) { + console.error("Upgrade response was not returned from callback"); + context.close(); + } + innerRequest?.[_upgraded](); + return; + } + + // Did everything shut down while we were waiting? + if (context.closed) { + // We're shutting down, so this status shouldn't make it back to the client but "Service Unavailable" seems appropriate + op_http_set_promise_complete(req, 503); + innerRequest?.close(); + return; + } + + const status = inner.status; + const headers = inner.headerList; + if (headers && headers.length > 0) { + if (headers.length == 1) { + op_http_set_response_header(req, headers[0][0], headers[0][1]); + } else { + op_http_set_response_headers(req, headers); + } + } + + // Attempt to respond quickly to this request, otherwise extract the stream + const stream = fastSyncResponseOrStream(req, inner.body); + if (stream !== null) { + // Handle the stream asynchronously + await asyncResponse(responseBodies, req, status, stream); + } else { + op_http_set_promise_complete(req, status); + } + + innerRequest?.close(); + }; +} + +function serve(arg1, arg2) { + let options = undefined; + let handler = undefined; + if (typeof arg1 === "function") { + handler = arg1; + } else if (typeof arg2 === "function") { + handler = arg2; + options = arg1; + } else { + options = arg1; + } + if (handler === undefined) { + if (options === undefined) { + throw new TypeError( + "No handler was provided, so an options bag is mandatory.", + ); + } + handler = options.handler; + } + if (typeof handler !== "function") { + throw new TypeError("A handler function must be provided."); + } + if (options === undefined) { + options = {}; + } + + const wantsHttps = options.cert || options.key; + const signal = options.signal; + const onError = options.onError ?? function (error) { + console.error(error); + return internalServerError(); + }; + const listenOpts = { + hostname: options.hostname ?? "0.0.0.0", + port: options.port ?? (wantsHttps ? 9000 : 8000), + reusePort: options.reusePort ?? false, + }; + + let listener; + if (wantsHttps) { + if (!options.cert || !options.key) { + throw new TypeError( + "Both cert and key must be provided to enable HTTPS.", + ); + } + listenOpts.cert = options.cert; + listenOpts.key = options.key; + listenOpts.alpnProtocols = ["h2", "http/1.1"]; + listener = listenTls(listenOpts); + listenOpts.port = listener.addr.port; + } else { + listener = listen(listenOpts); + listenOpts.port = listener.addr.port; + } + + const onListen = (scheme) => { + const port = listenOpts.port; + if (options.onListen) { + options.onListen({ port }); + } else { + // If the hostname is "0.0.0.0", we display "localhost" in console + // because browsers in Windows don't resolve "0.0.0.0". + // See the discussion in https://github.com/denoland/deno_std/issues/1165 + const hostname = listenOpts.hostname == "0.0.0.0" + ? "localhost" + : listenOpts.hostname; + console.log(`Listening on ${scheme}${hostname}:${port}/`); + } + }; + + return serveHttpOnListener(listener, signal, handler, onError, onListen); +} + +/** + * Serve HTTP/1.1 and/or HTTP/2 on an arbitrary listener. + */ +function serveHttpOnListener(listener, signal, handler, onError, onListen) { + const context = new CallbackContext(signal, op_http_serve(listener.rid)); + const callback = mapToCallback(context, handler, onError); + + onListen(context.scheme); + + return serveHttpOn(context, callback); +} + +/** + * Serve HTTP/1.1 and/or HTTP/2 on an arbitrary connection. + */ +function serveHttpOnConnection(connection, signal, handler, onError, onListen) { + const context = new CallbackContext(signal, op_http_serve_on(connection.rid)); + const callback = mapToCallback(context, handler, onError); + + onListen(context.scheme); + + return serveHttpOn(context, callback); +} + +function serveHttpOn(context, callback) { + let ref = true; + let currentPromise = null; + const promiseIdSymbol = SymbolFor("Deno.core.internalPromiseId"); + + // Run the server + const finished = (async () => { + while (true) { + const rid = context.serverRid; + let req; + try { + // Attempt to pull as many requests out of the queue as possible before awaiting. This API is + // a synchronous, non-blocking API that returns u32::MAX if anything goes wrong. + while ((req = op_http_try_wait(rid)) !== 0xffffffff) { + PromisePrototypeCatch(callback(req), (error) => { + // Abnormal exit + console.error( + "Terminating Deno.serve loop due to unexpected error", + error, + ); + context.close(); + }); + } + currentPromise = op_http_wait(rid); + if (!ref) { + core.unrefOp(currentPromise[promiseIdSymbol]); + } + req = await currentPromise; + currentPromise = null; + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + break; + } + throw new Deno.errors.Http(error); + } + if (req === 0xffffffff) { + break; + } + PromisePrototypeCatch(callback(req), (error) => { + // Abnormal exit + console.error( + "Terminating Deno.serve loop due to unexpected error", + error, + ); + context.close(); + }); + } + + for (const streamRid of new SafeSetIterator(context.responseBodies)) { + core.tryClose(streamRid); + } + })(); + + return { + finished, + ref() { + ref = true; + if (currentPromise) { + core.refOp(currentPromise[promiseIdSymbol]); + } + }, + unref() { + ref = false; + if (currentPromise) { + core.unrefOp(currentPromise[promiseIdSymbol]); + } + }, + }; +} + +internals.addTrailers = addTrailers; +internals.upgradeHttpRaw = upgradeHttpRaw; +internals.serveHttpOnListener = serveHttpOnListener; +internals.serveHttpOnConnection = serveHttpOnConnection; + +export { + addTrailers, + serve, + serveHttpOnConnection, + serveHttpOnListener, + upgradeHttpRaw, +}; diff --git a/ext/http/01_http.js b/ext/http/01_http.js index 5bfa58655e..f9a8d2cdbc 100644 --- a/ext/http/01_http.js +++ b/ext/http/01_http.js @@ -1,8 +1,12 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file camelcase + const core = globalThis.Deno.core; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { BadResourcePrototype, InterruptedPrototype, ops } = core; +const { op_http_write } = Deno.core.ensureFastOps(); import * as webidl from "ext:deno_webidl/00_webidl.js"; import { InnerBody } from "ext:deno_fetch/22_body.js"; import { Event, setEventTargetData } from "ext:deno_web/02_event.js"; @@ -32,8 +36,8 @@ import { SERVER, WebSocket, } from "ext:deno_websocket/01_websocket.js"; -import { listen, TcpConn, UnixConn } from "ext:deno_net/01_net.js"; -import { listenTls, TlsConn } from "ext:deno_net/02_tls.js"; +import { TcpConn, UnixConn } from "ext:deno_net/01_net.js"; +import { TlsConn } from "ext:deno_net/02_tls.js"; import { Deferred, getReadableStreamResourceBacking, @@ -41,22 +45,22 @@ import { readableStreamForRid, ReadableStreamPrototype, } from "ext:deno_web/06_streams.js"; +import { serve } from "ext:deno_http/00_serve.js"; const { ArrayPrototypeIncludes, ArrayPrototypeMap, ArrayPrototypePush, Error, ObjectPrototypeIsPrototypeOf, - PromisePrototypeCatch, SafeSet, SafeSetIterator, SetPrototypeAdd, SetPrototypeDelete, - SetPrototypeClear, StringPrototypeCharCodeAt, StringPrototypeIncludes, - StringPrototypeToLowerCase, StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, Symbol, SymbolAsyncIterator, TypeError, @@ -65,7 +69,6 @@ const { } = primordials; const connErrorSymbol = Symbol("connError"); -const streamRid = Symbol("streamRid"); const _deferred = Symbol("upgradeHttpDeferred"); class HttpConn { @@ -322,7 +325,7 @@ function createRespondWith( break; } try { - await core.opAsync2("op_http_write", streamRid, value); + await op_http_write(streamRid, value); } catch (error) { const connError = httpConn[connErrorSymbol]; if ( @@ -406,6 +409,7 @@ const websocketCvf = buildCaseInsensitiveCommaValueFinder("websocket"); const upgradeCvf = buildCaseInsensitiveCommaValueFinder("upgrade"); function upgradeWebSocket(request, options = {}) { + const inner = toInnerRequest(request); const upgrade = request.headers.get("upgrade"); const upgradeHasWebSocketOption = upgrade !== null && websocketCvf(upgrade); @@ -455,29 +459,33 @@ function upgradeWebSocket(request, options = {}) { } } - const response = fromInnerResponse(r, "immutable"); - const socket = webidl.createBranded(WebSocket); setEventTargetData(socket); socket[_server] = true; - response[_ws] = socket; socket[_idleTimeoutDuration] = options.idleTimeout ?? 120; socket[_idleTimeoutTimeout] = null; + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeWebSocket", r, socket); + } + + const response = fromInnerResponse(r, "immutable"); + + response[_ws] = socket; + return { response, socket }; } function upgradeHttp(req) { + const inner = toInnerRequest(req); + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeHttp", arguments); + } + req[_deferred] = new Deferred(); return req[_deferred].promise; } -async function upgradeHttpRaw(req, tcpConn) { - const inner = toInnerRequest(req); - const res = await core.opAsync("op_http_upgrade_early", inner[streamRid]); - return new TcpConn(res, tcpConn.remoteAddr, tcpConn.localAddr); -} - const spaceCharCode = StringPrototypeCharCodeAt(" ", 0); const tabCharCode = StringPrototypeCharCodeAt("\t", 0); const commaCharCode = StringPrototypeCharCodeAt(",", 0); @@ -494,17 +502,20 @@ function buildCaseInsensitiveCommaValueFinder(checkText) { StringPrototypeToLowerCase(checkText), "", ), - (c) => [c.charCodeAt(0), c.toUpperCase().charCodeAt(0)], + (c) => [ + StringPrototypeCharCodeAt(c, 0), + StringPrototypeCharCodeAt(StringPrototypeToUpperCase(c), 0), + ], ); /** @type {number} */ let i; /** @type {number} */ let char; - /** @param value {string} */ + /** @param {string} value */ return function (value) { for (i = 0; i < value.length; i++) { - char = value.charCodeAt(i); + char = StringPrototypeCharCodeAt(value, i); skipWhitespace(value); if (hasWord(value)) { @@ -552,233 +563,4 @@ function buildCaseInsensitiveCommaValueFinder(checkText) { internals.buildCaseInsensitiveCommaValueFinder = buildCaseInsensitiveCommaValueFinder; -function hostnameForDisplay(hostname) { - // If the hostname is "0.0.0.0", we display "localhost" in console - // because browsers in Windows don't resolve "0.0.0.0". - // See the discussion in https://github.com/denoland/deno_std/issues/1165 - return hostname === "0.0.0.0" ? "localhost" : hostname; -} - -async function respond(handler, requestEvent, connInfo, onError) { - let response; - - try { - response = await handler(requestEvent.request, connInfo); - - if (response.bodyUsed && response.body !== null) { - throw new TypeError("Response body already consumed."); - } - } catch (e) { - // Invoke `onError` handler if the request handler throws. - response = await onError(e); - } - - try { - // Send the response. - await requestEvent.respondWith(response); - } catch { - // `respondWith()` can throw for various reasons, including downstream and - // upstream connection errors, as well as errors thrown during streaming - // of the response content. In order to avoid false negatives, we ignore - // the error here and let `serveHttp` close the connection on the - // following iteration if it is in fact a downstream connection error. - } -} - -async function serveConnection( - server, - activeHttpConnections, - handler, - httpConn, - connInfo, - onError, -) { - while (!server.closed) { - let requestEvent = null; - - try { - // Yield the new HTTP request on the connection. - requestEvent = await httpConn.nextRequest(); - } catch { - // Connection has been closed. - break; - } - - if (requestEvent === null) { - break; - } - - respond(handler, requestEvent, connInfo, onError); - } - - SetPrototypeDelete(activeHttpConnections, httpConn); - try { - httpConn.close(); - } catch { - // Connection has already been closed. - } -} - -async function serve(arg1, arg2) { - let options = undefined; - let handler = undefined; - if (typeof arg1 === "function") { - handler = arg1; - options = arg2; - } else if (typeof arg2 === "function") { - handler = arg2; - options = arg1; - } else { - options = arg1; - } - if (handler === undefined) { - if (options === undefined) { - throw new TypeError( - "No handler was provided, so an options bag is mandatory.", - ); - } - handler = options.handler; - } - if (typeof handler !== "function") { - throw new TypeError("A handler function must be provided."); - } - if (options === undefined) { - options = {}; - } - - const signal = options.signal; - const onError = options.onError ?? function (error) { - console.error(error); - return new Response("Internal Server Error", { status: 500 }); - }; - const onListen = options.onListen ?? function ({ port }) { - console.log( - `Listening on http://${hostnameForDisplay(listenOpts.hostname)}:${port}/`, - ); - }; - const listenOpts = { - hostname: options.hostname ?? "127.0.0.1", - port: options.port ?? 9000, - reusePort: options.reusePort ?? false, - }; - - if (options.cert || options.key) { - if (!options.cert || !options.key) { - throw new TypeError( - "Both cert and key must be provided to enable HTTPS.", - ); - } - listenOpts.cert = options.cert; - listenOpts.key = options.key; - } - - let listener; - if (listenOpts.cert && listenOpts.key) { - listener = listenTls({ - hostname: listenOpts.hostname, - port: listenOpts.port, - cert: listenOpts.cert, - key: listenOpts.key, - reusePort: listenOpts.reusePort, - }); - } else { - listener = listen({ - hostname: listenOpts.hostname, - port: listenOpts.port, - reusePort: listenOpts.reusePort, - }); - } - - const serverDeferred = new Deferred(); - const activeHttpConnections = new SafeSet(); - - const server = { - transport: listenOpts.cert && listenOpts.key ? "https" : "http", - hostname: listenOpts.hostname, - port: listenOpts.port, - closed: false, - - close() { - if (server.closed) { - return; - } - server.closed = true; - try { - listener.close(); - } catch { - // Might have been already closed. - } - - for (const httpConn of new SafeSetIterator(activeHttpConnections)) { - try { - httpConn.close(); - } catch { - // Might have been already closed. - } - } - - SetPrototypeClear(activeHttpConnections); - serverDeferred.resolve(); - }, - - async serve() { - while (!server.closed) { - let conn; - - try { - conn = await listener.accept(); - } catch { - // Listener has been closed. - if (!server.closed) { - console.log("Listener has closed unexpectedly"); - } - break; - } - - let httpConn; - try { - const rid = ops.op_http_start(conn.rid); - httpConn = new HttpConn(rid, conn.remoteAddr, conn.localAddr); - } catch { - // Connection has been closed; - continue; - } - - SetPrototypeAdd(activeHttpConnections, httpConn); - - const connInfo = { - localAddr: conn.localAddr, - remoteAddr: conn.remoteAddr, - }; - // Serve the HTTP connection - serveConnection( - server, - activeHttpConnections, - handler, - httpConn, - connInfo, - onError, - ); - } - await serverDeferred.promise; - }, - }; - - signal?.addEventListener( - "abort", - () => { - try { - server.close(); - } catch { - // Pass - } - }, - { once: true }, - ); - - onListen(listener.addr); - - await PromisePrototypeCatch(server.serve(), console.error); -} - -export { _ws, HttpConn, serve, upgradeHttp, upgradeHttpRaw, upgradeWebSocket }; +export { _ws, HttpConn, serve, upgradeHttp, upgradeWebSocket }; diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 382fd3184f..9e7d39378c 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.94.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -10,6 +10,10 @@ readme = "README.md" repository.workspace = true description = "HTTP server implementation for Deno" +[features] +"__zombie_http_tracking" = [] +"__http_tracing" = [] + [lib] path = "lib.rs" @@ -19,16 +23,20 @@ harness = false [dependencies] async-compression = { version = "0.3.12", features = ["tokio", "brotli", "gzip"] } +async-trait.workspace = true base64.workspace = true brotli = "3.3.4" bytes.workspace = true cache_control.workspace = true deno_core.workspace = true +deno_net.workspace = true deno_websocket.workspace = true flate2.workspace = true fly-accept-encoding = "0.2.0" +http.workspace = true httparse.workspace = true hyper = { workspace = true, features = ["server", "stream", "http1", "http2", "runtime"] } +hyper1 = { package = "hyper", features = ["full"], version = "1.0.0-rc.3" } memmem.workspace = true mime = "0.3.16" once_cell.workspace = true @@ -37,8 +45,12 @@ phf = { version = "0.10", features = ["macros"] } pin-project.workspace = true ring.workspace = true serde.workspace = true +slab.workspace = true +smallvec.workspace = true +thiserror.workspace = true tokio.workspace = true tokio-util = { workspace = true, features = ["io"] } [dev-dependencies] bencher.workspace = true +rand.workspace = true diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs new file mode 100644 index 0000000000..900a956f49 --- /dev/null +++ b/ext/http/http_next.rs @@ -0,0 +1,1041 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::compressible::is_content_compressible; +use crate::extract_network_stream; +use crate::network_buffered_stream::NetworkStreamPrefixCheck; +use crate::request_body::HttpRequestBody; +use crate::request_properties::HttpConnectionProperties; +use crate::request_properties::HttpListenProperties; +use crate::request_properties::HttpPropertyExtractor; +use crate::response_body::Compression; +use crate::response_body::ResponseBytes; +use crate::response_body::ResponseBytesInner; +use crate::response_body::V8StreamHttpResponseBody; +use crate::slab::slab_drop; +use crate::slab::slab_get; +use crate::slab::slab_insert; +use crate::slab::SlabId; +use crate::websocket_upgrade::WebSocketUpgrade; +use crate::LocalExecutor; +use cache_control::CacheControl; +use deno_core::error::AnyError; +use deno_core::futures::TryFutureExt; +use deno_core::op; +use deno_core::serde_v8; +use deno_core::serde_v8::from_v8; +use deno_core::task::spawn; +use deno_core::task::JoinHandle; +use deno_core::v8; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::ByteString; +use deno_core::CancelFuture; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_net::ops_tls::TlsStream; +use deno_net::raw::NetworkStream; +use deno_websocket::ws_create_server_stream; +use fly_accept_encoding::Encoding; +use http::header::ACCEPT_ENCODING; +use http::header::CACHE_CONTROL; +use http::header::CONTENT_ENCODING; +use http::header::CONTENT_LENGTH; +use http::header::CONTENT_RANGE; +use http::header::CONTENT_TYPE; +use http::HeaderMap; +use hyper1::body::Incoming; +use hyper1::header::COOKIE; +use hyper1::http::HeaderName; +use hyper1::http::HeaderValue; +use hyper1::server::conn::http1; +use hyper1::server::conn::http2; +use hyper1::service::service_fn; +use hyper1::service::HttpService; +use hyper1::StatusCode; +use once_cell::sync::Lazy; +use pin_project::pin_project; +use pin_project::pinned_drop; +use smallvec::SmallVec; +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::io; +use std::pin::Pin; +use std::rc::Rc; + +use tokio::io::AsyncReadExt; +use tokio::io::AsyncWriteExt; + +type Request = hyper1::Request; +type Response = hyper1::Response; + +static USE_WRITEV: Lazy = Lazy::new(|| { + let disable_writev = std::env::var("DENO_HYPER_USE_WRITEV").ok(); + + if let Some(val) = disable_writev { + return val != "0"; + } + + true +}); + +/// All HTTP/2 connections start with this byte string. +/// +/// In HTTP/2, each endpoint is required to send a connection preface as a final confirmation +/// of the protocol in use and to establish the initial settings for the HTTP/2 connection. The +/// client and server each send a different connection preface. +/// +/// The client connection preface starts with a sequence of 24 octets, which in hex notation is: +/// +/// 0x505249202a20485454502f322e300d0a0d0a534d0d0a0d0a +/// +/// That is, the connection preface starts with the string PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n). This sequence +/// MUST be followed by a SETTINGS frame (Section 6.5), which MAY be empty. +const HTTP2_PREFIX: &[u8] = b"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n"; + +/// ALPN negotation for "h2" +const TLS_ALPN_HTTP_2: &[u8] = b"h2"; + +/// ALPN negotation for "http/1.1" +const TLS_ALPN_HTTP_11: &[u8] = b"http/1.1"; + +/// Name a trait for streams we can serve HTTP over. +trait HttpServeStream: + tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static +{ +} +impl< + S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static, + > HttpServeStream for S +{ +} + +#[op] +pub fn op_http_upgrade_raw( + state: &mut OpState, + slab_id: SlabId, +) -> Result { + // Stage 1: extract the upgrade future + let upgrade = slab_get(slab_id).upgrade()?; + let (read, write) = tokio::io::duplex(1024); + let (read_rx, write_tx) = tokio::io::split(read); + let (mut write_rx, mut read_tx) = tokio::io::split(write); + spawn(async move { + let mut upgrade_stream = WebSocketUpgrade::::default(); + + // Stage 2: Extract the Upgraded connection + let mut buf = [0; 1024]; + let upgraded = loop { + let read = Pin::new(&mut write_rx).read(&mut buf).await?; + match upgrade_stream.write(&buf[..read]) { + Ok(None) => continue, + Ok(Some((response, bytes))) => { + let mut http = slab_get(slab_id); + *http.response() = response; + http.complete(); + let mut upgraded = upgrade.await?; + upgraded.write_all(&bytes).await?; + break upgraded; + } + Err(err) => return Err(err), + } + }; + + // Stage 3: Pump the data + let (mut upgraded_rx, mut upgraded_tx) = tokio::io::split(upgraded); + + spawn(async move { + let mut buf = [0; 1024]; + loop { + let read = upgraded_rx.read(&mut buf).await?; + if read == 0 { + break; + } + read_tx.write_all(&buf[..read]).await?; + } + Ok::<_, AnyError>(()) + }); + spawn(async move { + let mut buf = [0; 1024]; + loop { + let read = write_rx.read(&mut buf).await?; + if read == 0 { + break; + } + upgraded_tx.write_all(&buf[..read]).await?; + } + Ok::<_, AnyError>(()) + }); + + Ok(()) + }); + + Ok( + state + .resource_table + .add(UpgradeStream::new(read_rx, write_tx)), + ) +} + +#[op] +pub async fn op_http_upgrade_websocket_next( + state: Rc>, + slab_id: SlabId, + headers: Vec<(ByteString, ByteString)>, +) -> Result { + let mut http = slab_get(slab_id); + // Stage 1: set the response to 101 Switching Protocols and send it + let upgrade = http.upgrade()?; + + let response = http.response(); + *response.status_mut() = StatusCode::SWITCHING_PROTOCOLS; + for (name, value) in headers { + response.headers_mut().append( + HeaderName::from_bytes(&name).unwrap(), + HeaderValue::from_bytes(&value).unwrap(), + ); + } + http.complete(); + + // Stage 2: wait for the request to finish upgrading + let upgraded = upgrade.await?; + + // Stage 3: take the extracted raw network stream and upgrade it to a websocket, then return it + let (stream, bytes) = extract_network_stream(upgraded); + ws_create_server_stream(&mut state.borrow_mut(), stream, bytes) +} + +#[op(fast)] +pub fn op_http_set_promise_complete(slab_id: SlabId, status: u16) { + let mut http = slab_get(slab_id); + // The Javascript code will never provide a status that is invalid here (see 23_response.js) + *http.response().status_mut() = StatusCode::from_u16(status).unwrap(); + http.complete(); +} + +#[op(v8)] +pub fn op_http_get_request_method_and_url<'scope, HTTP>( + scope: &mut v8::HandleScope<'scope>, + slab_id: SlabId, +) -> serde_v8::Value<'scope> +where + HTTP: HttpPropertyExtractor, +{ + let http = slab_get(slab_id); + let request_info = http.request_info(); + let request_parts = http.request_parts(); + let request_properties = HTTP::request_properties( + request_info, + &request_parts.uri, + &request_parts.headers, + ); + + let method: v8::Local = v8::String::new_from_utf8( + scope, + request_parts.method.as_str().as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(); + + let authority: v8::Local = match request_properties.authority { + Some(authority) => v8::String::new_from_utf8( + scope, + authority.as_ref(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + None => v8::undefined(scope).into(), + }; + + // Only extract the path part - we handle authority elsewhere + let path = match &request_parts.uri.path_and_query() { + Some(path_and_query) => path_and_query.to_string(), + None => "".to_owned(), + }; + + let path: v8::Local = + v8::String::new_from_utf8(scope, path.as_ref(), v8::NewStringType::Normal) + .unwrap() + .into(); + + let peer_address: v8::Local = v8::String::new_from_utf8( + scope, + request_info.peer_address.as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(); + + let port: v8::Local = match request_info.peer_port { + Some(port) => v8::Integer::new(scope, port.into()).into(), + None => v8::undefined(scope).into(), + }; + + let vec = [method, authority, path, peer_address, port]; + let array = v8::Array::new_with_elements(scope, vec.as_slice()); + let array_value: v8::Local = array.into(); + + array_value.into() +} + +#[op] +pub fn op_http_get_request_header( + slab_id: SlabId, + name: String, +) -> Option { + let http = slab_get(slab_id); + let value = http.request_parts().headers.get(name); + value.map(|value| value.as_bytes().into()) +} + +#[op(v8)] +pub fn op_http_get_request_headers<'scope>( + scope: &mut v8::HandleScope<'scope>, + slab_id: SlabId, +) -> serde_v8::Value<'scope> { + let http = slab_get(slab_id); + let headers = &http.request_parts().headers; + // Two slots for each header key/value pair + let mut vec: SmallVec<[v8::Local; 32]> = + SmallVec::with_capacity(headers.len() * 2); + + let mut cookies: Option> = None; + for (name, value) in headers { + if name == COOKIE { + if let Some(ref mut cookies) = cookies { + cookies.push(value.as_bytes()); + } else { + cookies = Some(vec![value.as_bytes()]); + } + } else { + vec.push( + v8::String::new_from_one_byte( + scope, + name.as_ref(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + ); + vec.push( + v8::String::new_from_one_byte( + scope, + value.as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + ); + } + } + + // We treat cookies specially, because we don't want them to get them + // mangled by the `Headers` object in JS. What we do is take all cookie + // headers and concat them into a single cookie header, separated by + // semicolons. + // TODO(mmastrac): This should probably happen on the JS side on-demand + if let Some(cookies) = cookies { + let cookie_sep = "; ".as_bytes(); + + vec.push( + v8::String::new_external_onebyte_static(scope, COOKIE.as_ref()) + .unwrap() + .into(), + ); + vec.push( + v8::String::new_from_one_byte( + scope, + cookies.join(cookie_sep).as_ref(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + ); + } + + let array = v8::Array::new_with_elements(scope, vec.as_slice()); + let array_value: v8::Local = array.into(); + + array_value.into() +} + +#[op(fast)] +pub fn op_http_read_request_body( + state: &mut OpState, + slab_id: SlabId, +) -> ResourceId { + let mut http = slab_get(slab_id); + let incoming = http.take_body(); + let body_resource = Rc::new(HttpRequestBody::new(incoming)); + state.resource_table.add_rc(body_resource) +} + +#[op(fast)] +pub fn op_http_set_response_header(slab_id: SlabId, name: &str, value: &str) { + let mut http = slab_get(slab_id); + let resp_headers = http.response().headers_mut(); + // These are valid latin-1 strings + let name = HeaderName::from_bytes(name.as_bytes()).unwrap(); + let value = HeaderValue::from_bytes(value.as_bytes()).unwrap(); + resp_headers.append(name, value); +} + +#[op(v8)] +fn op_http_set_response_headers( + scope: &mut v8::HandleScope, + slab_id: SlabId, + headers: serde_v8::Value, +) { + let mut http = slab_get(slab_id); + // TODO(mmastrac): Invalid headers should be handled? + let resp_headers = http.response().headers_mut(); + + let arr = v8::Local::::try_from(headers.v8_value).unwrap(); + + let len = arr.length(); + let header_len = len * 2; + resp_headers.reserve(header_len.try_into().unwrap()); + + for i in 0..len { + let item = arr.get_index(scope, i).unwrap(); + let pair = v8::Local::::try_from(item).unwrap(); + let name = pair.get_index(scope, 0).unwrap(); + let value = pair.get_index(scope, 1).unwrap(); + + let v8_name: ByteString = from_v8(scope, name).unwrap(); + let v8_value: ByteString = from_v8(scope, value).unwrap(); + let header_name = HeaderName::from_bytes(&v8_name).unwrap(); + let header_value = HeaderValue::from_bytes(&v8_value).unwrap(); + resp_headers.append(header_name, header_value); + } +} + +#[op] +pub fn op_http_set_response_trailers( + slab_id: SlabId, + trailers: Vec<(ByteString, ByteString)>, +) { + let mut http = slab_get(slab_id); + let mut trailer_map: HeaderMap = HeaderMap::with_capacity(trailers.len()); + for (name, value) in trailers { + // These are valid latin-1 strings + let name = HeaderName::from_bytes(&name).unwrap(); + let value = HeaderValue::from_bytes(&value).unwrap(); + trailer_map.append(name, value); + } + *http.trailers().borrow_mut() = Some(trailer_map); +} + +fn is_request_compressible(headers: &HeaderMap) -> Compression { + let Some(accept_encoding) = headers.get(ACCEPT_ENCODING) else { + return Compression::None; + }; + + match accept_encoding.to_str().unwrap() { + // Firefox and Chrome send this -- no need to parse + "gzip, deflate, br" => return Compression::Brotli, + "gzip" => return Compression::GZip, + "br" => return Compression::Brotli, + _ => (), + } + + // Fall back to the expensive parser + let accepted = fly_accept_encoding::encodings_iter(headers).filter(|r| { + matches!( + r, + Ok(( + Some(Encoding::Identity | Encoding::Gzip | Encoding::Brotli), + _ + )) + ) + }); + match fly_accept_encoding::preferred(accepted) { + Ok(Some(fly_accept_encoding::Encoding::Gzip)) => Compression::GZip, + Ok(Some(fly_accept_encoding::Encoding::Brotli)) => Compression::Brotli, + _ => Compression::None, + } +} + +fn is_response_compressible(headers: &HeaderMap) -> bool { + if let Some(content_type) = headers.get(CONTENT_TYPE) { + if !is_content_compressible(content_type) { + return false; + } + } else { + return false; + } + if headers.contains_key(CONTENT_ENCODING) { + return false; + } + if headers.contains_key(CONTENT_RANGE) { + return false; + } + if let Some(cache_control) = headers.get(CACHE_CONTROL) { + if let Ok(s) = std::str::from_utf8(cache_control.as_bytes()) { + if let Some(cache_control) = CacheControl::from_value(s) { + if cache_control.no_transform { + return false; + } + } + } + } + true +} + +fn modify_compressibility_from_response( + compression: Compression, + length: Option, + headers: &mut HeaderMap, +) -> Compression { + ensure_vary_accept_encoding(headers); + if let Some(length) = length { + // By the time we add compression headers and Accept-Encoding, it probably doesn't make sense + // to compress stuff that's smaller than this. + if length < 64 { + return Compression::None; + } + } + if compression == Compression::None { + return Compression::None; + } + if !is_response_compressible(headers) { + return Compression::None; + } + let encoding = match compression { + Compression::Brotli => "br", + Compression::GZip => "gzip", + _ => unreachable!(), + }; + weaken_etag(headers); + headers.remove(CONTENT_LENGTH); + headers.insert(CONTENT_ENCODING, HeaderValue::from_static(encoding)); + compression +} + +/// If the user provided a ETag header for uncompressed data, we need to ensure it is a +/// weak Etag header ("W/"). +fn weaken_etag(hmap: &mut HeaderMap) { + if let Some(etag) = hmap.get_mut(hyper::header::ETAG) { + if !etag.as_bytes().starts_with(b"W/") { + let mut v = Vec::with_capacity(etag.as_bytes().len() + 2); + v.extend(b"W/"); + v.extend(etag.as_bytes()); + *etag = v.try_into().unwrap(); + } + } +} + +// Set Vary: Accept-Encoding header for direct body response. +// Note: we set the header irrespective of whether or not we compress the data +// to make sure cache services do not serve uncompressed data to clients that +// support compression. +fn ensure_vary_accept_encoding(hmap: &mut HeaderMap) { + if let Some(v) = hmap.get_mut(hyper::header::VARY) { + if let Ok(s) = v.to_str() { + if !s.to_lowercase().contains("accept-encoding") { + *v = format!("Accept-Encoding, {s}").try_into().unwrap() + } + return; + } + } + hmap.insert( + hyper::header::VARY, + HeaderValue::from_static("Accept-Encoding"), + ); +} + +fn set_response( + slab_id: SlabId, + length: Option, + response_fn: impl FnOnce(Compression) -> ResponseBytesInner, +) { + let mut http = slab_get(slab_id); + let compression = is_request_compressible(&http.request_parts().headers); + let response = http.response(); + let compression = modify_compressibility_from_response( + compression, + length, + response.headers_mut(), + ); + response.body_mut().initialize(response_fn(compression)) +} + +#[op(fast)] +pub fn op_http_set_response_body_resource( + state: &mut OpState, + slab_id: SlabId, + stream_rid: ResourceId, + auto_close: bool, +) -> Result<(), AnyError> { + // If the stream is auto_close, we will hold the last ref to it until the response is complete. + let resource = if auto_close { + state.resource_table.take_any(stream_rid)? + } else { + state.resource_table.get_any(stream_rid)? + }; + + set_response( + slab_id, + resource.size_hint().1.map(|s| s as usize), + move |compression| { + ResponseBytesInner::from_resource(compression, resource, auto_close) + }, + ); + + Ok(()) +} + +#[op(fast)] +pub fn op_http_set_response_body_stream( + state: &mut OpState, + slab_id: SlabId, +) -> Result { + // TODO(mmastrac): what should this channel size be? + let (tx, rx) = tokio::sync::mpsc::channel(1); + set_response(slab_id, None, |compression| { + ResponseBytesInner::from_v8(compression, rx) + }); + + Ok(state.resource_table.add(V8StreamHttpResponseBody::new(tx))) +} + +#[op(fast)] +pub fn op_http_set_response_body_text(slab_id: SlabId, text: String) { + if !text.is_empty() { + set_response(slab_id, Some(text.len()), |compression| { + ResponseBytesInner::from_vec(compression, text.into_bytes()) + }); + } +} + +#[op(fast)] +pub fn op_http_set_response_body_bytes(slab_id: SlabId, buffer: &[u8]) { + if !buffer.is_empty() { + set_response(slab_id, Some(buffer.len()), |compression| { + ResponseBytesInner::from_slice(compression, buffer) + }); + }; +} + +#[op] +pub async fn op_http_track( + state: Rc>, + slab_id: SlabId, + server_rid: ResourceId, +) -> Result<(), AnyError> { + let http = slab_get(slab_id); + let handle = http.body_promise(); + + let join_handle = state + .borrow_mut() + .resource_table + .get::(server_rid)?; + + match handle.or_cancel(join_handle.cancel_handle()).await { + Ok(true) => Ok(()), + Ok(false) => { + Err(AnyError::msg("connection closed before message completed")) + } + Err(_e) => Ok(()), + } +} + +#[pin_project(PinnedDrop)] +pub struct SlabFuture>(SlabId, #[pin] F); + +pub fn new_slab_future( + request: Request, + request_info: HttpConnectionProperties, + tx: tokio::sync::mpsc::Sender, +) -> SlabFuture> { + let index = slab_insert(request, request_info); + let rx = slab_get(index).promise(); + SlabFuture(index, async move { + if tx.send(index).await.is_ok() { + // We only need to wait for completion if we aren't closed + rx.await; + } + }) +} + +impl> SlabFuture {} + +#[pinned_drop] +impl> PinnedDrop for SlabFuture { + fn drop(self: Pin<&mut Self>) { + slab_drop(self.0); + } +} + +impl> Future for SlabFuture { + type Output = Result; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let index = self.0; + self + .project() + .1 + .poll(cx) + .map(|_| Ok(slab_get(index).take_response())) + } +} + +fn serve_http11_unconditional( + io: impl HttpServeStream, + svc: impl HttpService + 'static, +) -> impl Future> + 'static { + let conn = http1::Builder::new() + .keep_alive(true) + .writev(*USE_WRITEV) + .serve_connection(io, svc); + + conn.with_upgrades().map_err(AnyError::from) +} + +fn serve_http2_unconditional( + io: impl HttpServeStream, + svc: impl HttpService + 'static, +) -> impl Future> + 'static { + let conn = http2::Builder::new(LocalExecutor).serve_connection(io, svc); + conn.map_err(AnyError::from) +} + +async fn serve_http2_autodetect( + io: impl HttpServeStream, + svc: impl HttpService + 'static, +) -> Result<(), AnyError> { + let prefix = NetworkStreamPrefixCheck::new(io, HTTP2_PREFIX); + let (matches, io) = prefix.match_prefix().await?; + if matches { + serve_http2_unconditional(io, svc).await + } else { + serve_http11_unconditional(io, svc).await + } +} + +fn serve_https( + mut io: TlsStream, + request_info: HttpConnectionProperties, + cancel: Rc, + tx: tokio::sync::mpsc::Sender, +) -> JoinHandle> { + let svc = service_fn(move |req: Request| { + new_slab_future(req, request_info.clone(), tx.clone()) + }); + spawn( + async { + io.handshake().await?; + // If the client specifically negotiates a protocol, we will use it. If not, we'll auto-detect + // based on the prefix bytes + let handshake = io.get_ref().1.alpn_protocol(); + if handshake == Some(TLS_ALPN_HTTP_2) { + serve_http2_unconditional(io, svc).await + } else if handshake == Some(TLS_ALPN_HTTP_11) { + serve_http11_unconditional(io, svc).await + } else { + serve_http2_autodetect(io, svc).await + } + } + .try_or_cancel(cancel), + ) +} + +fn serve_http( + io: impl HttpServeStream, + request_info: HttpConnectionProperties, + cancel: Rc, + tx: tokio::sync::mpsc::Sender, +) -> JoinHandle> { + let svc = service_fn(move |req: Request| { + new_slab_future(req, request_info.clone(), tx.clone()) + }); + spawn(serve_http2_autodetect(io, svc).try_or_cancel(cancel)) +} + +fn serve_http_on( + connection: HTTP::Connection, + listen_properties: &HttpListenProperties, + cancel: Rc, + tx: tokio::sync::mpsc::Sender, +) -> JoinHandle> +where + HTTP: HttpPropertyExtractor, +{ + let connection_properties: HttpConnectionProperties = + HTTP::connection_properties(listen_properties, &connection); + + let network_stream = HTTP::to_network_stream_from_connection(connection); + + match network_stream { + NetworkStream::Tcp(conn) => { + serve_http(conn, connection_properties, cancel, tx) + } + NetworkStream::Tls(conn) => { + serve_https(conn, connection_properties, cancel, tx) + } + #[cfg(unix)] + NetworkStream::Unix(conn) => { + serve_http(conn, connection_properties, cancel, tx) + } + } +} + +struct HttpJoinHandle( + AsyncRefCell>>>, + // Cancel handle must live in a separate Rc to avoid keeping the outer join handle ref'd + Rc, + AsyncRefCell>, +); + +impl HttpJoinHandle { + fn cancel_handle(self: &Rc) -> Rc { + self.1.clone() + } +} + +impl Resource for HttpJoinHandle { + fn name(&self) -> Cow { + "http".into() + } + + fn close(self: Rc) { + self.1.cancel() + } +} + +impl Drop for HttpJoinHandle { + fn drop(&mut self) { + // In some cases we may be dropped without closing, so let's cancel everything on the way out + self.1.cancel(); + } +} + +#[op(v8)] +pub fn op_http_serve( + state: Rc>, + listener_rid: ResourceId, +) -> Result<(ResourceId, &'static str, String), AnyError> +where + HTTP: HttpPropertyExtractor, +{ + let listener = + HTTP::get_listener_for_rid(&mut state.borrow_mut(), listener_rid)?; + + let listen_properties = HTTP::listen_properties_from_listener(&listener)?; + + let (tx, rx) = tokio::sync::mpsc::channel(10); + let resource: Rc = Rc::new(HttpJoinHandle( + AsyncRefCell::new(None), + CancelHandle::new_rc(), + AsyncRefCell::new(rx), + )); + let cancel_clone = resource.cancel_handle(); + + let listen_properties_clone: HttpListenProperties = listen_properties.clone(); + let handle = spawn(async move { + loop { + let conn = HTTP::accept_connection_from_listener(&listener) + .try_or_cancel(cancel_clone.clone()) + .await?; + serve_http_on::( + conn, + &listen_properties_clone, + cancel_clone.clone(), + tx.clone(), + ); + } + #[allow(unreachable_code)] + Ok::<_, AnyError>(()) + }); + + // Set the handle after we start the future + *RcRef::map(&resource, |this| &this.0) + .try_borrow_mut() + .unwrap() = Some(handle); + + Ok(( + state.borrow_mut().resource_table.add_rc(resource), + listen_properties.scheme, + listen_properties.fallback_host, + )) +} + +#[op(v8)] +pub fn op_http_serve_on( + state: Rc>, + connection_rid: ResourceId, +) -> Result<(ResourceId, &'static str, String), AnyError> +where + HTTP: HttpPropertyExtractor, +{ + let connection = + HTTP::get_connection_for_rid(&mut state.borrow_mut(), connection_rid)?; + + let listen_properties = HTTP::listen_properties_from_connection(&connection)?; + + let (tx, rx) = tokio::sync::mpsc::channel(10); + let resource: Rc = Rc::new(HttpJoinHandle( + AsyncRefCell::new(None), + CancelHandle::new_rc(), + AsyncRefCell::new(rx), + )); + + let handle: JoinHandle> = + serve_http_on::( + connection, + &listen_properties, + resource.cancel_handle(), + tx, + ); + + // Set the handle after we start the future + *RcRef::map(&resource, |this| &this.0) + .try_borrow_mut() + .unwrap() = Some(handle); + + Ok(( + state.borrow_mut().resource_table.add_rc(resource), + listen_properties.scheme, + listen_properties.fallback_host, + )) +} + +/// Synchronous, non-blocking call to see if there are any further HTTP requests. If anything +/// goes wrong in this method we return [`SlabId::MAX`] and let the async handler pick up the real error. +#[op(fast)] +pub fn op_http_try_wait(state: &mut OpState, rid: ResourceId) -> SlabId { + // The resource needs to exist. + let Ok(join_handle) = state + .resource_table + .get::(rid) else { + return SlabId::MAX; + }; + + // If join handle is somehow locked, just abort. + let Some(mut handle) = RcRef::map(&join_handle, |this| &this.2).try_borrow_mut() else { + return SlabId::MAX; + }; + + // See if there are any requests waiting on this channel. If not, return. + let Ok(id) = handle.try_recv() else { + return SlabId::MAX; + }; + + id +} + +#[op] +pub async fn op_http_wait( + state: Rc>, + rid: ResourceId, +) -> Result { + // We will get the join handle initially, as we might be consuming requests still + let join_handle = state + .borrow_mut() + .resource_table + .get::(rid)?; + + let cancel = join_handle.cancel_handle(); + let next = async { + let mut recv = RcRef::map(&join_handle, |this| &this.2).borrow_mut().await; + recv.recv().await + } + .or_cancel(cancel) + .unwrap_or_else(|_| None) + .await; + + // Do we have a request? + if let Some(req) = next { + return Ok(req); + } + + // No - we're shutting down + let res = RcRef::map(join_handle, |this| &this.0) + .borrow_mut() + .await + .take() + .unwrap() + .await?; + + // Drop the cancel and join handles + state + .borrow_mut() + .resource_table + .take::(rid)?; + + // Filter out shutdown (ENOTCONN) errors + if let Err(err) = res { + if let Some(err) = err.source() { + if let Some(err) = err.downcast_ref::() { + if err.kind() == io::ErrorKind::NotConnected { + return Ok(SlabId::MAX); + } + } + } + return Err(err); + } + + Ok(SlabId::MAX) +} + +struct UpgradeStream { + read: AsyncRefCell>, + write: AsyncRefCell>, + cancel_handle: CancelHandle, +} + +impl UpgradeStream { + pub fn new( + read: tokio::io::ReadHalf, + write: tokio::io::WriteHalf, + ) -> Self { + Self { + read: AsyncRefCell::new(read), + write: AsyncRefCell::new(write), + cancel_handle: CancelHandle::new(), + } + } + + async fn read(self: Rc, buf: &mut [u8]) -> Result { + let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle); + async { + let read = RcRef::map(self, |this| &this.read); + let mut read = read.borrow_mut().await; + Ok(Pin::new(&mut *read).read(buf).await?) + } + .try_or_cancel(cancel_handle) + .await + } + + async fn write(self: Rc, buf: &[u8]) -> Result { + let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle); + async { + let write = RcRef::map(self, |this| &this.write); + let mut write = write.borrow_mut().await; + Ok(Pin::new(&mut *write).write(buf).await?) + } + .try_or_cancel(cancel_handle) + .await + } +} + +impl Resource for UpgradeStream { + fn name(&self) -> Cow { + "httpRawUpgradeStream".into() + } + + deno_core::impl_readable_byob!(); + deno_core::impl_writable!(); + + fn close(self: Rc) { + self.cancel_handle.cancel(); + } +} diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 289e7bf0f9..da007ba398 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -20,6 +20,7 @@ use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::futures::TryFutureExt; use deno_core::op; +use deno_core::task::spawn; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::BufView; @@ -32,8 +33,8 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::StringOrBuffer; -use deno_core::WriteOutcome; use deno_core::ZeroCopyBuf; +use deno_net::raw::NetworkStream; use deno_websocket::ws_create_server_stream; use flate2::write::GzEncoder; use flate2::Compression; @@ -66,34 +67,63 @@ use std::sync::Arc; use std::task::Context; use std::task::Poll; use tokio::io::AsyncRead; -use tokio::io::AsyncReadExt; use tokio::io::AsyncWrite; use tokio::io::AsyncWriteExt; -use tokio::task::spawn_local; -use websocket_upgrade::WebSocketUpgrade; +use crate::network_buffered_stream::NetworkBufferedStream; use crate::reader_stream::ExternallyAbortableReaderStream; use crate::reader_stream::ShutdownHandle; pub mod compressible; +mod http_next; +mod network_buffered_stream; mod reader_stream; +mod request_body; +mod request_properties; +mod response_body; +mod slab; mod websocket_upgrade; +pub use request_properties::DefaultHttpPropertyExtractor; +pub use request_properties::HttpConnectionProperties; +pub use request_properties::HttpListenProperties; +pub use request_properties::HttpPropertyExtractor; +pub use request_properties::HttpRequestProperties; + deno_core::extension!( deno_http, deps = [deno_web, deno_net, deno_fetch, deno_websocket], + parameters = [ HTTP: HttpPropertyExtractor ], ops = [ op_http_accept, - op_http_write_headers, op_http_headers, - op_http_write, - op_http_write_resource, op_http_shutdown, - op_http_websocket_accept_header, - op_http_upgrade_early, op_http_upgrade_websocket, + op_http_websocket_accept_header, + op_http_write_headers, + op_http_write_resource, + op_http_write, + http_next::op_http_get_request_header, + http_next::op_http_get_request_headers, + http_next::op_http_get_request_method_and_url, + http_next::op_http_read_request_body, + http_next::op_http_serve_on, + http_next::op_http_serve, + http_next::op_http_set_promise_complete, + http_next::op_http_set_response_body_bytes, + http_next::op_http_set_response_body_resource, + http_next::op_http_set_response_body_stream, + http_next::op_http_set_response_body_text, + http_next::op_http_set_response_header, + http_next::op_http_set_response_headers, + http_next::op_http_set_response_trailers, + http_next::op_http_track, + http_next::op_http_upgrade_websocket_next, + http_next::op_http_upgrade_raw, + http_next::op_http_try_wait, + http_next::op_http_wait, ], - esm = ["01_http.js"], + esm = ["00_serve.js", "01_http.js"], ); pub enum HttpSocketAddr { @@ -157,7 +187,7 @@ impl HttpConnResource { }; let (task_fut, closed_fut) = task_fut.remote_handle(); let closed_fut = closed_fut.shared(); - spawn_local(task_fut); + spawn(task_fut); Self { addr, @@ -943,227 +973,6 @@ fn op_http_websocket_accept_header(key: String) -> Result { Ok(base64::encode(digest)) } -struct EarlyUpgradeSocket(AsyncRefCell, CancelHandle); - -enum EarlyUpgradeSocketInner { - PreResponse( - Rc, - WebSocketUpgrade, - // Readers need to block in this state, so they can wait here for the broadcast. - tokio::sync::broadcast::Sender< - Rc>>, - >, - ), - PostResponse( - Rc>>, - Rc>>, - ), -} - -impl EarlyUpgradeSocket { - /// Gets a reader without holding the lock. - async fn get_reader( - self: Rc, - ) -> Result< - Rc>>, - AnyError, - > { - let mut borrow = RcRef::map(self.clone(), |x| &x.0).borrow_mut().await; - let cancel = RcRef::map(self, |x| &x.1); - let inner = &mut *borrow; - match inner { - EarlyUpgradeSocketInner::PreResponse(_, _, tx) => { - let mut rx = tx.subscribe(); - // Ensure we're not borrowing self here - drop(borrow); - Ok( - rx.recv() - .map_err(AnyError::from) - .try_or_cancel(&cancel) - .await?, - ) - } - EarlyUpgradeSocketInner::PostResponse(rx, _) => Ok(rx.clone()), - } - } - - async fn read(self: Rc, data: &mut [u8]) -> Result { - let reader = self.clone().get_reader().await?; - let cancel = RcRef::map(self, |x| &x.1); - Ok( - reader - .borrow_mut() - .await - .read(data) - .try_or_cancel(&cancel) - .await?, - ) - } - - /// Write all the data provided, only holding the lock while we see if the connection needs to be - /// upgraded. - async fn write_all(self: Rc, buf: &[u8]) -> Result<(), AnyError> { - let mut borrow = RcRef::map(self.clone(), |x| &x.0).borrow_mut().await; - let cancel = RcRef::map(self, |x| &x.1); - let inner = &mut *borrow; - match inner { - EarlyUpgradeSocketInner::PreResponse(stream, upgrade, rx_tx) => { - if let Some((resp, extra)) = upgrade.write(buf)? { - let new_wr = HttpResponseWriter::Closed; - let mut old_wr = - RcRef::map(stream.clone(), |r| &r.wr).borrow_mut().await; - let response_tx = match replace(&mut *old_wr, new_wr) { - HttpResponseWriter::Headers(response_tx) => response_tx, - _ => return Err(http_error("response headers already sent")), - }; - - if response_tx.send(resp).is_err() { - stream.conn.closed().await?; - return Err(http_error("connection closed while sending response")); - }; - - let mut old_rd = - RcRef::map(stream.clone(), |r| &r.rd).borrow_mut().await; - let new_rd = HttpRequestReader::Closed; - let upgraded = match replace(&mut *old_rd, new_rd) { - HttpRequestReader::Headers(request) => { - hyper::upgrade::on(request) - .map_err(AnyError::from) - .try_or_cancel(&cancel) - .await? - } - _ => { - return Err(http_error("response already started")); - } - }; - - let (rx, tx) = tokio::io::split(upgraded); - let rx = Rc::new(AsyncRefCell::new(rx)); - let tx = Rc::new(AsyncRefCell::new(tx)); - - // Take the tx and rx lock before we allow anything else to happen because we want to control - // the order of reads and writes. - let mut tx_lock = tx.clone().borrow_mut().await; - let rx_lock = rx.clone().borrow_mut().await; - - // Allow all the pending readers to go now. We still have the lock on inner, so no more - // pending readers can show up. We intentionally ignore errors here, as there may be - // nobody waiting on a read. - _ = rx_tx.send(rx.clone()); - - // We swap out inner here, so once the lock is gone, readers will acquire rx directly. - // We also fully release our lock. - *inner = EarlyUpgradeSocketInner::PostResponse(rx, tx); - drop(borrow); - - // We've updated inner and unlocked it, reads are free to go in-order. - drop(rx_lock); - - // If we had extra data after the response, write that to the upgraded connection - if !extra.is_empty() { - tx_lock.write_all(&extra).try_or_cancel(&cancel).await?; - } - } - } - EarlyUpgradeSocketInner::PostResponse(_, tx) => { - let tx = tx.clone(); - drop(borrow); - tx.borrow_mut() - .await - .write_all(buf) - .try_or_cancel(&cancel) - .await?; - } - }; - Ok(()) - } -} - -impl Resource for EarlyUpgradeSocket { - fn name(&self) -> Cow { - "upgradedHttpConnection".into() - } - - deno_core::impl_readable_byob!(); - - fn write( - self: Rc, - buf: BufView, - ) -> AsyncResult { - Box::pin(async move { - let nwritten = buf.len(); - Self::write_all(self, &buf).await?; - Ok(WriteOutcome::Full { nwritten }) - }) - } - - fn write_all(self: Rc, buf: BufView) -> AsyncResult<()> { - Box::pin(async move { Self::write_all(self, &buf).await }) - } - - fn close(self: Rc) { - self.1.cancel() - } -} - -#[op] -async fn op_http_upgrade_early( - state: Rc>, - rid: ResourceId, -) -> Result { - let stream = state - .borrow_mut() - .resource_table - .get::(rid)?; - let resources = &mut state.borrow_mut().resource_table; - let (tx, _rx) = tokio::sync::broadcast::channel(1); - let socket = EarlyUpgradeSocketInner::PreResponse( - stream, - WebSocketUpgrade::default(), - tx, - ); - let rid = resources.add(EarlyUpgradeSocket( - AsyncRefCell::new(socket), - CancelHandle::new(), - )); - Ok(rid) -} - -struct UpgradedStream(hyper::upgrade::Upgraded); -impl tokio::io::AsyncRead for UpgradedStream { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context, - buf: &mut tokio::io::ReadBuf, - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_read(cx, buf) - } -} - -impl tokio::io::AsyncWrite for UpgradedStream { - fn poll_write( - self: Pin<&mut Self>, - cx: &mut Context, - buf: &[u8], - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_write(cx, buf) - } - fn poll_flush( - self: Pin<&mut Self>, - cx: &mut Context, - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_flush(cx) - } - fn poll_shutdown( - self: Pin<&mut Self>, - cx: &mut Context, - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_shutdown(cx) - } -} - -impl deno_websocket::Upgraded for UpgradedStream {} - #[op] async fn op_http_upgrade_websocket( state: Rc>, @@ -1182,10 +991,10 @@ async fn op_http_upgrade_websocket( } }; - let transport = hyper::upgrade::on(request).await?; + let (transport, bytes) = + extract_network_stream(hyper::upgrade::on(request).await?); let ws_rid = - ws_create_server_stream(&state, Box::pin(UpgradedStream(transport))) - .await?; + ws_create_server_stream(&mut state.borrow_mut(), transport, bytes)?; Ok(ws_rid) } @@ -1199,7 +1008,17 @@ where Fut::Output: 'static, { fn execute(&self, fut: Fut) { - spawn_local(fut); + deno_core::task::spawn(fut); + } +} + +impl hyper1::rt::Executor for LocalExecutor +where + Fut: Future + 'static, + Fut::Output: 'static, +{ + fn execute(&self, fut: Fut) { + deno_core::task::spawn(fut); } } @@ -1229,3 +1048,91 @@ fn filter_enotconn( fn never() -> Pending { pending() } + +trait CanDowncastUpgrade: Sized { + fn downcast( + self, + ) -> Result<(T, Bytes), Self>; +} + +impl CanDowncastUpgrade for hyper1::upgrade::Upgraded { + fn downcast( + self, + ) -> Result<(T, Bytes), Self> { + let hyper1::upgrade::Parts { io, read_buf, .. } = self.downcast()?; + Ok((io, read_buf)) + } +} + +impl CanDowncastUpgrade for hyper::upgrade::Upgraded { + fn downcast( + self, + ) -> Result<(T, Bytes), Self> { + let hyper::upgrade::Parts { io, read_buf, .. } = self.downcast()?; + Ok((io, read_buf)) + } +} + +fn maybe_extract_network_stream< + T: Into + AsyncRead + AsyncWrite + Unpin + 'static, + U: CanDowncastUpgrade, +>( + upgraded: U, +) -> Result<(NetworkStream, Bytes), U> { + let upgraded = match upgraded.downcast::() { + Ok((stream, bytes)) => return Ok((stream.into(), bytes)), + Err(x) => x, + }; + + match upgraded.downcast::>() { + Ok((stream, upgraded_bytes)) => { + // Both the upgrade and the stream might have unread bytes + let (io, stream_bytes) = stream.into_inner(); + let bytes = match (stream_bytes.is_empty(), upgraded_bytes.is_empty()) { + (false, false) => Bytes::default(), + (true, false) => upgraded_bytes, + (false, true) => stream_bytes, + (true, true) => { + // The upgraded bytes come first as they have already been read + let mut v = upgraded_bytes.to_vec(); + v.append(&mut stream_bytes.to_vec()); + Bytes::from(v) + } + }; + Ok((io.into(), bytes)) + } + Err(x) => Err(x), + } +} + +fn extract_network_stream( + upgraded: U, +) -> (NetworkStream, Bytes) { + let upgraded = + match maybe_extract_network_stream::(upgraded) { + Ok(res) => return res, + Err(x) => x, + }; + let upgraded = + match maybe_extract_network_stream::( + upgraded, + ) { + Ok(res) => return res, + Err(x) => x, + }; + #[cfg(unix)] + let upgraded = + match maybe_extract_network_stream::(upgraded) { + Ok(res) => return res, + Err(x) => x, + }; + let upgraded = + match maybe_extract_network_stream::(upgraded) { + Ok(res) => return res, + Err(x) => x, + }; + + // TODO(mmastrac): HTTP/2 websockets may yield an un-downgradable type + drop(upgraded); + unreachable!("unexpected stream type"); +} diff --git a/ext/http/network_buffered_stream.rs b/ext/http/network_buffered_stream.rs new file mode 100644 index 0000000000..bb128ba045 --- /dev/null +++ b/ext/http/network_buffered_stream.rs @@ -0,0 +1,308 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use bytes::Bytes; +use deno_core::futures::future::poll_fn; +use deno_core::futures::ready; +use std::io; +use std::mem::MaybeUninit; +use std::pin::Pin; +use std::task::Poll; +use tokio::io::AsyncRead; +use tokio::io::AsyncWrite; +use tokio::io::ReadBuf; + +const MAX_PREFIX_SIZE: usize = 256; + +/// [`NetworkStreamPrefixCheck`] is used to differentiate a stream between two different modes, depending +/// on whether the first bytes match a given prefix (or not). +/// +/// IMPORTANT: This stream makes the assumption that the incoming bytes will never partially match the prefix +/// and then "hang" waiting for a write. For this code not to hang, the incoming stream must: +/// +/// * match the prefix fully and then request writes at a later time +/// * not match the prefix, and then request writes after writing a byte that causes the prefix not to match +/// * not match the prefix and then close +pub struct NetworkStreamPrefixCheck { + buffer: [MaybeUninit; MAX_PREFIX_SIZE * 2], + io: S, + prefix: &'static [u8], +} + +impl NetworkStreamPrefixCheck { + pub fn new(io: S, prefix: &'static [u8]) -> Self { + debug_assert!(prefix.len() < MAX_PREFIX_SIZE); + Self { + io, + prefix, + buffer: [MaybeUninit::::uninit(); MAX_PREFIX_SIZE * 2], + } + } + + // Returns a [`NetworkBufferedStream`] and a flag determining if we matched a prefix, rewound with the bytes we read to determine what + // type of stream this is. + pub async fn match_prefix( + self, + ) -> io::Result<(bool, NetworkBufferedStream)> { + let mut buffer = self.buffer; + let mut readbuf = ReadBuf::uninit(&mut buffer); + let mut io = self.io; + let prefix = self.prefix; + loop { + enum State { + Unknown, + Matched, + NotMatched, + } + + let state = poll_fn(|cx| { + let filled_len = readbuf.filled().len(); + let res = ready!(Pin::new(&mut io).poll_read(cx, &mut readbuf)); + if let Err(e) = res { + return Poll::Ready(Err(e)); + } + let filled = readbuf.filled(); + let new_len = filled.len(); + if new_len == filled_len { + // Empty read, no match + return Poll::Ready(Ok(State::NotMatched)); + } else if new_len < prefix.len() { + // Read less than prefix, make sure we're still matching the prefix (early exit) + if !prefix.starts_with(filled) { + return Poll::Ready(Ok(State::NotMatched)); + } + } else if new_len >= prefix.len() { + // We have enough to determine + if filled.starts_with(prefix) { + return Poll::Ready(Ok(State::Matched)); + } else { + return Poll::Ready(Ok(State::NotMatched)); + } + } + + Poll::Ready(Ok(State::Unknown)) + }) + .await?; + + match state { + State::Unknown => continue, + State::Matched => { + let initialized_len = readbuf.filled().len(); + return Ok(( + true, + NetworkBufferedStream::new(io, buffer, initialized_len), + )); + } + State::NotMatched => { + let initialized_len = readbuf.filled().len(); + return Ok(( + false, + NetworkBufferedStream::new(io, buffer, initialized_len), + )); + } + } + } + } +} + +/// [`NetworkBufferedStream`] is a stream that allows us to efficiently search for an incoming prefix in another stream without +/// reading too much data. If the stream detects that the prefix has definitely been matched, or definitely not been matched, +/// it returns a flag and a rewound stream allowing later code to take another pass at that data. +/// +/// [`NetworkBufferedStream`] is a custom wrapper around an asynchronous stream that implements AsyncRead +/// and AsyncWrite. It is designed to provide additional buffering functionality to the wrapped stream. +/// The primary use case for this struct is when you want to read a small amount of data from the beginning +/// of a stream, process it, and then continue reading the rest of the stream. +/// +/// While the bounds for the class are limited to [`AsyncRead`] for easier testing, it is far more useful to use +/// with interactive duplex streams that have a prefix determining which mode to operate in. For example, this class +/// can determine whether an incoming stream is HTTP/2 or non-HTTP/2 and allow downstream code to make that determination. +pub struct NetworkBufferedStream { + prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], + io: S, + initialized_len: usize, + prefix_offset: usize, + /// Have the prefix bytes been completely read out? + prefix_read: bool, +} + +impl NetworkBufferedStream { + /// This constructor is private, because passing partically initialized data between the [`NetworkStreamPrefixCheck`] and + /// this [`NetworkBufferedStream`] is challenging without the introduction of extra copies. + fn new( + io: S, + prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], + initialized_len: usize, + ) -> Self { + Self { + io, + initialized_len, + prefix_offset: 0, + prefix, + prefix_read: false, + } + } + + fn current_slice(&self) -> &[u8] { + // We trust that these bytes are initialized properly + let slice = &self.prefix[self.prefix_offset..self.initialized_len]; + + // This guarantee comes from slice_assume_init_ref (we can't use that until it's stable) + + // SAFETY: casting `slice` to a `*const [T]` is safe since the caller guarantees that + // `slice` is initialized, and `MaybeUninit` is guaranteed to have the same layout as `T`. + // The pointer obtained is valid since it refers to memory owned by `slice` which is a + // reference and thus guaranteed to be valid for reads. + + unsafe { &*(slice as *const [_] as *const [u8]) as _ } + } + + pub fn into_inner(self) -> (S, Bytes) { + let bytes = Bytes::copy_from_slice(self.current_slice()); + (self.io, bytes) + } +} + +impl AsyncRead for NetworkBufferedStream { + // From hyper's Rewind (https://github.com/hyperium/hyper), MIT License, Copyright (c) Sean McArthur + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if !self.prefix_read { + let prefix = self.current_slice(); + + // If there are no remaining bytes, let the bytes get dropped. + if !prefix.is_empty() { + let copy_len = std::cmp::min(prefix.len(), buf.remaining()); + buf.put_slice(&prefix[..copy_len]); + self.prefix_offset += copy_len; + + return Poll::Ready(Ok(())); + } else { + self.prefix_read = true; + } + } + Pin::new(&mut self.io).poll_read(cx, buf) + } +} + +impl AsyncWrite + for NetworkBufferedStream +{ + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_write(cx, buf) + } + + fn poll_flush( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_flush(cx) + } + + fn poll_shutdown( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_shutdown(cx) + } + + fn is_write_vectored(&self) -> bool { + self.io.is_write_vectored() + } + + fn poll_write_vectored( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_write_vectored(cx, bufs) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tokio::io::AsyncReadExt; + + struct YieldsOneByteAtATime(&'static [u8]); + + impl AsyncRead for YieldsOneByteAtATime { + fn poll_read( + mut self: Pin<&mut Self>, + _cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if let Some((head, tail)) = self.as_mut().0.split_first() { + self.as_mut().0 = tail; + let dest = buf.initialize_unfilled_to(1); + dest[0] = *head; + buf.advance(1); + } + Poll::Ready(Ok(())) + } + } + + async fn test( + io: impl AsyncRead + Unpin, + prefix: &'static [u8], + expect_match: bool, + expect_string: &'static str, + ) -> io::Result<()> { + let (matches, mut io) = NetworkStreamPrefixCheck::new(io, prefix) + .match_prefix() + .await?; + assert_eq!(matches, expect_match); + let mut s = String::new(); + Pin::new(&mut io).read_to_string(&mut s).await?; + assert_eq!(s, expect_string); + Ok(()) + } + + #[tokio::test] + async fn matches_prefix_simple() -> io::Result<()> { + let buf = b"prefix match".as_slice(); + test(buf, b"prefix", true, "prefix match").await + } + + #[tokio::test] + async fn matches_prefix_exact() -> io::Result<()> { + let buf = b"prefix".as_slice(); + test(buf, b"prefix", true, "prefix").await + } + + #[tokio::test] + async fn not_matches_prefix_simple() -> io::Result<()> { + let buf = b"prefill match".as_slice(); + test(buf, b"prefix", false, "prefill match").await + } + + #[tokio::test] + async fn not_matches_prefix_short() -> io::Result<()> { + let buf = b"nope".as_slice(); + test(buf, b"prefix", false, "nope").await + } + + #[tokio::test] + async fn not_matches_prefix_empty() -> io::Result<()> { + let buf = b"".as_slice(); + test(buf, b"prefix", false, "").await + } + + #[tokio::test] + async fn matches_one_byte_at_a_time() -> io::Result<()> { + let buf = YieldsOneByteAtATime(b"prefix"); + test(buf, b"prefix", true, "prefix").await + } + + #[tokio::test] + async fn not_matches_one_byte_at_a_time() -> io::Result<()> { + let buf = YieldsOneByteAtATime(b"prefill"); + test(buf, b"prefix", false, "prefill").await + } +} diff --git a/ext/http/request_body.rs b/ext/http/request_body.rs new file mode 100644 index 0000000000..73908ca55d --- /dev/null +++ b/ext/http/request_body.rs @@ -0,0 +1,84 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use bytes::Bytes; +use deno_core::error::AnyError; +use deno_core::futures::stream::Peekable; +use deno_core::futures::Stream; +use deno_core::futures::StreamExt; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::RcRef; +use deno_core::Resource; +use hyper1::body::Body; +use hyper1::body::Incoming; +use hyper1::body::SizeHint; +use std::borrow::Cow; +use std::pin::Pin; +use std::rc::Rc; + +/// Converts a hyper incoming body stream into a stream of [`Bytes`] that we can use to read in V8. +struct ReadFuture(Incoming); + +impl Stream for ReadFuture { + type Item = Result; + + fn poll_next( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + let res = Pin::new(&mut self.get_mut().0).poll_frame(cx); + match res { + std::task::Poll::Ready(Some(Ok(frame))) => { + if let Ok(data) = frame.into_data() { + // Ensure that we never yield an empty frame + if !data.is_empty() { + return std::task::Poll::Ready(Some(Ok(data))); + } + } + } + std::task::Poll::Ready(None) => return std::task::Poll::Ready(None), + _ => {} + } + std::task::Poll::Pending + } +} + +pub struct HttpRequestBody(AsyncRefCell>, SizeHint); + +impl HttpRequestBody { + pub fn new(body: Incoming) -> Self { + let size_hint = body.size_hint(); + Self(AsyncRefCell::new(ReadFuture(body).peekable()), size_hint) + } + + async fn read(self: Rc, limit: usize) -> Result { + let peekable = RcRef::map(self, |this| &this.0); + let mut peekable = peekable.borrow_mut().await; + match Pin::new(&mut *peekable).peek_mut().await { + None => Ok(BufView::empty()), + Some(Err(_)) => Err(peekable.next().await.unwrap().err().unwrap()), + Some(Ok(bytes)) => { + if bytes.len() <= limit { + // We can safely take the next item since we peeked it + return Ok(BufView::from(peekable.next().await.unwrap()?)); + } + let ret = bytes.split_to(limit); + Ok(BufView::from(ret)) + } + } + } +} + +impl Resource for HttpRequestBody { + fn name(&self) -> Cow { + "requestBody".into() + } + + fn read(self: Rc, limit: usize) -> AsyncResult { + Box::pin(HttpRequestBody::read(self, limit)) + } + + fn size_hint(&self) -> (u64, Option) { + (self.1.lower(), self.1.upper()) + } +} diff --git a/ext/http/request_properties.rs b/ext/http/request_properties.rs new file mode 100644 index 0000000000..905139673e --- /dev/null +++ b/ext/http/request_properties.rs @@ -0,0 +1,309 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use deno_core::error::AnyError; +use deno_core::OpState; +use deno_core::ResourceId; +use deno_net::raw::take_network_stream_listener_resource; +use deno_net::raw::take_network_stream_resource; +use deno_net::raw::NetworkStream; +use deno_net::raw::NetworkStreamAddress; +use deno_net::raw::NetworkStreamListener; +use deno_net::raw::NetworkStreamType; +use hyper::HeaderMap; +use hyper::Uri; +use hyper1::header::HOST; +use std::borrow::Cow; +use std::net::Ipv4Addr; +use std::net::SocketAddr; +use std::net::SocketAddrV4; +use std::rc::Rc; + +// TODO(mmastrac): I don't like that we have to clone this, but it's one-time setup +#[derive(Clone)] +pub struct HttpListenProperties { + pub scheme: &'static str, + pub fallback_host: String, + pub local_port: Option, + pub stream_type: NetworkStreamType, +} + +#[derive(Clone)] +pub struct HttpConnectionProperties { + pub peer_address: Rc, + pub peer_port: Option, + pub local_port: Option, + pub stream_type: NetworkStreamType, +} + +pub struct HttpRequestProperties { + pub authority: Option, +} + +/// Pluggable trait to determine listen, connection and request properties +/// for embedders that wish to provide alternative routes for incoming HTTP. +#[async_trait::async_trait(?Send)] +pub trait HttpPropertyExtractor { + type Listener: 'static; + type Connection; + + /// Given a listener [`ResourceId`], returns the [`HttpPropertyExtractor::Listener`]. + fn get_listener_for_rid( + state: &mut OpState, + listener_rid: ResourceId, + ) -> Result; + + /// Given a connection [`ResourceId`], returns the [`HttpPropertyExtractor::Connection`]. + fn get_connection_for_rid( + state: &mut OpState, + connection_rid: ResourceId, + ) -> Result; + + /// Determines the listener properties. + fn listen_properties_from_listener( + listener: &Self::Listener, + ) -> Result; + + /// Determines the listener properties given a [`HttpPropertyExtractor::Connection`]. + fn listen_properties_from_connection( + connection: &Self::Connection, + ) -> Result; + + /// Accept a new [`HttpPropertyExtractor::Connection`] from the given listener [`HttpPropertyExtractor::Listener`]. + async fn accept_connection_from_listener( + listener: &Self::Listener, + ) -> Result; + + /// Determines the connection properties. + fn connection_properties( + listen_properties: &HttpListenProperties, + connection: &Self::Connection, + ) -> HttpConnectionProperties; + + /// Turn a given [`HttpPropertyExtractor::Connection`] into a [`NetworkStream`]. + fn to_network_stream_from_connection( + connection: Self::Connection, + ) -> NetworkStream; + + /// Determines the request properties. + fn request_properties( + connection_properties: &HttpConnectionProperties, + uri: &Uri, + headers: &HeaderMap, + ) -> HttpRequestProperties; +} + +pub struct DefaultHttpPropertyExtractor {} + +#[async_trait::async_trait(?Send)] +impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { + type Listener = NetworkStreamListener; + + type Connection = NetworkStream; + + fn get_listener_for_rid( + state: &mut OpState, + listener_rid: ResourceId, + ) -> Result { + take_network_stream_listener_resource( + &mut state.resource_table, + listener_rid, + ) + } + + fn get_connection_for_rid( + state: &mut OpState, + stream_rid: ResourceId, + ) -> Result { + take_network_stream_resource(&mut state.resource_table, stream_rid) + } + + async fn accept_connection_from_listener( + listener: &NetworkStreamListener, + ) -> Result { + listener.accept().await.map_err(Into::into) + } + + fn listen_properties_from_listener( + listener: &NetworkStreamListener, + ) -> Result { + let stream_type = listener.stream(); + let local_address = listener.listen_address()?; + listener_properties(stream_type, local_address) + } + + fn listen_properties_from_connection( + connection: &Self::Connection, + ) -> Result { + let stream_type = connection.stream(); + let local_address = connection.local_address()?; + listener_properties(stream_type, local_address) + } + + fn to_network_stream_from_connection( + connection: Self::Connection, + ) -> NetworkStream { + connection + } + + fn connection_properties( + listen_properties: &HttpListenProperties, + connection: &NetworkStream, + ) -> HttpConnectionProperties { + // We always want some sort of peer address. If we can't get one, just make up one. + let peer_address = connection.peer_address().unwrap_or_else(|_| { + NetworkStreamAddress::Ip(SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(0, 0, 0, 0), + 0, + ))) + }); + let peer_port: Option = match peer_address { + NetworkStreamAddress::Ip(ip) => Some(ip.port()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => None, + }; + let peer_address = match peer_address { + NetworkStreamAddress::Ip(addr) => Rc::from(addr.ip().to_string()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => Rc::from("unix"), + }; + let local_port = listen_properties.local_port; + let stream_type = listen_properties.stream_type; + + HttpConnectionProperties { + peer_address, + peer_port, + local_port, + stream_type, + } + } + + fn request_properties( + connection_properties: &HttpConnectionProperties, + uri: &Uri, + headers: &HeaderMap, + ) -> HttpRequestProperties { + let authority = req_host( + uri, + headers, + connection_properties.stream_type, + connection_properties.local_port.unwrap_or_default(), + ) + .map(|s| s.into_owned()); + + HttpRequestProperties { authority } + } +} + +fn listener_properties( + stream_type: NetworkStreamType, + local_address: NetworkStreamAddress, +) -> Result { + let scheme = req_scheme_from_stream_type(stream_type); + let fallback_host = req_host_from_addr(stream_type, &local_address); + let local_port: Option = match local_address { + NetworkStreamAddress::Ip(ip) => Some(ip.port()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => None, + }; + Ok(HttpListenProperties { + scheme, + fallback_host, + local_port, + stream_type, + }) +} + +/// Compute the fallback address from the [`NetworkStreamListenAddress`]. If the request has no authority/host in +/// its URI, and there is no [`HeaderName::HOST`] header, we fall back to this. +fn req_host_from_addr( + stream_type: NetworkStreamType, + addr: &NetworkStreamAddress, +) -> String { + match addr { + NetworkStreamAddress::Ip(addr) => { + if (stream_type == NetworkStreamType::Tls && addr.port() == 443) + || (stream_type == NetworkStreamType::Tcp && addr.port() == 80) + { + if addr.ip().is_loopback() || addr.ip().is_unspecified() { + return "localhost".to_owned(); + } + addr.ip().to_string() + } else { + if addr.ip().is_loopback() || addr.ip().is_unspecified() { + return format!("localhost:{}", addr.port()); + } + addr.to_string() + } + } + // There is no standard way for unix domain socket URLs + // nginx and nodejs request use http://unix:[socket_path]:/ but it is not a valid URL + // httpie uses http+unix://[percent_encoding_of_path]/ which we follow + #[cfg(unix)] + NetworkStreamAddress::Unix(unix) => percent_encoding::percent_encode( + unix + .as_pathname() + .and_then(|x| x.to_str()) + .unwrap_or_default() + .as_bytes(), + percent_encoding::NON_ALPHANUMERIC, + ) + .to_string(), + } +} + +fn req_scheme_from_stream_type(stream_type: NetworkStreamType) -> &'static str { + match stream_type { + NetworkStreamType::Tcp => "http://", + NetworkStreamType::Tls => "https://", + #[cfg(unix)] + NetworkStreamType::Unix => "http+unix://", + } +} + +fn req_host<'a>( + uri: &'a Uri, + headers: &'a HeaderMap, + addr_type: NetworkStreamType, + port: u16, +) -> Option> { + // Unix sockets always use the socket address + #[cfg(unix)] + if addr_type == NetworkStreamType::Unix { + return None; + } + + // It is rare that an authority will be passed, but if it does, it takes priority + if let Some(auth) = uri.authority() { + match addr_type { + NetworkStreamType::Tcp => { + if port == 80 { + return Some(Cow::Borrowed(auth.host())); + } + } + NetworkStreamType::Tls => { + if port == 443 { + return Some(Cow::Borrowed(auth.host())); + } + } + #[cfg(unix)] + NetworkStreamType::Unix => {} + } + return Some(Cow::Borrowed(auth.as_str())); + } + + // TODO(mmastrac): Most requests will use this path and we probably will want to optimize it in the future + if let Some(host) = headers.get(HOST) { + return Some(match host.to_str() { + Ok(host) => Cow::Borrowed(host), + Err(_) => Cow::Owned( + host + .as_bytes() + .iter() + .cloned() + .map(char::from) + .collect::(), + ), + }); + } + + None +} diff --git a/ext/http/response_body.rs b/ext/http/response_body.rs new file mode 100644 index 0000000000..6793f0e784 --- /dev/null +++ b/ext/http/response_body.rs @@ -0,0 +1,1027 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::io::Write; +use std::pin::Pin; +use std::rc::Rc; +use std::task::Waker; + +use brotli::ffi::compressor::BrotliEncoderState; +use bytes::Bytes; +use bytes::BytesMut; +use deno_core::error::bad_resource; +use deno_core::error::AnyError; +use deno_core::futures::ready; +use deno_core::futures::FutureExt; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::WriteOutcome; +use flate2::write::GzEncoder; +use http::HeaderMap; +use hyper1::body::Body; +use hyper1::body::Frame; +use hyper1::body::SizeHint; +use pin_project::pin_project; + +/// Simplification for nested types we use for our streams. We provide a way to convert from +/// this type into Hyper's body [`Frame`]. +enum ResponseStreamResult { + /// Stream is over. + EndOfStream, + /// Stream provided non-empty data. + NonEmptyBuf(BufView), + /// Stream is ready, but provided no data. Retry. This is a result that is like Pending, but does + /// not register a waker and should be called again at the lowest level of this code. Generally this + /// will only be returned from compression streams that require additional buffering. + NoData, + /// Stream provided trailers. + // TODO(mmastrac): We are threading trailers through the response system to eventually support Grpc. + #[allow(unused)] + Trailers(HeaderMap), + /// Stream failed. + Error(AnyError), +} + +impl From for Option, AnyError>> { + fn from(value: ResponseStreamResult) -> Self { + match value { + ResponseStreamResult::EndOfStream => None, + ResponseStreamResult::NonEmptyBuf(buf) => Some(Ok(Frame::data(buf))), + ResponseStreamResult::Error(err) => Some(Err(err)), + ResponseStreamResult::Trailers(map) => Some(Ok(Frame::trailers(map))), + // This result should be handled by retrying + ResponseStreamResult::NoData => unimplemented!(), + } + } +} + +#[derive(Clone, Debug, Default)] +pub struct CompletionHandle { + inner: Rc>, +} + +#[derive(Debug, Default)] +struct CompletionHandleInner { + complete: bool, + success: bool, + waker: Option, +} + +impl CompletionHandle { + pub fn complete(&self, success: bool) { + let mut mut_self = self.inner.borrow_mut(); + mut_self.complete = true; + mut_self.success = success; + if let Some(waker) = mut_self.waker.take() { + drop(mut_self); + waker.wake(); + } + } + + pub fn is_completed(&self) -> bool { + self.inner.borrow().complete + } +} + +impl Future for CompletionHandle { + type Output = bool; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let mut mut_self = self.inner.borrow_mut(); + if mut_self.complete { + return std::task::Poll::Ready(mut_self.success); + } + + mut_self.waker = Some(cx.waker().clone()); + std::task::Poll::Pending + } +} + +trait PollFrame: Unpin { + fn poll_frame( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll; + + fn size_hint(&self) -> SizeHint; +} + +#[derive(PartialEq, Eq)] +pub enum Compression { + None, + GZip, + Brotli, +} + +pub enum ResponseStream { + /// A resource stream, piped in fast mode. + Resource(ResourceBodyAdapter), + /// A JS-backed stream, written in JS and transported via pipe. + V8Stream(tokio::sync::mpsc::Receiver), +} + +#[derive(Default)] +pub enum ResponseBytesInner { + /// An empty stream. + #[default] + Empty, + /// A completed stream. + Done, + /// A static buffer of bytes, sent in one fell swoop. + Bytes(BufView), + /// An uncompressed stream. + UncompressedStream(ResponseStream), + /// A GZip stream. + GZipStream(GZipResponseStream), + /// A Brotli stream. + BrotliStream(BrotliResponseStream), +} + +impl std::fmt::Debug for ResponseBytesInner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Done => f.write_str("Done"), + Self::Empty => f.write_str("Empty"), + Self::Bytes(..) => f.write_str("Bytes"), + Self::UncompressedStream(..) => f.write_str("Uncompressed"), + Self::GZipStream(..) => f.write_str("GZip"), + Self::BrotliStream(..) => f.write_str("Brotli"), + } + } +} + +/// This represents the union of possible response types in Deno with the stream-style [`Body`] interface +/// required by hyper. As the API requires information about request completion (including a success/fail +/// flag), we include a very lightweight [`CompletionHandle`] for interested parties to listen on. +#[derive(Debug, Default)] +pub struct ResponseBytes( + ResponseBytesInner, + CompletionHandle, + Rc>>, +); + +impl ResponseBytes { + pub fn initialize(&mut self, inner: ResponseBytesInner) { + debug_assert!(matches!(self.0, ResponseBytesInner::Empty)); + self.0 = inner; + } + + pub fn completion_handle(&self) -> CompletionHandle { + self.1.clone() + } + + pub fn trailers(&self) -> Rc>> { + self.2.clone() + } + + fn complete(&mut self, success: bool) -> ResponseBytesInner { + if matches!(self.0, ResponseBytesInner::Done) { + return ResponseBytesInner::Done; + } + + let current = std::mem::replace(&mut self.0, ResponseBytesInner::Done); + self.1.complete(success); + current + } +} + +impl ResponseBytesInner { + pub fn size_hint(&self) -> SizeHint { + match self { + Self::Done => SizeHint::with_exact(0), + Self::Empty => SizeHint::with_exact(0), + Self::Bytes(bytes) => SizeHint::with_exact(bytes.len() as u64), + Self::UncompressedStream(res) => res.size_hint(), + Self::GZipStream(..) => SizeHint::default(), + Self::BrotliStream(..) => SizeHint::default(), + } + } + + fn from_stream(compression: Compression, stream: ResponseStream) -> Self { + match compression { + Compression::GZip => Self::GZipStream(GZipResponseStream::new(stream)), + Compression::Brotli => { + Self::BrotliStream(BrotliResponseStream::new(stream)) + } + _ => Self::UncompressedStream(stream), + } + } + + pub fn from_v8( + compression: Compression, + rx: tokio::sync::mpsc::Receiver, + ) -> Self { + Self::from_stream(compression, ResponseStream::V8Stream(rx)) + } + + pub fn from_resource( + compression: Compression, + stm: Rc, + auto_close: bool, + ) -> Self { + Self::from_stream( + compression, + ResponseStream::Resource(ResourceBodyAdapter::new(stm, auto_close)), + ) + } + + pub fn from_slice(compression: Compression, bytes: &[u8]) -> Self { + match compression { + Compression::GZip => { + let mut writer = + GzEncoder::new(Vec::new(), flate2::Compression::fast()); + writer.write_all(bytes).unwrap(); + Self::Bytes(BufView::from(writer.finish().unwrap())) + } + Compression::Brotli => { + // quality level 6 is based on google's nginx default value for + // on-the-fly compression + // https://github.com/google/ngx_brotli#brotli_comp_level + // lgwin 22 is equivalent to brotli window size of (2**22)-16 bytes + // (~4MB) + let mut writer = + brotli::CompressorWriter::new(Vec::new(), 65 * 1024, 6, 22); + writer.write_all(bytes).unwrap(); + writer.flush().unwrap(); + Self::Bytes(BufView::from(writer.into_inner())) + } + _ => Self::Bytes(BufView::from(bytes.to_vec())), + } + } + + pub fn from_vec(compression: Compression, vec: Vec) -> Self { + match compression { + Compression::GZip => { + let mut writer = + GzEncoder::new(Vec::new(), flate2::Compression::fast()); + writer.write_all(&vec).unwrap(); + Self::Bytes(BufView::from(writer.finish().unwrap())) + } + Compression::Brotli => { + let mut writer = + brotli::CompressorWriter::new(Vec::new(), 65 * 1024, 6, 22); + writer.write_all(&vec).unwrap(); + writer.flush().unwrap(); + Self::Bytes(BufView::from(writer.into_inner())) + } + _ => Self::Bytes(BufView::from(vec)), + } + } +} + +impl Body for ResponseBytes { + type Data = BufView; + type Error = AnyError; + + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll, Self::Error>>> { + let res = loop { + let res = match &mut self.0 { + ResponseBytesInner::Done | ResponseBytesInner::Empty => { + if let Some(trailers) = self.2.borrow_mut().take() { + return std::task::Poll::Ready(Some(Ok(Frame::trailers(trailers)))); + } + unreachable!() + } + ResponseBytesInner::Bytes(..) => { + let ResponseBytesInner::Bytes(data) = self.complete(true) else { unreachable!(); }; + return std::task::Poll::Ready(Some(Ok(Frame::data(data)))); + } + ResponseBytesInner::UncompressedStream(stm) => { + ready!(Pin::new(stm).poll_frame(cx)) + } + ResponseBytesInner::GZipStream(stm) => { + ready!(Pin::new(stm).poll_frame(cx)) + } + ResponseBytesInner::BrotliStream(stm) => { + ready!(Pin::new(stm).poll_frame(cx)) + } + }; + // This is where we retry the NoData response + if matches!(res, ResponseStreamResult::NoData) { + continue; + } + break res; + }; + + if matches!(res, ResponseStreamResult::EndOfStream) { + if let Some(trailers) = self.2.borrow_mut().take() { + return std::task::Poll::Ready(Some(Ok(Frame::trailers(trailers)))); + } + self.complete(true); + } + std::task::Poll::Ready(res.into()) + } + + fn is_end_stream(&self) -> bool { + matches!(self.0, ResponseBytesInner::Done | ResponseBytesInner::Empty) + && self.2.borrow_mut().is_none() + } + + fn size_hint(&self) -> SizeHint { + // The size hint currently only used in the case where it is exact bounds in hyper, but we'll pass it through + // anyways just in case hyper needs it. + self.0.size_hint() + } +} + +impl Drop for ResponseBytes { + fn drop(&mut self) { + // We won't actually poll_frame for Empty responses so this is where we return success + self.complete(matches!(self.0, ResponseBytesInner::Empty)); + } +} + +pub struct ResourceBodyAdapter { + auto_close: bool, + stm: Rc, + future: AsyncResult, +} + +impl ResourceBodyAdapter { + pub fn new(stm: Rc, auto_close: bool) -> Self { + let future = stm.clone().read(64 * 1024); + ResourceBodyAdapter { + auto_close, + stm, + future, + } + } +} + +impl PollFrame for ResponseStream { + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + match &mut *self { + ResponseStream::Resource(res) => Pin::new(res).poll_frame(cx), + ResponseStream::V8Stream(res) => Pin::new(res).poll_frame(cx), + } + } + + fn size_hint(&self) -> SizeHint { + match self { + ResponseStream::Resource(res) => res.size_hint(), + ResponseStream::V8Stream(res) => res.size_hint(), + } + } +} + +impl PollFrame for ResourceBodyAdapter { + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let res = match ready!(self.future.poll_unpin(cx)) { + Err(err) => ResponseStreamResult::Error(err), + Ok(buf) => { + if buf.is_empty() { + if self.auto_close { + self.stm.clone().close(); + } + ResponseStreamResult::EndOfStream + } else { + // Re-arm the future + self.future = self.stm.clone().read(64 * 1024); + ResponseStreamResult::NonEmptyBuf(buf) + } + } + }; + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + let hint = self.stm.size_hint(); + let mut size_hint = SizeHint::new(); + size_hint.set_lower(hint.0); + if let Some(upper) = hint.1 { + size_hint.set_upper(upper) + } + size_hint + } +} + +impl PollFrame for tokio::sync::mpsc::Receiver { + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let res = match ready!(self.poll_recv(cx)) { + Some(buf) => ResponseStreamResult::NonEmptyBuf(buf), + None => ResponseStreamResult::EndOfStream, + }; + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + SizeHint::default() + } +} + +#[derive(Copy, Clone, Debug)] +enum GZipState { + Header, + Streaming, + Flushing, + Trailer, + EndOfStream, +} + +#[pin_project] +pub struct GZipResponseStream { + stm: flate2::Compress, + crc: flate2::Crc, + next_buf: Option, + partial: Option, + #[pin] + underlying: ResponseStream, + state: GZipState, +} + +impl GZipResponseStream { + pub fn new(underlying: ResponseStream) -> Self { + Self { + stm: flate2::Compress::new(flate2::Compression::fast(), false), + crc: flate2::Crc::new(), + next_buf: None, + partial: None, + state: GZipState::Header, + underlying, + } + } +} + +/// This is a minimal GZip header suitable for serving data from a webserver. We don't need to provide +/// most of the information. We're skipping header name, CRC, etc, and providing a null timestamp. +/// +/// We're using compression level 1, as higher levels don't produce significant size differences. This +/// is probably the reason why nginx's default gzip compression level is also 1: +/// +/// https://nginx.org/en/docs/http/ngx_http_gzip_module.html#gzip_comp_level +static GZIP_HEADER: Bytes = + Bytes::from_static(&[0x1f, 0x8b, 0x08, 0, 0, 0, 0, 0, 0x01, 0xff]); + +impl PollFrame for GZipResponseStream { + fn poll_frame( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = self.get_mut(); + let state = &mut this.state; + let orig_state = *state; + let frame = match *state { + GZipState::EndOfStream => { + return std::task::Poll::Ready(ResponseStreamResult::EndOfStream) + } + GZipState::Header => { + *state = GZipState::Streaming; + return std::task::Poll::Ready(ResponseStreamResult::NonEmptyBuf( + BufView::from(GZIP_HEADER.clone()), + )); + } + GZipState::Trailer => { + *state = GZipState::EndOfStream; + let mut v = Vec::with_capacity(8); + v.extend(&this.crc.sum().to_le_bytes()); + v.extend(&this.crc.amount().to_le_bytes()); + return std::task::Poll::Ready(ResponseStreamResult::NonEmptyBuf( + BufView::from(v), + )); + } + GZipState::Streaming => { + if let Some(partial) = this.partial.take() { + ResponseStreamResult::NonEmptyBuf(partial) + } else { + ready!(Pin::new(&mut this.underlying).poll_frame(cx)) + } + } + GZipState::Flushing => ResponseStreamResult::EndOfStream, + }; + + let stm = &mut this.stm; + + // Ideally we could use MaybeUninit here, but flate2 requires &[u8]. We should also try + // to dynamically adjust this buffer. + let mut buf = this + .next_buf + .take() + .unwrap_or_else(|| BytesMut::zeroed(64 * 1024)); + + let start_in = stm.total_in(); + let start_out = stm.total_out(); + let res = match frame { + // Short-circuit these and just return + x @ (ResponseStreamResult::NoData + | ResponseStreamResult::Error(..) + | ResponseStreamResult::Trailers(..)) => { + return std::task::Poll::Ready(x) + } + ResponseStreamResult::EndOfStream => { + *state = GZipState::Flushing; + stm.compress(&[], &mut buf, flate2::FlushCompress::Finish) + } + ResponseStreamResult::NonEmptyBuf(mut input) => { + let res = stm.compress(&input, &mut buf, flate2::FlushCompress::None); + let len_in = (stm.total_in() - start_in) as usize; + debug_assert!(len_in <= input.len()); + this.crc.update(&input[..len_in]); + if len_in < input.len() { + input.advance_cursor(len_in); + this.partial = Some(input); + } + res + } + }; + let len = stm.total_out() - start_out; + let res = match res { + Err(err) => ResponseStreamResult::Error(err.into()), + Ok(flate2::Status::BufError) => { + // This should not happen + unreachable!("old={orig_state:?} new={state:?} buf_len={}", buf.len()); + } + Ok(flate2::Status::Ok) => { + if len == 0 { + this.next_buf = Some(buf); + ResponseStreamResult::NoData + } else { + buf.truncate(len as usize); + ResponseStreamResult::NonEmptyBuf(BufView::from(buf.freeze())) + } + } + Ok(flate2::Status::StreamEnd) => { + *state = GZipState::Trailer; + if len == 0 { + this.next_buf = Some(buf); + ResponseStreamResult::NoData + } else { + buf.truncate(len as usize); + ResponseStreamResult::NonEmptyBuf(BufView::from(buf.freeze())) + } + } + }; + + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + SizeHint::default() + } +} + +#[derive(Copy, Clone, Debug)] +enum BrotliState { + Streaming, + Flushing, + EndOfStream, +} + +struct BrotliEncoderStateWrapper { + stm: *mut BrotliEncoderState, +} + +#[pin_project] +pub struct BrotliResponseStream { + state: BrotliState, + stm: BrotliEncoderStateWrapper, + current_cursor: usize, + output_written_so_far: usize, + #[pin] + underlying: ResponseStream, +} + +impl Drop for BrotliEncoderStateWrapper { + fn drop(&mut self) { + // SAFETY: since we are dropping, we can be sure that this instance will not + // be used again. + unsafe { + brotli::ffi::compressor::BrotliEncoderDestroyInstance(self.stm); + } + } +} + +impl BrotliResponseStream { + pub fn new(underlying: ResponseStream) -> Self { + Self { + // SAFETY: creating an FFI instance should be OK with these args. + stm: unsafe { + BrotliEncoderStateWrapper { + stm: brotli::ffi::compressor::BrotliEncoderCreateInstance( + None, + None, + std::ptr::null_mut(), + ), + } + }, + output_written_so_far: 0, + current_cursor: 0, + state: BrotliState::Streaming, + underlying, + } + } +} + +fn max_compressed_size(input_size: usize) -> usize { + if input_size == 0 { + return 2; + } + + // [window bits / empty metadata] + N * [uncompressed] + [last empty] + let num_large_blocks = input_size >> 14; + let overhead = 2 + (4 * num_large_blocks) + 3 + 1; + let result = input_size + overhead; + + if result < input_size { + 0 + } else { + result + } +} + +impl PollFrame for BrotliResponseStream { + fn poll_frame( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = self.get_mut(); + let state = &mut this.state; + let frame = match *state { + BrotliState::Streaming => { + ready!(Pin::new(&mut this.underlying).poll_frame(cx)) + } + BrotliState::Flushing => ResponseStreamResult::EndOfStream, + BrotliState::EndOfStream => { + return std::task::Poll::Ready(ResponseStreamResult::EndOfStream); + } + }; + + let res = match frame { + ResponseStreamResult::NonEmptyBuf(buf) => { + let mut output_written = 0; + let mut total_output_written = 0; + let mut input_size = buf.len(); + let input_buffer = buf.as_ref(); + let mut len = max_compressed_size(input_size); + let mut output_buffer = vec![0u8; len]; + let mut ob_ptr = output_buffer.as_mut_ptr(); + + // SAFETY: these are okay arguments to these FFI calls. + unsafe { + brotli::ffi::compressor::BrotliEncoderCompressStream( + this.stm.stm, + brotli::ffi::compressor::BrotliEncoderOperation::BROTLI_OPERATION_PROCESS, + &mut input_size, + &input_buffer.as_ptr() as *const *const u8 as *mut *const u8, + &mut len, + &mut ob_ptr, + &mut output_written, + ); + total_output_written += output_written; + output_written = 0; + + brotli::ffi::compressor::BrotliEncoderCompressStream( + this.stm.stm, + brotli::ffi::compressor::BrotliEncoderOperation::BROTLI_OPERATION_FLUSH, + &mut input_size, + &input_buffer.as_ptr() as *const *const u8 as *mut *const u8, + &mut len, + &mut ob_ptr, + &mut output_written, + ); + total_output_written += output_written; + }; + + output_buffer + .truncate(total_output_written - this.output_written_so_far); + this.output_written_so_far = total_output_written; + ResponseStreamResult::NonEmptyBuf(BufView::from(output_buffer)) + } + ResponseStreamResult::EndOfStream => { + let mut len = 1024usize; + let mut output_buffer = vec![0u8; len]; + let mut input_size = 0; + let mut output_written = 0; + let ob_ptr = output_buffer.as_mut_ptr(); + + // SAFETY: these are okay arguments to these FFI calls. + unsafe { + brotli::ffi::compressor::BrotliEncoderCompressStream( + this.stm.stm, + brotli::ffi::compressor::BrotliEncoderOperation::BROTLI_OPERATION_FINISH, + &mut input_size, + std::ptr::null_mut(), + &mut len, + &ob_ptr as *const *mut u8 as *mut *mut u8, + &mut output_written, + ); + }; + + if output_written == 0 { + this.state = BrotliState::EndOfStream; + ResponseStreamResult::EndOfStream + } else { + this.state = BrotliState::Flushing; + output_buffer.truncate(output_written - this.output_written_so_far); + ResponseStreamResult::NonEmptyBuf(BufView::from(output_buffer)) + } + } + _ => frame, + }; + + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + SizeHint::default() + } +} + +/// A response body object that can be passed to V8. This body will feed byte buffers to a channel which +/// feed's hyper's HTTP response. +pub struct V8StreamHttpResponseBody( + AsyncRefCell>>, + CancelHandle, +); + +impl V8StreamHttpResponseBody { + pub fn new(sender: tokio::sync::mpsc::Sender) -> Self { + Self(AsyncRefCell::new(Some(sender)), CancelHandle::default()) + } +} + +impl Resource for V8StreamHttpResponseBody { + fn name(&self) -> Cow { + "responseBody".into() + } + + fn write( + self: Rc, + buf: BufView, + ) -> AsyncResult { + let cancel_handle = RcRef::map(&self, |this| &this.1); + Box::pin( + async move { + let nwritten = buf.len(); + + let res = RcRef::map(self, |this| &this.0).borrow().await; + if let Some(tx) = res.as_ref() { + tx.send(buf) + .await + .map_err(|_| bad_resource("failed to write"))?; + Ok(WriteOutcome::Full { nwritten }) + } else { + Err(bad_resource("failed to write")) + } + } + .try_or_cancel(cancel_handle), + ) + } + + fn close(self: Rc) { + self.1.cancel(); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use deno_core::futures::future::poll_fn; + use std::hash::Hasher; + use std::io::Read; + use std::io::Write; + + fn zeros() -> Vec { + vec![0; 1024 * 1024] + } + + fn hard_to_gzip_data() -> Vec { + const SIZE: usize = 1024 * 1024; + let mut v = Vec::with_capacity(SIZE); + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + for i in 0..SIZE { + hasher.write_usize(i); + v.push(hasher.finish() as u8); + } + v + } + + fn already_gzipped_data() -> Vec { + let mut v = Vec::with_capacity(1024 * 1024); + let mut gz = + flate2::GzBuilder::new().write(&mut v, flate2::Compression::best()); + gz.write_all(&hard_to_gzip_data()).unwrap(); + _ = gz.finish().unwrap(); + v + } + + fn chunk(v: Vec) -> impl Iterator> { + // Chunk the data into 10k + let mut out = vec![]; + for v in v.chunks(10 * 1024) { + out.push(v.to_vec()); + } + out.into_iter() + } + + fn random(mut v: Vec) -> impl Iterator> { + let mut out = vec![]; + loop { + if v.is_empty() { + break; + } + let rand = (rand::random::() % v.len()) + 1; + let new = v.split_off(rand); + out.push(v); + v = new; + } + // Print the lengths of the vectors if we actually fail this test at some point + let lengths = out.iter().map(|v| v.len()).collect::>(); + eprintln!("Lengths = {:?}", lengths); + out.into_iter() + } + + fn front_load(mut v: Vec) -> impl Iterator> { + // Chunk the data at 90% + let offset = (v.len() * 90) / 100; + let v2 = v.split_off(offset); + vec![v, v2].into_iter() + } + + fn front_load_but_one(mut v: Vec) -> impl Iterator> { + let offset = v.len() - 1; + let v2 = v.split_off(offset); + vec![v, v2].into_iter() + } + + fn back_load(mut v: Vec) -> impl Iterator> { + // Chunk the data at 10% + let offset = (v.len() * 10) / 100; + let v2 = v.split_off(offset); + vec![v, v2].into_iter() + } + + async fn test_gzip(i: impl Iterator> + Send + 'static) { + let v = i.collect::>(); + let mut expected: Vec = vec![]; + for v in &v { + expected.extend(v); + } + let (tx, rx) = tokio::sync::mpsc::channel(1); + let underlying = ResponseStream::V8Stream(rx); + let mut resp = GZipResponseStream::new(underlying); + let handle = tokio::task::spawn(async move { + for chunk in v { + tx.send(chunk.into()).await.ok().unwrap(); + } + }); + // Limit how many times we'll loop + const LIMIT: usize = 1000; + let mut v: Vec = vec![]; + for i in 0..=LIMIT { + assert_ne!(i, LIMIT); + let frame = poll_fn(|cx| Pin::new(&mut resp).poll_frame(cx)).await; + if matches!(frame, ResponseStreamResult::EndOfStream) { + break; + } + if matches!(frame, ResponseStreamResult::NoData) { + continue; + } + let ResponseStreamResult::NonEmptyBuf(buf) = frame else { + panic!("Unexpected stream type"); + }; + assert_ne!(buf.len(), 0); + v.extend(&*buf); + } + + let mut gz = flate2::read::GzDecoder::new(&*v); + let mut v = vec![]; + gz.read_to_end(&mut v).unwrap(); + + assert_eq!(v, expected); + + handle.await.unwrap(); + } + + async fn test_brotli(i: impl Iterator> + Send + 'static) { + let v = i.collect::>(); + let mut expected: Vec = vec![]; + for v in &v { + expected.extend(v); + } + let (tx, rx) = tokio::sync::mpsc::channel(1); + let underlying = ResponseStream::V8Stream(rx); + let mut resp = BrotliResponseStream::new(underlying); + let handle = tokio::task::spawn(async move { + for chunk in v { + tx.send(chunk.into()).await.ok().unwrap(); + } + }); + // Limit how many times we'll loop + const LIMIT: usize = 1000; + let mut v: Vec = vec![]; + for i in 0..=LIMIT { + assert_ne!(i, LIMIT); + let frame = poll_fn(|cx| Pin::new(&mut resp).poll_frame(cx)).await; + if matches!(frame, ResponseStreamResult::EndOfStream) { + break; + } + if matches!(frame, ResponseStreamResult::NoData) { + continue; + } + let ResponseStreamResult::NonEmptyBuf(buf) = frame else { + panic!("Unexpected stream type"); + }; + assert_ne!(buf.len(), 0); + v.extend(&*buf); + } + + let mut gz = brotli::Decompressor::new(&*v, v.len()); + let mut v = vec![]; + if !expected.is_empty() { + gz.read_to_end(&mut v).unwrap(); + } + + assert_eq!(v, expected); + + handle.await.unwrap(); + } + + #[tokio::test] + async fn test_simple() { + test_brotli(vec![b"hello world".to_vec()].into_iter()).await; + test_gzip(vec![b"hello world".to_vec()].into_iter()).await; + } + + #[tokio::test] + async fn test_empty() { + test_brotli(vec![].into_iter()).await; + test_gzip(vec![].into_iter()).await; + } + + #[tokio::test] + async fn test_simple_zeros() { + test_brotli(vec![vec![0; 0x10000]].into_iter()).await; + test_gzip(vec![vec![0; 0x10000]].into_iter()).await; + } + + macro_rules! test { + ($vec:ident) => { + mod $vec { + #[tokio::test] + async fn chunk() { + let iter = super::chunk(super::$vec()); + super::test_gzip(iter).await; + let br_iter = super::chunk(super::$vec()); + super::test_brotli(br_iter).await; + } + + #[tokio::test] + async fn front_load() { + let iter = super::front_load(super::$vec()); + super::test_gzip(iter).await; + let br_iter = super::front_load(super::$vec()); + super::test_brotli(br_iter).await; + } + + #[tokio::test] + async fn front_load_but_one() { + let iter = super::front_load_but_one(super::$vec()); + super::test_gzip(iter).await; + let br_iter = super::front_load_but_one(super::$vec()); + super::test_brotli(br_iter).await; + } + + #[tokio::test] + async fn back_load() { + let iter = super::back_load(super::$vec()); + super::test_gzip(iter).await; + let br_iter = super::back_load(super::$vec()); + super::test_brotli(br_iter).await; + } + + #[tokio::test] + async fn random() { + let iter = super::random(super::$vec()); + super::test_gzip(iter).await; + let br_iter = super::random(super::$vec()); + super::test_brotli(br_iter).await; + } + } + }; + } + + test!(zeros); + test!(hard_to_gzip_data); + test!(already_gzipped_data); +} diff --git a/ext/http/slab.rs b/ext/http/slab.rs new file mode 100644 index 0000000000..93a56e9ff3 --- /dev/null +++ b/ext/http/slab.rs @@ -0,0 +1,252 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::request_properties::HttpConnectionProperties; +use crate::response_body::CompletionHandle; +use crate::response_body::ResponseBytes; +use deno_core::error::AnyError; +use http::request::Parts; +use http::HeaderMap; +use hyper1::body::Incoming; +use hyper1::upgrade::OnUpgrade; + +use slab::Slab; +use std::cell::RefCell; +use std::cell::RefMut; +use std::ptr::NonNull; +use std::rc::Rc; + +pub type Request = hyper1::Request; +pub type Response = hyper1::Response; +pub type SlabId = u32; + +pub struct HttpSlabRecord { + request_info: HttpConnectionProperties, + request_parts: Parts, + request_body: Option, + // The response may get taken before we tear this down + response: Option, + promise: CompletionHandle, + trailers: Rc>>, + been_dropped: bool, + #[cfg(feature = "__zombie_http_tracking")] + alive: bool, +} + +thread_local! { + static SLAB: RefCell> = RefCell::new(Slab::with_capacity(1024)); +} + +macro_rules! http_trace { + ($index:expr, $args:tt) => { + #[cfg(feature = "__http_tracing")] + { + let total = SLAB.with(|x| x.try_borrow().map(|x| x.len())); + if let Ok(total) = total { + println!("HTTP id={} total={}: {}", $index, total, format!($args)); + } else { + println!("HTTP id={} total=?: {}", $index, format!($args)); + } + } + }; +} + +/// Hold a lock on the slab table and a reference to one entry in the table. +pub struct SlabEntry( + NonNull, + SlabId, + RefMut<'static, Slab>, +); + +pub fn slab_get(index: SlabId) -> SlabEntry { + http_trace!(index, "slab_get"); + let mut lock: RefMut<'static, Slab> = SLAB.with(|x| { + // SAFETY: We're extracting a lock here and placing it into an object that is thread-local, !Send as a &'static + unsafe { std::mem::transmute(x.borrow_mut()) } + }); + let Some(entry) = lock.get_mut(index as usize) else { + panic!("HTTP state error: Attemped to access invalid request {} ({} in total available)", + index, + lock.len()) + }; + #[cfg(feature = "__zombie_http_tracking")] + { + assert!(entry.alive, "HTTP state error: Entry is not alive"); + } + let entry = NonNull::new(entry as _).unwrap(); + + SlabEntry(entry, index, lock) +} + +#[allow(clippy::let_and_return)] +fn slab_insert_raw( + request_parts: Parts, + request_body: Option, + request_info: HttpConnectionProperties, +) -> SlabId { + let index = SLAB.with(|slab| { + let mut slab = slab.borrow_mut(); + let body = ResponseBytes::default(); + let trailers = body.trailers(); + slab.insert(HttpSlabRecord { + request_info, + request_parts, + request_body, + response: Some(Response::new(body)), + trailers, + been_dropped: false, + promise: CompletionHandle::default(), + #[cfg(feature = "__zombie_http_tracking")] + alive: true, + }) + }) as u32; + http_trace!(index, "slab_insert"); + index +} + +pub fn slab_insert( + request: Request, + request_info: HttpConnectionProperties, +) -> SlabId { + let (request_parts, request_body) = request.into_parts(); + slab_insert_raw(request_parts, Some(request_body), request_info) +} + +pub fn slab_drop(index: SlabId) { + http_trace!(index, "slab_drop"); + let mut entry = slab_get(index); + let record = entry.self_mut(); + assert!( + !record.been_dropped, + "HTTP state error: Entry has already been dropped" + ); + record.been_dropped = true; + if record.promise.is_completed() { + drop(entry); + slab_expunge(index); + } +} + +fn slab_expunge(index: SlabId) { + SLAB.with(|slab| { + #[cfg(__zombie_http_tracking)] + { + slab.borrow_mut().get_mut(index as usize).unwrap().alive = false; + } + #[cfg(not(__zombie_http_tracking))] + { + slab.borrow_mut().remove(index as usize); + } + }); + http_trace!(index, "slab_expunge"); +} + +impl SlabEntry { + fn self_ref(&self) -> &HttpSlabRecord { + // SAFETY: We have the lock and we're borrowing lifetime from self + unsafe { self.0.as_ref() } + } + + fn self_mut(&mut self) -> &mut HttpSlabRecord { + // SAFETY: We have the lock and we're borrowing lifetime from self + unsafe { self.0.as_mut() } + } + + /// Perform the Hyper upgrade on this entry. + pub fn upgrade(&mut self) -> Result { + // Manually perform the upgrade. We're peeking into hyper's underlying machinery here a bit + self + .self_mut() + .request_parts + .extensions + .remove::() + .ok_or_else(|| AnyError::msg("upgrade unavailable")) + } + + /// Take the Hyper body from this entry. + pub fn take_body(&mut self) -> Incoming { + self.self_mut().request_body.take().unwrap() + } + + /// Complete this entry, potentially expunging it if it is complete. + pub fn complete(self) { + let promise = &self.self_ref().promise; + assert!( + !promise.is_completed(), + "HTTP state error: Entry has already been completed" + ); + http_trace!(self.1, "SlabEntry::complete"); + promise.complete(true); + // If we're all done, we need to drop ourself to release the lock before we expunge this record + if self.self_ref().been_dropped { + let index = self.1; + drop(self); + slab_expunge(index); + } + } + + /// Get a mutable reference to the response. + pub fn response(&mut self) -> &mut Response { + self.self_mut().response.as_mut().unwrap() + } + + /// Get a mutable reference to the trailers. + pub fn trailers(&mut self) -> &RefCell> { + &self.self_mut().trailers + } + + /// Take the response. + pub fn take_response(&mut self) -> Response { + self.self_mut().response.take().unwrap() + } + + /// Get a reference to the connection properties. + pub fn request_info(&self) -> &HttpConnectionProperties { + &self.self_ref().request_info + } + + /// Get a reference to the request parts. + pub fn request_parts(&self) -> &Parts { + &self.self_ref().request_parts + } + + /// Get a reference to the completion handle. + pub fn promise(&self) -> CompletionHandle { + self.self_ref().promise.clone() + } + + /// Get a reference to the response body completion handle. + pub fn body_promise(&self) -> CompletionHandle { + self + .self_ref() + .response + .as_ref() + .unwrap() + .body() + .completion_handle() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use deno_net::raw::NetworkStreamType; + use http::Request; + + #[test] + fn test_slab() { + let req = Request::builder().body(()).unwrap(); + let (parts, _) = req.into_parts(); + let id = slab_insert_raw( + parts, + None, + HttpConnectionProperties { + peer_address: "".into(), + peer_port: None, + local_port: None, + stream_type: NetworkStreamType::Tcp, + }, + ); + let entry = slab_get(id); + entry.complete(); + slab_drop(id); + } +} diff --git a/ext/http/websocket_upgrade.rs b/ext/http/websocket_upgrade.rs index 042a467219..70ad785267 100644 --- a/ext/http/websocket_upgrade.rs +++ b/ext/http/websocket_upgrade.rs @@ -1,12 +1,13 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::marker::PhantomData; + use bytes::Bytes; use bytes::BytesMut; use deno_core::error::AnyError; use httparse::Status; use hyper::http::HeaderName; use hyper::http::HeaderValue; -use hyper::Body; use hyper::Response; use memmem::Searcher; use memmem::TwoWaySearcher; @@ -15,14 +16,14 @@ use once_cell::sync::OnceCell; use crate::http_error; /// Given a buffer that ends in `\n\n` or `\r\n\r\n`, returns a parsed [`Request`]. -fn parse_response( +fn parse_response( header_bytes: &[u8], -) -> Result<(usize, Response), AnyError> { +) -> Result<(usize, Response), AnyError> { let mut headers = [httparse::EMPTY_HEADER; 16]; let status = httparse::parse_headers(header_bytes, &mut headers)?; match status { Status::Complete((index, parsed)) => { - let mut resp = Response::builder().status(101).body(Body::empty())?; + let mut resp = Response::builder().status(101).body(T::default())?; for header in parsed.iter() { resp.headers_mut().append( HeaderName::from_bytes(header.name.as_bytes())?, @@ -59,12 +60,13 @@ static HEADER_SEARCHER: OnceCell = OnceCell::new(); static HEADER_SEARCHER2: OnceCell = OnceCell::new(); #[derive(Default)] -pub struct WebSocketUpgrade { +pub struct WebSocketUpgrade { state: WebSocketUpgradeState, buf: BytesMut, + _t: PhantomData, } -impl WebSocketUpgrade { +impl WebSocketUpgrade { /// Ensures that the status line starts with "HTTP/1.1 101 " which matches all of the node.js /// WebSocket libraries that are known. We don't care about the trailing status text. fn validate_status(&self, status: &[u8]) -> Result<(), AnyError> { @@ -80,7 +82,7 @@ impl WebSocketUpgrade { pub fn write( &mut self, bytes: &[u8], - ) -> Result, Bytes)>, AnyError> { + ) -> Result, Bytes)>, AnyError> { use WebSocketUpgradeState::*; match self.state { @@ -153,6 +155,7 @@ impl WebSocketUpgrade { #[cfg(test)] mod tests { use super::*; + use hyper::Body; type ExpectedResponseAndHead = Option<(Response, &'static [u8])>; diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index ceb4e3f954..c3fb336154 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.9.0" +version = "0.17.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -14,7 +14,10 @@ description = "IO promitives for Deno extensions" path = "lib.rs" [dependencies] +async-trait.workspace = true deno_core.workspace = true +filetime.workspace = true +fs3.workspace = true once_cell.workspace = true tokio.workspace = true diff --git a/ext/io/fs.rs b/ext/io/fs.rs new file mode 100644 index 0000000000..9afa192ab9 --- /dev/null +++ b/ext/io/fs.rs @@ -0,0 +1,371 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::io; +use std::rc::Rc; +use std::time::SystemTime; +use std::time::UNIX_EPOCH; + +use deno_core::error::not_supported; +use deno_core::error::resource_unavailable; +use deno_core::error::AnyError; +use deno_core::BufMutView; +use deno_core::BufView; +use deno_core::OpState; +use deno_core::ResourceId; +use tokio::task::JoinError; + +#[derive(Debug)] +pub enum FsError { + Io(io::Error), + FileBusy, + NotSupported, +} + +impl FsError { + pub fn kind(&self) -> io::ErrorKind { + match self { + Self::Io(err) => err.kind(), + Self::FileBusy => io::ErrorKind::Other, + Self::NotSupported => io::ErrorKind::Other, + } + } + + pub fn into_io_error(self) -> io::Error { + match self { + FsError::Io(err) => err, + FsError::FileBusy => io::Error::new(self.kind(), "file busy"), + FsError::NotSupported => io::Error::new(self.kind(), "not supported"), + } + } +} + +impl From for FsError { + fn from(err: io::Error) -> Self { + Self::Io(err) + } +} + +impl From for AnyError { + fn from(err: FsError) -> Self { + match err { + FsError::Io(err) => AnyError::from(err), + FsError::FileBusy => resource_unavailable(), + FsError::NotSupported => not_supported(), + } + } +} + +impl From for FsError { + fn from(err: JoinError) -> Self { + if err.is_cancelled() { + todo!("async tasks must not be cancelled") + } + if err.is_panic() { + std::panic::resume_unwind(err.into_panic()); // resume the panic on the main thread + } + unreachable!() + } +} + +pub type FsResult = Result; + +pub struct FsStat { + pub is_file: bool, + pub is_directory: bool, + pub is_symlink: bool, + pub size: u64, + + pub mtime: Option, + pub atime: Option, + pub birthtime: Option, + + pub dev: u64, + pub ino: u64, + pub mode: u32, + pub nlink: u64, + pub uid: u32, + pub gid: u32, + pub rdev: u64, + pub blksize: u64, + pub blocks: u64, + pub is_block_device: bool, + pub is_char_device: bool, + pub is_fifo: bool, + pub is_socket: bool, +} + +impl FsStat { + pub fn from_std(metadata: std::fs::Metadata) -> Self { + macro_rules! unix_or_zero { + ($member:ident) => {{ + #[cfg(unix)] + { + use std::os::unix::fs::MetadataExt; + metadata.$member() + } + #[cfg(not(unix))] + { + 0 + } + }}; + } + + macro_rules! unix_or_false { + ($member:ident) => {{ + #[cfg(unix)] + { + use std::os::unix::fs::FileTypeExt; + metadata.file_type().$member() + } + #[cfg(not(unix))] + { + false + } + }}; + } + + #[inline(always)] + fn to_msec(maybe_time: Result) -> Option { + match maybe_time { + Ok(time) => Some( + time + .duration_since(UNIX_EPOCH) + .map(|t| t.as_millis() as u64) + .unwrap_or_else(|err| err.duration().as_millis() as u64), + ), + Err(_) => None, + } + } + + Self { + is_file: metadata.is_file(), + is_directory: metadata.is_dir(), + is_symlink: metadata.file_type().is_symlink(), + size: metadata.len(), + + mtime: to_msec(metadata.modified()), + atime: to_msec(metadata.accessed()), + birthtime: to_msec(metadata.created()), + + dev: unix_or_zero!(dev), + ino: unix_or_zero!(ino), + mode: unix_or_zero!(mode), + nlink: unix_or_zero!(nlink), + uid: unix_or_zero!(uid), + gid: unix_or_zero!(gid), + rdev: unix_or_zero!(rdev), + blksize: unix_or_zero!(blksize), + blocks: unix_or_zero!(blocks), + is_block_device: unix_or_false!(is_block_device), + is_char_device: unix_or_false!(is_char_device), + is_fifo: unix_or_false!(is_fifo), + is_socket: unix_or_false!(is_socket), + } + } +} + +#[async_trait::async_trait(?Send)] +pub trait File { + fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult; + async fn read(self: Rc, limit: usize) -> FsResult { + let vec = vec![0; limit]; + let buf = BufMutView::from(vec); + let (nread, buf) = self.read_byob(buf).await?; + let mut vec = buf.unwrap_vec(); + if vec.len() != nread { + vec.truncate(nread); + } + Ok(BufView::from(vec)) + } + async fn read_byob( + self: Rc, + buf: BufMutView, + ) -> FsResult<(usize, BufMutView)>; + + fn write_sync(self: Rc, buf: &[u8]) -> FsResult; + async fn write( + self: Rc, + buf: BufView, + ) -> FsResult; + + fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()>; + async fn write_all(self: Rc, buf: BufView) -> FsResult<()>; + + fn read_all_sync(self: Rc) -> FsResult>; + async fn read_all_async(self: Rc) -> FsResult>; + + fn chmod_sync(self: Rc, pathmode: u32) -> FsResult<()>; + async fn chmod_async(self: Rc, mode: u32) -> FsResult<()>; + + fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult; + async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult; + + fn datasync_sync(self: Rc) -> FsResult<()>; + async fn datasync_async(self: Rc) -> FsResult<()>; + + fn sync_sync(self: Rc) -> FsResult<()>; + async fn sync_async(self: Rc) -> FsResult<()>; + + fn stat_sync(self: Rc) -> FsResult; + async fn stat_async(self: Rc) -> FsResult; + + fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()>; + async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()>; + + fn unlock_sync(self: Rc) -> FsResult<()>; + async fn unlock_async(self: Rc) -> FsResult<()>; + + fn truncate_sync(self: Rc, len: u64) -> FsResult<()>; + async fn truncate_async(self: Rc, len: u64) -> FsResult<()>; + + fn utime_sync( + self: Rc, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()>; + async fn utime_async( + self: Rc, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()>; + + // lower level functionality + fn as_stdio(self: Rc) -> FsResult; + #[cfg(unix)] + fn backing_fd(self: Rc) -> Option; + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option; + fn try_clone_inner(self: Rc) -> FsResult>; +} + +pub struct FileResource { + name: String, + file: Rc, +} + +impl FileResource { + pub fn new(file: Rc, name: String) -> Self { + Self { name, file } + } + + pub fn with_resource( + state: &OpState, + rid: ResourceId, + f: F, + ) -> Result + where + F: FnOnce(Rc) -> Result, + { + let resource = state.resource_table.get::(rid)?; + f(resource) + } + + pub fn get_file( + state: &OpState, + rid: ResourceId, + ) -> Result, AnyError> { + let resource = state.resource_table.get::(rid)?; + Ok(resource.file()) + } + + pub fn with_file( + state: &OpState, + rid: ResourceId, + f: F, + ) -> Result + where + F: FnOnce(Rc) -> Result, + { + Self::with_resource(state, rid, |r| f(r.file.clone())) + } + + pub fn file(&self) -> Rc { + self.file.clone() + } +} + +impl deno_core::Resource for FileResource { + fn name(&self) -> Cow { + Cow::Borrowed(&self.name) + } + + fn read( + self: Rc, + limit: usize, + ) -> deno_core::AsyncResult { + Box::pin(async move { + self + .file + .clone() + .read(limit) + .await + .map_err(|err| err.into()) + }) + } + + fn read_byob( + self: Rc, + buf: deno_core::BufMutView, + ) -> deno_core::AsyncResult<(usize, deno_core::BufMutView)> { + Box::pin(async move { + self + .file + .clone() + .read_byob(buf) + .await + .map_err(|err| err.into()) + }) + } + + fn write( + self: Rc, + buf: deno_core::BufView, + ) -> deno_core::AsyncResult { + Box::pin(async move { + self.file.clone().write(buf).await.map_err(|err| err.into()) + }) + } + + fn write_all( + self: Rc, + buf: deno_core::BufView, + ) -> deno_core::AsyncResult<()> { + Box::pin(async move { + self + .file + .clone() + .write_all(buf) + .await + .map_err(|err| err.into()) + }) + } + + fn read_byob_sync( + self: Rc, + data: &mut [u8], + ) -> Result { + self.file.clone().read_sync(data).map_err(|err| err.into()) + } + + fn write_sync( + self: Rc, + data: &[u8], + ) -> Result { + self.file.clone().write_sync(data).map_err(|err| err.into()) + } + + #[cfg(unix)] + fn backing_fd(self: Rc) -> Option { + self.file.clone().backing_fd() + } + + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + self.file.clone().backing_fd() + } +} diff --git a/ext/io/lib.rs b/ext/io/lib.rs index c85b4baf6d..6dec7c3a7f 100644 --- a/ext/io/lib.rs +++ b/ext/io/lib.rs @@ -1,9 +1,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use deno_core::error::resource_unavailable; use deno_core::error::AnyError; use deno_core::op; -use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use deno_core::AsyncMutFuture; use deno_core::AsyncRefCell; use deno_core::AsyncResult; @@ -14,8 +13,12 @@ use deno_core::CancelTryFuture; use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; -use deno_core::ResourceId; use deno_core::TaskQueue; +use fs::FileResource; +use fs::FsError; +use fs::FsResult; +use fs::FsStat; +use fs3::FileExt; use once_cell::sync::Lazy; use std::borrow::Cow; use std::cell::RefCell; @@ -23,9 +26,9 @@ use std::fs::File as StdFile; use std::io; use std::io::ErrorKind; use std::io::Read; +use std::io::Seek; use std::io::Write; use std::rc::Rc; -use std::sync::Arc; use tokio::io::AsyncRead; use tokio::io::AsyncReadExt; use tokio::io::AsyncWrite; @@ -42,6 +45,8 @@ use winapi::um::processenv::GetStdHandle; #[cfg(windows)] use winapi::um::winbase; +pub mod fs; + // Store the stdio fd/handles in global statics in order to keep them // alive for the duration of the application since the last handle/fd // being dropped will close the corresponding pipe. @@ -91,39 +96,39 @@ deno_core::extension!(deno_io, if let Some(stdio) = options.stdio { let t = &mut state.resource_table; - let rid = t.add(StdFileResource::stdio( - match stdio.stdin { - StdioPipe::Inherit => StdFileResourceInner { - kind: StdFileResourceKind::Stdin, - file: STDIN_HANDLE.try_clone().unwrap(), - }, + let rid = t.add(fs::FileResource::new( + Rc::new(match stdio.stdin { + StdioPipe::Inherit => StdFileResourceInner::new( + StdFileResourceKind::Stdin, + STDIN_HANDLE.try_clone().unwrap(), + ), StdioPipe::File(pipe) => StdFileResourceInner::file(pipe), - }, - "stdin", + }), + "stdin".to_string(), )); assert_eq!(rid, 0, "stdin must have ResourceId 0"); - let rid = t.add(StdFileResource::stdio( - match stdio.stdout { - StdioPipe::Inherit => StdFileResourceInner { - kind: StdFileResourceKind::Stdout, - file: STDOUT_HANDLE.try_clone().unwrap(), - }, + let rid = t.add(FileResource::new( + Rc::new(match stdio.stdout { + StdioPipe::Inherit => StdFileResourceInner::new( + StdFileResourceKind::Stdout, + STDOUT_HANDLE.try_clone().unwrap(), + ), StdioPipe::File(pipe) => StdFileResourceInner::file(pipe), - }, - "stdout", + }), + "stdout".to_string(), )); assert_eq!(rid, 1, "stdout must have ResourceId 1"); - let rid = t.add(StdFileResource::stdio( - match stdio.stderr { - StdioPipe::Inherit => StdFileResourceInner { - kind: StdFileResourceKind::Stderr, - file: STDERR_HANDLE.try_clone().unwrap(), - }, + let rid = t.add(FileResource::new( + Rc::new(match stdio.stderr { + StdioPipe::Inherit => StdFileResourceInner::new( + StdFileResourceKind::Stderr, + STDERR_HANDLE.try_clone().unwrap(), + ), StdioPipe::File(pipe) => StdFileResourceInner::file(pipe), - }, - "stderr", + }), + "stderr".to_string(), )); assert_eq!(rid, 2, "stderr must have ResourceId 2"); } @@ -159,20 +164,6 @@ pub struct Stdio { pub stderr: StdioPipe, } -#[cfg(unix)] -use nix::sys::termios; - -#[derive(Default)] -pub struct TtyMetadata { - #[cfg(unix)] - pub mode: Option, -} - -#[derive(Default)] -pub struct FileMetadata { - pub tty: TtyMetadata, -} - #[derive(Debug)] pub struct WriteOnlyResource { stream: AsyncRefCell, @@ -307,34 +298,88 @@ enum StdFileResourceKind { Stderr, } -struct StdFileResourceInner { +pub struct StdFileResourceInner { kind: StdFileResourceKind, - file: StdFile, + // We can't use an AsyncRefCell here because we need to allow + // access to the resource synchronously at any time and + // asynchronously one at a time in order + cell: RefCell>, + // Used to keep async actions in order and only allow one + // to occur at a time + cell_async_task_queue: TaskQueue, } impl StdFileResourceInner { pub fn file(fs_file: StdFile) -> Self { + StdFileResourceInner::new(StdFileResourceKind::File, fs_file) + } + + fn new(kind: StdFileResourceKind, fs_file: StdFile) -> Self { StdFileResourceInner { - kind: StdFileResourceKind::File, - file: fs_file, + kind, + cell: RefCell::new(Some(fs_file)), + cell_async_task_queue: Default::default(), } } - pub fn with_file(&mut self, f: impl FnOnce(&mut StdFile) -> R) -> R { - f(&mut self.file) + fn with_sync(&self, action: F) -> FsResult + where + F: FnOnce(&mut StdFile) -> FsResult, + { + match self.cell.try_borrow_mut() { + Ok(mut cell) if cell.is_some() => action(cell.as_mut().unwrap()), + _ => Err(fs::FsError::FileBusy), + } } - pub fn try_clone(&self) -> Result { - Ok(Self { - kind: self.kind, - file: self.file.try_clone()?, + async fn with_inner_blocking_task(&self, action: F) -> R + where + F: FnOnce(&mut StdFile) -> R + Send + 'static, + { + // we want to restrict this to one async action at a time + let _permit = self.cell_async_task_queue.acquire().await; + // we take the value out of the cell, use it on a blocking task, + // then put it back into the cell when we're done + let mut did_take = false; + let mut cell_value = { + let mut cell = self.cell.borrow_mut(); + match cell.as_mut().unwrap().try_clone().ok() { + Some(value) => value, + None => { + did_take = true; + cell.take().unwrap() + } + } + }; + let (cell_value, result) = spawn_blocking(move || { + let result = action(&mut cell_value); + (cell_value, result) }) + .await + .unwrap(); + + if did_take { + // put it back + self.cell.borrow_mut().replace(cell_value); + } + + result } - pub fn write_and_maybe_flush( - &mut self, - buf: &[u8], - ) -> Result { + async fn with_blocking_task(&self, action: F) -> R + where + F: FnOnce() -> R + Send + 'static, + { + // we want to restrict this to one async action at a time + let _permit = self.cell_async_task_queue.acquire().await; + + spawn_blocking(action).await.unwrap() + } +} + +#[async_trait::async_trait(?Send)] +impl crate::fs::File for StdFileResourceInner { + fn write_sync(self: Rc, buf: &[u8]) -> FsResult { // Rust will line buffer and we don't want that behavior // (see https://github.com/denoland/deno/issues/948), so flush stdout and stderr. // Although an alternative solution could be to bypass Rust's std by @@ -342,7 +387,7 @@ impl StdFileResourceInner { // that we get solved for free by using Rust's stdio wrappers (see // std/src/sys/windows/stdio.rs in Rust's source code). match self.kind { - StdFileResourceKind::File => Ok(self.file.write(buf)?), + StdFileResourceKind::File => self.with_sync(|file| Ok(file.write(buf)?)), StdFileResourceKind::Stdin => { Err(Into::::into(ErrorKind::Unsupported).into()) } @@ -363,14 +408,22 @@ impl StdFileResourceInner { } } - pub fn write_all_and_maybe_flush( - &mut self, - buf: &[u8], - ) -> Result<(), AnyError> { - // this method exists instead of using a `Write` implementation - // so that we can acquire the locks once and do both actions + fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult { match self.kind { - StdFileResourceKind::File => Ok(self.file.write_all(buf)?), + StdFileResourceKind::File | StdFileResourceKind::Stdin => { + self.with_sync(|file| Ok(file.read(buf)?)) + } + StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { + Err(FsError::NotSupported) + } + } + } + + fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()> { + match self.kind { + StdFileResourceKind::File => { + self.with_sync(|file| Ok(file.write_all(buf)?)) + } StdFileResourceKind::Stdin => { Err(Into::::into(ErrorKind::Unsupported).into()) } @@ -390,329 +443,292 @@ impl StdFileResourceInner { } } } -} - -impl Read for StdFileResourceInner { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + async fn write_all(self: Rc, buf: BufView) -> FsResult<()> { match self.kind { - StdFileResourceKind::File | StdFileResourceKind::Stdin => { - self.file.read(buf) + StdFileResourceKind::File => { + self + .with_inner_blocking_task(move |file| Ok(file.write_all(&buf)?)) + .await } - StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { - Err(ErrorKind::Unsupported.into()) + StdFileResourceKind::Stdin => { + Err(Into::::into(ErrorKind::Unsupported).into()) + } + StdFileResourceKind::Stdout => { + self + .with_blocking_task(move || { + // bypass the file and use std::io::stdout() + let mut stdout = std::io::stdout().lock(); + stdout.write_all(&buf)?; + stdout.flush()?; + Ok(()) + }) + .await + } + StdFileResourceKind::Stderr => { + self + .with_blocking_task(move || { + // bypass the file and use std::io::stderr() + let mut stderr = std::io::stderr().lock(); + stderr.write_all(&buf)?; + stderr.flush()?; + Ok(()) + }) + .await } } } -} - -struct StdFileResourceCellValue { - inner: StdFileResourceInner, - meta_data: Arc>, -} - -impl StdFileResourceCellValue { - pub fn try_clone(&self) -> Result { - Ok(Self { - inner: self.inner.try_clone()?, - meta_data: self.meta_data.clone(), - }) - } -} - -pub struct StdFileResource { - name: String, - // We can't use an AsyncRefCell here because we need to allow - // access to the resource synchronously at any time and - // asynchronously one at a time in order - cell: RefCell>, - // Used to keep async actions in order and only allow one - // to occur at a time - cell_async_task_queue: TaskQueue, -} - -impl StdFileResource { - fn stdio(inner: StdFileResourceInner, name: &str) -> Self { - Self { - cell: RefCell::new(Some(StdFileResourceCellValue { - inner, - meta_data: Default::default(), - })), - cell_async_task_queue: Default::default(), - name: name.to_string(), - } - } - - pub fn fs_file(fs_file: StdFile) -> Self { - Self { - cell: RefCell::new(Some(StdFileResourceCellValue { - inner: StdFileResourceInner::file(fs_file), - meta_data: Default::default(), - })), - cell_async_task_queue: Default::default(), - name: "fsFile".to_string(), - } - } - - fn with_inner_and_metadata( - &self, - action: impl FnOnce( - &mut StdFileResourceInner, - &Arc>, - ) -> Result, - ) -> Option> { - match self.cell.try_borrow_mut() { - Ok(mut cell) => { - let mut file = cell.take().unwrap(); - let result = action(&mut file.inner, &file.meta_data); - cell.replace(file); - Some(result) - } - Err(_) => None, - } - } - - async fn with_inner_blocking_task(&self, action: F) -> R - where - F: FnOnce(&mut StdFileResourceInner) -> R + Send + 'static, - { - // we want to restrict this to one async action at a time - let _permit = self.cell_async_task_queue.acquire().await; - // we take the value out of the cell, use it on a blocking task, - // then put it back into the cell when we're done - let mut did_take = false; - let mut cell_value = { - let mut cell = self.cell.borrow_mut(); - match cell.as_mut().unwrap().try_clone() { - Ok(value) => value, - Err(_) => { - did_take = true; - cell.take().unwrap() - } - } - }; - let (cell_value, result) = tokio::task::spawn_blocking(move || { - let result = action(&mut cell_value.inner); - (cell_value, result) - }) - .await - .unwrap(); - - if did_take { - // put it back - self.cell.borrow_mut().replace(cell_value); - } - - result - } - - async fn read_byob( - self: Rc, - mut buf: BufMutView, - ) -> Result<(usize, BufMutView), AnyError> { - self - .with_inner_blocking_task(move |inner| { - let nread = inner.read(&mut buf)?; - Ok((nread, buf)) - }) - .await - } async fn write( self: Rc, view: BufView, - ) -> Result { - self - .with_inner_blocking_task(move |inner| { - let nwritten = inner.write_and_maybe_flush(&view)?; - Ok(deno_core::WriteOutcome::Partial { nwritten, view }) - }) - .await - } - - async fn write_all(self: Rc, view: BufView) -> Result<(), AnyError> { - self - .with_inner_blocking_task(move |inner| { - inner.write_all_and_maybe_flush(&view) - }) - .await - } - - fn read_byob_sync(&self, buf: &mut [u8]) -> Result { - self - .with_inner_and_metadata(|inner, _| inner.read(buf)) - .ok_or_else(resource_unavailable)? - .map_err(Into::into) - } - - fn write_sync(&self, data: &[u8]) -> Result { - self - .with_inner_and_metadata(|inner, _| inner.write_and_maybe_flush(data)) - .ok_or_else(resource_unavailable)? - } - - fn with_resource( - state: &mut OpState, - rid: ResourceId, - f: F, - ) -> Result - where - F: FnOnce(Rc) -> Result, - { - let resource = state.resource_table.get::(rid)?; - f(resource) - } - - pub fn with_file( - state: &mut OpState, - rid: ResourceId, - f: F, - ) -> Result - where - F: FnOnce(&mut StdFile) -> Result, - { - Self::with_resource(state, rid, move |resource| { - resource - .with_inner_and_metadata(move |inner, _| inner.with_file(f)) - .ok_or_else(resource_unavailable)? - }) - } - - pub fn with_file2(self: Rc, f: F) -> Option> - where - F: FnOnce(&mut StdFile) -> Result, - { - self.with_inner_and_metadata(move |inner, _| inner.with_file(f)) - } - - pub fn with_file_and_metadata( - state: &mut OpState, - rid: ResourceId, - f: F, - ) -> Result - where - F: FnOnce(&mut StdFile, &Arc>) -> Result, - { - Self::with_resource(state, rid, move |resource| { - resource - .with_inner_and_metadata(move |inner, metadata| { - inner.with_file(move |file| f(file, metadata)) - }) - .ok_or_else(resource_unavailable)? - }) - } - - pub async fn with_file_blocking_task( - state: Rc>, - rid: ResourceId, - f: F, - ) -> Result - where - F: (FnOnce(&mut StdFile) -> Result) + Send + 'static, - { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - - resource - .with_inner_blocking_task(move |inner| inner.with_file(f)) - .await - } - - pub async fn with_file_blocking_task2( - self: Rc, - f: F, - ) -> Result - where - F: (FnOnce(&mut StdFile) -> Result) + Send + 'static, - { - self - .with_inner_blocking_task(move |inner| inner.with_file(f)) - .await - } - - pub fn clone_file( - state: &mut OpState, - rid: ResourceId, - ) -> Result { - Self::with_file(state, rid, move |std_file| { - std_file.try_clone().map_err(AnyError::from) - }) - } - - pub fn as_stdio( - state: &mut OpState, - rid: u32, - ) -> Result { - Self::with_resource(state, rid, |resource| { - resource - .with_inner_and_metadata(|inner, _| match inner.kind { - StdFileResourceKind::File => { - let file = inner.file.try_clone()?; - Ok(file.into()) - } - _ => Ok(std::process::Stdio::inherit()), - }) - .ok_or_else(resource_unavailable)? - }) - } -} - -impl Resource for StdFileResource { - fn name(&self) -> Cow { - self.name.as_str().into() - } - - fn read(self: Rc, limit: usize) -> AsyncResult { - Box::pin(async move { - let vec = vec![0; limit]; - let buf = BufMutView::from(vec); - let (nread, buf) = StdFileResource::read_byob(self, buf).await?; - let mut vec = buf.unwrap_vec(); - if vec.len() != nread { - vec.truncate(nread); + ) -> FsResult { + match self.kind { + StdFileResourceKind::File => { + self + .with_inner_blocking_task(|file| { + let nwritten = file.write(&view)?; + Ok(deno_core::WriteOutcome::Partial { nwritten, view }) + }) + .await } - Ok(BufView::from(vec)) + StdFileResourceKind::Stdin => { + Err(Into::::into(ErrorKind::Unsupported).into()) + } + StdFileResourceKind::Stdout => { + self + .with_blocking_task(|| { + // bypass the file and use std::io::stdout() + let mut stdout = std::io::stdout().lock(); + let nwritten = stdout.write(&view)?; + stdout.flush()?; + Ok(deno_core::WriteOutcome::Partial { nwritten, view }) + }) + .await + } + StdFileResourceKind::Stderr => { + self + .with_blocking_task(|| { + // bypass the file and use std::io::stderr() + let mut stderr = std::io::stderr().lock(); + let nwritten = stderr.write(&view)?; + stderr.flush()?; + Ok(deno_core::WriteOutcome::Partial { nwritten, view }) + }) + .await + } + } + } + + fn read_all_sync(self: Rc) -> FsResult> { + match self.kind { + StdFileResourceKind::File | StdFileResourceKind::Stdin => { + let mut buf = Vec::new(); + self.with_sync(|file| Ok(file.read_to_end(&mut buf)?))?; + Ok(buf) + } + StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { + Err(FsError::NotSupported) + } + } + } + async fn read_all_async(self: Rc) -> FsResult> { + match self.kind { + StdFileResourceKind::File | StdFileResourceKind::Stdin => { + self + .with_inner_blocking_task(|file| { + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + Ok(buf) + }) + .await + } + StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { + Err(FsError::NotSupported) + } + } + } + + fn chmod_sync(self: Rc, _mode: u32) -> FsResult<()> { + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + self.with_sync(|file| { + Ok(file.set_permissions(std::fs::Permissions::from_mode(_mode))?) + }) + } + #[cfg(not(unix))] + Err(FsError::NotSupported) + } + async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + self + .with_inner_blocking_task(move |file| { + Ok(file.set_permissions(std::fs::Permissions::from_mode(_mode))?) + }) + .await + } + #[cfg(not(unix))] + Err(FsError::NotSupported) + } + + fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult { + self.with_sync(|file| Ok(file.seek(pos)?)) + } + async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult { + self + .with_inner_blocking_task(move |file| Ok(file.seek(pos)?)) + .await + } + + fn datasync_sync(self: Rc) -> FsResult<()> { + self.with_sync(|file| Ok(file.sync_data()?)) + } + async fn datasync_async(self: Rc) -> FsResult<()> { + self + .with_inner_blocking_task(|file| Ok(file.sync_data()?)) + .await + } + + fn sync_sync(self: Rc) -> FsResult<()> { + self.with_sync(|file| Ok(file.sync_all()?)) + } + async fn sync_async(self: Rc) -> FsResult<()> { + self + .with_inner_blocking_task(|file| Ok(file.sync_all()?)) + .await + } + + fn stat_sync(self: Rc) -> FsResult { + self.with_sync(|file| Ok(file.metadata().map(FsStat::from_std)?)) + } + async fn stat_async(self: Rc) -> FsResult { + self + .with_inner_blocking_task(|file| { + Ok(file.metadata().map(FsStat::from_std)?) + }) + .await + } + + fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()> { + self.with_sync(|file| { + if exclusive { + file.lock_exclusive()?; + } else { + file.lock_shared()?; + } + Ok(()) }) } + async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()> { + self + .with_inner_blocking_task(move |file| { + if exclusive { + file.lock_exclusive()?; + } else { + file.lock_shared()?; + } + Ok(()) + }) + .await + } - fn read_byob( + fn unlock_sync(self: Rc) -> FsResult<()> { + self.with_sync(|file| Ok(file.unlock()?)) + } + async fn unlock_async(self: Rc) -> FsResult<()> { + self + .with_inner_blocking_task(|file| Ok(file.unlock()?)) + .await + } + + fn truncate_sync(self: Rc, len: u64) -> FsResult<()> { + self.with_sync(|file| Ok(file.set_len(len)?)) + } + async fn truncate_async(self: Rc, len: u64) -> FsResult<()> { + self + .with_inner_blocking_task(move |file| Ok(file.set_len(len)?)) + .await + } + + fn utime_sync( self: Rc, - buf: deno_core::BufMutView, - ) -> AsyncResult<(usize, deno_core::BufMutView)> { - Box::pin(StdFileResource::read_byob(self, buf)) - } + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); + let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - fn write( + self.with_sync(|file| { + filetime::set_file_handle_times(file, Some(atime), Some(mtime))?; + Ok(()) + }) + } + async fn utime_async( self: Rc, - view: deno_core::BufView, - ) -> AsyncResult { - Box::pin(StdFileResource::write(self, view)) + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); + let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); + + self + .with_inner_blocking_task(move |file| { + filetime::set_file_handle_times(file, Some(atime), Some(mtime))?; + Ok(()) + }) + .await } - fn write_all(self: Rc, view: deno_core::BufView) -> AsyncResult<()> { - Box::pin(StdFileResource::write_all(self, view)) + async fn read_byob( + self: Rc, + mut buf: BufMutView, + ) -> FsResult<(usize, BufMutView)> { + self + .with_inner_blocking_task(|file| { + let nread = file.read(&mut buf)?; + Ok((nread, buf)) + }) + .await } - fn write_sync(&self, data: &[u8]) -> Result { - StdFileResource::write_sync(self, data) + fn try_clone_inner(self: Rc) -> FsResult> { + let inner: &Option<_> = &self.cell.borrow(); + match inner { + Some(inner) => Ok(Rc::new(StdFileResourceInner { + kind: self.kind, + cell: RefCell::new(Some(inner.try_clone()?)), + cell_async_task_queue: Default::default(), + })), + None => Err(FsError::FileBusy), + } } - fn read_byob_sync( - &self, - data: &mut [u8], - ) -> Result { - StdFileResource::read_byob_sync(self, data) + fn as_stdio(self: Rc) -> FsResult { + match self.kind { + StdFileResourceKind::File => self.with_sync(|file| { + let file = file.try_clone()?; + Ok(file.into()) + }), + _ => Ok(std::process::Stdio::inherit()), + } } #[cfg(unix)] fn backing_fd(self: Rc) -> Option { use std::os::unix::io::AsRawFd; - self - .with_inner_and_metadata(move |std_file, _| { - Ok::<_, ()>(std_file.with_file(|f| f.as_raw_fd())) - })? - .ok() + self.with_sync(|file| Ok(file.as_raw_fd())).ok() + } + + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + use std::os::windows::prelude::AsRawHandle; + self.with_sync(|file| Ok(file.as_raw_handle())).ok() } } @@ -724,12 +740,7 @@ pub fn op_print( is_err: bool, ) -> Result<(), AnyError> { let rid = if is_err { 2 } else { 1 }; - StdFileResource::with_resource(state, rid, move |resource| { - resource - .with_inner_and_metadata(|inner, _| { - inner.write_all_and_maybe_flush(msg.as_bytes())?; - Ok(()) - }) - .ok_or_else(resource_unavailable)? + FileResource::with_file(state, rid, move |file| { + Ok(file.write_all_sync(msg.as_bytes())?) }) } diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index 16099c2251..f8181cc2e7 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -2,8 +2,15 @@ // @ts-ignore internal api const { - ObjectGetPrototypeOf, AsyncGeneratorPrototype, + BigIntPrototypeToString, + ObjectFreeze, + ObjectGetPrototypeOf, + ObjectPrototypeIsPrototypeOf, + StringPrototypeReplace, + SymbolFor, + SymbolToStringTag, + Uint8ArrayPrototype, } = globalThis.__bootstrap.primordials; const core = Deno.core; const ops = core.ops; @@ -16,7 +23,7 @@ const encodeCursor: ( async function openKv(path: string) { const rid = await core.opAsync("op_kv_database_open", path); - return new Kv(rid); + return new Kv(rid, kvSymbol); } interface RawKvEntry { @@ -36,10 +43,17 @@ type RawValue = { value: bigint; }; +const kvSymbol = Symbol("KvRid"); + class Kv { #rid: number; - constructor(rid: number) { + constructor(rid: number = undefined, symbol: symbol = undefined) { + if (kvSymbol !== symbol) { + throw new TypeError( + "Deno.Kv can not be constructed, use Deno.openKv instead.", + ); + } this.#rid = rid; } @@ -116,7 +130,7 @@ class Kv { [], ); if (versionstamp === null) throw new TypeError("Failed to set value"); - return { versionstamp }; + return { ok: true, versionstamp }; } async delete(key: Deno.KvKey) { @@ -211,14 +225,6 @@ class AtomicOperation { return this; } - sum(key: Deno.KvKey, n: bigint): this { - return this.mutate({ - type: "sum", - key, - value: new KvU64(n), - }); - } - mutate(...mutations: Deno.KvMutation[]): this { for (const mutation of mutations) { const key = mutation.key; @@ -249,6 +255,21 @@ class AtomicOperation { return this; } + sum(key: Deno.KvKey, n: bigint): this { + this.#mutations.push([key, "sum", serializeValue(new KvU64(n))]); + return this; + } + + min(key: Deno.KvKey, n: bigint): this { + this.#mutations.push([key, "min", serializeValue(new KvU64(n))]); + return this; + } + + max(key: Deno.KvKey, n: bigint): this { + this.#mutations.push([key, "max", serializeValue(new KvU64(n))]); + return this; + } + set(key: Deno.KvKey, value: unknown): this { this.#mutations.push([key, "set", serializeValue(value)]); return this; @@ -259,7 +280,7 @@ class AtomicOperation { return this; } - async commit(): Promise { + async commit(): Promise { const versionstamp = await core.opAsync( "op_kv_atomic_write", this.#rid, @@ -267,8 +288,8 @@ class AtomicOperation { this.#mutations, [], // TODO(@losfair): enqueue ); - if (versionstamp === null) return null; - return { versionstamp }; + if (versionstamp === null) return { ok: false }; + return { ok: true, versionstamp }; } then() { @@ -278,11 +299,11 @@ class AtomicOperation { } } -const MIN_U64 = 0n; -const MAX_U64 = 0xffffffffffffffffn; +const MIN_U64 = BigInt("0"); +const MAX_U64 = BigInt("0xffffffffffffffff"); class KvU64 { - readonly value: bigint; + value: bigint; constructor(value: bigint) { if (typeof value !== "bigint") { @@ -292,11 +313,31 @@ class KvU64 { throw new RangeError("value must be a positive bigint"); } if (value > MAX_U64) { - throw new RangeError("value must be a 64-bit unsigned integer"); + throw new RangeError("value must fit in a 64-bit unsigned integer"); } this.value = value; Object.freeze(this); } + + valueOf() { + return this.value; + } + + toString() { + return BigIntPrototypeToString(this.value); + } + + get [SymbolToStringTag]() { + return "Deno.KvU64"; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect, inspectOptions) { + return StringPrototypeReplace( + inspect(Object(this.value), inspectOptions), + "BigInt", + "Deno.KvU64", + ); + } } function deserializeValue(entry: RawKvEntry): Deno.KvEntry { @@ -305,7 +346,7 @@ function deserializeValue(entry: RawKvEntry): Deno.KvEntry { case "v8": return { ...entry, - value: core.deserialize(value), + value: core.deserialize(value, { forStorage: true }), }; case "bytes": return { @@ -323,20 +364,20 @@ function deserializeValue(entry: RawKvEntry): Deno.KvEntry { } function serializeValue(value: unknown): RawValue { - if (value instanceof Uint8Array) { + if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, value)) { return { kind: "bytes", value, }; - } else if (value instanceof KvU64) { + } else if (ObjectPrototypeIsPrototypeOf(KvU64.prototype, value)) { return { kind: "u64", - value: value.value, + value: value.valueOf(), }; } else { return { kind: "v8", - value: core.serialize(value), + value: core.serialize(value, { forStorage: true }), }; } } @@ -391,13 +432,13 @@ class KvListIterator extends AsyncIterator let start: Deno.KvKey | undefined; let end: Deno.KvKey | undefined; if ("prefix" in selector && selector.prefix !== undefined) { - prefix = Object.freeze([...selector.prefix]); + prefix = ObjectFreeze([...selector.prefix]); } if ("start" in selector && selector.start !== undefined) { - start = Object.freeze([...selector.start]); + start = ObjectFreeze([...selector.start]); } if ("end" in selector && selector.end !== undefined) { - end = Object.freeze([...selector.end]); + end = ObjectFreeze([...selector.end]); } if (prefix) { if (start && end) { diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 36155cd8ea..c805c0f5df 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.7.0" +version = "0.15.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/lib.rs b/ext/kv/lib.rs index f17ed55e33..dbc626225f 100644 --- a/ext/kv/lib.rs +++ b/ext/kv/lib.rs @@ -53,8 +53,7 @@ impl UnstableChecker { } deno_core::extension!(deno_kv, - // TODO(bartlomieju): specify deps - deps = [ ], + deps = [ deno_console ], parameters = [ DBH: DatabaseHandler ], ops = [ op_kv_database_open, diff --git a/ext/kv/sqlite.rs b/ext/kv/sqlite.rs index 63be1281b4..80d230ab15 100644 --- a/ext/kv/sqlite.rs +++ b/ext/kv/sqlite.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; +use std::cell::Cell; use std::cell::RefCell; use std::marker::PhantomData; use std::path::Path; @@ -10,6 +11,8 @@ use std::rc::Rc; use async_trait::async_trait; use deno_core::error::type_error; use deno_core::error::AnyError; +use deno_core::task::spawn_blocking; +use deno_core::AsyncRefCell; use deno_core::OpState; use rusqlite::params; use rusqlite::OpenFlags; @@ -112,11 +115,9 @@ impl DatabaseHandler for SqliteDbHandler

{ state: Rc>, path: Option, ) -> Result { - let conn = match (path.as_deref(), &self.default_storage_dir) { - (Some(":memory:"), _) | (None, None) => { - rusqlite::Connection::open_in_memory()? - } - (Some(path), _) => { + // Validate path + if let Some(path) = &path { + if path != ":memory:" { if path.is_empty() { return Err(type_error("Filename cannot be empty")); } @@ -132,44 +133,92 @@ impl DatabaseHandler for SqliteDbHandler

{ permissions.check_read(path, "Deno.openKv")?; permissions.check_write(path, "Deno.openKv")?; } - let flags = OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); - rusqlite::Connection::open_with_flags(path, flags)? - } - (None, Some(path)) => { - std::fs::create_dir_all(path)?; - let path = path.join("kv.sqlite3"); - rusqlite::Connection::open(&path)? - } - }; - - conn.pragma_update(None, "journal_mode", "wal")?; - conn.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; - - let current_version: usize = conn - .query_row( - "select version from migration_state where k = 0", - [], - |row| row.get(0), - ) - .optional()? - .unwrap_or(0); - - for (i, migration) in MIGRATIONS.iter().enumerate() { - let version = i + 1; - if version > current_version { - conn.execute_batch(migration)?; - conn.execute( - "replace into migration_state (k, version) values(?, ?)", - [&0, &version], - )?; } } - Ok(SqliteDb(RefCell::new(conn))) + let default_storage_dir = self.default_storage_dir.clone(); + let conn = spawn_blocking(move || { + let conn = match (path.as_deref(), &default_storage_dir) { + (Some(":memory:"), _) | (None, None) => { + rusqlite::Connection::open_in_memory()? + } + (Some(path), _) => { + let flags = + OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); + rusqlite::Connection::open_with_flags(path, flags)? + } + (None, Some(path)) => { + std::fs::create_dir_all(path)?; + let path = path.join("kv.sqlite3"); + rusqlite::Connection::open(&path)? + } + }; + + conn.pragma_update(None, "journal_mode", "wal")?; + conn.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; + + let current_version: usize = conn + .query_row( + "select version from migration_state where k = 0", + [], + |row| row.get(0), + ) + .optional()? + .unwrap_or(0); + + for (i, migration) in MIGRATIONS.iter().enumerate() { + let version = i + 1; + if version > current_version { + conn.execute_batch(migration)?; + conn.execute( + "replace into migration_state (k, version) values(?, ?)", + [&0, &version], + )?; + } + } + + Ok::<_, AnyError>(conn) + }) + .await + .unwrap()?; + + Ok(SqliteDb(Rc::new(AsyncRefCell::new(Cell::new(Some(conn)))))) } } -pub struct SqliteDb(RefCell); +pub struct SqliteDb(Rc>>>); + +impl SqliteDb { + async fn run_tx(&self, f: F) -> Result + where + F: (FnOnce(rusqlite::Transaction<'_>) -> Result) + + Send + + 'static, + R: Send + 'static, + { + // Transactions need exclusive access to the connection. Wait until + // we can borrow_mut the connection. + let cell = self.0.borrow_mut().await; + + // Take the db out of the cell and run the transaction via spawn_blocking. + let mut db = cell.take().unwrap(); + let (result, db) = spawn_blocking(move || { + let result = { + match db.transaction() { + Ok(tx) => f(tx), + Err(e) => Err(e.into()), + } + }; + (result, db) + }) + .await + .unwrap(); + + // Put the db back into the cell. + cell.set(Some(db)); + result + } +} #[async_trait(?Send)] impl Database for SqliteDb { @@ -178,110 +227,126 @@ impl Database for SqliteDb { requests: Vec, _options: SnapshotReadOptions, ) -> Result, AnyError> { - let mut responses = Vec::with_capacity(requests.len()); - let mut db = self.0.borrow_mut(); - let tx = db.transaction()?; + self + .run_tx(move |tx| { + let mut responses = Vec::with_capacity(requests.len()); + for request in requests { + let mut stmt = tx.prepare_cached(if request.reverse { + STATEMENT_KV_RANGE_SCAN_REVERSE + } else { + STATEMENT_KV_RANGE_SCAN + })?; + let entries = stmt + .query_map( + ( + request.start.as_slice(), + request.end.as_slice(), + request.limit.get(), + ), + |row| { + let key: Vec = row.get(0)?; + let value: Vec = row.get(1)?; + let encoding: i64 = row.get(2)?; - for request in requests { - let mut stmt = tx.prepare_cached(if request.reverse { - STATEMENT_KV_RANGE_SCAN_REVERSE - } else { - STATEMENT_KV_RANGE_SCAN - })?; - let entries = stmt - .query_map( - ( - request.start.as_slice(), - request.end.as_slice(), - request.limit.get(), - ), - |row| { - let key: Vec = row.get(0)?; - let value: Vec = row.get(1)?; - let encoding: i64 = row.get(2)?; + let value = decode_value(value, encoding); - let value = decode_value(value, encoding); + let version: i64 = row.get(3)?; + Ok(KvEntry { + key, + value, + versionstamp: version_to_versionstamp(version), + }) + }, + )? + .collect::, rusqlite::Error>>()?; + responses.push(ReadRangeOutput { entries }); + } - let version: i64 = row.get(3)?; - Ok(KvEntry { - key, - value, - versionstamp: version_to_versionstamp(version), - }) - }, - )? - .collect::, rusqlite::Error>>()?; - responses.push(ReadRangeOutput { entries }); - } - - Ok(responses) + Ok(responses) + }) + .await } async fn atomic_write( &self, write: AtomicWrite, ) -> Result, AnyError> { - let mut db = self.0.borrow_mut(); - - let tx = db.transaction()?; - - for check in write.checks { - let real_versionstamp = tx - .prepare_cached(STATEMENT_KV_POINT_GET_VERSION_ONLY)? - .query_row([check.key.as_slice()], |row| row.get(0)) - .optional()? - .map(version_to_versionstamp); - if real_versionstamp != check.versionstamp { - return Ok(None); - } - } - - let version: i64 = tx - .prepare_cached(STATEMENT_INC_AND_GET_DATA_VERSION)? - .query_row([], |row| row.get(0))?; - - for mutation in write.mutations { - match mutation.kind { - MutationKind::Set(value) => { - let (value, encoding) = encode_value(&value); - let changed = tx - .prepare_cached(STATEMENT_KV_POINT_SET)? - .execute(params![mutation.key, &value, &encoding, &version])?; - assert_eq!(changed, 1) + self + .run_tx(move |tx| { + for check in write.checks { + let real_versionstamp = tx + .prepare_cached(STATEMENT_KV_POINT_GET_VERSION_ONLY)? + .query_row([check.key.as_slice()], |row| row.get(0)) + .optional()? + .map(version_to_versionstamp); + if real_versionstamp != check.versionstamp { + return Ok(None); + } } - MutationKind::Delete => { - let changed = tx - .prepare_cached(STATEMENT_KV_POINT_DELETE)? - .execute(params![mutation.key])?; - assert!(changed == 0 || changed == 1) - } - MutationKind::Sum(operand) => { - mutate_le64(&tx, &mutation.key, "sum", &operand, version, |a, b| { - a.wrapping_add(b) - })?; - } - MutationKind::Min(operand) => { - mutate_le64(&tx, &mutation.key, "min", &operand, version, |a, b| { - a.min(b) - })?; - } - MutationKind::Max(operand) => { - mutate_le64(&tx, &mutation.key, "max", &operand, version, |a, b| { - a.max(b) - })?; - } - } - } - // TODO(@losfair): enqueues + let version: i64 = tx + .prepare_cached(STATEMENT_INC_AND_GET_DATA_VERSION)? + .query_row([], |row| row.get(0))?; - tx.commit()?; + for mutation in write.mutations { + match mutation.kind { + MutationKind::Set(value) => { + let (value, encoding) = encode_value(&value); + let changed = tx + .prepare_cached(STATEMENT_KV_POINT_SET)? + .execute(params![mutation.key, &value, &encoding, &version])?; + assert_eq!(changed, 1) + } + MutationKind::Delete => { + let changed = tx + .prepare_cached(STATEMENT_KV_POINT_DELETE)? + .execute(params![mutation.key])?; + assert!(changed == 0 || changed == 1) + } + MutationKind::Sum(operand) => { + mutate_le64( + &tx, + &mutation.key, + "sum", + &operand, + version, + |a, b| a.wrapping_add(b), + )?; + } + MutationKind::Min(operand) => { + mutate_le64( + &tx, + &mutation.key, + "min", + &operand, + version, + |a, b| a.min(b), + )?; + } + MutationKind::Max(operand) => { + mutate_le64( + &tx, + &mutation.key, + "max", + &operand, + version, + |a, b| a.max(b), + )?; + } + } + } - let new_vesionstamp = version_to_versionstamp(version); + // TODO(@losfair): enqueues - Ok(Some(CommitResult { - versionstamp: new_vesionstamp, - })) + tx.commit()?; + + let new_vesionstamp = version_to_versionstamp(version); + + Ok(Some(CommitResult { + versionstamp: new_vesionstamp, + })) + }) + .await } } diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index 3b563a5233..8054a7a08a 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.29.0" +version = "0.37.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/lib.rs b/ext/napi/lib.rs index 2e7ceed673..22d86e4a9b 100644 --- a/ext/napi/lib.rs +++ b/ext/napi/lib.rs @@ -81,9 +81,7 @@ pub const napi_would_deadlock: napi_status = 21; pub const NAPI_AUTO_LENGTH: usize = usize::MAX; thread_local! { - pub static MODULE: RefCell> = RefCell::new(None); - pub static ASYNC_WORK_SENDER: RefCell>> = RefCell::new(None); - pub static THREAD_SAFE_FN_SENDER: RefCell>> = RefCell::new(None); + pub static MODULE_TO_REGISTER: RefCell> = RefCell::new(None); } type napi_addon_register_func = @@ -101,95 +99,6 @@ pub struct NapiModule { reserved: [*mut c_void; 4], } -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Error { - InvalidArg, - ObjectExpected, - StringExpected, - NameExpected, - FunctionExpected, - NumberExpected, - BooleanExpected, - ArrayExpected, - GenericFailure, - PendingException, - Cancelled, - EscapeCalledTwice, - HandleScopeMismatch, - CallbackScopeMismatch, - QueueFull, - Closing, - BigIntExpected, - DateExpected, - ArrayBufferExpected, - DetachableArraybufferExpected, - WouldDeadlock, -} - -#[allow(clippy::from_over_into)] -impl Into for napi_status { - fn into(self) -> Error { - match self { - napi_invalid_arg => Error::InvalidArg, - napi_object_expected => Error::ObjectExpected, - napi_string_expected => Error::StringExpected, - napi_name_expected => Error::NameExpected, - napi_function_expected => Error::FunctionExpected, - napi_number_expected => Error::NumberExpected, - napi_boolean_expected => Error::BooleanExpected, - napi_array_expected => Error::ArrayExpected, - napi_generic_failure => Error::GenericFailure, - napi_pending_exception => Error::PendingException, - napi_cancelled => Error::Cancelled, - napi_escape_called_twice => Error::EscapeCalledTwice, - napi_handle_scope_mismatch => Error::HandleScopeMismatch, - napi_callback_scope_mismatch => Error::CallbackScopeMismatch, - napi_queue_full => Error::QueueFull, - napi_closing => Error::Closing, - napi_bigint_expected => Error::BigIntExpected, - napi_date_expected => Error::DateExpected, - napi_arraybuffer_expected => Error::ArrayBufferExpected, - napi_detachable_arraybuffer_expected => { - Error::DetachableArraybufferExpected - } - napi_would_deadlock => Error::WouldDeadlock, - _ => unreachable!(), - } - } -} - -pub type Result = std::result::Result<(), Error>; - -impl From for napi_status { - fn from(error: Error) -> Self { - match error { - Error::InvalidArg => napi_invalid_arg, - Error::ObjectExpected => napi_object_expected, - Error::StringExpected => napi_string_expected, - Error::NameExpected => napi_name_expected, - Error::FunctionExpected => napi_function_expected, - Error::NumberExpected => napi_number_expected, - Error::BooleanExpected => napi_boolean_expected, - Error::ArrayExpected => napi_array_expected, - Error::GenericFailure => napi_generic_failure, - Error::PendingException => napi_pending_exception, - Error::Cancelled => napi_cancelled, - Error::EscapeCalledTwice => napi_escape_called_twice, - Error::HandleScopeMismatch => napi_handle_scope_mismatch, - Error::CallbackScopeMismatch => napi_callback_scope_mismatch, - Error::QueueFull => napi_queue_full, - Error::Closing => napi_closing, - Error::BigIntExpected => napi_bigint_expected, - Error::DateExpected => napi_date_expected, - Error::ArrayBufferExpected => napi_arraybuffer_expected, - Error::DetachableArraybufferExpected => { - napi_detachable_arraybuffer_expected - } - Error::WouldDeadlock => napi_would_deadlock, - } - } -} - pub type napi_valuetype = i32; pub const napi_undefined: napi_valuetype = 0; @@ -435,15 +344,6 @@ impl Env { >, tsfn_ref_counters: Arc>, ) -> Self { - let sc = sender.clone(); - ASYNC_WORK_SENDER.with(|s| { - s.replace(Some(sc)); - }); - let ts = threadsafe_function_sender.clone(); - THREAD_SAFE_FN_SENDER.with(|s| { - s.replace(Some(ts)); - }); - Self { isolate_ptr, context: context.into_raw(), @@ -592,6 +492,50 @@ pub trait NapiPermissions { -> std::result::Result<(), AnyError>; } +/// # Safety +/// +/// This function is unsafe because it dereferences raw pointer Env. +/// - The caller must ensure that the pointer is valid. +/// - The caller must ensure that the pointer is not freed. +pub unsafe fn weak_local( + env_ptr: *mut Env, + value: v8::Local, + data: *mut c_void, + finalize_cb: napi_finalize, + finalize_hint: *mut c_void, +) -> Option> { + use std::cell::Cell; + + let env = &mut *env_ptr; + + let weak_ptr = Rc::new(Cell::new(None)); + let scope = &mut env.scope(); + + let weak = v8::Weak::with_finalizer( + scope, + value, + Box::new({ + let weak_ptr = weak_ptr.clone(); + move |isolate| { + finalize_cb(env_ptr as _, data as _, finalize_hint as _); + + // Self-deleting weak. + if let Some(weak_ptr) = weak_ptr.get() { + let weak: v8::Weak = + unsafe { v8::Weak::from_raw(isolate, Some(weak_ptr)) }; + drop(weak); + } + } + }), + ); + + let value = weak.to_local(scope); + let raw = weak.into_raw(); + weak_ptr.set(raw); + + value +} + #[op(v8)] fn op_napi_open( scope: &mut v8::HandleScope<'scope>, @@ -604,7 +548,6 @@ where { let permissions = op_state.borrow_mut::(); permissions.check(Some(&PathBuf::from(&path)))?; - let ( async_work_sender, tsfn_sender, @@ -667,77 +610,67 @@ where Err(e) => return Err(type_error(e.to_string())), }; - MODULE.with(|cell| { - let slot = *cell.borrow(); - let obj = match slot { - Some(nm) => { - // SAFETY: napi_register_module guarantees that `nm` is valid. - let nm = unsafe { &*nm }; - assert_eq!(nm.nm_version, 1); - // SAFETY: we are going blind, calling the register function on the other side. - let maybe_exports = unsafe { - (nm.nm_register_func)( - env_ptr, - std::mem::transmute::, napi_value>( - exports.into(), - ), - ) - }; + let maybe_module = MODULE_TO_REGISTER.with(|cell| { + let mut slot = cell.borrow_mut(); + slot.take() + }); - let exports = maybe_exports - .as_ref() - .map(|_| unsafe { - // SAFETY: v8::Local is a pointer to a value and napi_value is also a pointer - // to a value, they have the same layout - std::mem::transmute::>( - maybe_exports, - ) - }) - .unwrap_or_else(|| { - // If the module didn't return anything, we use the exports object. - exports.into() - }); - - Ok(serde_v8::Value { v8_value: exports }) - } - None => { - // Initializer callback. - // SAFETY: we are going blind, calling the register function on the other side. - unsafe { - let init = library - .get:: napi_value>(b"napi_register_module_v1") - .expect("napi_register_module_v1 not found"); - let maybe_exports = init( - env_ptr, - std::mem::transmute::, napi_value>( - exports.into(), - ), - ); - - let exports = maybe_exports - .as_ref() - .map(|_| { - // SAFETY: v8::Local is a pointer to a value and napi_value is also a pointer - // to a value, they have the same layout - std::mem::transmute::>( - maybe_exports, - ) - }) - .unwrap_or_else(|| { - // If the module didn't return anything, we use the exports object. - exports.into() - }); - - Ok(serde_v8::Value { v8_value: exports }) - } - } + if let Some(module_to_register) = maybe_module { + // SAFETY: napi_register_module guarantees that `module_to_register` is valid. + let nm = unsafe { &*module_to_register }; + assert_eq!(nm.nm_version, 1); + // SAFETY: we are going blind, calling the register function on the other side. + let maybe_exports = unsafe { + (nm.nm_register_func)( + env_ptr, + std::mem::transmute::, napi_value>(exports.into()), + ) }; + + let exports = if maybe_exports.is_some() { + // SAFETY: v8::Local is a pointer to a value and napi_value is also a pointer + // to a value, they have the same layout + unsafe { + std::mem::transmute::>(maybe_exports) + } + } else { + exports.into() + }; + // NAPI addons can't be unloaded, so we're going to "forget" the library // object so it lives till the program exit. std::mem::forget(library); - obj - }) + return Ok(serde_v8::Value { v8_value: exports }); + } + + // Initializer callback. + // SAFETY: we are going blind, calling the register function on the other side. + + let maybe_exports = unsafe { + let init = library + .get:: napi_value>(b"napi_register_module_v1") + .expect("napi_register_module_v1 not found"); + init( + env_ptr, + std::mem::transmute::, napi_value>(exports.into()), + ) + }; + + let exports = if maybe_exports.is_some() { + // SAFETY: v8::Local is a pointer to a value and napi_value is also a pointer + // to a value, they have the same layout + unsafe { + std::mem::transmute::>(maybe_exports) + } + } else { + exports.into() + }; + + // NAPI addons can't be unloaded, so we're going to "forget" the library + // object so it lives till the program exit. + std::mem::forget(library); + Ok(serde_v8::Value { v8_value: exports }) } diff --git a/ext/net/01_net.js b/ext/net/01_net.js index 81e13f0945..e8ce3a3001 100644 --- a/ext/net/01_net.js +++ b/ext/net/01_net.js @@ -11,13 +11,16 @@ import { import * as abortSignal from "ext:deno_web/03_abort_signal.js"; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypeFilter, + ArrayPrototypeForEach, + ArrayPrototypePush, Error, ObjectPrototypeIsPrototypeOf, PromiseResolve, SymbolAsyncIterator, SymbolFor, - TypedArrayPrototypeSubarray, TypeError, + TypedArrayPrototypeSubarray, Uint8Array, } = primordials; @@ -97,15 +100,16 @@ class Conn { const promise = core.read(this.rid, buffer); const promiseId = promise[promiseIdSymbol]; if (this.#unref) core.unrefOp(promiseId); - this.#pendingReadPromiseIds.push(promiseId); + ArrayPrototypePush(this.#pendingReadPromiseIds, promiseId); let nread; try { nread = await promise; } catch (e) { throw e; } finally { - this.#pendingReadPromiseIds = this.#pendingReadPromiseIds.filter((id) => - id !== promiseId + this.#pendingReadPromiseIds = ArrayPrototypeFilter( + this.#pendingReadPromiseIds, + (id) => id !== promiseId, ); } return nread === 0 ? null : nread; @@ -141,7 +145,7 @@ class Conn { if (this.#readable) { readableStreamForRidUnrefableRef(this.#readable); } - this.#pendingReadPromiseIds.forEach((id) => core.refOp(id)); + ArrayPrototypeForEach(this.#pendingReadPromiseIds, (id) => core.refOp(id)); } unref() { @@ -149,7 +153,10 @@ class Conn { if (this.#readable) { readableStreamForRidUnrefableUnref(this.#readable); } - this.#pendingReadPromiseIds.forEach((id) => core.unrefOp(id)); + ArrayPrototypeForEach( + this.#pendingReadPromiseIds, + (id) => core.unrefOp(id), + ); } } diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index a7a1acff6f..d5a26a2c75 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.91.0" +version = "0.99.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -16,7 +16,11 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_tls.workspace = true +# Pinning to 0.5.1, because 0.5.2 breaks "cargo publish" +# https://github.com/bluejekyll/enum-as-inner/pull/91 +enum-as-inner = "=0.5.1" log.workspace = true +pin-project.workspace = true serde.workspace = true socket2.workspace = true tokio.workspace = true diff --git a/ext/net/lib.rs b/ext/net/lib.rs index f812bf60bc..0e3778d5a8 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -5,15 +5,18 @@ pub mod ops; pub mod ops_tls; #[cfg(unix)] pub mod ops_unix; +pub mod raw; pub mod resolve_addr; use deno_core::error::AnyError; use deno_core::OpState; use deno_tls::rustls::RootCertStore; +use deno_tls::RootCertStoreProvider; use std::cell::RefCell; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; pub trait NetPermissions { fn check_net>( @@ -66,7 +69,16 @@ pub fn get_declaration() -> PathBuf { #[derive(Clone)] pub struct DefaultTlsOptions { - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, +} + +impl DefaultTlsOptions { + pub fn root_cert_store(&self) -> Result, AnyError> { + Ok(match &self.root_cert_store_provider { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }) + } } /// `UnsafelyIgnoreCertificateErrors` is a wrapper struct so it can be placed inside `GothamState`; @@ -86,12 +98,12 @@ deno_core::extension!(deno_net, ops::op_node_unstable_net_listen_udp

, ops::op_net_recv_udp, ops::op_net_send_udp

, - ops::op_net_join_multi_v4_udp

, - ops::op_net_join_multi_v6_udp

, - ops::op_net_leave_multi_v4_udp

, - ops::op_net_leave_multi_v6_udp

, - ops::op_net_set_multi_loopback_udp

, - ops::op_net_set_multi_ttl_udp

, + ops::op_net_join_multi_v4_udp, + ops::op_net_join_multi_v6_udp, + ops::op_net_leave_multi_v4_udp, + ops::op_net_leave_multi_v6_udp, + ops::op_net_set_multi_loopback_udp, + ops::op_net_set_multi_ttl_udp, ops::op_dns_resolve

, ops::op_set_nodelay, ops::op_set_keepalive, @@ -112,13 +124,13 @@ deno_core::extension!(deno_net, ], esm = [ "01_net.js", "02_tls.js" ], options = { - root_cert_store: Option, + root_cert_store_provider: Option>, unstable: bool, unsafely_ignore_certificate_errors: Option>, }, state = |state, options| { state.put(DefaultTlsOptions { - root_cert_store: options.root_cert_store, + root_cert_store_provider: options.root_cert_store_provider, }); state.put(UnstableChecker { unstable: options.unstable }); state.put(UnsafelyIgnoreCertificateErrors( diff --git a/ext/net/ops.rs b/ext/net/ops.rs index 2264df8679..05aa416b4b 100644 --- a/ext/net/ops.rs +++ b/ext/net/ops.rs @@ -159,15 +159,12 @@ where } #[op] -async fn op_net_join_multi_v4_udp( +async fn op_net_join_multi_v4_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: String, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -184,15 +181,12 @@ where } #[op] -async fn op_net_join_multi_v6_udp( +async fn op_net_join_multi_v6_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: u32, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -208,15 +202,12 @@ where } #[op] -async fn op_net_leave_multi_v4_udp( +async fn op_net_leave_multi_v4_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: String, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -233,15 +224,12 @@ where } #[op] -async fn op_net_leave_multi_v6_udp( +async fn op_net_leave_multi_v6_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: u32, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -257,15 +245,12 @@ where } #[op] -async fn op_net_set_multi_loopback_udp( +async fn op_net_set_multi_loopback_udp( state: Rc>, rid: ResourceId, is_v4_membership: bool, loopback: bool, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -283,14 +268,11 @@ where } #[op] -async fn op_net_set_multi_ttl_udp( +async fn op_net_set_multi_ttl_udp( state: Rc>, rid: ResourceId, ttl: u32, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index c0cfb8674f..7f451d0a84 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -26,6 +26,7 @@ use deno_core::futures::task::Waker; use deno_core::op; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::ByteString; @@ -61,6 +62,7 @@ use std::fs::File; use std::io; use std::io::BufReader; use std::io::ErrorKind; +use std::net::SocketAddr; use std::path::Path; use std::pin::Pin; use std::rc::Rc; @@ -73,7 +75,6 @@ use tokio::io::AsyncWriteExt; use tokio::io::ReadBuf; use tokio::net::TcpListener; use tokio::net::TcpStream; -use tokio::task::spawn_local; #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum Flow { @@ -115,6 +116,13 @@ impl TlsStream { Self::new(tcp, Connection::Client(tls)) } + pub fn new_client_side_from( + tcp: TcpStream, + connection: ClientConnection, + ) -> Self { + Self::new(tcp, Connection::Client(connection)) + } + pub fn new_server_side( tcp: TcpStream, tls_config: Arc, @@ -123,6 +131,13 @@ impl TlsStream { Self::new(tcp, Connection::Server(tls)) } + pub fn new_server_side_from( + tcp: TcpStream, + connection: ServerConnection, + ) -> Self { + Self::new(tcp, Connection::Server(connection)) + } + pub fn into_split(self) -> (ReadHalf, WriteHalf) { let shared = Shared::new(self); let rd = ReadHalf { @@ -132,6 +147,16 @@ impl TlsStream { (rd, wr) } + /// Convenience method to match [`TcpStream`]. + pub fn peer_addr(&self) -> Result { + self.0.as_ref().unwrap().tcp.peer_addr() + } + + /// Convenience method to match [`TcpStream`]. + pub fn local_addr(&self) -> Result { + self.0.as_ref().unwrap().tcp.local_addr() + } + /// Tokio-rustls compatibility: returns a reference to the underlying TCP /// stream, and a reference to the Rustls `Connection` object. pub fn get_ref(&self) -> (&TcpStream, &Connection) { @@ -199,9 +224,9 @@ impl Drop for TlsStream { let use_linger_task = inner.poll_close(&mut cx).is_pending(); if use_linger_task { - spawn_local(poll_fn(move |cx| inner.poll_close(cx))); + spawn(poll_fn(move |cx| inner.poll_close(cx))); } else if cfg!(debug_assertions) { - spawn_local(async {}); // Spawn dummy task to detect missing LocalSet. + spawn(async {}); // Spawn dummy task to detect missing runtime. } } } @@ -788,14 +813,10 @@ where .try_borrow::() .and_then(|it| it.0.clone()); - // TODO(@justinmchase): Ideally the certificate store is created once - // and not cloned. The store should be wrapped in Arc to reduce - // copying memory unnecessarily. let root_cert_store = state .borrow() .borrow::() - .root_cert_store - .clone(); + .root_cert_store()?; let resource_rc = state .borrow_mut() @@ -887,8 +908,7 @@ where let root_cert_store = state .borrow() .borrow::() - .root_cert_store - .clone(); + .root_cert_store()?; let hostname_dns = ServerName::try_from(&*addr.hostname) .map_err(|_| invalid_hostname(&addr.hostname))?; let connect_addr = resolve_addr(&addr.hostname, addr.port) @@ -954,8 +974,8 @@ fn load_private_keys_from_file( } pub struct TlsListenerResource { - tcp_listener: AsyncRefCell, - tls_config: Arc, + pub(crate) tcp_listener: AsyncRefCell, + pub(crate) tls_config: Arc, cancel_handle: CancelHandle, } diff --git a/ext/net/ops_unix.rs b/ext/net/ops_unix.rs index 1161d27592..bed923f8b4 100644 --- a/ext/net/ops_unix.rs +++ b/ext/net/ops_unix.rs @@ -32,8 +32,8 @@ pub fn into_string(s: std::ffi::OsString) -> Result { }) } -struct UnixListenerResource { - listener: AsyncRefCell, +pub(crate) struct UnixListenerResource { + pub listener: AsyncRefCell, cancel: CancelHandle, } diff --git a/ext/net/raw.rs b/ext/net/raw.rs new file mode 100644 index 0000000000..0c92c46707 --- /dev/null +++ b/ext/net/raw.rs @@ -0,0 +1,298 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::io::TcpStreamResource; +#[cfg(unix)] +use crate::io::UnixStreamResource; +use crate::ops::TcpListenerResource; +use crate::ops_tls::TlsListenerResource; +use crate::ops_tls::TlsStream; +use crate::ops_tls::TlsStreamResource; +#[cfg(unix)] +use crate::ops_unix::UnixListenerResource; +use deno_core::error::bad_resource; +use deno_core::error::bad_resource_id; +use deno_core::error::AnyError; +use deno_core::ResourceId; +use deno_core::ResourceTable; +use deno_tls::rustls::ServerConfig; +use pin_project::pin_project; +use std::rc::Rc; +use std::sync::Arc; +use tokio::net::TcpStream; +#[cfg(unix)] +use tokio::net::UnixStream; + +/// A raw stream of one of the types handled by this extension. +#[pin_project(project = NetworkStreamProject)] +pub enum NetworkStream { + Tcp(#[pin] TcpStream), + Tls(#[pin] TlsStream), + #[cfg(unix)] + Unix(#[pin] UnixStream), +} + +impl From for NetworkStream { + fn from(value: TcpStream) -> Self { + NetworkStream::Tcp(value) + } +} + +impl From for NetworkStream { + fn from(value: TlsStream) -> Self { + NetworkStream::Tls(value) + } +} + +#[cfg(unix)] +impl From for NetworkStream { + fn from(value: UnixStream) -> Self { + NetworkStream::Unix(value) + } +} + +/// A raw stream of one of the types handled by this extension. +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum NetworkStreamType { + Tcp, + Tls, + #[cfg(unix)] + Unix, +} + +impl NetworkStream { + pub fn local_address(&self) -> Result { + match self { + Self::Tcp(tcp) => Ok(NetworkStreamAddress::Ip(tcp.local_addr()?)), + Self::Tls(tls) => Ok(NetworkStreamAddress::Ip(tls.local_addr()?)), + #[cfg(unix)] + Self::Unix(unix) => Ok(NetworkStreamAddress::Unix(unix.local_addr()?)), + } + } + + pub fn peer_address(&self) -> Result { + match self { + Self::Tcp(tcp) => Ok(NetworkStreamAddress::Ip(tcp.peer_addr()?)), + Self::Tls(tls) => Ok(NetworkStreamAddress::Ip(tls.peer_addr()?)), + #[cfg(unix)] + Self::Unix(unix) => Ok(NetworkStreamAddress::Unix(unix.peer_addr()?)), + } + } + + pub fn stream(&self) -> NetworkStreamType { + match self { + Self::Tcp(_) => NetworkStreamType::Tcp, + Self::Tls(_) => NetworkStreamType::Tls, + #[cfg(unix)] + Self::Unix(_) => NetworkStreamType::Unix, + } + } +} + +impl tokio::io::AsyncRead for NetworkStream { + fn poll_read( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut tokio::io::ReadBuf<'_>, + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_read(cx, buf), + NetworkStreamProject::Tls(s) => s.poll_read(cx, buf), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_read(cx, buf), + } + } +} + +impl tokio::io::AsyncWrite for NetworkStream { + fn poll_write( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_write(cx, buf), + NetworkStreamProject::Tls(s) => s.poll_write(cx, buf), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_write(cx, buf), + } + } + + fn poll_flush( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_flush(cx), + NetworkStreamProject::Tls(s) => s.poll_flush(cx), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_flush(cx), + } + } + + fn poll_shutdown( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_shutdown(cx), + NetworkStreamProject::Tls(s) => s.poll_shutdown(cx), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_shutdown(cx), + } + } + + fn is_write_vectored(&self) -> bool { + match self { + Self::Tcp(s) => s.is_write_vectored(), + Self::Tls(s) => s.is_write_vectored(), + #[cfg(unix)] + Self::Unix(s) => s.is_write_vectored(), + } + } + + fn poll_write_vectored( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_write_vectored(cx, bufs), + NetworkStreamProject::Tls(s) => s.poll_write_vectored(cx, bufs), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_write_vectored(cx, bufs), + } + } +} + +/// A raw stream listener of one of the types handled by this extension. +pub enum NetworkStreamListener { + Tcp(tokio::net::TcpListener), + Tls(tokio::net::TcpListener, Arc), + #[cfg(unix)] + Unix(tokio::net::UnixListener), +} + +pub enum NetworkStreamAddress { + Ip(std::net::SocketAddr), + #[cfg(unix)] + Unix(tokio::net::unix::SocketAddr), +} + +impl NetworkStreamListener { + /// Accepts a connection on this listener. + pub async fn accept(&self) -> Result { + Ok(match self { + Self::Tcp(tcp) => { + let (stream, _addr) = tcp.accept().await?; + NetworkStream::Tcp(stream) + } + Self::Tls(tcp, config) => { + let (stream, _addr) = tcp.accept().await?; + NetworkStream::Tls(TlsStream::new_server_side(stream, config.clone())) + } + #[cfg(unix)] + Self::Unix(unix) => { + let (stream, _addr) = unix.accept().await?; + NetworkStream::Unix(stream) + } + }) + } + + pub fn listen_address(&self) -> Result { + match self { + Self::Tcp(tcp) => Ok(NetworkStreamAddress::Ip(tcp.local_addr()?)), + Self::Tls(tcp, _) => Ok(NetworkStreamAddress::Ip(tcp.local_addr()?)), + #[cfg(unix)] + Self::Unix(unix) => Ok(NetworkStreamAddress::Unix(unix.local_addr()?)), + } + } + + pub fn stream(&self) -> NetworkStreamType { + match self { + Self::Tcp(..) => NetworkStreamType::Tcp, + Self::Tls(..) => NetworkStreamType::Tls, + #[cfg(unix)] + Self::Unix(..) => NetworkStreamType::Unix, + } + } +} + +/// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). +/// This method will extract a stream from the resource table and return it, unwrapped. +pub fn take_network_stream_resource( + resource_table: &mut ResourceTable, + stream_rid: ResourceId, +) -> Result { + // The stream we're attempting to unwrap may be in use somewhere else. If that's the case, we cannot proceed + // with the process of unwrapping this connection, so we just return a bad resource error. + // See also: https://github.com/denoland/deno/pull/16242 + + if let Ok(resource_rc) = resource_table.take::(stream_rid) + { + // This TCP connection might be used somewhere else. + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TCP stream is currently in use"))?; + let (read_half, write_half) = resource.into_inner(); + let tcp_stream = read_half.reunite(write_half)?; + return Ok(NetworkStream::Tcp(tcp_stream)); + } + + if let Ok(resource_rc) = resource_table.take::(stream_rid) + { + // This TLS connection might be used somewhere else. + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TLS stream is currently in use"))?; + let (read_half, write_half) = resource.into_inner(); + let tls_stream = read_half.reunite(write_half); + return Ok(NetworkStream::Tls(tls_stream)); + } + + #[cfg(unix)] + if let Ok(resource_rc) = resource_table.take::(stream_rid) + { + // This UNIX socket might be used somewhere else. + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("UNIX stream is currently in use"))?; + let (read_half, write_half) = resource.into_inner(); + let unix_stream = read_half.reunite(write_half)?; + return Ok(NetworkStream::Unix(unix_stream)); + } + + Err(bad_resource_id()) +} + +/// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). +/// This method will extract a stream from the resource table and return it, unwrapped. +pub fn take_network_stream_listener_resource( + resource_table: &mut ResourceTable, + listener_rid: ResourceId, +) -> Result { + if let Ok(resource_rc) = + resource_table.take::(listener_rid) + { + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TCP socket listener is currently in use"))?; + return Ok(NetworkStreamListener::Tcp(resource.listener.into_inner())); + } + + if let Ok(resource_rc) = + resource_table.take::(listener_rid) + { + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TLS socket listener is currently in use"))?; + return Ok(NetworkStreamListener::Tls( + resource.tcp_listener.into_inner(), + resource.tls_config, + )); + } + + #[cfg(unix)] + if let Ok(resource_rc) = + resource_table.take::(listener_rid) + { + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("UNIX socket listener is currently in use"))?; + return Ok(NetworkStreamListener::Unix(resource.listener.into_inner())); + } + + Err(bad_resource_id()) +} diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 0d647e4f02..0efb27aeaa 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.36.0" +version = "0.44.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -18,9 +18,15 @@ aes.workspace = true cbc.workspace = true data-encoding = "2.3.3" deno_core.workspace = true +deno_fetch.workspace = true +deno_fs.workspace = true +deno_media_type.workspace = true +deno_npm.workspace = true +deno_semver.workspace = true digest = { version = "0.10.5", features = ["core-api", "std"] } dsa = "0.6.1" ecb.workspace = true +elliptic-curve.workspace = true hex.workspace = true hkdf.workspace = true idna = "0.3.0" @@ -34,14 +40,19 @@ num-bigint-dig = "0.8.2" num-integer = "0.1.45" num-traits = "0.2.14" once_cell.workspace = true +p224.workspace = true +p256.workspace = true +p384.workspace = true path-clean = "=0.1.0" pbkdf2 = "0.12.1" rand.workspace = true regex.workspace = true +reqwest.workspace = true ring.workspace = true ripemd = "0.1.3" rsa.workspace = true scrypt = "0.11.0" +secp256k1 = { version = "0.27.0", features = ["rand-std"] } serde = "1.0.149" sha-1 = "0.10.0" sha2.workspace = true diff --git a/cli/node/analyze.rs b/ext/node/analyze.rs similarity index 54% rename from cli/node/analyze.rs rename to ext/node/analyze.rs index f93e9fa910..2e5c2d15f5 100644 --- a/cli/node/analyze.rs +++ b/ext/node/analyze.rs @@ -5,32 +5,21 @@ use std::collections::VecDeque; use std::fmt::Write; use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; -use deno_ast::swc::common::SyntaxContext; -use deno_ast::view::Node; -use deno_ast::view::NodeTrait; -use deno_ast::CjsAnalysis; -use deno_ast::MediaType; -use deno_ast::ModuleSpecifier; -use deno_ast::ParsedSource; -use deno_ast::SourceRanged; -use deno_core::anyhow::anyhow; -use deno_core::error::AnyError; -use deno_runtime::deno_node::package_exports_resolve; -use deno_runtime::deno_node::NodeModuleKind; -use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeResolutionMode; -use deno_runtime::deno_node::PackageJson; -use deno_runtime::deno_node::PathClean; -use deno_runtime::deno_node::RealFs; -use deno_runtime::deno_node::RequireNpmResolver; -use deno_runtime::deno_node::NODE_GLOBAL_THIS_NAME; +use deno_core::anyhow::Context; +use deno_core::ModuleSpecifier; use once_cell::sync::Lazy; -use crate::cache::NodeAnalysisCache; -use crate::file_fetcher::FileFetcher; -use crate::npm::NpmPackageResolver; +use deno_core::error::AnyError; + +use crate::resolution::NodeResolverRc; +use crate::NodeModuleKind; +use crate::NodePermissions; +use crate::NodeResolutionMode; +use crate::NpmResolverRc; +use crate::PackageJson; +use crate::PathClean; +use crate::NODE_GLOBAL_THIS_NAME; static NODE_GLOBALS: &[&str] = &[ "Buffer", @@ -43,33 +32,74 @@ static NODE_GLOBALS: &[&str] = &[ "setImmediate", "setInterval", "setTimeout", + "performance", ]; -pub struct NodeCodeTranslator { - analysis_cache: NodeAnalysisCache, - file_fetcher: Arc, - npm_resolver: Arc, +#[derive(Debug, Clone)] +pub struct CjsAnalysis { + pub exports: Vec, + pub reexports: Vec, } -impl NodeCodeTranslator { +/// Code analyzer for CJS and ESM files. +pub trait CjsEsmCodeAnalyzer { + /// Analyzes CommonJs code for exports and reexports, which is + /// then used to determine the wrapper ESM module exports. + fn analyze_cjs( + &self, + specifier: &ModuleSpecifier, + source: &str, + ) -> Result; + + /// Analyzes ESM code for top level declarations. This is used + /// to help inform injecting node specific globals into Node ESM + /// code. For example, if a top level `setTimeout` function exists + /// then we don't want to inject a `setTimeout` declaration. + /// + /// Note: This will go away in the future once we do this all in v8. + fn analyze_esm_top_level_decls( + &self, + specifier: &ModuleSpecifier, + source: &str, + ) -> Result, AnyError>; +} + +pub struct NodeCodeTranslator { + cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, + fs: deno_fs::FileSystemRc, + node_resolver: NodeResolverRc, + npm_resolver: NpmResolverRc, +} + +impl + NodeCodeTranslator +{ pub fn new( - analysis_cache: NodeAnalysisCache, - file_fetcher: Arc, - npm_resolver: Arc, + cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, + fs: deno_fs::FileSystemRc, + node_resolver: NodeResolverRc, + npm_resolver: NpmResolverRc, ) -> Self { Self { - analysis_cache, - file_fetcher, + cjs_esm_code_analyzer, + fs, + node_resolver, npm_resolver, } } + /// Resolves the code to be used when executing Node specific ESM code. + /// + /// Note: This will go away in the future once we do this all in v8. pub fn esm_code_with_node_globals( &self, specifier: &ModuleSpecifier, - code: String, + source: &str, ) -> Result { - esm_code_with_node_globals(&self.analysis_cache, specifier, code) + let top_level_decls = self + .cjs_esm_code_analyzer + .analyze_esm_top_level_decls(specifier, source)?; + Ok(esm_code_from_top_level_decls(source, &top_level_decls)) } /// Translates given CJS module into ESM. This function will perform static @@ -81,22 +111,20 @@ impl NodeCodeTranslator { pub fn translate_cjs_to_esm( &self, specifier: &ModuleSpecifier, - code: String, - media_type: MediaType, - permissions: &mut dyn NodePermissions, + source: &str, + permissions: &dyn NodePermissions, ) -> Result { let mut temp_var_count = 0; let mut handled_reexports: HashSet = HashSet::default(); + let analysis = self.cjs_esm_code_analyzer.analyze_cjs(specifier, source)?; + let mut source = vec![ r#"import {createRequire as __internalCreateRequire} from "node:module"; const require = __internalCreateRequire(import.meta.url);"# .to_string(), ]; - let analysis = - self.perform_cjs_analysis(specifier.as_str(), media_type, code)?; - let mut all_exports = analysis .exports .iter() @@ -126,27 +154,23 @@ impl NodeCodeTranslator { NodeResolutionMode::Execution, permissions, )?; - let reexport_specifier = - ModuleSpecifier::from_file_path(resolved_reexport).unwrap(); // Second, read the source code from disk - let reexport_file = self - .file_fetcher - .get_source(&reexport_specifier) - .ok_or_else(|| { - anyhow!( + let reexport_specifier = + ModuleSpecifier::from_file_path(&resolved_reexport).unwrap(); + let reexport_file_text = self + .fs + .read_to_string(&resolved_reexport) + .map_err(AnyError::from) + .with_context(|| { + format!( "Could not find '{}' ({}) referenced from {}", - reexport, - reexport_specifier, - referrer + reexport, reexport_specifier, referrer ) })?; - { - let analysis = self.perform_cjs_analysis( - reexport_specifier.as_str(), - reexport_file.media_type, - reexport_file.source.to_string(), - )?; + let analysis = self + .cjs_esm_code_analyzer + .analyze_cjs(&reexport_specifier, &reexport_file_text)?; for reexport in analysis.reexports { reexports_to_handle.push_back((reexport, reexport_specifier.clone())); @@ -190,50 +214,13 @@ impl NodeCodeTranslator { Ok(translated_source) } - fn perform_cjs_analysis( - &self, - specifier: &str, - media_type: MediaType, - code: String, - ) -> Result { - let source_hash = NodeAnalysisCache::compute_source_hash(&code); - if let Some(analysis) = self - .analysis_cache - .get_cjs_analysis(specifier, &source_hash) - { - return Ok(analysis); - } - - if media_type == MediaType::Json { - return Ok(CjsAnalysis { - exports: vec![], - reexports: vec![], - }); - } - - let parsed_source = deno_ast::parse_script(deno_ast::ParseParams { - specifier: specifier.to_string(), - text_info: deno_ast::SourceTextInfo::new(code.into()), - media_type, - capture_tokens: true, - scope_analysis: false, - maybe_syntax: None, - })?; - let analysis = parsed_source.analyze_cjs(); - self - .analysis_cache - .set_cjs_analysis(specifier, &source_hash, &analysis); - - Ok(analysis) - } - fn resolve( &self, specifier: &str, referrer: &ModuleSpecifier, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { if specifier.starts_with('/') { todo!(); @@ -242,7 +229,8 @@ impl NodeCodeTranslator { let referrer_path = referrer.to_file_path().unwrap(); if specifier.starts_with("./") || specifier.starts_with("../") { if let Some(parent) = referrer_path.parent() { - return file_extension_probe(parent.join(specifier), &referrer_path); + return self + .file_extension_probe(parent.join(specifier), &referrer_path); } else { todo!(); } @@ -254,23 +242,23 @@ impl NodeCodeTranslator { parse_specifier(specifier).unwrap(); // todo(dsherret): use not_found error on not found here - let resolver = self.npm_resolver.as_require_npm_resolver(); - let module_dir = resolver.resolve_package_folder_from_package( + let module_dir = self.npm_resolver.resolve_package_folder_from_package( package_specifier.as_str(), - &referrer_path, + referrer, mode, )?; let package_json_path = module_dir.join("package.json"); - if package_json_path.exists() { - let package_json = PackageJson::load::( - &self.npm_resolver.as_require_npm_resolver(), + if self.fs.exists(&package_json_path) { + let package_json = PackageJson::load( + &*self.fs, + &*self.npm_resolver, permissions, package_json_path.clone(), )?; if let Some(exports) = &package_json.exports { - return package_exports_resolve::( + return self.node_resolver.package_exports_resolve( &package_json_path, package_subpath, exports, @@ -278,7 +266,6 @@ impl NodeCodeTranslator { NodeModuleKind::Esm, conditions, mode, - &self.npm_resolver.as_require_npm_resolver(), permissions, ); } @@ -286,25 +273,24 @@ impl NodeCodeTranslator { // old school if package_subpath != "." { let d = module_dir.join(package_subpath); - if let Ok(m) = d.metadata() { - if m.is_dir() { - // subdir might have a package.json that specifies the entrypoint - let package_json_path = d.join("package.json"); - if package_json_path.exists() { - let package_json = PackageJson::load::( - &self.npm_resolver.as_require_npm_resolver(), - permissions, - package_json_path, - )?; - if let Some(main) = package_json.main(NodeModuleKind::Cjs) { - return Ok(d.join(main).clean()); - } + if self.fs.is_dir(&d) { + // subdir might have a package.json that specifies the entrypoint + let package_json_path = d.join("package.json"); + if self.fs.exists(&package_json_path) { + let package_json = PackageJson::load( + &*self.fs, + &*self.npm_resolver, + permissions, + package_json_path, + )?; + if let Some(main) = package_json.main(NodeModuleKind::Cjs) { + return Ok(d.join(main).clean()); } - - return Ok(d.join("index.js").clean()); } + + return Ok(d.join("index.js").clean()); } - return file_extension_probe(d, &referrer_path); + return self.file_extension_probe(d, &referrer_path); } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) { return Ok(module_dir.join(main).clean()); } else { @@ -313,47 +299,33 @@ impl NodeCodeTranslator { } Err(not_found(specifier, &referrer_path)) } -} -fn esm_code_with_node_globals( - analysis_cache: &NodeAnalysisCache, - specifier: &ModuleSpecifier, - code: String, -) -> Result { - // TODO(dsherret): this code is way more inefficient than it needs to be. - // - // In the future, we should disable capturing tokens & scope analysis - // and instead only use swc's APIs to go through the portions of the tree - // that we know will affect the global scope while still ensuring that - // `var` decls are taken into consideration. - let source_hash = NodeAnalysisCache::compute_source_hash(&code); - let text_info = deno_ast::SourceTextInfo::from_string(code); - let top_level_decls = if let Some(decls) = - analysis_cache.get_esm_analysis(specifier.as_str(), &source_hash) - { - HashSet::from_iter(decls) - } else { - let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { - specifier: specifier.to_string(), - text_info: text_info.clone(), - media_type: deno_ast::MediaType::from_specifier(specifier), - capture_tokens: true, - scope_analysis: true, - maybe_syntax: None, - })?; - let top_level_decls = analyze_top_level_decls(&parsed_source)?; - analysis_cache.set_esm_analysis( - specifier.as_str(), - &source_hash, - &top_level_decls.clone().into_iter().collect(), - ); - top_level_decls - }; - - Ok(esm_code_from_top_level_decls( - text_info.text_str(), - &top_level_decls, - )) + fn file_extension_probe( + &self, + p: PathBuf, + referrer: &Path, + ) -> Result { + let p = p.clean(); + if self.fs.exists(&p) { + let file_name = p.file_name().unwrap(); + let p_js = + p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if self.fs.is_file(&p_js) { + return Ok(p_js); + } else if self.fs.is_dir(&p) { + return Ok(p.join("index.js")); + } else { + return Ok(p); + } + } else if let Some(file_name) = p.file_name() { + let p_js = + p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if self.fs.is_file(&p_js) { + return Ok(p_js); + } + } + Err(not_found(&p.to_string_lossy(), referrer)) + } } fn esm_code_from_top_level_decls( @@ -369,7 +341,7 @@ fn esm_code_from_top_level_decls( } let mut result = String::new(); - let global_this_expr = NODE_GLOBAL_THIS_NAME.as_str(); + let global_this_expr = NODE_GLOBAL_THIS_NAME; let global_this_expr = if has_global_this { global_this_expr } else { @@ -392,70 +364,6 @@ fn esm_code_from_top_level_decls( result } -fn analyze_top_level_decls( - parsed_source: &ParsedSource, -) -> Result, AnyError> { - fn visit_children( - node: Node, - top_level_context: SyntaxContext, - results: &mut HashSet, - ) { - if let Node::Ident(ident) = node { - if ident.ctxt() == top_level_context && is_local_declaration_ident(node) { - results.insert(ident.sym().to_string()); - } - } - - for child in node.children() { - visit_children(child, top_level_context, results); - } - } - - let top_level_context = parsed_source.top_level_context(); - - parsed_source.with_view(|program| { - let mut results = HashSet::new(); - visit_children(program.into(), top_level_context, &mut results); - Ok(results) - }) -} - -fn is_local_declaration_ident(node: Node) -> bool { - if let Some(parent) = node.parent() { - match parent { - Node::BindingIdent(decl) => decl.id.range().contains(&node.range()), - Node::ClassDecl(decl) => decl.ident.range().contains(&node.range()), - Node::ClassExpr(decl) => decl - .ident - .as_ref() - .map(|i| i.range().contains(&node.range())) - .unwrap_or(false), - Node::TsInterfaceDecl(decl) => decl.id.range().contains(&node.range()), - Node::FnDecl(decl) => decl.ident.range().contains(&node.range()), - Node::FnExpr(decl) => decl - .ident - .as_ref() - .map(|i| i.range().contains(&node.range())) - .unwrap_or(false), - Node::TsModuleDecl(decl) => decl.id.range().contains(&node.range()), - Node::TsNamespaceDecl(decl) => decl.id.range().contains(&node.range()), - Node::VarDeclarator(decl) => decl.name.range().contains(&node.range()), - Node::ImportNamedSpecifier(decl) => { - decl.local.range().contains(&node.range()) - } - Node::ImportDefaultSpecifier(decl) => { - decl.local.range().contains(&node.range()) - } - Node::ImportStarAsSpecifier(decl) => decl.range().contains(&node.range()), - Node::KeyValuePatProp(decl) => decl.key.range().contains(&node.range()), - Node::AssignPatProp(decl) => decl.key.range().contains(&node.range()), - _ => false, - } - } else { - false - } -} - static RESERVED_WORDS: Lazy> = Lazy::new(|| { HashSet::from([ "break", @@ -579,30 +487,6 @@ fn parse_specifier(specifier: &str) -> Option<(String, String)> { Some((package_name, package_subpath)) } -fn file_extension_probe( - p: PathBuf, - referrer: &Path, -) -> Result { - let p = p.clean(); - if p.exists() { - let file_name = p.file_name().unwrap(); - let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if p_js.exists() && p_js.is_file() { - return Ok(p_js); - } else if p.is_dir() { - return Ok(p.join("index.js")); - } else { - return Ok(p); - } - } else if let Some(file_name) = p.file_name() { - let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if p_js.exists() && p_js.is_file() { - return Ok(p_js); - } - } - Err(not_found(&p.to_string_lossy(), referrer)) -} - fn not_found(path: &str, referrer: &Path) -> AnyError { let msg = format!( "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"", @@ -618,28 +502,23 @@ mod tests { #[test] fn test_esm_code_with_node_globals() { - let r = esm_code_with_node_globals( - &NodeAnalysisCache::new_in_memory(), - &ModuleSpecifier::parse("https://example.com/foo/bar.js").unwrap(), - "export const x = 1;".to_string(), - ) - .unwrap(); - assert!(r.contains(&format!( - "var globalThis = {};", - NODE_GLOBAL_THIS_NAME.as_str() - ))); + let r = esm_code_from_top_level_decls( + "export const x = 1;", + &HashSet::from(["x".to_string()]), + ); + assert!( + r.contains(&format!("var globalThis = {};", NODE_GLOBAL_THIS_NAME,)) + ); assert!(r.contains("var process = globalThis.process;")); assert!(r.contains("export const x = 1;")); } #[test] fn test_esm_code_with_node_globals_with_shebang() { - let r = esm_code_with_node_globals( - &NodeAnalysisCache::new_in_memory(), - &ModuleSpecifier::parse("https://example.com/foo/bar.js").unwrap(), - "#!/usr/bin/env node\nexport const x = 1;".to_string(), - ) - .unwrap(); + let r = esm_code_from_top_level_decls( + "#!/usr/bin/env node\nexport const x = 1;", + &HashSet::from(["x".to_string()]), + ); assert_eq!( r, format!( @@ -650,10 +529,10 @@ mod tests { "var clearTimeout = globalThis.clearTimeout;var console = globalThis.console;", "var global = globalThis.global;var process = globalThis.process;", "var setImmediate = globalThis.setImmediate;var setInterval = globalThis.setInterval;", - "var setTimeout = globalThis.setTimeout;\n", + "var setTimeout = globalThis.setTimeout;var performance = globalThis.performance;\n", "export const x = 1;" ), - NODE_GLOBAL_THIS_NAME.as_str(), + NODE_GLOBAL_THIS_NAME, ) ); } diff --git a/ext/node/build.rs b/ext/node/build.rs new file mode 100644 index 0000000000..e9b960cab2 --- /dev/null +++ b/ext/node/build.rs @@ -0,0 +1,10 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +fn main() { + // we use a changing variable name to make it harder to depend on this + let crate_version = env!("CARGO_PKG_VERSION"); + println!( + "cargo:rustc-env=NODE_GLOBAL_THIS_NAME=__DENO_NODE_GLOBAL_THIS_{}__", + crate_version.replace('.', "_") + ); +} diff --git a/ext/node/clippy.toml b/ext/node/clippy.toml index 94796f5a70..02fd259d09 100644 --- a/ext/node/clippy.toml +++ b/ext/node/clippy.toml @@ -1,40 +1,43 @@ disallowed-methods = [ - { path = "std::env::current_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::copy", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::create_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::create_dir_all", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::hard_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read_to_string", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::remove_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::remove_dir_all", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::remove_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::rename", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::set_permissions", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::write", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::env::current_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::copy", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::rename", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::write", reason = "File system operations should be done using FileSystem trait" }, +] +disallowed-types = [ + { path = "std::sync::Arc", reason = "use deno_fs::sync::MaybeArc instead" }, ] diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 64a2e083ab..e77b3c0896 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -1,149 +1,122 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::collections::HashSet; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; + use deno_core::error::AnyError; use deno_core::located_script_name; use deno_core::op; use deno_core::serde_json; +use deno_core::url::Url; use deno_core::JsRuntime; +use deno_core::ModuleSpecifier; +use deno_fs::sync::MaybeSend; +use deno_fs::sync::MaybeSync; +use deno_npm::resolution::PackageReqNotFoundError; +use deno_npm::NpmPackageId; +use deno_semver::npm::NpmPackageNv; +use deno_semver::npm::NpmPackageNvReference; +use deno_semver::npm::NpmPackageReq; +use deno_semver::npm::NpmPackageReqReference; use once_cell::sync::Lazy; -use std::collections::HashSet; -use std::io; -use std::path::Path; -use std::path::PathBuf; -use std::rc::Rc; -mod crypto; +pub mod analyze; pub mod errors; -mod idna; mod ops; mod package_json; mod path; mod polyfill; mod resolution; -mod v8; -mod winerror; -mod zlib; pub use package_json::PackageJson; pub use path::PathClean; -pub use polyfill::find_builtin_node_module; pub use polyfill::is_builtin_node_module; pub use polyfill::NodeModulePolyfill; pub use polyfill::SUPPORTED_BUILTIN_NODE_MODULES; -pub use resolution::get_closest_package_json; -pub use resolution::get_package_scope_config; -pub use resolution::legacy_main_resolve; -pub use resolution::package_exports_resolve; -pub use resolution::package_imports_resolve; -pub use resolution::package_resolve; -pub use resolution::path_to_declaration_path; pub use resolution::NodeModuleKind; +pub use resolution::NodeResolution; pub use resolution::NodeResolutionMode; -pub use resolution::DEFAULT_CONDITIONS; - -pub trait NodeEnv { - type P: NodePermissions; - type Fs: NodeFs; -} +pub use resolution::NodeResolver; pub trait NodePermissions { - fn check_read(&mut self, path: &Path) -> Result<(), AnyError>; + fn check_net_url( + &mut self, + url: &Url, + api_name: &str, + ) -> Result<(), AnyError>; + fn check_read(&self, path: &Path) -> Result<(), AnyError>; } -#[derive(Default, Clone)] -pub struct NodeFsMetadata { - pub is_file: bool, - pub is_dir: bool, -} +pub(crate) struct AllowAllNodePermissions; -pub trait NodeFs { - fn current_dir() -> io::Result; - fn metadata>(path: P) -> io::Result; - fn is_file>(path: P) -> bool; - fn is_dir>(path: P) -> bool; - fn exists>(path: P) -> bool; - fn read_to_string>(path: P) -> io::Result; - fn canonicalize>(path: P) -> io::Result; -} - -pub struct RealFs; -impl NodeFs for RealFs { - fn current_dir() -> io::Result { - #[allow(clippy::disallowed_methods)] - std::env::current_dir() +impl NodePermissions for AllowAllNodePermissions { + fn check_net_url( + &mut self, + _url: &Url, + _api_name: &str, + ) -> Result<(), AnyError> { + Ok(()) } - - fn metadata>(path: P) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path).map(|metadata| { - // on most systems, calling is_file() and is_dir() is cheap - // and returns information already found in the metadata object - NodeFsMetadata { - is_file: metadata.is_file(), - is_dir: metadata.is_dir(), - } - }) - } - - fn exists>(path: P) -> bool { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path).is_ok() - } - - fn is_file>(path: P) -> bool { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path) - .map(|m| m.is_file()) - .unwrap_or(false) - } - - fn is_dir>(path: P) -> bool { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path).map(|m| m.is_dir()).unwrap_or(false) - } - - fn read_to_string>(path: P) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::fs::read_to_string(path) - } - - fn canonicalize>(path: P) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::path::Path::canonicalize(path.as_ref()) + fn check_read(&self, _path: &Path) -> Result<(), AnyError> { + Ok(()) } } -pub trait RequireNpmResolver { +#[allow(clippy::disallowed_types)] +pub type NpmResolverRc = deno_fs::sync::MaybeArc; + +pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { + /// Resolves an npm package folder path from an npm package referrer. fn resolve_package_folder_from_package( &self, specifier: &str, - referrer: &Path, + referrer: &ModuleSpecifier, mode: NodeResolutionMode, ) -> Result; + /// Resolves the npm package folder path from the specified path. fn resolve_package_folder_from_path( &self, path: &Path, ) -> Result; - fn in_npm_package(&self, path: &Path) -> bool; + /// Resolves an npm package folder path from a Deno module. + fn resolve_package_folder_from_deno_module( + &self, + pkg_nv: &NpmPackageNv, + ) -> Result; + + fn resolve_pkg_id_from_pkg_req( + &self, + req: &NpmPackageReq, + ) -> Result; + + fn resolve_nv_ref_from_pkg_req_ref( + &self, + req_ref: &NpmPackageReqReference, + ) -> Result; + + fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool; + + fn in_npm_package_at_path(&self, path: &Path) -> bool { + let specifier = + match ModuleSpecifier::from_file_path(path.to_path_buf().clean()) { + Ok(p) => p, + Err(_) => return false, + }; + self.in_npm_package(&specifier) + } fn ensure_read_permission( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: &Path, ) -> Result<(), AnyError>; } -pub static NODE_GLOBAL_THIS_NAME: Lazy = Lazy::new(|| { - let now = std::time::SystemTime::now(); - let seconds = now - .duration_since(std::time::SystemTime::UNIX_EPOCH) - .unwrap() - .as_secs(); - // use a changing variable name to make it hard to depend on this - format!("__DENO_NODE_GLOBAL_THIS_{seconds}__") -}); +pub const NODE_GLOBAL_THIS_NAME: &str = env!("NODE_GLOBAL_THIS_NAME"); pub static NODE_ENV_VAR_ALLOWLIST: Lazy> = Lazy::new(|| { // The full list of environment variables supported by Node.js is available @@ -166,102 +139,109 @@ fn op_node_build_os() -> String { deno_core::extension!(deno_node, deps = [ deno_io, deno_fs ], - parameters = [Env: NodeEnv], + parameters = [P: NodePermissions], ops = [ - crypto::op_node_create_decipheriv, - crypto::op_node_cipheriv_encrypt, - crypto::op_node_cipheriv_final, - crypto::op_node_create_cipheriv, - crypto::op_node_create_hash, - crypto::op_node_decipheriv_decrypt, - crypto::op_node_decipheriv_final, - crypto::op_node_hash_update, - crypto::op_node_hash_update_str, - crypto::op_node_hash_digest, - crypto::op_node_hash_digest_hex, - crypto::op_node_hash_clone, - crypto::op_node_private_encrypt, - crypto::op_node_private_decrypt, - crypto::op_node_public_encrypt, - crypto::op_node_check_prime, - crypto::op_node_check_prime_async, - crypto::op_node_check_prime_bytes, - crypto::op_node_check_prime_bytes_async, - crypto::op_node_pbkdf2, - crypto::op_node_pbkdf2_async, - crypto::op_node_hkdf, - crypto::op_node_hkdf_async, - crypto::op_node_generate_secret, - crypto::op_node_generate_secret_async, - crypto::op_node_sign, - crypto::op_node_generate_rsa, - crypto::op_node_generate_rsa_async, - crypto::op_node_dsa_generate, - crypto::op_node_dsa_generate_async, - crypto::op_node_ec_generate, - crypto::op_node_ec_generate_async, - crypto::op_node_ed25519_generate, - crypto::op_node_ed25519_generate_async, - crypto::op_node_x25519_generate, - crypto::op_node_x25519_generate_async, - crypto::op_node_dh_generate_group, - crypto::op_node_dh_generate_group_async, - crypto::op_node_dh_generate, - crypto::op_node_dh_generate_async, - crypto::op_node_verify, - crypto::op_node_random_int, - crypto::op_node_scrypt_sync, - crypto::op_node_scrypt_async, - crypto::x509::op_node_x509_parse, - crypto::x509::op_node_x509_ca, - crypto::x509::op_node_x509_check_email, - crypto::x509::op_node_x509_fingerprint, - crypto::x509::op_node_x509_fingerprint256, - crypto::x509::op_node_x509_fingerprint512, - crypto::x509::op_node_x509_get_issuer, - crypto::x509::op_node_x509_get_subject, - crypto::x509::op_node_x509_get_valid_from, - crypto::x509::op_node_x509_get_valid_to, - crypto::x509::op_node_x509_get_serial_number, - crypto::x509::op_node_x509_key_usage, - winerror::op_node_sys_to_uv_error, - v8::op_v8_cached_data_version_tag, - v8::op_v8_get_heap_statistics, - idna::op_node_idna_domain_to_ascii, - idna::op_node_idna_domain_to_unicode, - idna::op_node_idna_punycode_decode, - idna::op_node_idna_punycode_encode, - zlib::op_zlib_new, - zlib::op_zlib_close, - zlib::op_zlib_close_if_pending, - zlib::op_zlib_write, - zlib::op_zlib_write_async, - zlib::op_zlib_init, - zlib::op_zlib_reset, + ops::crypto::op_node_create_decipheriv, + ops::crypto::op_node_cipheriv_encrypt, + ops::crypto::op_node_cipheriv_final, + ops::crypto::op_node_create_cipheriv, + ops::crypto::op_node_create_hash, + ops::crypto::op_node_decipheriv_decrypt, + ops::crypto::op_node_decipheriv_final, + ops::crypto::op_node_hash_update, + ops::crypto::op_node_hash_update_str, + ops::crypto::op_node_hash_digest, + ops::crypto::op_node_hash_digest_hex, + ops::crypto::op_node_hash_clone, + ops::crypto::op_node_private_encrypt, + ops::crypto::op_node_private_decrypt, + ops::crypto::op_node_public_encrypt, + ops::crypto::op_node_check_prime, + ops::crypto::op_node_check_prime_async, + ops::crypto::op_node_check_prime_bytes, + ops::crypto::op_node_check_prime_bytes_async, + ops::crypto::op_node_gen_prime, + ops::crypto::op_node_gen_prime_async, + ops::crypto::op_node_pbkdf2, + ops::crypto::op_node_pbkdf2_async, + ops::crypto::op_node_hkdf, + ops::crypto::op_node_hkdf_async, + ops::crypto::op_node_generate_secret, + ops::crypto::op_node_generate_secret_async, + ops::crypto::op_node_sign, + ops::crypto::op_node_generate_rsa, + ops::crypto::op_node_generate_rsa_async, + ops::crypto::op_node_dsa_generate, + ops::crypto::op_node_dsa_generate_async, + ops::crypto::op_node_ec_generate, + ops::crypto::op_node_ec_generate_async, + ops::crypto::op_node_ed25519_generate, + ops::crypto::op_node_ed25519_generate_async, + ops::crypto::op_node_x25519_generate, + ops::crypto::op_node_x25519_generate_async, + ops::crypto::op_node_dh_generate_group, + ops::crypto::op_node_dh_generate_group_async, + ops::crypto::op_node_dh_generate, + ops::crypto::op_node_dh_generate2, + ops::crypto::op_node_dh_compute_secret, + ops::crypto::op_node_dh_generate_async, + ops::crypto::op_node_verify, + ops::crypto::op_node_random_int, + ops::crypto::op_node_scrypt_sync, + ops::crypto::op_node_scrypt_async, + ops::crypto::op_node_ecdh_generate_keys, + ops::crypto::op_node_ecdh_compute_secret, + ops::crypto::op_node_ecdh_compute_public_key, + ops::crypto::x509::op_node_x509_parse, + ops::crypto::x509::op_node_x509_ca, + ops::crypto::x509::op_node_x509_check_email, + ops::crypto::x509::op_node_x509_fingerprint, + ops::crypto::x509::op_node_x509_fingerprint256, + ops::crypto::x509::op_node_x509_fingerprint512, + ops::crypto::x509::op_node_x509_get_issuer, + ops::crypto::x509::op_node_x509_get_subject, + ops::crypto::x509::op_node_x509_get_valid_from, + ops::crypto::x509::op_node_x509_get_valid_to, + ops::crypto::x509::op_node_x509_get_serial_number, + ops::crypto::x509::op_node_x509_key_usage, + ops::winerror::op_node_sys_to_uv_error, + ops::v8::op_v8_cached_data_version_tag, + ops::v8::op_v8_get_heap_statistics, + ops::idna::op_node_idna_domain_to_ascii, + ops::idna::op_node_idna_domain_to_unicode, + ops::idna::op_node_idna_punycode_decode, + ops::idna::op_node_idna_punycode_encode, + ops::zlib::op_zlib_new, + ops::zlib::op_zlib_close, + ops::zlib::op_zlib_close_if_pending, + ops::zlib::op_zlib_write, + ops::zlib::op_zlib_write_async, + ops::zlib::op_zlib_init, + ops::zlib::op_zlib_reset, + ops::http::op_node_http_request

, op_node_build_os, - - ops::op_require_init_paths, - ops::op_require_node_module_paths, - ops::op_require_proxy_path, - ops::op_require_is_deno_dir_package, - ops::op_require_resolve_deno_dir, - ops::op_require_is_request_relative, - ops::op_require_resolve_lookup_paths, - ops::op_require_try_self_parent_path, - ops::op_require_try_self, - ops::op_require_real_path, - ops::op_require_path_is_absolute, - ops::op_require_path_dirname, - ops::op_require_stat, - ops::op_require_path_resolve, - ops::op_require_path_basename, - ops::op_require_read_file, - ops::op_require_as_file_path, - ops::op_require_resolve_exports, - ops::op_require_read_closest_package_json, - ops::op_require_read_package_scope, - ops::op_require_package_imports_resolve, - ops::op_require_break_on_next_statement, + ops::require::op_require_init_paths, + ops::require::op_require_node_module_paths

, + ops::require::op_require_proxy_path, + ops::require::op_require_is_deno_dir_package, + ops::require::op_require_resolve_deno_dir, + ops::require::op_require_is_request_relative, + ops::require::op_require_resolve_lookup_paths, + ops::require::op_require_try_self_parent_path

, + ops::require::op_require_try_self

, + ops::require::op_require_real_path

, + ops::require::op_require_path_is_absolute, + ops::require::op_require_path_dirname, + ops::require::op_require_stat

, + ops::require::op_require_path_resolve, + ops::require::op_require_path_basename, + ops::require::op_require_read_file

, + ops::require::op_require_as_file_path, + ops::require::op_require_resolve_exports

, + ops::require::op_require_read_closest_package_json

, + ops::require::op_require_read_package_scope

, + ops::require::op_require_package_imports_resolve

, + ops::require::op_require_break_on_next_statement, ], esm_entry_point = "ext:deno_node/02_init.js", esm = [ @@ -269,7 +249,6 @@ deno_core::extension!(deno_node, "00_globals.js", "01_require.js", "02_init.js", - "_core.ts", "_events.mjs", "_fs/_fs_access.ts", "_fs/_fs_appendFile.ts", @@ -413,6 +392,7 @@ deno_core::extension!(deno_node, "internal/fixed_queue.ts", "internal/fs/streams.mjs", "internal/fs/utils.mjs", + "internal/fs/handle.ts", "internal/hide_stack_frames.ts", "internal/http.ts", "internal/idna.ts", @@ -490,11 +470,18 @@ deno_core::extension!(deno_node, "zlib.ts", ], options = { - maybe_npm_resolver: Option>, + maybe_npm_resolver: Option, + fs: deno_fs::FileSystemRc, }, state = |state, options| { + let fs = options.fs; + state.put(fs.clone()); if let Some(npm_resolver) = options.maybe_npm_resolver { - state.put(npm_resolver); + state.put(npm_resolver.clone()); + state.put(Rc::new(NodeResolver::new( + fs, + npm_resolver, + ))) } }, ); @@ -502,10 +489,10 @@ deno_core::extension!(deno_node, pub fn initialize_runtime( js_runtime: &mut JsRuntime, uses_local_node_modules_dir: bool, - maybe_binary_command_name: Option, + maybe_binary_command_name: Option<&str>, ) -> Result<(), AnyError> { let argv0 = if let Some(binary_command_name) = maybe_binary_command_name { - serde_json::to_string(binary_command_name.as_str())? + serde_json::to_string(binary_command_name)? } else { "undefined".to_string() }; @@ -516,10 +503,10 @@ pub fn initialize_runtime( usesLocalNodeModulesDir, argv0 ); + // Make the nodeGlobalThisName unconfigurable here. + Object.defineProperty(globalThis, nodeGlobalThisName, {{ configurable: false }}); }})('{}', {}, {});"#, - NODE_GLOBAL_THIS_NAME.as_str(), - uses_local_node_modules_dir, - argv0 + NODE_GLOBAL_THIS_NAME, uses_local_node_modules_dir, argv0 ); js_runtime.execute_script(located_script_name!(), source_code.into())?; diff --git a/ext/node/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs similarity index 100% rename from ext/node/crypto/cipher.rs rename to ext/node/ops/crypto/cipher.rs diff --git a/ext/node/crypto/dh.rs b/ext/node/ops/crypto/dh.rs similarity index 99% rename from ext/node/crypto/dh.rs rename to ext/node/ops/crypto/dh.rs index 4da9a01bf8..8b756d9a2d 100644 --- a/ext/node/crypto/dh.rs +++ b/ext/node/ops/crypto/dh.rs @@ -63,7 +63,7 @@ impl DiffieHellman { } pub fn new(prime: Prime, generator: usize) -> Self { - let private_key = PrivateKey::new(32); + let private_key = PrivateKey::new(prime.bits()); let generator = BigUint::from_usize(generator).unwrap(); let public_key = private_key.compute_public_key(&generator, &prime); diff --git a/ext/node/crypto/digest.rs b/ext/node/ops/crypto/digest.rs similarity index 100% rename from ext/node/crypto/digest.rs rename to ext/node/ops/crypto/digest.rs diff --git a/ext/node/crypto/mod.rs b/ext/node/ops/crypto/mod.rs similarity index 75% rename from ext/node/crypto/mod.rs rename to ext/node/ops/crypto/mod.rs index d224b40f72..05f2d34f7e 100644 --- a/ext/node/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -4,12 +4,14 @@ use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; use deno_core::serde_v8; +use deno_core::task::spawn_blocking; use deno_core::OpState; use deno_core::ResourceId; use deno_core::StringOrBuffer; use deno_core::ZeroCopyBuf; use hkdf::Hkdf; use num_bigint::BigInt; +use num_bigint_dig::BigUint; use num_traits::FromPrimitive; use rand::distributions::Distribution; use rand::distributions::Uniform; @@ -18,12 +20,18 @@ use rand::Rng; use std::future::Future; use std::rc::Rc; +use p224::NistP224; +use p256::NistP256; +use p384::NistP384; use rsa::padding::PaddingScheme; use rsa::pkcs8::DecodePrivateKey; use rsa::pkcs8::DecodePublicKey; use rsa::PublicKey; use rsa::RsaPrivateKey; use rsa::RsaPublicKey; +use secp256k1::ecdh::SharedSecret; +use secp256k1::Secp256k1; +use secp256k1::SecretKey; mod cipher; mod dh; @@ -51,12 +59,7 @@ pub async fn op_node_check_prime_async( checks: usize, ) -> Result { // TODO(@littledivy): use rayon for CPU-bound tasks - Ok( - tokio::task::spawn_blocking(move || { - primes::is_probably_prime(&num, checks) - }) - .await?, - ) + Ok(spawn_blocking(move || primes::is_probably_prime(&num, checks)).await?) } #[op] @@ -68,10 +71,8 @@ pub fn op_node_check_prime_bytes_async( // TODO(@littledivy): use rayon for CPU-bound tasks Ok(async move { Ok( - tokio::task::spawn_blocking(move || { - primes::is_probably_prime(&candidate, checks) - }) - .await?, + spawn_blocking(move || primes::is_probably_prime(&candidate, checks)) + .await?, ) }) } @@ -456,7 +457,7 @@ pub async fn op_node_pbkdf2_async( digest: String, keylen: usize, ) -> Result { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut derived_key = vec![0; keylen]; pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key) .map(|_| derived_key.into()) @@ -471,7 +472,7 @@ pub fn op_node_generate_secret(buf: &mut [u8]) { #[op] pub async fn op_node_generate_secret_async(len: i32) -> ZeroCopyBuf { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut buf = vec![0u8; len as usize]; rand::thread_rng().fill(&mut buf[..]); buf.into() @@ -529,7 +530,7 @@ pub async fn op_node_hkdf_async( info: ZeroCopyBuf, okm_len: usize, ) -> Result { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut okm = vec![0u8; okm_len]; hkdf_sync(&hash, &ikm, &salt, &info, &mut okm)?; Ok(okm.into()) @@ -572,10 +573,7 @@ pub async fn op_node_generate_rsa_async( modulus_length: usize, public_exponent: usize, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || { - generate_rsa(modulus_length, public_exponent) - }) - .await? + spawn_blocking(move || generate_rsa(modulus_length, public_exponent)).await? } fn dsa_generate( @@ -629,10 +627,7 @@ pub async fn op_node_dsa_generate_async( modulus_length: usize, divisor_length: usize, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || { - dsa_generate(modulus_length, divisor_length) - }) - .await? + spawn_blocking(move || dsa_generate(modulus_length, divisor_length)).await? } fn ec_generate( @@ -671,7 +666,7 @@ pub fn op_node_ec_generate( pub async fn op_node_ec_generate_async( named_curve: String, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || ec_generate(&named_curve)).await? + spawn_blocking(move || ec_generate(&named_curve)).await? } fn ed25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { @@ -698,7 +693,7 @@ pub fn op_node_ed25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError #[op] pub async fn op_node_ed25519_generate_async( ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(ed25519_generate).await? + spawn_blocking(ed25519_generate).await? } fn x25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { @@ -733,7 +728,7 @@ pub fn op_node_x25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> #[op] pub async fn op_node_x25519_generate_async( ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(x25519_generate).await? + spawn_blocking(x25519_generate).await? } fn dh_generate_group( @@ -766,7 +761,7 @@ pub fn op_node_dh_generate_group( pub async fn op_node_dh_generate_group_async( group_name: String, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || dh_generate_group(&group_name)).await? + spawn_blocking(move || dh_generate_group(&group_name)).await? } fn dh_generate( @@ -794,16 +789,38 @@ pub fn op_node_dh_generate( dh_generate(prime, prime_len, generator) } +// TODO(lev): This duplication should be avoided. +#[op] +pub fn op_node_dh_generate2( + prime: ZeroCopyBuf, + prime_len: usize, + generator: usize, +) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { + dh_generate(Some(prime).as_deref(), prime_len, generator) +} + +#[op] +pub fn op_node_dh_compute_secret( + prime: ZeroCopyBuf, + private_key: ZeroCopyBuf, + their_public_key: ZeroCopyBuf, +) -> Result { + let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref()); + let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref()); + let primei: BigUint = BigUint::from_bytes_be(prime.as_ref()); + let shared_secret: BigUint = pubkey.modpow(&privkey, &primei); + + Ok(shared_secret.to_bytes_be().into()) +} + #[op] pub async fn op_node_dh_generate_async( prime: Option, prime_len: usize, generator: usize, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || { - dh_generate(prime.as_deref(), prime_len, generator) - }) - .await? + spawn_blocking(move || dh_generate(prime.as_deref(), prime_len, generator)) + .await? } #[op] @@ -879,7 +896,7 @@ pub async fn op_node_scrypt_async( parallelization: u32, maxmem: u32, ) -> Result { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut output_buffer = vec![0u8; keylen as usize]; let res = scrypt( password, @@ -901,3 +918,179 @@ pub async fn op_node_scrypt_async( }) .await? } + +#[op] +pub fn op_node_ecdh_generate_keys( + curve: &str, + pubbuf: &mut [u8], + privbuf: &mut [u8], +) -> Result { + let mut rng = rand::thread_rng(); + match curve { + "secp256k1" => { + let secp = Secp256k1::new(); + let (privkey, pubkey) = secp.generate_keypair(&mut rng); + pubbuf.copy_from_slice(&pubkey.serialize_uncompressed()); + privbuf.copy_from_slice(&privkey.secret_bytes()); + + Ok(0) + } + "prime256v1" | "secp256r1" => { + let privkey = elliptic_curve::SecretKey::::random(&mut rng); + let pubkey = privkey.public_key(); + pubbuf.copy_from_slice(pubkey.to_sec1_bytes().as_ref()); + privbuf.copy_from_slice(privkey.to_nonzero_scalar().to_bytes().as_ref()); + Ok(0) + } + "secp384r1" => { + let privkey = elliptic_curve::SecretKey::::random(&mut rng); + let pubkey = privkey.public_key(); + pubbuf.copy_from_slice(pubkey.to_sec1_bytes().as_ref()); + privbuf.copy_from_slice(privkey.to_nonzero_scalar().to_bytes().as_ref()); + Ok(0) + } + "secp224r1" => { + let privkey = elliptic_curve::SecretKey::::random(&mut rng); + let pubkey = privkey.public_key(); + pubbuf.copy_from_slice(pubkey.to_sec1_bytes().as_ref()); + privbuf.copy_from_slice(privkey.to_nonzero_scalar().to_bytes().as_ref()); + Ok(0) + } + &_ => todo!(), + } +} + +#[op] +pub fn op_node_ecdh_compute_secret( + curve: &str, + this_priv: Option, + their_pub: &mut [u8], + secret: &mut [u8], +) -> Result<(), AnyError> { + match curve { + "secp256k1" => { + let this_secret_key = SecretKey::from_slice( + this_priv.expect("no private key provided?").as_ref(), + ) + .unwrap(); + let their_public_key = + secp256k1::PublicKey::from_slice(their_pub).unwrap(); + let shared_secret = + SharedSecret::new(&their_public_key, &this_secret_key); + + secret.copy_from_slice(&shared_secret.secret_bytes()); + Ok(()) + } + "prime256v1" | "secp256r1" => { + let their_public_key = + elliptic_curve::PublicKey::::from_sec1_bytes(their_pub) + .expect("bad public key"); + let this_private_key = elliptic_curve::SecretKey::::from_slice( + &this_priv.expect("must supply private key"), + ) + .expect("bad private key"); + let shared_secret = elliptic_curve::ecdh::diffie_hellman( + this_private_key.to_nonzero_scalar(), + their_public_key.as_affine(), + ); + secret.copy_from_slice(shared_secret.raw_secret_bytes()); + + Ok(()) + } + "secp384r1" => { + let their_public_key = + elliptic_curve::PublicKey::::from_sec1_bytes(their_pub) + .expect("bad public key"); + let this_private_key = elliptic_curve::SecretKey::::from_slice( + &this_priv.expect("must supply private key"), + ) + .expect("bad private key"); + let shared_secret = elliptic_curve::ecdh::diffie_hellman( + this_private_key.to_nonzero_scalar(), + their_public_key.as_affine(), + ); + secret.copy_from_slice(shared_secret.raw_secret_bytes()); + + Ok(()) + } + "secp224r1" => { + let their_public_key = + elliptic_curve::PublicKey::::from_sec1_bytes(their_pub) + .expect("bad public key"); + let this_private_key = elliptic_curve::SecretKey::::from_slice( + &this_priv.expect("must supply private key"), + ) + .expect("bad private key"); + let shared_secret = elliptic_curve::ecdh::diffie_hellman( + this_private_key.to_nonzero_scalar(), + their_public_key.as_affine(), + ); + secret.copy_from_slice(shared_secret.raw_secret_bytes()); + + Ok(()) + } + &_ => todo!(), + } +} + +#[op] +pub fn op_node_ecdh_compute_public_key( + curve: &str, + privkey: &[u8], + pubkey: &mut [u8], +) -> Result<(), AnyError> { + match curve { + "secp256k1" => { + let secp = Secp256k1::new(); + let secret_key = SecretKey::from_slice(privkey).unwrap(); + let public_key = + secp256k1::PublicKey::from_secret_key(&secp, &secret_key); + + pubkey.copy_from_slice(&public_key.serialize_uncompressed()); + + Ok(()) + } + "prime256v1" | "secp256r1" => { + let this_private_key = + elliptic_curve::SecretKey::::from_slice(privkey) + .expect("bad private key"); + let public_key = this_private_key.public_key(); + pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); + Ok(()) + } + "secp384r1" => { + let this_private_key = + elliptic_curve::SecretKey::::from_slice(privkey) + .expect("bad private key"); + let public_key = this_private_key.public_key(); + pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); + Ok(()) + } + "secp224r1" => { + let this_private_key = + elliptic_curve::SecretKey::::from_slice(privkey) + .expect("bad private key"); + let public_key = this_private_key.public_key(); + pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); + Ok(()) + } + &_ => todo!(), + } +} + +#[inline] +fn gen_prime(size: usize) -> ZeroCopyBuf { + primes::Prime::generate(size).0.to_bytes_be().into() +} + +#[op] +pub fn op_node_gen_prime(size: usize) -> ZeroCopyBuf { + gen_prime(size) +} + +#[op] +pub async fn op_node_gen_prime_async( + size: usize, +) -> Result { + Ok(spawn_blocking(move || gen_prime(size)).await?) +} diff --git a/ext/node/crypto/primes.rs b/ext/node/ops/crypto/primes.rs similarity index 99% rename from ext/node/crypto/primes.rs rename to ext/node/ops/crypto/primes.rs index d03398f024..15aa643adb 100644 --- a/ext/node/crypto/primes.rs +++ b/ext/node/ops/crypto/primes.rs @@ -8,7 +8,7 @@ use num_traits::Zero; use rand::Rng; use std::ops::Deref; -pub struct Prime(num_bigint_dig::BigUint); +pub struct Prime(pub num_bigint_dig::BigUint); impl Prime { pub fn generate(n: usize) -> Self { diff --git a/ext/node/crypto/x509.rs b/ext/node/ops/crypto/x509.rs similarity index 96% rename from ext/node/crypto/x509.rs rename to ext/node/ops/crypto/x509.rs index 776103e1e7..402c58b720 100644 --- a/ext/node/crypto/x509.rs +++ b/ext/node/ops/crypto/x509.rs @@ -228,6 +228,8 @@ fn x509name_to_string( name: &X509Name, oid_registry: &oid_registry::OidRegistry, ) -> Result { + // Lifted from https://github.com/rusticata/x509-parser/blob/4d618c2ed6b1fc102df16797545895f7c67ee0fe/src/x509.rs#L543-L566 + // since it's a private function (Copyright 2017 Pierre Chifflier) name.iter_rdn().fold(Ok(String::new()), |acc, rdn| { acc.and_then(|mut _vec| { rdn @@ -244,13 +246,13 @@ fn x509name_to_string( let rdn = format!("{}={}", abbrev, val_str); match _vec2.len() { 0 => Ok(rdn), - _ => Ok(_vec2 + " + " + &rdn), + _ => Ok(_vec2 + " + " + rdn.as_str()), } }) }) .map(|v| match _vec.len() { 0 => v, - _ => _vec + "\n" + &v, + _ => _vec + "\n" + v.as_str(), }) }) }) diff --git a/ext/node/ops/http.rs b/ext/node/ops/http.rs new file mode 100644 index 0000000000..cc7dbf5220 --- /dev/null +++ b/ext/node/ops/http.rs @@ -0,0 +1,109 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::type_error; +use deno_core::error::AnyError; +use deno_core::op; +use deno_core::url::Url; +use deno_core::AsyncRefCell; +use deno_core::ByteString; +use deno_core::CancelFuture; +use deno_core::CancelHandle; +use deno_core::OpState; +use deno_fetch::get_or_create_client_from_state; +use deno_fetch::FetchCancelHandle; +use deno_fetch::FetchRequestBodyResource; +use deno_fetch::FetchRequestResource; +use deno_fetch::FetchReturn; +use deno_fetch::HttpClientResource; +use deno_fetch::MpscByteStream; +use reqwest::header::HeaderMap; +use reqwest::header::HeaderName; +use reqwest::header::HeaderValue; +use reqwest::header::CONTENT_LENGTH; +use reqwest::Body; +use reqwest::Method; + +#[op] +pub fn op_node_http_request

( + state: &mut OpState, + method: ByteString, + url: String, + headers: Vec<(ByteString, ByteString)>, + client_rid: Option, + has_body: bool, +) -> Result +where + P: crate::NodePermissions + 'static, +{ + let client = if let Some(rid) = client_rid { + let r = state.resource_table.get::(rid)?; + r.client.clone() + } else { + get_or_create_client_from_state(state)? + }; + + let method = Method::from_bytes(&method)?; + let url = Url::parse(&url)?; + + { + let permissions = state.borrow_mut::

(); + permissions.check_net_url(&url, "ClientRequest")?; + } + + let mut header_map = HeaderMap::new(); + for (key, value) in headers { + let name = HeaderName::from_bytes(&key) + .map_err(|err| type_error(err.to_string()))?; + let v = HeaderValue::from_bytes(&value) + .map_err(|err| type_error(err.to_string()))?; + + header_map.append(name, v); + } + + let mut request = client.request(method.clone(), url).headers(header_map); + + let request_body_rid = if has_body { + // If no body is passed, we return a writer for streaming the body. + let (stream, tx) = MpscByteStream::new(); + + request = request.body(Body::wrap_stream(stream)); + + let request_body_rid = state.resource_table.add(FetchRequestBodyResource { + body: AsyncRefCell::new(tx), + cancel: CancelHandle::default(), + }); + + Some(request_body_rid) + } else { + // POST and PUT requests should always have a 0 length content-length, + // if there is no body. https://fetch.spec.whatwg.org/#http-network-or-cache-fetch + if matches!(method, Method::POST | Method::PUT) { + request = request.header(CONTENT_LENGTH, HeaderValue::from(0)); + } + None + }; + + let cancel_handle = CancelHandle::new_rc(); + let cancel_handle_ = cancel_handle.clone(); + + let fut = async move { + request + .send() + .or_cancel(cancel_handle_) + .await + .map(|res| res.map_err(|err| type_error(err.to_string()))) + }; + + let request_rid = state + .resource_table + .add(FetchRequestResource(Box::pin(fut))); + + let cancel_handle_rid = + state.resource_table.add(FetchCancelHandle(cancel_handle)); + + Ok(FetchReturn { + request_rid, + request_body_rid, + cancel_handle_rid: Some(cancel_handle_rid), + }) +} diff --git a/ext/node/idna.rs b/ext/node/ops/idna.rs similarity index 100% rename from ext/node/idna.rs rename to ext/node/ops/idna.rs diff --git a/ext/node/ops/mod.rs b/ext/node/ops/mod.rs new file mode 100644 index 0000000000..2bbf02d343 --- /dev/null +++ b/ext/node/ops/mod.rs @@ -0,0 +1,9 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +pub mod crypto; +pub mod http; +pub mod idna; +pub mod require; +pub mod v8; +pub mod winerror; +pub mod zlib; diff --git a/ext/node/ops.rs b/ext/node/ops/require.rs similarity index 69% rename from ext/node/ops.rs rename to ext/node/ops/require.rs index 3db23b5eaf..eb092ab862 100644 --- a/ext/node/ops.rs +++ b/ext/node/ops/require.rs @@ -7,21 +7,21 @@ use deno_core::normalize_path; use deno_core::op; use deno_core::url::Url; use deno_core::JsRuntimeInspector; +use deno_core::ModuleSpecifier; use deno_core::OpState; +use deno_fs::FileSystemRc; use std::cell::RefCell; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use crate::NodeEnv; -use crate::NodeFs; - -use super::resolution; -use super::NodeModuleKind; -use super::NodePermissions; -use super::NodeResolutionMode; -use super::PackageJson; -use super::RequireNpmResolver; +use crate::resolution; +use crate::NodeModuleKind; +use crate::NodePermissions; +use crate::NodeResolutionMode; +use crate::NodeResolver; +use crate::NpmResolverRc; +use crate::PackageJson; fn ensure_read_permission

( state: &mut OpState, @@ -30,11 +30,8 @@ fn ensure_read_permission

( where P: NodePermissions + 'static, { - let resolver = { - let resolver = state.borrow::>(); - resolver.clone() - }; - let permissions = state.borrow_mut::

(); + let resolver = state.borrow::(); + let permissions = state.borrow::

(); resolver.ensure_read_permission(permissions, file_path) } @@ -89,23 +86,24 @@ pub fn op_require_init_paths() -> Vec { } #[op] -pub fn op_require_node_module_paths( +pub fn op_require_node_module_paths

( state: &mut OpState, from: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { + let fs = state.borrow::(); // Guarantee that "from" is absolute. let from = deno_core::resolve_path( &from, - &(Env::Fs::current_dir()).context("Unable to get CWD")?, + &(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?, ) .unwrap() .to_file_path() .unwrap(); - ensure_read_permission::(state, &from)?; + ensure_read_permission::

(state, &from)?; if cfg!(windows) { // return root node_modules when path is 'D:\\'. @@ -130,16 +128,11 @@ where let mut current_path = from.as_path(); let mut maybe_parent = Some(current_path); while let Some(parent) = maybe_parent { - if !parent.ends_with("/node_modules") { + if !parent.ends_with("node_modules") { paths.push(parent.join("node_modules").to_string_lossy().to_string()); - current_path = parent; - maybe_parent = current_path.parent(); } - } - - if !cfg!(windows) { - // Append /node_modules to handle root paths. - paths.push("/node_modules".to_string()); + current_path = parent; + maybe_parent = current_path.parent(); } Ok(paths) @@ -191,11 +184,11 @@ fn op_require_resolve_deno_dir( request: String, parent_filename: String, ) -> Option { - let resolver = state.borrow::>(); + let resolver = state.borrow::(); resolver .resolve_package_folder_from_package( &request, - &PathBuf::from(parent_filename), + &ModuleSpecifier::from_file_path(parent_filename).unwrap(), NodeResolutionMode::Execution, ) .ok() @@ -204,8 +197,8 @@ fn op_require_resolve_deno_dir( #[op] fn op_require_is_deno_dir_package(state: &mut OpState, path: String) -> bool { - let resolver = state.borrow::>(); - resolver.in_npm_package(&PathBuf::from(path)) + let resolver = state.borrow::(); + resolver.in_npm_package_at_path(&PathBuf::from(path)) } #[op] @@ -255,16 +248,17 @@ fn op_require_path_is_absolute(p: String) -> bool { } #[op] -fn op_require_stat( +fn op_require_stat

( state: &mut OpState, path: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let path = PathBuf::from(path); - ensure_read_permission::(state, &path)?; - if let Ok(metadata) = Env::Fs::metadata(&path) { + ensure_read_permission::

(state, &path)?; + let fs = state.borrow::(); + if let Ok(metadata) = fs.stat_sync(&path) { if metadata.is_file { return Ok(0); } else { @@ -276,24 +270,18 @@ where } #[op] -fn op_require_real_path( +fn op_require_real_path

( state: &mut OpState, request: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let path = PathBuf::from(request); - ensure_read_permission::(state, &path)?; - let mut canonicalized_path = Env::Fs::canonicalize(&path)?; - if cfg!(windows) { - canonicalized_path = PathBuf::from( - canonicalized_path - .display() - .to_string() - .trim_start_matches("\\\\?\\"), - ); - } + ensure_read_permission::

(state, &path)?; + let fs = state.borrow::(); + let canonicalized_path = + deno_core::strip_unc_prefix(fs.realpath_sync(&path)?); Ok(canonicalized_path.to_string_lossy().to_string()) } @@ -334,14 +322,14 @@ fn op_require_path_basename(request: String) -> Result { } #[op] -fn op_require_try_self_parent_path( +fn op_require_try_self_parent_path

( state: &mut OpState, has_parent: bool, maybe_parent_filename: Option, maybe_parent_id: Option, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { if !has_parent { return Ok(None); @@ -353,8 +341,9 @@ where if let Some(parent_id) = maybe_parent_id { if parent_id == "" || parent_id == "internal/preload" { - if let Ok(cwd) = Env::Fs::current_dir() { - ensure_read_permission::(state, &cwd)?; + let fs = state.borrow::(); + if let Ok(cwd) = fs.cwd() { + ensure_read_permission::

(state, &cwd)?; return Ok(Some(cwd.to_string_lossy().to_string())); } } @@ -363,26 +352,26 @@ where } #[op] -fn op_require_try_self( +fn op_require_try_self

( state: &mut OpState, parent_path: Option, request: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { if parent_path.is_none() { return Ok(None); } - let resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); - let pkg = resolution::get_package_scope_config::( - &Url::from_file_path(parent_path.unwrap()).unwrap(), - &*resolver, - permissions, - ) - .ok(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::

(); + let pkg = node_resolver + .get_package_scope_config( + &Url::from_file_path(parent_path.unwrap()).unwrap(), + permissions, + ) + .ok(); if pkg.is_none() { return Ok(None); } @@ -408,34 +397,35 @@ where let referrer = deno_core::url::Url::from_file_path(&pkg.path).unwrap(); if let Some(exports) = &pkg.exports { - resolution::package_exports_resolve::( - &pkg.path, - expansion, - exports, - &referrer, - NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, - NodeResolutionMode::Execution, - &*resolver, - permissions, - ) - .map(|r| Some(r.to_string_lossy().to_string())) + node_resolver + .package_exports_resolve( + &pkg.path, + expansion, + exports, + &referrer, + NodeModuleKind::Cjs, + resolution::REQUIRE_CONDITIONS, + NodeResolutionMode::Execution, + permissions, + ) + .map(|r| Some(r.to_string_lossy().to_string())) } else { Ok(None) } } #[op] -fn op_require_read_file( +fn op_require_read_file

( state: &mut OpState, file_path: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let file_path = PathBuf::from(file_path); - ensure_read_permission::(state, &file_path)?; - Ok(Env::Fs::read_to_string(file_path)?) + ensure_read_permission::

(state, &file_path)?; + let fs = state.borrow::(); + Ok(fs.read_to_string(&file_path)?) } #[op] @@ -450,7 +440,7 @@ pub fn op_require_as_file_path(file_or_url: String) -> String { } #[op] -fn op_require_resolve_exports( +fn op_require_resolve_exports

( state: &mut OpState, uses_local_node_modules_dir: bool, modules_path: String, @@ -460,118 +450,116 @@ fn op_require_resolve_exports( parent_path: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { - let resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let fs = state.borrow::(); + let npm_resolver = state.borrow::(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::

(); - let pkg_path = if resolver.in_npm_package(&PathBuf::from(&modules_path)) + let pkg_path = if npm_resolver + .in_npm_package_at_path(&PathBuf::from(&modules_path)) && !uses_local_node_modules_dir { modules_path } else { let orignal = modules_path.clone(); let mod_dir = path_resolve(vec![modules_path, name]); - if Env::Fs::is_dir(&mod_dir) { + if fs.is_dir(Path::new(&mod_dir)) { mod_dir } else { orignal } }; - let pkg = PackageJson::load::( - &*resolver, + let pkg = node_resolver.load_package_json( permissions, PathBuf::from(&pkg_path).join("package.json"), )?; if let Some(exports) = &pkg.exports { let referrer = Url::from_file_path(parent_path).unwrap(); - resolution::package_exports_resolve::( - &pkg.path, - format!(".{expansion}"), - exports, - &referrer, - NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, - NodeResolutionMode::Execution, - &*resolver, - permissions, - ) - .map(|r| Some(r.to_string_lossy().to_string())) + node_resolver + .package_exports_resolve( + &pkg.path, + format!(".{expansion}"), + exports, + &referrer, + NodeModuleKind::Cjs, + resolution::REQUIRE_CONDITIONS, + NodeResolutionMode::Execution, + permissions, + ) + .map(|r| Some(r.to_string_lossy().to_string())) } else { Ok(None) } } #[op] -fn op_require_read_closest_package_json( +fn op_require_read_closest_package_json

( state: &mut OpState, filename: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { - ensure_read_permission::( + ensure_read_permission::

( state, PathBuf::from(&filename).parent().unwrap(), )?; - let resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); - resolution::get_closest_package_json::( + let node_resolver = state.borrow::>(); + let permissions = state.borrow::

(); + node_resolver.get_closest_package_json( &Url::from_file_path(filename).unwrap(), - &*resolver, permissions, ) } #[op] -fn op_require_read_package_scope( +fn op_require_read_package_scope

( state: &mut OpState, package_json_path: String, ) -> Option where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { - let resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::

(); let package_json_path = PathBuf::from(package_json_path); - PackageJson::load::(&*resolver, permissions, package_json_path).ok() + node_resolver + .load_package_json(permissions, package_json_path) + .ok() } #[op] -fn op_require_package_imports_resolve( +fn op_require_package_imports_resolve

( state: &mut OpState, parent_filename: String, request: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let parent_path = PathBuf::from(&parent_filename); - ensure_read_permission::(state, &parent_path)?; - let resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); - let pkg = PackageJson::load::( - &*resolver, - permissions, - parent_path.join("package.json"), - )?; + ensure_read_permission::

(state, &parent_path)?; + let node_resolver = state.borrow::>(); + let permissions = state.borrow::

(); + let pkg = node_resolver + .load_package_json(permissions, parent_path.join("package.json"))?; if pkg.imports.is_some() { let referrer = deno_core::url::Url::from_file_path(&parent_filename).unwrap(); - let r = resolution::package_imports_resolve::( - &request, - &referrer, - NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, - NodeResolutionMode::Execution, - &*resolver, - permissions, - ) - .map(|r| Some(Url::from_file_path(r).unwrap().to_string())); - state.put(resolver); - r + node_resolver + .package_imports_resolve( + &request, + &referrer, + NodeModuleKind::Cjs, + resolution::REQUIRE_CONDITIONS, + NodeResolutionMode::Execution, + permissions, + ) + .map(|r| Some(Url::from_file_path(r).unwrap().to_string())) } else { Ok(None) } diff --git a/ext/node/v8.rs b/ext/node/ops/v8.rs similarity index 100% rename from ext/node/v8.rs rename to ext/node/ops/v8.rs diff --git a/ext/node/winerror.rs b/ext/node/ops/winerror.rs similarity index 100% rename from ext/node/winerror.rs rename to ext/node/ops/winerror.rs diff --git a/ext/node/zlib/alloc.rs b/ext/node/ops/zlib/alloc.rs similarity index 100% rename from ext/node/zlib/alloc.rs rename to ext/node/ops/zlib/alloc.rs diff --git a/ext/node/zlib/mod.rs b/ext/node/ops/zlib/mod.rs similarity index 100% rename from ext/node/zlib/mod.rs rename to ext/node/ops/zlib/mod.rs diff --git a/ext/node/zlib/mode.rs b/ext/node/ops/zlib/mode.rs similarity index 100% rename from ext/node/zlib/mode.rs rename to ext/node/ops/zlib/mode.rs diff --git a/ext/node/zlib/stream.rs b/ext/node/ops/zlib/stream.rs similarity index 100% rename from ext/node/zlib/stream.rs rename to ext/node/ops/zlib/stream.rs diff --git a/ext/node/package_json.rs b/ext/node/package_json.rs index 60f50ad787..95ca8b5618 100644 --- a/ext/node/package_json.rs +++ b/ext/node/package_json.rs @@ -1,10 +1,9 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; -use super::RequireNpmResolver; +use super::NpmResolver; use deno_core::anyhow; use deno_core::anyhow::bail; @@ -62,16 +61,18 @@ impl PackageJson { } } - pub fn load( - resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, + pub fn load( + fs: &dyn deno_fs::FileSystem, + resolver: &dyn NpmResolver, + permissions: &dyn NodePermissions, path: PathBuf, ) -> Result { resolver.ensure_read_permission(permissions, &path)?; - Self::load_skip_read_permission::(path) + Self::load_skip_read_permission(fs, path) } - pub fn load_skip_read_permission( + pub fn load_skip_read_permission( + fs: &dyn deno_fs::FileSystem, path: PathBuf, ) -> Result { assert!(path.is_absolute()); @@ -80,7 +81,7 @@ impl PackageJson { return Ok(CACHE.with(|cache| cache.borrow()[&path].clone())); } - let source = match Fs::read_to_string(&path) { + let source = match fs.read_to_string(&path) { Ok(source) => source, Err(err) if err.kind() == ErrorKind::NotFound => { return Ok(PackageJson::empty(path)); @@ -88,7 +89,7 @@ impl PackageJson { Err(err) => bail!( "Error loading package.json at {}. {:#}", path.display(), - err + AnyError::from(err), ), }; diff --git a/ext/node/polyfill.rs b/ext/node/polyfill.rs index 1fbb4afa3d..434c20b03d 100644 --- a/ext/node/polyfill.rs +++ b/ext/node/polyfill.rs @@ -1,202 +1,217 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -pub fn find_builtin_node_module( - module_name: &str, -) -> Option<&NodeModulePolyfill> { +/// e.g. `is_builtin_node_module("assert")` +pub fn is_builtin_node_module(module_name: &str) -> bool { SUPPORTED_BUILTIN_NODE_MODULES .iter() - .find(|m| m.name == module_name) -} - -pub fn is_builtin_node_module(module_name: &str) -> bool { - find_builtin_node_module(module_name).is_some() + .any(|m| m.module_name() == module_name) } pub struct NodeModulePolyfill { /// Name of the module like "assert" or "timers/promises" - pub name: &'static str, pub specifier: &'static str, + pub ext_specifier: &'static str, } +impl NodeModulePolyfill { + pub fn module_name(&self) -> &'static str { + debug_assert!(self.specifier.starts_with("node:")); + &self.specifier[5..] + } +} + +// NOTE(bartlomieju): keep this list in sync with `ext/node/polyfills/01_require.js` pub static SUPPORTED_BUILTIN_NODE_MODULES: &[NodeModulePolyfill] = &[ NodeModulePolyfill { - name: "assert", - specifier: "ext:deno_node/assert.ts", + specifier: "node:assert", + ext_specifier: "ext:deno_node/assert.ts", }, NodeModulePolyfill { - name: "assert/strict", - specifier: "ext:deno_node/assert/strict.ts", + specifier: "node:assert/strict", + ext_specifier: "ext:deno_node/assert/strict.ts", }, NodeModulePolyfill { - name: "async_hooks", - specifier: "ext:deno_node/async_hooks.ts", + specifier: "node:async_hooks", + ext_specifier: "ext:deno_node/async_hooks.ts", }, NodeModulePolyfill { - name: "buffer", - specifier: "ext:deno_node/buffer.ts", + specifier: "node:buffer", + ext_specifier: "ext:deno_node/buffer.ts", }, NodeModulePolyfill { - name: "child_process", - specifier: "ext:deno_node/child_process.ts", + specifier: "node:child_process", + ext_specifier: "ext:deno_node/child_process.ts", }, NodeModulePolyfill { - name: "cluster", - specifier: "ext:deno_node/cluster.ts", + specifier: "node:cluster", + ext_specifier: "ext:deno_node/cluster.ts", }, NodeModulePolyfill { - name: "console", - specifier: "ext:deno_node/console.ts", + specifier: "node:console", + ext_specifier: "ext:deno_node/console.ts", }, NodeModulePolyfill { - name: "constants", - specifier: "ext:deno_node/constants.ts", + specifier: "node:constants", + ext_specifier: "ext:deno_node/constants.ts", }, NodeModulePolyfill { - name: "crypto", - specifier: "ext:deno_node/crypto.ts", + specifier: "node:crypto", + ext_specifier: "ext:deno_node/crypto.ts", }, NodeModulePolyfill { - name: "dgram", - specifier: "ext:deno_node/dgram.ts", + specifier: "node:dgram", + ext_specifier: "ext:deno_node/dgram.ts", }, NodeModulePolyfill { - name: "dns", - specifier: "ext:deno_node/dns.ts", + specifier: "node:diagnostics_channel", + ext_specifier: "ext:deno_node/diagnostics_channel.ts", }, NodeModulePolyfill { - name: "dns/promises", - specifier: "ext:deno_node/dns/promises.ts", + specifier: "node:dns", + ext_specifier: "ext:deno_node/dns.ts", }, NodeModulePolyfill { - name: "domain", - specifier: "ext:deno_node/domain.ts", + specifier: "node:dns/promises", + ext_specifier: "ext:deno_node/dns/promises.ts", }, NodeModulePolyfill { - name: "events", - specifier: "ext:deno_node/events.ts", + specifier: "node:domain", + ext_specifier: "ext:deno_node/domain.ts", }, NodeModulePolyfill { - name: "fs", - specifier: "ext:deno_node/fs.ts", + specifier: "node:events", + ext_specifier: "ext:deno_node/events.ts", }, NodeModulePolyfill { - name: "fs/promises", - specifier: "ext:deno_node/fs/promises.ts", + specifier: "node:fs", + ext_specifier: "ext:deno_node/fs.ts", }, NodeModulePolyfill { - name: "http", - specifier: "ext:deno_node/http.ts", + specifier: "node:fs/promises", + ext_specifier: "ext:deno_node/fs/promises.ts", }, NodeModulePolyfill { - name: "https", - specifier: "ext:deno_node/https.ts", + specifier: "node:http", + ext_specifier: "ext:deno_node/http.ts", }, NodeModulePolyfill { - name: "module", - specifier: "ext:deno_node/01_require.js", + specifier: "node:http2", + ext_specifier: "ext:deno_node/http2.ts", }, NodeModulePolyfill { - name: "net", - specifier: "ext:deno_node/net.ts", + specifier: "node:https", + ext_specifier: "ext:deno_node/https.ts", }, NodeModulePolyfill { - name: "os", - specifier: "ext:deno_node/os.ts", + specifier: "node:module", + ext_specifier: "ext:deno_node/01_require.js", }, NodeModulePolyfill { - name: "path", - specifier: "ext:deno_node/path.ts", + specifier: "node:net", + ext_specifier: "ext:deno_node/net.ts", }, NodeModulePolyfill { - name: "path/posix", - specifier: "ext:deno_node/path/posix.ts", + specifier: "node:os", + ext_specifier: "ext:deno_node/os.ts", }, NodeModulePolyfill { - name: "path/win32", - specifier: "ext:deno_node/path/win32.ts", + specifier: "node:path", + ext_specifier: "ext:deno_node/path.ts", }, NodeModulePolyfill { - name: "perf_hooks", - specifier: "ext:deno_node/perf_hooks.ts", + specifier: "node:path/posix", + ext_specifier: "ext:deno_node/path/posix.ts", }, NodeModulePolyfill { - name: "process", - specifier: "ext:deno_node/process.ts", + specifier: "node:path/win32", + ext_specifier: "ext:deno_node/path/win32.ts", }, NodeModulePolyfill { - name: "querystring", - specifier: "ext:deno_node/querystring.ts", + specifier: "node:perf_hooks", + ext_specifier: "ext:deno_node/perf_hooks.ts", }, NodeModulePolyfill { - name: "readline", - specifier: "ext:deno_node/readline.ts", + specifier: "node:process", + ext_specifier: "ext:deno_node/process.ts", }, NodeModulePolyfill { - name: "stream", - specifier: "ext:deno_node/stream.ts", + specifier: "node:punycode", + ext_specifier: "ext:deno_node/punycode.ts", }, NodeModulePolyfill { - name: "stream/consumers", - specifier: "ext:deno_node/stream/consumers.mjs", + specifier: "node:querystring", + ext_specifier: "ext:deno_node/querystring.ts", }, NodeModulePolyfill { - name: "stream/promises", - specifier: "ext:deno_node/stream/promises.mjs", + specifier: "node:readline", + ext_specifier: "ext:deno_node/readline.ts", }, NodeModulePolyfill { - name: "stream/web", - specifier: "ext:deno_node/stream/web.ts", + specifier: "node:stream", + ext_specifier: "ext:deno_node/stream.ts", }, NodeModulePolyfill { - name: "string_decoder", - specifier: "ext:deno_node/string_decoder.ts", + specifier: "node:stream/consumers", + ext_specifier: "ext:deno_node/stream/consumers.mjs", }, NodeModulePolyfill { - name: "sys", - specifier: "ext:deno_node/sys.ts", + specifier: "node:stream/promises", + ext_specifier: "ext:deno_node/stream/promises.mjs", }, NodeModulePolyfill { - name: "timers", - specifier: "ext:deno_node/timers.ts", + specifier: "node:stream/web", + ext_specifier: "ext:deno_node/stream/web.ts", }, NodeModulePolyfill { - name: "timers/promises", - specifier: "ext:deno_node/timers/promises.ts", + specifier: "node:string_decoder", + ext_specifier: "ext:deno_node/string_decoder.ts", }, NodeModulePolyfill { - name: "tls", - specifier: "ext:deno_node/tls.ts", + specifier: "node:sys", + ext_specifier: "ext:deno_node/sys.ts", }, NodeModulePolyfill { - name: "tty", - specifier: "ext:deno_node/tty.ts", + specifier: "node:timers", + ext_specifier: "ext:deno_node/timers.ts", }, NodeModulePolyfill { - name: "url", - specifier: "ext:deno_node/url.ts", + specifier: "node:timers/promises", + ext_specifier: "ext:deno_node/timers/promises.ts", }, NodeModulePolyfill { - name: "util", - specifier: "ext:deno_node/util.ts", + specifier: "node:tls", + ext_specifier: "ext:deno_node/tls.ts", }, NodeModulePolyfill { - name: "util/types", - specifier: "ext:deno_node/util/types.ts", + specifier: "node:tty", + ext_specifier: "ext:deno_node/tty.ts", }, NodeModulePolyfill { - name: "v8", - specifier: "ext:deno_node/v8.ts", + specifier: "node:url", + ext_specifier: "ext:deno_node/url.ts", }, NodeModulePolyfill { - name: "vm", - specifier: "ext:deno_node/vm.ts", + specifier: "node:util", + ext_specifier: "ext:deno_node/util.ts", }, NodeModulePolyfill { - name: "worker_threads", - specifier: "ext:deno_node/worker_threads.ts", + specifier: "node:util/types", + ext_specifier: "ext:deno_node/util/types.ts", }, NodeModulePolyfill { - name: "zlib", - specifier: "ext:deno_node/zlib.ts", + specifier: "node:v8", + ext_specifier: "ext:deno_node/v8.ts", + }, + NodeModulePolyfill { + specifier: "node:vm", + ext_specifier: "ext:deno_node/vm.ts", + }, + NodeModulePolyfill { + specifier: "node:worker_threads", + ext_specifier: "ext:deno_node/worker_threads.ts", + }, + NodeModulePolyfill { + specifier: "node:zlib", + ext_specifier: "ext:deno_node/zlib.ts", }, ]; diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 42ead05e32..508a32e126 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -16,7 +16,7 @@ const { ArrayPrototypeSplice, ObjectGetOwnPropertyDescriptor, ObjectGetPrototypeOf, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectSetPrototypeOf, ObjectKeys, ObjectEntries, @@ -132,6 +132,7 @@ import zlib from "ext:deno_node/zlib.ts"; const nativeModuleExports = ObjectCreate(null); const builtinModules = []; +// NOTE(bartlomieju): keep this list in sync with `ext/node/polyfill.rs` function setupBuiltinModules() { const nodeModules = { "_http_agent": _httpAgent, @@ -433,7 +434,7 @@ const CircularRequirePrototypeWarningProxy = new Proxy({}, { getOwnPropertyDescriptor(target, prop) { if ( - ObjectPrototypeHasOwnProperty(target, prop) || prop === "__esModule" + ObjectHasOwn(target, prop) || prop === "__esModule" ) { return ObjectGetOwnPropertyDescriptor(target, prop); } @@ -557,15 +558,21 @@ Module._findPath = function (request, paths, isMain, parentPath) { } } - const isDenoDirPackage = ops.op_require_is_deno_dir_package( - curPath, - ); - const isRelative = ops.op_require_is_request_relative( - request, - ); - const basePath = (isDenoDirPackage && !isRelative) - ? pathResolve(curPath, packageSpecifierSubPath(request)) - : pathResolve(curPath, request); + let basePath; + + if (usesLocalNodeModulesDir) { + basePath = pathResolve(curPath, request); + } else { + const isDenoDirPackage = ops.op_require_is_deno_dir_package( + curPath, + ); + const isRelative = ops.op_require_is_request_relative( + request, + ); + basePath = (isDenoDirPackage && !isRelative) + ? pathResolve(curPath, packageSpecifierSubPath(request)) + : pathResolve(curPath, request); + } let filename; const rc = stat(basePath); @@ -600,6 +607,11 @@ Module._findPath = function (request, paths, isMain, parentPath) { return false; }; +/** + * Get a list of potential module directories + * @param {string} fromPath The directory name of the module + * @returns {string[]} List of module directories + */ Module._nodeModulePaths = function (fromPath) { return ops.op_require_node_module_paths(fromPath); }; @@ -615,7 +627,9 @@ Module._resolveLookupPaths = function (request, parent) { return paths; } - if (parent?.filename && parent.filename.length > 0) { + if ( + !usesLocalNodeModulesDir && parent?.filename && parent.filename.length > 0 + ) { const denoDirPath = ops.op_require_resolve_deno_dir( request, parent.filename, @@ -853,9 +867,11 @@ Module.prototype.load = function (filename) { throw Error("Module already loaded"); } - this.filename = filename; + // Canonicalize the path so it's not pointing to the symlinked directory + // in `node_modules` directory of the referrer. + this.filename = ops.op_require_real_path(filename); this.paths = Module._nodeModulePaths( - pathDirname(filename), + pathDirname(this.filename), ); const extension = findLongestRegisteredExtension(filename); // allow .mjs to be overriden @@ -897,7 +913,7 @@ Module.prototype.require = function (id) { Module.wrapper = [ // We provide the non-standard APIs in the CommonJS wrapper // to avoid exposing them in global namespace. - "(function (exports, require, module, __filename, __dirname, globalThis) { const { Buffer, clearImmediate, clearInterval, clearTimeout, console, global, process, setImmediate, setInterval, setTimeout} = globalThis; var window = undefined; (function () {", + "(function (exports, require, module, __filename, __dirname, globalThis) { const { Buffer, clearImmediate, clearInterval, clearTimeout, console, global, process, setImmediate, setInterval, setTimeout, performance} = globalThis; var window = undefined; (function () {", "\n}).call(this); })", ]; Module.wrap = function (script) { @@ -1098,6 +1114,11 @@ Module.syncBuiltinESMExports = function syncBuiltinESMExports() { throw new Error("not implemented"); }; +// Mostly used by tools like ts-node. +Module.runMain = function () { + Module._load(process.argv[1], null, true); +}; + Module.Module = Module; nativeModuleExports.module = Module; diff --git a/ext/node/polyfills/02_init.js b/ext/node/polyfills/02_init.js index d419c3bcaa..a2fba8c0c5 100644 --- a/ext/node/polyfills/02_init.js +++ b/ext/node/polyfills/02_init.js @@ -34,17 +34,19 @@ function initialize( nodeGlobals.setImmediate = nativeModuleExports["timers"].setImmediate; nodeGlobals.setInterval = nativeModuleExports["timers"].setInterval; nodeGlobals.setTimeout = nativeModuleExports["timers"].setTimeout; + nodeGlobals.performance = nativeModuleExports["perf_hooks"].performance; // add a hidden global for the esm code to use in order to reliably // get node's globalThis ObjectDefineProperty(globalThis, nodeGlobalThisName, { enumerable: false, - writable: false, + configurable: true, value: nodeGlobalThis, }); // FIXME(bartlomieju): not nice to depend on `Deno` namespace here // but it's the only way to get `args` and `version` and this point. internals.__bootstrapNodeProcess(argv0, Deno.args, Deno.version); + internals.__initWorkerThreads(); // `Deno[Deno.internal].requireImpl` will be unreachable after this line. delete internals.requireImpl; } diff --git a/ext/node/polyfills/README.md b/ext/node/polyfills/README.md index 11de64bd99..671d78bc4f 100644 --- a/ext/node/polyfills/README.md +++ b/ext/node/polyfills/README.md @@ -99,75 +99,9 @@ const leftPad = require("left-pad"); ## Contributing -### Setting up the test runner +### Setting up the test runner and running tests -This library contains automated tests pulled directly from the Node.js repo in -order ensure compatibility. - -Setting up the test runner is as simple as running the `node/_tools/setup.ts` -file, this will pull the configured tests in and then add them to the test -workflow. - -```zsh -$ deno task node:setup -``` - -You can additionally pass the `-y`/`-n` flag to use test cache or generating -tests from scratch instead of being prompted at the moment of running it. - -```zsh -# Will use downloaded tests instead of prompting user -$ deno run --allow-read --allow-net --allow-write node/_tools/setup.ts -y -# Will not prompt but will download and extract the tests directly -$ deno run --allow-read --allow-net --allow-write node/_tools/setup.ts -n -``` - -To run the tests you have set up, do the following: - -```zsh -$ deno test --allow-read --allow-run node/_tools/test.ts -``` - -If you want to run specific Node.js test files, you can use the following -command - -```shellsession -$ deno test -A node/_tools/test.ts -- -``` - -For example, if you want to run only -`node/_tools/test/parallel/test-event-emitter-check-listener-leaks.js`, you can -use: - -```shellsession -$ deno test -A node/_tools/test.ts -- test-event-emitter-check-listener-leaks.js -``` - -If you want to run all test files which contains `event-emitter` in filename, -then you can use: - -```shellsession -$ deno test -A node/_tools/test.ts -- event-emitter -``` - -The test should be passing with the latest deno, so if the test fails, try the -following: - -- `$ deno upgrade` -- `$ git submodule update --init` -- Use - [`--unstable` flag](https://deno.land/manual@v1.15.3/runtime/stability#standard-modules) - -To enable new tests, simply add a new entry inside `node/_tools/config.json` -under the `tests` property. The structure this entries must have has to resemble -a path inside `https://github.com/nodejs/node/tree/main/test`. - -Adding a new entry under the `ignore` option will indicate the test runner that -it should not regenerate that file from scratch the next time the setup is run, -this is specially useful to keep track of files that have been manually edited -to pass certain tests. However, avoid doing such manual changes to the test -files, since that may cover up inconsistencies between the node library and -actual node behavior. +See [tools/node_compat/README.md](../../../tools/node_compat/README.md). ### Best practices @@ -226,4 +160,4 @@ It's not as clean, but prevents the callback being called twice. Node compatibility can be measured by how many native Node tests pass. If you'd like to know what you can work on, check out the list of Node tests remaining -[here](_tools/TODO.md). +[here](../../../tools/node_compat/TODO.md). diff --git a/ext/node/polyfills/_core.ts b/ext/node/polyfills/_core.ts deleted file mode 100644 index af619378f3..0000000000 --- a/ext/node/polyfills/_core.ts +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// This module provides an interface to `Deno.core`. For environments -// that don't have access to `Deno.core` some APIs are polyfilled, while -// some are unavailble and throw on call. -// Note: deno_std shouldn't use Deno.core namespace. We should minimize these -// usages. - -import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; - -// deno-lint-ignore no-explicit-any -let DenoCore: any; - -// deno-lint-ignore no-explicit-any -const { Deno } = globalThis as any; - -// @ts-ignore Deno.core is not defined in types -if (Deno?.[Deno.internal]?.core) { - // @ts-ignore Deno[Deno.internal].core is not defined in types - DenoCore = Deno[Deno.internal].core; -} else if (Deno?.core) { - // @ts-ignore Deno.core is not defined in types - DenoCore = Deno.core; -} else { - DenoCore = {}; -} - -export const core = { - runMicrotasks: DenoCore.runMicrotasks ?? function () { - throw new Error( - "Deno.core.runMicrotasks() is not supported in this environment", - ); - }, - setHasTickScheduled: DenoCore.setHasTickScheduled ?? function () { - throw new Error( - "Deno.core.setHasTickScheduled() is not supported in this environment", - ); - }, - hasTickScheduled: DenoCore.hasTickScheduled ?? function () { - throw new Error( - "Deno.core.hasTickScheduled() is not supported in this environment", - ); - }, - setNextTickCallback: DenoCore.setNextTickCallback ?? undefined, - setMacrotaskCallback: DenoCore.setMacrotaskCallback ?? function () { - throw new Error( - "Deno.core.setNextTickCallback() is not supported in this environment", - ); - }, - evalContext: DenoCore.evalContext ?? - function (_code: string, _filename: string) { - throw new Error( - "Deno.core.evalContext is not supported in this environment", - ); - }, - encode: DenoCore.encode ?? function (chunk: string): Uint8Array { - return new TextEncoder().encode(chunk); - }, - eventLoopHasMoreWork: DenoCore.eventLoopHasMoreWork ?? function (): boolean { - return false; - }, - isProxy: DenoCore.isProxy ?? function (): boolean { - return false; - }, - getPromiseDetails: DenoCore.getPromiseDetails ?? - function (_promise: Promise): [number, unknown] { - throw new Error( - "Deno.core.getPromiseDetails is not supported in this environment", - ); - }, - setPromiseHooks: DenoCore.setPromiseHooks ?? function () { - throw new Error( - "Deno.core.setPromiseHooks is not supported in this environment", - ); - }, - ops: DenoCore.ops ?? { - op_napi_open(_filename: string) { - throw new Error( - "Node API is not supported in this environment", - ); - }, - }, -}; diff --git a/ext/node/polyfills/_fs/_fs_open.ts b/ext/node/polyfills/_fs/_fs_open.ts index 135520591d..2e29f3df10 100644 --- a/ext/node/polyfills/_fs/_fs_open.ts +++ b/ext/node/polyfills/_fs/_fs_open.ts @@ -8,10 +8,10 @@ import { O_WRONLY, } from "ext:deno_node/_fs/_fs_constants.ts"; import { getOpenOptions } from "ext:deno_node/_fs/_fs_common.ts"; -import { promisify } from "ext:deno_node/internal/util.mjs"; import { parseFileMode } from "ext:deno_node/internal/validators.mjs"; import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; import { getValidatedPath } from "ext:deno_node/internal/fs/utils.mjs"; +import { FileHandle } from "ext:deno_node/internal/fs/handle.ts"; import type { Buffer } from "ext:deno_node/buffer.ts"; function existsSync(filePath: string | URL): boolean { @@ -139,16 +139,18 @@ export function open( } } -export const openPromise = promisify(open) as ( - & ((path: string | Buffer | URL) => Promise) - & ((path: string | Buffer | URL, flags: openFlags) => Promise) - & ((path: string | Buffer | URL, mode?: number) => Promise) - & (( - path: string | Buffer | URL, - flags?: openFlags, - mode?: number, - ) => Promise) -); +export function openPromise( + path: string | Buffer | URL, + flags?: openFlags = "r", + mode? = 0o666, +): Promise { + return new Promise((resolve, reject) => { + open(path, flags, mode, (err, fd) => { + if (err) reject(err); + else resolve(new FileHandle(fd)); + }); + }); +} export function openSync(path: string | Buffer | URL): number; export function openSync( diff --git a/ext/node/polyfills/_fs/_fs_read.ts b/ext/node/polyfills/_fs/_fs_read.ts index b34384dd47..bce7d334f4 100644 --- a/ext/node/polyfills/_fs/_fs_read.ts +++ b/ext/node/polyfills/_fs/_fs_read.ts @@ -1,6 +1,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { Buffer } from "ext:deno_node/buffer.ts"; import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; +import * as io from "ext:deno_io/12_io.js"; +import * as fs from "ext:deno_fs/30_fs.js"; import { validateOffsetLengthRead, validatePosition, @@ -117,14 +119,14 @@ export function read( try { let nread: number | null; if (typeof position === "number" && position >= 0) { - const currentPosition = await Deno.seek(fd, 0, Deno.SeekMode.Current); + const currentPosition = await fs.seek(fd, 0, io.SeekMode.Current); // We use sync calls below to avoid being affected by others during // these calls. - Deno.seekSync(fd, position, Deno.SeekMode.Start); - nread = Deno.readSync(fd, buffer); - Deno.seekSync(fd, currentPosition, Deno.SeekMode.Start); + fs.seekSync(fd, position, io.SeekMode.Start); + nread = io.readSync(fd, buffer); + fs.seekSync(fd, currentPosition, io.SeekMode.Start); } else { - nread = await Deno.read(fd, buffer); + nread = await io.read(fd, buffer); } cb(null, nread ?? 0, Buffer.from(buffer.buffer, offset, length)); } catch (error) { @@ -183,14 +185,14 @@ export function readSync( let currentPosition = 0; if (typeof position === "number" && position >= 0) { - currentPosition = Deno.seekSync(fd, 0, Deno.SeekMode.Current); - Deno.seekSync(fd, position, Deno.SeekMode.Start); + currentPosition = fs.seekSync(fd, 0, io.SeekMode.Current); + fs.seekSync(fd, position, io.SeekMode.Start); } - const numberOfBytesRead = Deno.readSync(fd, buffer); + const numberOfBytesRead = io.readSync(fd, buffer); if (typeof position === "number" && position >= 0) { - Deno.seekSync(fd, currentPosition, Deno.SeekMode.Start); + fs.seekSync(fd, currentPosition, io.SeekMode.Start); } return numberOfBytesRead ?? 0; diff --git a/ext/node/polyfills/_fs/_fs_readFile.ts b/ext/node/polyfills/_fs/_fs_readFile.ts index 0ff8a311ad..b3bd5b94c7 100644 --- a/ext/node/polyfills/_fs/_fs_readFile.ts +++ b/ext/node/polyfills/_fs/_fs_readFile.ts @@ -6,6 +6,8 @@ import { TextOptionsArgument, } from "ext:deno_node/_fs/_fs_common.ts"; import { Buffer } from "ext:deno_node/buffer.ts"; +import { readAll } from "ext:deno_io/12_io.js"; +import { FileHandle } from "ext:deno_node/internal/fs/handle.ts"; import { fromFileUrl } from "ext:deno_node/path.ts"; import { BinaryEncodings, @@ -32,25 +34,26 @@ type TextCallback = (err: Error | null, data?: string) => void; type BinaryCallback = (err: Error | null, data?: Buffer) => void; type GenericCallback = (err: Error | null, data?: string | Buffer) => void; type Callback = TextCallback | BinaryCallback | GenericCallback; +type Path = string | URL | FileHandle; export function readFile( - path: string | URL, + path: Path, options: TextOptionsArgument, callback: TextCallback, ): void; export function readFile( - path: string | URL, + path: Path, options: BinaryOptionsArgument, callback: BinaryCallback, ): void; export function readFile( - path: string | URL, + path: Path, options: null | undefined | FileOptionsArgument, callback: BinaryCallback, ): void; export function readFile(path: string | URL, callback: BinaryCallback): void; export function readFile( - path: string | URL, + path: Path, optOrCallback?: FileOptionsArgument | Callback | null | undefined, callback?: Callback, ) { @@ -64,7 +67,13 @@ export function readFile( const encoding = getEncoding(optOrCallback); - const p = Deno.readFile(path); + let p: Promise; + if (path instanceof FileHandle) { + const fsFile = new Deno.FsFile(path.fd); + p = readAll(fsFile); + } else { + p = Deno.readFile(path); + } if (cb) { p.then((data: Uint8Array) => { @@ -79,9 +88,9 @@ export function readFile( } export const readFilePromise = promisify(readFile) as ( - & ((path: string | URL, opt: TextOptionsArgument) => Promise) - & ((path: string | URL, opt?: BinaryOptionsArgument) => Promise) - & ((path: string | URL, opt?: FileOptionsArgument) => Promise) + & ((path: Path, opt: TextOptionsArgument) => Promise) + & ((path: Path, opt?: BinaryOptionsArgument) => Promise) + & ((path: Path, opt?: FileOptionsArgument) => Promise) ); export function readFileSync( diff --git a/ext/node/polyfills/_fs/_fs_write.mjs b/ext/node/polyfills/_fs/_fs_write.mjs index bd0ffd1051..fd7a1171c3 100644 --- a/ext/node/polyfills/_fs/_fs_write.mjs +++ b/ext/node/polyfills/_fs/_fs_write.mjs @@ -2,6 +2,8 @@ // Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license. import { Buffer } from "ext:deno_node/buffer.ts"; import { validateEncoding, validateInteger } from "ext:deno_node/internal/validators.mjs"; +import * as io from "ext:deno_io/12_io.js"; +import * as fs from "ext:deno_fs/30_fs.js"; import { getValidatedFd, showStringCoercionDeprecation, @@ -19,12 +21,12 @@ export function writeSync(fd, buffer, offset, length, position) { buffer = new Uint8Array(buffer.buffer); } if (typeof position === "number") { - Deno.seekSync(fd, position, Deno.SeekMode.Start); + fs.seekSync(fd, position, io.SeekMode.Start); } let currentOffset = offset; const end = offset + length; while (currentOffset - offset < length) { - currentOffset += Deno.writeSync(fd, buffer.subarray(currentOffset, end)); + currentOffset += io.writeSync(fd, buffer.subarray(currentOffset, end)); } return currentOffset - offset; }; @@ -65,12 +67,12 @@ export function write(fd, buffer, offset, length, position, callback) { buffer = new Uint8Array(buffer.buffer); } if (typeof position === "number") { - await Deno.seek(fd, position, Deno.SeekMode.Start); + await fs.seek(fd, position, io.SeekMode.Start); } let currentOffset = offset; const end = offset + length; while (currentOffset - offset < length) { - currentOffset += await Deno.write( + currentOffset += await io.write( fd, buffer.subarray(currentOffset, end), ); diff --git a/ext/node/polyfills/_fs/_fs_writev.mjs b/ext/node/polyfills/_fs/_fs_writev.mjs index 7440f4fd70..84e2b7cdda 100644 --- a/ext/node/polyfills/_fs/_fs_writev.mjs +++ b/ext/node/polyfills/_fs/_fs_writev.mjs @@ -4,6 +4,8 @@ import { Buffer } from "ext:deno_node/buffer.ts"; import { validateBufferArray } from "ext:deno_node/internal/fs/utils.mjs"; import { getValidatedFd } from "ext:deno_node/internal/fs/utils.mjs"; import { maybeCallback } from "ext:deno_node/_fs/_fs_common.ts"; +import * as io from "ext:deno_io/12_io.js"; +import * as fs from "ext:deno_fs/30_fs.js"; export function writev(fd, buffers, position, callback) { const innerWritev = async (fd, buffers, position) => { @@ -17,12 +19,12 @@ export function writev(fd, buffers, position, callback) { } } if (typeof position === "number") { - await Deno.seekSync(fd, position, Deno.SeekMode.Start); + await fs.seekSync(fd, position, io.SeekMode.Start); } const buffer = Buffer.concat(chunks); let currentOffset = 0; while (currentOffset < buffer.byteLength) { - currentOffset += await Deno.writeSync(fd, buffer.subarray(currentOffset)); + currentOffset += await io.writeSync(fd, buffer.subarray(currentOffset)); } return currentOffset - offset; }; @@ -58,12 +60,12 @@ export function writevSync(fd, buffers, position) { } } if (typeof position === "number") { - Deno.seekSync(fd, position, Deno.SeekMode.Start); + fs.seekSync(fd, position, io.SeekMode.Start); } const buffer = Buffer.concat(chunks); let currentOffset = 0; while (currentOffset < buffer.byteLength) { - currentOffset += Deno.writeSync(fd, buffer.subarray(currentOffset)); + currentOffset += io.writeSync(fd, buffer.subarray(currentOffset)); } return currentOffset - offset; }; diff --git a/ext/node/polyfills/_http_outgoing.ts b/ext/node/polyfills/_http_outgoing.ts index 7382be19c8..ab6a78038d 100644 --- a/ext/node/polyfills/_http_outgoing.ts +++ b/ext/node/polyfills/_http_outgoing.ts @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent and Node contributors. All rights reserved. MIT license. +const core = globalThis.__bootstrap.core; import { getDefaultHighWaterMark } from "ext:deno_node/internal/streams/state.mjs"; import assert from "ext:deno_node/internal/assert.mjs"; import EE from "ext:deno_node/events.ts"; @@ -10,13 +11,14 @@ import type { Socket } from "ext:deno_node/net.ts"; import { kNeedDrain, kOutHeaders, - utcDate, + // utcDate, } from "ext:deno_node/internal/http.ts"; +import { notImplemented } from "ext:deno_node/_utils.ts"; import { Buffer } from "ext:deno_node/buffer.ts"; import { _checkInvalidHeaderChar as checkInvalidHeaderChar, _checkIsHttpToken as checkIsHttpToken, - chunkExpression as RE_TE_CHUNKED, + // chunkExpression as RE_TE_CHUNKED, } from "ext:deno_node/_http_common.ts"; import { defaultTriggerAsyncIdScope, @@ -27,21 +29,22 @@ const { async_id_symbol } = symbols; import { ERR_HTTP_HEADERS_SENT, ERR_HTTP_INVALID_HEADER_VALUE, - ERR_HTTP_TRAILER_INVALID, - ERR_INVALID_ARG_TYPE, - ERR_INVALID_ARG_VALUE, + // ERR_HTTP_TRAILER_INVALID, + // ERR_INVALID_ARG_TYPE, + // ERR_INVALID_ARG_VALUE, ERR_INVALID_CHAR, ERR_INVALID_HTTP_TOKEN, ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_ALREADY_FINISHED, + // ERR_STREAM_ALREADY_FINISHED, ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END, + // ERR_STREAM_DESTROYED, + // ERR_STREAM_NULL_VALUES, + // ERR_STREAM_WRITE_AFTER_END, hideStackFrames, } from "ext:deno_node/internal/errors.ts"; import { validateString } from "ext:deno_node/internal/validators.mjs"; -import { isUint8Array } from "ext:deno_node/internal/util/types.ts"; +// import { isUint8Array } from "ext:deno_node/internal/util/types.ts"; +// import { kStreamBaseField } from "ext:deno_node/internal_binding/stream_wrap.ts"; import { debuglog } from "ext:deno_node/internal/util/debuglog.ts"; let debug = debuglog("http", (fn) => { @@ -54,98 +57,535 @@ const kCorked = Symbol("corked"); const nop = () => {}; -const RE_CONN_CLOSE = /(?:^|\W)close(?:$|\W)/i; +export class OutgoingMessage extends Stream { + // deno-lint-ignore no-explicit-any + outputData: any[]; + outputSize: number; + writable: boolean; + destroyed: boolean; -// isCookieField performs a case-insensitive comparison of a provided string -// against the word "cookie." As of V8 6.6 this is faster than handrolling or -// using a case-insensitive RegExp. -function isCookieField(s: string) { - return s.length === 6 && s.toLowerCase() === "cookie"; -} + _last: boolean; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + maxRequestsOnConnectionReached: boolean; + _defaultKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + _removedConnection: boolean; + _removedContLen: boolean; + _removedTE: boolean; -// deno-lint-ignore no-explicit-any -export function OutgoingMessage(this: any) { - Stream.call(this); + _contentLength: number | null; + _hasBody: boolean; + _trailer: string; + [kNeedDrain]: boolean; - // Queue that holds all currently pending data, until the response will be - // assigned to the socket (until it will its turn in the HTTP pipeline). - this.outputData = []; + finished: boolean; + _headerSent: boolean; + [kCorked]: number; + _closed: boolean; - // `outputSize` is an approximate measure of how much data is queued on this - // response. `_onPendingData` will be invoked to update similar global - // per-connection counter. That counter will be used to pause/unpause the - // TCP socket and HTTP Parser and thus handle the backpressure. - this.outputSize = 0; + // TODO(crowlKats): use it + socket: null; + // TODO(crowlKats): use it + _header: null; + [kOutHeaders]: null | Record; - this.writable = true; - this.destroyed = false; + _keepAliveTimeout: number; + _onPendingData: () => void; - this._last = false; - this.chunkedEncoding = false; - this.shouldKeepAlive = true; - this.maxRequestsOnConnectionReached = false; - this._defaultKeepAlive = true; - this.useChunkedEncodingByDefault = true; - this.sendDate = false; - this._removedConnection = false; - this._removedContLen = false; - this._removedTE = false; + constructor() { + super(); - this._contentLength = null; - this._hasBody = true; - this._trailer = ""; - this[kNeedDrain] = false; + // Queue that holds all currently pending data, until the response will be + // assigned to the socket (until it will its turn in the HTTP pipeline). + this.outputData = []; - this.finished = false; - this._headerSent = false; - this[kCorked] = 0; - this._closed = false; + // `outputSize` is an approximate measure of how much data is queued on this + // response. `_onPendingData` will be invoked to update similar global + // per-connection counter. That counter will be used to pause/unpause the + // TCP socket and HTTP Parser and thus handle the backpressure. + this.outputSize = 0; - this.socket = null; - this._header = null; - this[kOutHeaders] = null; + this.writable = true; + this.destroyed = false; - this._keepAliveTimeout = 0; + this._last = false; + this.chunkedEncoding = false; + this.shouldKeepAlive = true; + this.maxRequestsOnConnectionReached = false; + this._defaultKeepAlive = true; + this.useChunkedEncodingByDefault = true; + this.sendDate = false; + this._removedConnection = false; + this._removedContLen = false; + this._removedTE = false; - this._onPendingData = nop; -} -Object.setPrototypeOf(OutgoingMessage.prototype, Stream.prototype); -Object.setPrototypeOf(OutgoingMessage, Stream); + this._contentLength = null; + this._hasBody = true; + this._trailer = ""; + this[kNeedDrain] = false; -Object.defineProperty(OutgoingMessage.prototype, "writableFinished", { - get() { + this.finished = false; + this._headerSent = false; + this[kCorked] = 0; + this._closed = false; + + this.socket = null; + this._header = null; + this[kOutHeaders] = null; + + this._keepAliveTimeout = 0; + + this._onPendingData = nop; + } + + get writableFinished() { return ( this.finished && this.outputSize === 0 && (!this.socket || this.socket.writableLength === 0) ); - }, -}); + } -Object.defineProperty(OutgoingMessage.prototype, "writableObjectMode", { - get() { + get writableObjectMode() { return false; - }, -}); + } -Object.defineProperty(OutgoingMessage.prototype, "writableLength", { - get() { + get writableLength() { return this.outputSize + (this.socket ? this.socket.writableLength : 0); - }, -}); + } -Object.defineProperty(OutgoingMessage.prototype, "writableHighWaterMark", { - get() { + get writableHighWaterMark() { return this.socket ? this.socket.writableHighWaterMark : HIGH_WATER_MARK; - }, -}); + } -Object.defineProperty(OutgoingMessage.prototype, "writableCorked", { - get() { + get writableCorked() { const corked = this.socket ? this.socket.writableCorked : 0; return corked + this[kCorked]; - }, -}); + } + + get connection() { + return this.socket; + } + + set connection(val) { + this.socket = val; + } + + get writableEnded() { + return this.finished; + } + + get writableNeedDrain() { + return !this.destroyed && !this.finished && this[kNeedDrain]; + } + + cork() { + if (this.socket) { + this.socket.cork(); + } else { + this[kCorked]++; + } + } + + uncork() { + if (this.socket) { + this.socket.uncork(); + } else if (this[kCorked]) { + this[kCorked]--; + } + } + + setTimeout(msecs: number, callback?: (...args: unknown[]) => void) { + if (callback) { + this.on("timeout", callback); + } + + if (!this.socket) { + // deno-lint-ignore no-explicit-any + this.once("socket", function socketSetTimeoutOnConnect(socket: any) { + socket.setTimeout(msecs); + }); + } else { + this.socket.setTimeout(msecs); + } + return this; + } + + // It's possible that the socket will be destroyed, and removed from + // any messages, before ever calling this. In that case, just skip + // it, since something else is destroying this connection anyway. + destroy(error: unknown) { + if (this.destroyed) { + return this; + } + this.destroyed = true; + + if (this.socket) { + this.socket.destroy(error); + } else { + // deno-lint-ignore no-explicit-any + this.once("socket", function socketDestroyOnConnect(socket: any) { + socket.destroy(error); + }); + } + + return this; + } + + setHeader(name: string, value: string) { + if (this._header) { + throw new ERR_HTTP_HEADERS_SENT("set"); + } + validateHeaderName(name); + validateHeaderValue(name, value); + + let headers = this[kOutHeaders]; + if (headers === null) { + this[kOutHeaders] = headers = Object.create(null); + } + + name = name.toString(); + headers[name.toLowerCase()] = [name, value.toString()]; + return this; + } + + appendHeader(name, value) { + if (this._header) { + throw new ERR_HTTP_HEADERS_SENT("append"); + } + validateHeaderName(name); + validateHeaderValue(name, value); + + name = name.toString(); + + const field = name.toLowerCase(); + const headers = this[kOutHeaders]; + if (headers === null || !headers[field]) { + return this.setHeader(name, value); + } + + // Prepare the field for appending, if required + if (!Array.isArray(headers[field][1])) { + headers[field][1] = [headers[field][1]]; + } + + const existingValues = headers[field][1]; + if (Array.isArray(value)) { + for (let i = 0, length = value.length; i < length; i++) { + existingValues.push(value[i].toString()); + } + } else { + existingValues.push(value.toString()); + } + + return this; + } + + // Returns a shallow copy of the current outgoing headers. + getHeaders() { + const headers = this[kOutHeaders]; + const ret = Object.create(null); + if (headers) { + const keys = Object.keys(headers); + // Retain for(;;) loop for performance reasons + // Refs: https://github.com/nodejs/node/pull/30958 + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + const val = headers[key][1]; + ret[key] = val; + } + } + return ret; + } + + hasHeader(name: string) { + validateString(name, "name"); + return this[kOutHeaders] !== null && + !!this[kOutHeaders][name.toLowerCase()]; + } + + removeHeader(name: string) { + validateString(name, "name"); + + if (this._header) { + throw new ERR_HTTP_HEADERS_SENT("remove"); + } + + const key = name.toLowerCase(); + + switch (key) { + case "connection": + this._removedConnection = true; + break; + case "content-length": + this._removedContLen = true; + break; + case "transfer-encoding": + this._removedTE = true; + break; + case "date": + this.sendDate = false; + break; + } + + if (this[kOutHeaders] !== null) { + delete this[kOutHeaders][key]; + } + } + + getHeader(name: string) { + validateString(name, "name"); + + const headers = this[kOutHeaders]; + if (headers === null) { + return; + } + + const entry = headers[name.toLowerCase()]; + return entry && entry[1]; + } + + // Returns an array of the names of the current outgoing headers. + getHeaderNames() { + return this[kOutHeaders] !== null ? Object.keys(this[kOutHeaders]) : []; + } + + // Returns an array of the names of the current outgoing raw headers. + getRawHeaderNames() { + const headersMap = this[kOutHeaders]; + if (headersMap === null) return []; + + const values = Object.values(headersMap); + const headers = Array(values.length); + // Retain for(;;) loop for performance reasons + // Refs: https://github.com/nodejs/node/pull/30958 + for (let i = 0, l = values.length; i < l; i++) { + // deno-lint-ignore no-explicit-any + headers[i] = (values as any)[i][0]; + } + + return headers; + } + + write( + chunk: string | Uint8Array | Buffer, + encoding: string | null, + callback: () => void, + ): boolean { + if ( + (typeof chunk === "string" && chunk.length > 0) || + ((chunk instanceof Buffer || chunk instanceof Uint8Array) && + chunk.buffer.byteLength > 0) + ) { + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (chunk instanceof Buffer) { + chunk = new Uint8Array(chunk.buffer); + } + + core.writeAll(this._bodyWriteRid, chunk).then(() => { + callback?.(); + this.emit("drain"); + }).catch((e) => { + this._requestSendError = e; + }); + } + + return false; + } + + // deno-lint-ignore no-explicit-any + addTrailers(_headers: any) { + // TODO(crowlKats): finish it + notImplemented("OutgoingMessage.addTrailers"); + } + + // deno-lint-ignore no-explicit-any + end(_chunk: any, _encoding: any, _callback: any) { + notImplemented("OutgoingMessage.end"); + } + + flushHeaders() { + if (!this._header) { + this._implicitHeader(); + } + + // Force-flush the headers. + this._send(""); + } + + pipe() { + // OutgoingMessage should be write-only. Piping from it is disabled. + this.emit("error", new ERR_STREAM_CANNOT_PIPE()); + } + + _implicitHeader() { + throw new ERR_METHOD_NOT_IMPLEMENTED("_implicitHeader()"); + } + + _finish() { + assert(this.socket); + this.emit("prefinish"); + } + + // This logic is probably a bit confusing. Let me explain a bit: + // + // In both HTTP servers and clients it is possible to queue up several + // outgoing messages. This is easiest to imagine in the case of a client. + // Take the following situation: + // + // req1 = client.request('GET', '/'); + // req2 = client.request('POST', '/'); + // + // When the user does + // + // req2.write('hello world\n'); + // + // it's possible that the first request has not been completely flushed to + // the socket yet. Thus the outgoing messages need to be prepared to queue + // up data internally before sending it on further to the socket's queue. + // + // This function, outgoingFlush(), is called by both the Server and Client + // to attempt to flush any pending messages out to the socket. + _flush() { + const socket = this.socket; + + if (socket && socket.writable) { + // There might be remaining data in this.output; write it out + const ret = this._flushOutput(socket); + + if (this.finished) { + // This is a queue to the server or client to bring in the next this. + this._finish(); + } else if (ret && this[kNeedDrain]) { + this[kNeedDrain] = false; + this.emit("drain"); + } + } + } + + _flushOutput(socket: Socket) { + while (this[kCorked]) { + this[kCorked]--; + socket.cork(); + } + + const outputLength = this.outputData.length; + if (outputLength <= 0) { + return undefined; + } + + const outputData = this.outputData; + socket.cork(); + let ret; + // Retain for(;;) loop for performance reasons + // Refs: https://github.com/nodejs/node/pull/30958 + for (let i = 0; i < outputLength; i++) { + const { data, encoding, callback } = outputData[i]; + ret = socket.write(data, encoding, callback); + } + socket.uncork(); + + this.outputData = []; + this._onPendingData(-this.outputSize); + this.outputSize = 0; + + return ret; + } + + // This abstract either writing directly to the socket or buffering it. + // deno-lint-ignore no-explicit-any + _send(data: any, encoding?: string | null, callback?: () => void) { + // This is a shameful hack to get the headers and first body chunk onto + // the same packet. Future versions of Node are going to take care of + // this at a lower level and in a more general way. + if (!this._headerSent && this._header !== null) { + // `this._header` can be null if OutgoingMessage is used without a proper Socket + // See: /test/parallel/test-http-outgoing-message-inheritance.js + if ( + typeof data === "string" && + (encoding === "utf8" || encoding === "latin1" || !encoding) + ) { + data = this._header + data; + } else { + const header = this._header; + this.outputData.unshift({ + data: header, + encoding: "latin1", + callback: null, + }); + this.outputSize += header.length; + this._onPendingData(header.length); + } + this._headerSent = true; + } + return this._writeRaw(data, encoding, callback); + } + + _writeRaw( + // deno-lint-ignore no-explicit-any + this: any, + // deno-lint-ignore no-explicit-any + data: any, + encoding?: string | null, + callback?: () => void, + ) { + const conn = this.socket; + if (conn && conn.destroyed) { + // The socket was destroyed. If we're still trying to write to it, + // then we haven't gotten the 'close' event yet. + return false; + } + + if (typeof encoding === "function") { + callback = encoding; + encoding = null; + } + + if (conn && conn._httpMessage === this && conn.writable) { + // There might be pending data in the this.output buffer. + if (this.outputData.length) { + this._flushOutput(conn); + } + // Directly write to socket. + return conn.write(data, encoding, callback); + } + // Buffer, as long as we're not destroyed. + this.outputData.push({ data, encoding, callback }); + this.outputSize += data.length; + this._onPendingData(data.length); + return this.outputSize < HIGH_WATER_MARK; + } + + _renderHeaders() { + if (this._header) { + throw new ERR_HTTP_HEADERS_SENT("render"); + } + + const headersMap = this[kOutHeaders]; + // deno-lint-ignore no-explicit-any + const headers: any = {}; + + if (headersMap !== null) { + const keys = Object.keys(headersMap); + // Retain for(;;) loop for performance reasons + // Refs: https://github.com/nodejs/node/pull/30958 + for (let i = 0, l = keys.length; i < l; i++) { + const key = keys[i]; + headers[headersMap[key][0]] = headersMap[key][1]; + } + } + return headers; + } + + // deno-lint-ignore no-explicit-any + [EE.captureRejectionSymbol](err: any, _event: any) { + this.destroy(err); + } +} Object.defineProperty(OutgoingMessage.prototype, "_headers", { get: deprecate( @@ -177,15 +617,6 @@ Object.defineProperty(OutgoingMessage.prototype, "_headers", { ), }); -Object.defineProperty(OutgoingMessage.prototype, "connection", { - get: function () { - return this.socket; - }, - set: function (val) { - this.socket = val; - }, -}); - Object.defineProperty(OutgoingMessage.prototype, "_headerNames", { get: deprecate( // deno-lint-ignore no-explicit-any @@ -232,373 +663,6 @@ Object.defineProperty(OutgoingMessage.prototype, "_headerNames", { ), }); -OutgoingMessage.prototype._renderHeaders = function _renderHeaders() { - if (this._header) { - throw new ERR_HTTP_HEADERS_SENT("render"); - } - - const headersMap = this[kOutHeaders]; - // deno-lint-ignore no-explicit-any - const headers: any = {}; - - if (headersMap !== null) { - const keys = Object.keys(headersMap); - // Retain for(;;) loop for performance reasons - // Refs: https://github.com/nodejs/node/pull/30958 - for (let i = 0, l = keys.length; i < l; i++) { - const key = keys[i]; - headers[headersMap[key][0]] = headersMap[key][1]; - } - } - return headers; -}; - -OutgoingMessage.prototype.cork = function () { - if (this.socket) { - this.socket.cork(); - } else { - this[kCorked]++; - } -}; - -OutgoingMessage.prototype.uncork = function () { - if (this.socket) { - this.socket.uncork(); - } else if (this[kCorked]) { - this[kCorked]--; - } -}; - -OutgoingMessage.prototype.setTimeout = function setTimeout( - msecs: number, - callback?: (...args: unknown[]) => void, -) { - if (callback) { - this.on("timeout", callback); - } - - if (!this.socket) { - // deno-lint-ignore no-explicit-any - this.once("socket", function socketSetTimeoutOnConnect(socket: any) { - socket.setTimeout(msecs); - }); - } else { - this.socket.setTimeout(msecs); - } - return this; -}; - -// It's possible that the socket will be destroyed, and removed from -// any messages, before ever calling this. In that case, just skip -// it, since something else is destroying this connection anyway. -OutgoingMessage.prototype.destroy = function destroy(error: unknown) { - if (this.destroyed) { - return this; - } - this.destroyed = true; - - if (this.socket) { - this.socket.destroy(error); - } else { - // deno-lint-ignore no-explicit-any - this.once("socket", function socketDestroyOnConnect(socket: any) { - socket.destroy(error); - }); - } - - return this; -}; - -// This abstract either writing directly to the socket or buffering it. -OutgoingMessage.prototype._send = function _send( - // deno-lint-ignore no-explicit-any - data: any, - encoding: string | null, - callback: () => void, -) { - // This is a shameful hack to get the headers and first body chunk onto - // the same packet. Future versions of Node are going to take care of - // this at a lower level and in a more general way. - if (!this._headerSent) { - if ( - typeof data === "string" && - (encoding === "utf8" || encoding === "latin1" || !encoding) - ) { - data = this._header + data; - } else { - const header = this._header; - this.outputData.unshift({ - data: header, - encoding: "latin1", - callback: null, - }); - this.outputSize += header.length; - this._onPendingData(header.length); - } - this._headerSent = true; - } - return this._writeRaw(data, encoding, callback); -}; - -OutgoingMessage.prototype._writeRaw = _writeRaw; -function _writeRaw( - // deno-lint-ignore no-explicit-any - this: any, - // deno-lint-ignore no-explicit-any - data: any, - encoding: string | null, - callback: () => void, -) { - const conn = this.socket; - if (conn && conn.destroyed) { - // The socket was destroyed. If we're still trying to write to it, - // then we haven't gotten the 'close' event yet. - return false; - } - - if (typeof encoding === "function") { - callback = encoding; - encoding = null; - } - - if (conn && conn._httpMessage === this && conn.writable) { - // There might be pending data in the this.output buffer. - if (this.outputData.length) { - this._flushOutput(conn); - } - // Directly write to socket. - return conn.write(data, encoding, callback); - } - // Buffer, as long as we're not destroyed. - this.outputData.push({ data, encoding, callback }); - this.outputSize += data.length; - this._onPendingData(data.length); - return this.outputSize < HIGH_WATER_MARK; -} - -OutgoingMessage.prototype._storeHeader = _storeHeader; -// deno-lint-ignore no-explicit-any -function _storeHeader(this: any, firstLine: any, headers: any) { - // firstLine in the case of request is: 'GET /index.html HTTP/1.1\r\n' - // in the case of response it is: 'HTTP/1.1 200 OK\r\n' - const state = { - connection: false, - contLen: false, - te: false, - date: false, - expect: false, - trailer: false, - header: firstLine, - }; - - if (headers) { - if (headers === this[kOutHeaders]) { - for (const key in headers) { - if (Object.hasOwn(headers, key)) { - const entry = headers[key]; - processHeader(this, state, entry[0], entry[1], false); - } - } - } else if (Array.isArray(headers)) { - if (headers.length && Array.isArray(headers[0])) { - for (let i = 0; i < headers.length; i++) { - const entry = headers[i]; - processHeader(this, state, entry[0], entry[1], true); - } - } else { - if (headers.length % 2 !== 0) { - throw new ERR_INVALID_ARG_VALUE("headers", headers); - } - - for (let n = 0; n < headers.length; n += 2) { - processHeader(this, state, headers[n + 0], headers[n + 1], true); - } - } - } else { - for (const key in headers) { - if (Object.hasOwn(headers, key)) { - processHeader(this, state, key, headers[key], true); - } - } - } - } - - let { header } = state; - - // Date header - if (this.sendDate && !state.date) { - header += "Date: " + utcDate() + "\r\n"; - } - - // Force the connection to close when the response is a 204 No Content or - // a 304 Not Modified and the user has set a "Transfer-Encoding: chunked" - // header. - // - // RFC 2616 mandates that 204 and 304 responses MUST NOT have a body but - // node.js used to send out a zero chunk anyway to accommodate clients - // that don't have special handling for those responses. - // - // It was pointed out that this might confuse reverse proxies to the point - // of creating security liabilities, so suppress the zero chunk and force - // the connection to close. - if ( - this.chunkedEncoding && (this.statusCode === 204 || - this.statusCode === 304) - ) { - debug( - this.statusCode + " response should not use chunked encoding," + - " closing connection.", - ); - this.chunkedEncoding = false; - this.shouldKeepAlive = false; - } - - // keep-alive logic - if (this._removedConnection) { - this._last = true; - this.shouldKeepAlive = false; - } else if (!state.connection) { - const shouldSendKeepAlive = this.shouldKeepAlive && - (state.contLen || this.useChunkedEncodingByDefault || this.agent); - if (shouldSendKeepAlive && this.maxRequestsOnConnectionReached) { - header += "Connection: close\r\n"; - } else if (shouldSendKeepAlive) { - header += "Connection: keep-alive\r\n"; - if (this._keepAliveTimeout && this._defaultKeepAlive) { - const timeoutSeconds = Math.floor(this._keepAliveTimeout / 1000); - header += `Keep-Alive: timeout=${timeoutSeconds}\r\n`; - } - } else { - this._last = true; - header += "Connection: close\r\n"; - } - } - - if (!state.contLen && !state.te) { - if (!this._hasBody) { - // Make sure we don't end the 0\r\n\r\n at the end of the message. - this.chunkedEncoding = false; - } else if (!this.useChunkedEncodingByDefault) { - this._last = true; - } else if ( - !state.trailer && - !this._removedContLen && - typeof this._contentLength === "number" - ) { - header += "Content-Length: " + this._contentLength + "\r\n"; - } else if (!this._removedTE) { - header += "Transfer-Encoding: chunked\r\n"; - this.chunkedEncoding = true; - } else { - // We should only be able to get here if both Content-Length and - // Transfer-Encoding are removed by the user. - // See: test/parallel/test-http-remove-header-stays-removed.js - debug("Both Content-Length and Transfer-Encoding are removed"); - } - } - - // Test non-chunked message does not have trailer header set, - // message will be terminated by the first empty line after the - // header fields, regardless of the header fields present in the - // message, and thus cannot contain a message body or 'trailers'. - if (this.chunkedEncoding !== true && state.trailer) { - throw new ERR_HTTP_TRAILER_INVALID(); - } - - this._header = header + "\r\n"; - this._headerSent = false; - - // Wait until the first body chunk, or close(), is sent to flush, - // UNLESS we're sending Expect: 100-continue. - if (state.expect) this._send(""); -} - -function processHeader( - // deno-lint-ignore no-explicit-any - self: any, - // deno-lint-ignore no-explicit-any - state: any, - // deno-lint-ignore no-explicit-any - key: any, - // deno-lint-ignore no-explicit-any - value: any, - // deno-lint-ignore no-explicit-any - validate: any, -) { - if (validate) { - validateHeaderName(key); - } - if (Array.isArray(value)) { - if (value.length < 2 || !isCookieField(key)) { - // Retain for(;;) loop for performance reasons - // Refs: https://github.com/nodejs/node/pull/30958 - for (let i = 0; i < value.length; i++) { - storeHeader(self, state, key, value[i], validate); - } - return; - } - value = value.join("; "); - } - storeHeader(self, state, key, value, validate); -} - -function storeHeader( - // deno-lint-ignore no-explicit-any - self: any, - // deno-lint-ignore no-explicit-any - state: any, - // deno-lint-ignore no-explicit-any - key: any, - // deno-lint-ignore no-explicit-any - value: any, - // deno-lint-ignore no-explicit-any - validate: any, -) { - if (validate) { - validateHeaderValue(key, value); - } - state.header += key + ": " + value + "\r\n"; - matchHeader(self, state, key, value); -} - -// deno-lint-ignore no-explicit-any -function matchHeader(self: any, state: any, field: string, value: any) { - if (field.length < 4 || field.length > 17) { - return; - } - field = field.toLowerCase(); - switch (field) { - case "connection": - state.connection = true; - self._removedConnection = false; - if (RE_CONN_CLOSE.test(value)) { - self._last = true; - } else { - self.shouldKeepAlive = true; - } - break; - case "transfer-encoding": - state.te = true; - self._removedTE = false; - if (RE_TE_CHUNKED.test(value)) { - self.chunkedEncoding = true; - } - break; - case "content-length": - state.contLen = true; - self._removedContLen = false; - break; - case "date": - case "expect": - case "trailer": - state[field] = true; - break; - case "keep-alive": - self._defaultKeepAlive = false; - break; - } -} - export const validateHeaderName = hideStackFrames((name) => { if (typeof name !== "string" || !name || !checkIsHttpToken(name)) { throw new ERR_INVALID_HTTP_TOKEN("Header name", name); @@ -615,114 +679,19 @@ export const validateHeaderValue = hideStackFrames((name, value) => { } }); -OutgoingMessage.prototype.setHeader = function setHeader( - name: string, - value: string, -) { - if (this._header) { - throw new ERR_HTTP_HEADERS_SENT("set"); - } - validateHeaderName(name); - validateHeaderValue(name, value); - - let headers = this[kOutHeaders]; - if (headers === null) { - this[kOutHeaders] = headers = Object.create(null); +export function parseUniqueHeadersOption(headers) { + if (!Array.isArray(headers)) { + return null; } - headers[name.toLowerCase()] = [name, value]; - return this; -}; - -OutgoingMessage.prototype.getHeader = function getHeader(name: string) { - validateString(name, "name"); - - const headers = this[kOutHeaders]; - if (headers === null) { - return; + const unique = new Set(); + const l = headers.length; + for (let i = 0; i < l; i++) { + unique.add(headers[i].toLowerCasee()); } - const entry = headers[name.toLowerCase()]; - return entry && entry[1]; -}; - -// Returns an array of the names of the current outgoing headers. -OutgoingMessage.prototype.getHeaderNames = function getHeaderNames() { - return this[kOutHeaders] !== null ? Object.keys(this[kOutHeaders]) : []; -}; - -// Returns an array of the names of the current outgoing raw headers. -OutgoingMessage.prototype.getRawHeaderNames = function getRawHeaderNames() { - const headersMap = this[kOutHeaders]; - if (headersMap === null) return []; - - const values = Object.values(headersMap); - const headers = Array(values.length); - // Retain for(;;) loop for performance reasons - // Refs: https://github.com/nodejs/node/pull/30958 - for (let i = 0, l = values.length; i < l; i++) { - // deno-lint-ignore no-explicit-any - headers[i] = (values as any)[i][0]; - } - - return headers; -}; - -// Returns a shallow copy of the current outgoing headers. -OutgoingMessage.prototype.getHeaders = function getHeaders() { - const headers = this[kOutHeaders]; - const ret = Object.create(null); - if (headers) { - const keys = Object.keys(headers); - // Retain for(;;) loop for performance reasons - // Refs: https://github.com/nodejs/node/pull/30958 - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - const val = headers[key][1]; - ret[key] = val; - } - } - return ret; -}; - -OutgoingMessage.prototype.hasHeader = function hasHeader(name: string) { - validateString(name, "name"); - return this[kOutHeaders] !== null && - !!this[kOutHeaders][name.toLowerCase()]; -}; - -OutgoingMessage.prototype.removeHeader = function removeHeader(name: string) { - validateString(name, "name"); - - if (this._header) { - throw new ERR_HTTP_HEADERS_SENT("remove"); - } - - const key = name.toLowerCase(); - - switch (key) { - case "connection": - this._removedConnection = true; - break; - case "content-length": - this._removedContLen = true; - break; - case "transfer-encoding": - this._removedTE = true; - break; - case "date": - this.sendDate = false; - break; - } - - if (this[kOutHeaders] !== null) { - delete this[kOutHeaders][key]; - } -}; - -OutgoingMessage.prototype._implicitHeader = function _implicitHeader() { - throw new ERR_METHOD_NOT_IMPLEMENTED("_implicitHeader()"); -}; + return unique; +} Object.defineProperty(OutgoingMessage.prototype, "headersSent", { configurable: true, @@ -732,40 +701,13 @@ Object.defineProperty(OutgoingMessage.prototype, "headersSent", { }, }); -Object.defineProperty(OutgoingMessage.prototype, "writableEnded", { - get: function () { - return this.finished; - }, -}); - -Object.defineProperty(OutgoingMessage.prototype, "writableNeedDrain", { - get: function () { - return !this.destroyed && !this.finished && this[kNeedDrain]; - }, -}); - +// TODO(bartlomieju): use it // deno-lint-ignore camelcase -const crlf_buf = Buffer.from("\r\n"); -OutgoingMessage.prototype.write = function write( - // deno-lint-ignore no-explicit-any - chunk: any, - encoding: string | null, - callback: () => void, -) { - if (typeof encoding === "function") { - callback = encoding; - encoding = null; - } - - const ret = write_(this, chunk, encoding, callback, false); - if (!ret) { - this[kNeedDrain] = true; - } - return ret; -}; +const _crlf_buf = Buffer.from("\r\n"); +// TODO(bartlomieju): use it // deno-lint-ignore no-explicit-any -function onError(msg: any, err: any, callback: any) { +function _onError(msg: any, err: any, callback: any) { const triggerAsyncId = msg.socket ? msg.socket[async_id_symbol] : undefined; defaultTriggerAsyncIdScope( triggerAsyncId, @@ -786,314 +728,37 @@ function emitErrorNt(msg: any, err: any, callback: any) { } } -function write_( +// TODO(bartlomieju): use it +function _write_( // deno-lint-ignore no-explicit-any - msg: any, + _msg: any, // deno-lint-ignore no-explicit-any - chunk: any, - encoding: string | null, + _chunk: any, + _encoding: string | null, // deno-lint-ignore no-explicit-any - callback: any, + _callback: any, // deno-lint-ignore no-explicit-any - fromEnd: any, + _fromEnd: any, ) { - if (typeof callback !== "function") { - callback = nop; - } - - let len; - if (chunk === null) { - throw new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk === "string") { - len = Buffer.byteLength(chunk, encoding); - } else if (isUint8Array(chunk)) { - len = chunk.length; - } else { - throw new ERR_INVALID_ARG_TYPE( - "chunk", - ["string", "Buffer", "Uint8Array"], - chunk, - ); - } - - let err; - if (msg.finished) { - err = new ERR_STREAM_WRITE_AFTER_END(); - } else if (msg.destroyed) { - err = new ERR_STREAM_DESTROYED("write"); - } - - if (err) { - if (!msg.destroyed) { - onError(msg, err, callback); - } else { - // deno-lint-ignore no-explicit-any - (globalThis as any).process.nextTick(callback, err); - } - return false; - } - - if (!msg._header) { - if (fromEnd) { - msg._contentLength = len; - } - msg._implicitHeader(); - } - - if (!msg._hasBody) { - debug( - "This type of response MUST NOT have a body. " + - "Ignoring write() calls.", - ); - // deno-lint-ignore no-explicit-any - (globalThis as any).process.nextTick(callback); - return true; - } - - if (!fromEnd && msg.socket && !msg.socket.writableCorked) { - msg.socket.cork(); - // deno-lint-ignore no-explicit-any - (globalThis as any).process.nextTick(connectionCorkNT, msg.socket); - } - - let ret; - if (msg.chunkedEncoding && chunk.length !== 0) { - msg._send(len.toString(16), "latin1", null); - msg._send(crlf_buf, null, null); - msg._send(chunk, encoding, null); - ret = msg._send(crlf_buf, null, callback); - } else { - ret = msg._send(chunk, encoding, callback); - } - - debug("write ret = " + ret); - return ret; + // TODO(crowlKats): finish } +// TODO(bartlomieju): use it // deno-lint-ignore no-explicit-any -function connectionCorkNT(conn: any) { +function _connectionCorkNT(conn: any) { conn.uncork(); } +// TODO(bartlomieju): use it // deno-lint-ignore no-explicit-any -OutgoingMessage.prototype.addTrailers = function addTrailers(headers: any) { - this._trailer = ""; - const keys = Object.keys(headers); - const isArray = Array.isArray(headers); - // Retain for(;;) loop for performance reasons - // Refs: https://github.com/nodejs/node/pull/30958 - for (let i = 0, l = keys.length; i < l; i++) { - let field, value; - const key = keys[i]; - if (isArray) { - // deno-lint-ignore no-explicit-any - field = headers[key as any][0]; - // deno-lint-ignore no-explicit-any - value = headers[key as any][1]; - } else { - field = key; - value = headers[key]; - } - if (typeof field !== "string" || !field || !checkIsHttpToken(field)) { - throw new ERR_INVALID_HTTP_TOKEN("Trailer name", field); - } - if (checkInvalidHeaderChar(value)) { - debug('Trailer "%s" contains invalid characters', field); - throw new ERR_INVALID_CHAR("trailer content", field); - } - this._trailer += field + ": " + value + "\r\n"; - } -}; - -// deno-lint-ignore no-explicit-any -function onFinish(outmsg: any) { +function _onFinish(outmsg: any) { if (outmsg && outmsg.socket && outmsg.socket._hadError) return; outmsg.emit("finish"); } -OutgoingMessage.prototype.end = function end( - // deno-lint-ignore no-explicit-any - chunk: any, - // deno-lint-ignore no-explicit-any - encoding: any, - // deno-lint-ignore no-explicit-any - callback: any, -) { - if (typeof chunk === "function") { - callback = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === "function") { - callback = encoding; - encoding = null; - } - - if (chunk) { - if (this.finished) { - onError( - this, - new ERR_STREAM_WRITE_AFTER_END(), - typeof callback !== "function" ? nop : callback, - ); - return this; - } - - if (this.socket) { - this.socket.cork(); - } - - write_(this, chunk, encoding, null, true); - } else if (this.finished) { - if (typeof callback === "function") { - if (!this.writableFinished) { - this.on("finish", callback); - } else { - callback(new ERR_STREAM_ALREADY_FINISHED("end")); - } - } - return this; - } else if (!this._header) { - if (this.socket) { - this.socket.cork(); - } - - this._contentLength = 0; - this._implicitHeader(); - } - - if (typeof callback === "function") { - this.once("finish", callback); - } - - const finish = onFinish.bind(undefined, this); - - if (this._hasBody && this.chunkedEncoding) { - this._send("0\r\n" + this._trailer + "\r\n", "latin1", finish); - } else if (!this._headerSent || this.writableLength || chunk) { - this._send("", "latin1", finish); - } else { - // deno-lint-ignore no-explicit-any - (globalThis as any).process.nextTick(finish); - } - - if (this.socket) { - // Fully uncork connection on end(). - this.socket._writableState.corked = 1; - this.socket.uncork(); - } - this[kCorked] = 0; - - this.finished = true; - - // There is the first message on the outgoing queue, and we've sent - // everything to the socket. - debug("outgoing message end."); - if ( - this.outputData.length === 0 && - this.socket && - this.socket._httpMessage === this - ) { - this._finish(); - } - - return this; -}; - -OutgoingMessage.prototype._finish = function _finish() { - assert(this.socket); - this.emit("prefinish"); -}; - -// This logic is probably a bit confusing. Let me explain a bit: -// -// In both HTTP servers and clients it is possible to queue up several -// outgoing messages. This is easiest to imagine in the case of a client. -// Take the following situation: -// -// req1 = client.request('GET', '/'); -// req2 = client.request('POST', '/'); -// -// When the user does -// -// req2.write('hello world\n'); -// -// it's possible that the first request has not been completely flushed to -// the socket yet. Thus the outgoing messages need to be prepared to queue -// up data internally before sending it on further to the socket's queue. -// -// This function, outgoingFlush(), is called by both the Server and Client -// to attempt to flush any pending messages out to the socket. -OutgoingMessage.prototype._flush = function _flush() { - const socket = this.socket; - - if (socket && socket.writable) { - // There might be remaining data in this.output; write it out - const ret = this._flushOutput(socket); - - if (this.finished) { - // This is a queue to the server or client to bring in the next this. - this._finish(); - } else if (ret && this[kNeedDrain]) { - this[kNeedDrain] = false; - this.emit("drain"); - } - } -}; - -OutgoingMessage.prototype._flushOutput = function _flushOutput(socket: Socket) { - while (this[kCorked]) { - this[kCorked]--; - socket.cork(); - } - - const outputLength = this.outputData.length; - if (outputLength <= 0) { - return undefined; - } - - const outputData = this.outputData; - socket.cork(); - let ret; - // Retain for(;;) loop for performance reasons - // Refs: https://github.com/nodejs/node/pull/30958 - for (let i = 0; i < outputLength; i++) { - const { data, encoding, callback } = outputData[i]; - ret = socket.write(data, encoding, callback); - } - socket.uncork(); - - this.outputData = []; - this._onPendingData(-this.outputSize); - this.outputSize = 0; - - return ret; -}; - -OutgoingMessage.prototype.flushHeaders = function flushHeaders() { - if (!this._header) { - this._implicitHeader(); - } - - // Force-flush the headers. - this._send(""); -}; - -OutgoingMessage.prototype.pipe = function pipe() { - // OutgoingMessage should be write-only. Piping from it is disabled. - this.emit("error", new ERR_STREAM_CANNOT_PIPE()); -}; - -OutgoingMessage.prototype[EE.captureRejectionSymbol] = function ( - // deno-lint-ignore no-explicit-any - err: any, - // deno-lint-ignore no-explicit-any - _event: any, -) { - this.destroy(err); -}; - export default { validateHeaderName, validateHeaderValue, + parseUniqueHeadersOption, OutgoingMessage, }; diff --git a/ext/node/polyfills/_next_tick.ts b/ext/node/polyfills/_next_tick.ts index 45e972d6b5..fe1b687421 100644 --- a/ext/node/polyfills/_next_tick.ts +++ b/ext/node/polyfills/_next_tick.ts @@ -1,11 +1,12 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent, Inc. and other Node contributors. -import { core } from "ext:deno_node/_core.ts"; import { validateFunction } from "ext:deno_node/internal/validators.mjs"; import { _exiting } from "ext:deno_node/_process/exiting.ts"; import { FixedQueue } from "ext:deno_node/internal/fixed_queue.ts"; +const { core } = globalThis.__bootstrap; + interface Tock { callback: (...args: Array) => void; args: Array; diff --git a/ext/node/polyfills/_process/streams.mjs b/ext/node/polyfills/_process/streams.mjs index df014c11e7..934d4f9670 100644 --- a/ext/node/polyfills/_process/streams.mjs +++ b/ext/node/polyfills/_process/streams.mjs @@ -16,6 +16,7 @@ import * as io from "ext:deno_io/12_io.js"; // https://github.com/nodejs/node/blob/00738314828074243c9a52a228ab4c68b04259ef/lib/internal/bootstrap/switches/is_main_thread.js#L41 export function createWritableStdioStream(writer, name) { const stream = new Writable({ + emitClose: false, write(buf, enc, cb) { if (!writer) { this.destroy( @@ -216,7 +217,7 @@ export const initStdin = () => { enumerable: true, configurable: true, get() { - return Deno.isatty?.(Deno.stdin.rid); + return Deno.isatty?.(io.stdin.rid); }, }); stdin._isRawMode = false; diff --git a/ext/node/polyfills/_stream.d.ts b/ext/node/polyfills/_stream.d.ts index 467ac9f36b..382bb9093c 100644 --- a/ext/node/polyfills/_stream.d.ts +++ b/ext/node/polyfills/_stream.d.ts @@ -1190,7 +1190,7 @@ type PipelineDestinationPromiseFunction = ( source: AsyncIterable, ) => Promise

; type PipelineDestination, P> = S extends - PipelineTransformSource ? + PipelineTransformSource ? | WritableStream | PipelineDestinationIterableFunction | PipelineDestinationPromiseFunction diff --git a/ext/node/polyfills/_stream.mjs b/ext/node/polyfills/_stream.mjs index 3fec7f7767..2e2fcce8c3 100644 --- a/ext/node/polyfills/_stream.mjs +++ b/ext/node/polyfills/_stream.mjs @@ -5,10 +5,5707 @@ import { nextTick } from "ext:deno_node/_next_tick.ts"; import { AbortController } from "ext:deno_web/03_abort_signal.js"; import { Blob } from "ext:deno_web/09_file.js"; +import { StringDecoder } from "ext:deno_node/string_decoder.ts"; +import { + createDeferredPromise, + kEmptyObject, + normalizeEncoding, + once, + promisify, +} from "ext:deno_node/internal/util.mjs"; +import { + isArrayBufferView, + isAsyncFunction, +} from "ext:deno_node/internal/util/types.ts"; +import { debuglog } from "ext:deno_node/internal/util/debuglog.ts"; +import { inspect } from "ext:deno_node/internal/util/inspect.mjs"; + +import { + AbortError, + aggregateTwoErrors, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_RETURN_VALUE, + ERR_METHOD_NOT_IMPLEMENTED, + ERR_MISSING_ARGS, + ERR_MULTIPLE_CALLBACK, + ERR_OUT_OF_RANGE, + ERR_SOCKET_BAD_PORT, + ERR_STREAM_ALREADY_FINISHED, + ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES, + ERR_STREAM_PREMATURE_CLOSE, + ERR_STREAM_PUSH_AFTER_EOF, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT, + ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING, + ERR_UNKNOWN_SIGNAL, + hideStackFrames, +} from "ext:deno_node/internal/errors.ts"; /* esm.sh - esbuild bundle(readable-stream@4.2.0) es2022 production */ -const __process$ = { nextTick };import __buffer$ from "ext:deno_node/buffer.ts";import __string_decoder$ from "ext:deno_node/string_decoder.ts";import __events$ from "ext:deno_node/events.ts";var pi=Object.create;var Bt=Object.defineProperty;var wi=Object.getOwnPropertyDescriptor;var yi=Object.getOwnPropertyNames;var gi=Object.getPrototypeOf,Si=Object.prototype.hasOwnProperty;var E=(e=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(e,{get:(t,n)=>(typeof require<"u"?require:t)[n]}):e)(function(e){if(typeof require<"u")return require.apply(this,arguments);throw new Error('Dynamic require of "'+e+'" is not supported')});var g=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Ei=(e,t,n,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let i of yi(t))!Si.call(e,i)&&i!==n&&Bt(e,i,{get:()=>t[i],enumerable:!(r=wi(t,i))||r.enumerable});return e};var Ri=(e,t,n)=>(n=e!=null?pi(gi(e)):{},Ei(t||!e||!e.__esModule?Bt(n,"default",{value:e,enumerable:!0}):n,e));var m=g((Yf,Gt)=>{"use strict";Gt.exports={ArrayIsArray(e){return Array.isArray(e)},ArrayPrototypeIncludes(e,t){return e.includes(t)},ArrayPrototypeIndexOf(e,t){return e.indexOf(t)},ArrayPrototypeJoin(e,t){return e.join(t)},ArrayPrototypeMap(e,t){return e.map(t)},ArrayPrototypePop(e,t){return e.pop(t)},ArrayPrototypePush(e,t){return e.push(t)},ArrayPrototypeSlice(e,t,n){return e.slice(t,n)},Error,FunctionPrototypeCall(e,t,...n){return e.call(t,...n)},FunctionPrototypeSymbolHasInstance(e,t){return Function.prototype[Symbol.hasInstance].call(e,t)},MathFloor:Math.floor,Number,NumberIsInteger:Number.isInteger,NumberIsNaN:Number.isNaN,NumberMAX_SAFE_INTEGER:Number.MAX_SAFE_INTEGER,NumberMIN_SAFE_INTEGER:Number.MIN_SAFE_INTEGER,NumberParseInt:Number.parseInt,ObjectDefineProperties(e,t){return Object.defineProperties(e,t)},ObjectDefineProperty(e,t,n){return Object.defineProperty(e,t,n)},ObjectGetOwnPropertyDescriptor(e,t){return Object.getOwnPropertyDescriptor(e,t)},ObjectKeys(e){return Object.keys(e)},ObjectSetPrototypeOf(e,t){return Object.setPrototypeOf(e,t)},Promise,PromisePrototypeCatch(e,t){return e.catch(t)},PromisePrototypeThen(e,t,n){return e.then(t,n)},PromiseReject(e){return Promise.reject(e)},ReflectApply:Reflect.apply,RegExpPrototypeTest(e,t){return e.test(t)},SafeSet:Set,String,StringPrototypeSlice(e,t,n){return e.slice(t,n)},StringPrototypeToLowerCase(e){return e.toLowerCase()},StringPrototypeToUpperCase(e){return e.toUpperCase()},StringPrototypeTrim(e){return e.trim()},Symbol,SymbolAsyncIterator:Symbol.asyncIterator,SymbolHasInstance:Symbol.hasInstance,SymbolIterator:Symbol.iterator,TypedArrayPrototypeSet(e,t,n){return e.set(t,n)},Uint8Array}});var j=g((Kf,Je)=>{"use strict";var Ai=__buffer$,mi=Object.getPrototypeOf(async function(){}).constructor,Ht=Blob||Ai.Blob,Ti=typeof Ht<"u"?function(t){return t instanceof Ht}:function(t){return!1},Xe=class extends Error{constructor(t){if(!Array.isArray(t))throw new TypeError(`Expected input to be an Array, got ${typeof t}`);let n="";for(let r=0;r{e=r,t=i}),resolve:e,reject:t}},promisify(e){return new Promise((t,n)=>{e((r,...i)=>r?n(r):t(...i))})},debuglog(){return function(){}},format(e,...t){return e.replace(/%([sdifj])/g,function(...[n,r]){let i=t.shift();return r==="f"?i.toFixed(6):r==="j"?JSON.stringify(i):r==="s"&&typeof i=="object"?`${i.constructor!==Object?i.constructor.name:""} {}`.trim():i.toString()})},inspect(e){switch(typeof e){case"string":if(e.includes("'"))if(e.includes('"')){if(!e.includes("`")&&!e.includes("${"))return`\`${e}\``}else return`"${e}"`;return`'${e}'`;case"number":return isNaN(e)?"NaN":Object.is(e,-0)?String(e):e;case"bigint":return`${String(e)}n`;case"boolean":case"undefined":return String(e);case"object":return"{}"}},types:{isAsyncFunction(e){return e instanceof mi},isArrayBufferView(e){return ArrayBuffer.isView(e)}},isBlob:Ti};Je.exports.promisify.custom=Symbol.for("nodejs.util.promisify.custom")});var O=g((zf,Kt)=>{"use strict";var{format:Ii,inspect:Re,AggregateError:Mi}=j(),Ni=globalThis.AggregateError||Mi,Di=Symbol("kIsNodeError"),Oi=["string","function","number","object","Function","Object","boolean","bigint","symbol"],qi=/^([A-Z][a-z0-9]*)+$/,xi="__node_internal_",Ae={};function X(e,t){if(!e)throw new Ae.ERR_INTERNAL_ASSERTION(t)}function Vt(e){let t="",n=e.length,r=e[0]==="-"?1:0;for(;n>=r+4;n-=3)t=`_${e.slice(n-3,n)}${t}`;return`${e.slice(0,n)}${t}`}function Li(e,t,n){if(typeof t=="function")return X(t.length<=n.length,`Code: ${e}; The provided arguments length (${n.length}) does not match the required ones (${t.length}).`),t(...n);let r=(t.match(/%[dfijoOs]/g)||[]).length;return X(r===n.length,`Code: ${e}; The provided arguments length (${n.length}) does not match the required ones (${r}).`),n.length===0?t:Ii(t,...n)}function N(e,t,n){n||(n=Error);class r extends n{constructor(...o){super(Li(e,t,o))}toString(){return`${this.name} [${e}]: ${this.message}`}}Object.defineProperties(r.prototype,{name:{value:n.name,writable:!0,enumerable:!1,configurable:!0},toString:{value(){return`${this.name} [${e}]: ${this.message}`},writable:!0,enumerable:!1,configurable:!0}}),r.prototype.code=e,r.prototype[Di]=!0,Ae[e]=r}function Yt(e){let t=xi+e.name;return Object.defineProperty(e,"name",{value:t}),e}function Pi(e,t){if(e&&t&&e!==t){if(Array.isArray(t.errors))return t.errors.push(e),t;let n=new Ni([t,e],t.message);return n.code=t.code,n}return e||t}var Qe=class extends Error{constructor(t="The operation was aborted",n=void 0){if(n!==void 0&&typeof n!="object")throw new Ae.ERR_INVALID_ARG_TYPE("options","Object",n);super(t,n),this.code="ABORT_ERR",this.name="AbortError"}};N("ERR_ASSERTION","%s",Error);N("ERR_INVALID_ARG_TYPE",(e,t,n)=>{X(typeof e=="string","'name' must be a string"),Array.isArray(t)||(t=[t]);let r="The ";e.endsWith(" argument")?r+=`${e} `:r+=`"${e}" ${e.includes(".")?"property":"argument"} `,r+="must be ";let i=[],o=[],l=[];for(let f of t)X(typeof f=="string","All expected entries have to be of type string"),Oi.includes(f)?i.push(f.toLowerCase()):qi.test(f)?o.push(f):(X(f!=="object",'The value "object" should be written as "Object"'),l.push(f));if(o.length>0){let f=i.indexOf("object");f!==-1&&(i.splice(i,f,1),o.push("Object"))}if(i.length>0){switch(i.length){case 1:r+=`of type ${i[0]}`;break;case 2:r+=`one of type ${i[0]} or ${i[1]}`;break;default:{let f=i.pop();r+=`one of type ${i.join(", ")}, or ${f}`}}(o.length>0||l.length>0)&&(r+=" or ")}if(o.length>0){switch(o.length){case 1:r+=`an instance of ${o[0]}`;break;case 2:r+=`an instance of ${o[0]} or ${o[1]}`;break;default:{let f=o.pop();r+=`an instance of ${o.join(", ")}, or ${f}`}}l.length>0&&(r+=" or ")}switch(l.length){case 0:break;case 1:l[0].toLowerCase()!==l[0]&&(r+="an "),r+=`${l[0]}`;break;case 2:r+=`one of ${l[0]} or ${l[1]}`;break;default:{let f=l.pop();r+=`one of ${l.join(", ")}, or ${f}`}}if(n==null)r+=`. Received ${n}`;else if(typeof n=="function"&&n.name)r+=`. Received function ${n.name}`;else if(typeof n=="object"){var u;(u=n.constructor)!==null&&u!==void 0&&u.name?r+=`. Received an instance of ${n.constructor.name}`:r+=`. Received ${Re(n,{depth:-1})}`}else{let f=Re(n,{colors:!1});f.length>25&&(f=`${f.slice(0,25)}...`),r+=`. Received type ${typeof n} (${f})`}return r},TypeError);N("ERR_INVALID_ARG_VALUE",(e,t,n="is invalid")=>{let r=Re(t);return r.length>128&&(r=r.slice(0,128)+"..."),`The ${e.includes(".")?"property":"argument"} '${e}' ${n}. Received ${r}`},TypeError);N("ERR_INVALID_RETURN_VALUE",(e,t,n)=>{var r;let i=n!=null&&(r=n.constructor)!==null&&r!==void 0&&r.name?`instance of ${n.constructor.name}`:`type ${typeof n}`;return`Expected ${e} to be returned from the "${t}" function but got ${i}.`},TypeError);N("ERR_MISSING_ARGS",(...e)=>{X(e.length>0,"At least one arg needs to be specified");let t,n=e.length;switch(e=(Array.isArray(e)?e:[e]).map(r=>`"${r}"`).join(" or "),n){case 1:t+=`The ${e[0]} argument`;break;case 2:t+=`The ${e[0]} and ${e[1]} arguments`;break;default:{let r=e.pop();t+=`The ${e.join(", ")}, and ${r} arguments`}break}return`${t} must be specified`},TypeError);N("ERR_OUT_OF_RANGE",(e,t,n)=>{X(t,'Missing "range" argument');let r;return Number.isInteger(n)&&Math.abs(n)>2**32?r=Vt(String(n)):typeof n=="bigint"?(r=String(n),(n>2n**32n||n<-(2n**32n))&&(r=Vt(r)),r+="n"):r=Re(n),`The value of "${e}" is out of range. It must be ${t}. Received ${r}`},RangeError);N("ERR_MULTIPLE_CALLBACK","Callback called multiple times",Error);N("ERR_METHOD_NOT_IMPLEMENTED","The %s method is not implemented",Error);N("ERR_STREAM_ALREADY_FINISHED","Cannot call %s after a stream was finished",Error);N("ERR_STREAM_CANNOT_PIPE","Cannot pipe, not readable",Error);N("ERR_STREAM_DESTROYED","Cannot call %s after a stream was destroyed",Error);N("ERR_STREAM_NULL_VALUES","May not write null values to stream",TypeError);N("ERR_STREAM_PREMATURE_CLOSE","Premature close",Error);N("ERR_STREAM_PUSH_AFTER_EOF","stream.push() after EOF",Error);N("ERR_STREAM_UNSHIFT_AFTER_END_EVENT","stream.unshift() after end event",Error);N("ERR_STREAM_WRITE_AFTER_END","write after end",Error);N("ERR_UNKNOWN_ENCODING","Unknown encoding: %s",TypeError);Kt.exports={AbortError:Qe,aggregateTwoErrors:Yt(Pi),hideStackFrames:Yt,codes:Ae}});var _e=g((Xf,nn)=>{"use strict";var{ArrayIsArray:Jt,ArrayPrototypeIncludes:Qt,ArrayPrototypeJoin:Zt,ArrayPrototypeMap:ki,NumberIsInteger:et,NumberIsNaN:Wi,NumberMAX_SAFE_INTEGER:Ci,NumberMIN_SAFE_INTEGER:ji,NumberParseInt:$i,ObjectPrototypeHasOwnProperty:vi,RegExpPrototypeExec:Fi,String:Ui,StringPrototypeToUpperCase:Bi,StringPrototypeTrim:Gi}=m(),{hideStackFrames:k,codes:{ERR_SOCKET_BAD_PORT:Hi,ERR_INVALID_ARG_TYPE:q,ERR_INVALID_ARG_VALUE:me,ERR_OUT_OF_RANGE:J,ERR_UNKNOWN_SIGNAL:zt}}=O(),{normalizeEncoding:Vi}=j(),{isAsyncFunction:Yi,isArrayBufferView:Ki}=j().types,Xt={};function zi(e){return e===(e|0)}function Xi(e){return e===e>>>0}var Ji=/^[0-7]+$/,Qi="must be a 32-bit unsigned integer or an octal string";function Zi(e,t,n){if(typeof e>"u"&&(e=n),typeof e=="string"){if(Fi(Ji,e)===null)throw new me(t,e,Qi);e=$i(e,8)}return en(e,t),e}var eo=k((e,t,n=ji,r=Ci)=>{if(typeof e!="number")throw new q(t,"number",e);if(!et(e))throw new J(t,"an integer",e);if(er)throw new J(t,`>= ${n} && <= ${r}`,e)}),to=k((e,t,n=-2147483648,r=2147483647)=>{if(typeof e!="number")throw new q(t,"number",e);if(!et(e))throw new J(t,"an integer",e);if(er)throw new J(t,`>= ${n} && <= ${r}`,e)}),en=k((e,t,n=!1)=>{if(typeof e!="number")throw new q(t,"number",e);if(!et(e))throw new J(t,"an integer",e);let r=n?1:0,i=4294967295;if(ei)throw new J(t,`>= ${r} && <= ${i}`,e)});function tn(e,t){if(typeof e!="string")throw new q(t,"string",e)}function no(e,t,n=void 0,r){if(typeof e!="number")throw new q(t,"number",e);if(n!=null&&er||(n!=null||r!=null)&&Wi(e))throw new J(t,`${n!=null?`>= ${n}`:""}${n!=null&&r!=null?" && ":""}${r!=null?`<= ${r}`:""}`,e)}var ro=k((e,t,n)=>{if(!Qt(n,e)){let r=Zt(ki(n,o=>typeof o=="string"?`'${o}'`:Ui(o)),", "),i="must be one of: "+r;throw new me(t,e,i)}});function io(e,t){if(typeof e!="boolean")throw new q(t,"boolean",e)}function Ze(e,t,n){return e==null||!vi(e,t)?n:e[t]}var oo=k((e,t,n=null)=>{let r=Ze(n,"allowArray",!1),i=Ze(n,"allowFunction",!1);if(!Ze(n,"nullable",!1)&&e===null||!r&&Jt(e)||typeof e!="object"&&(!i||typeof e!="function"))throw new q(t,"Object",e)}),lo=k((e,t,n=0)=>{if(!Jt(e))throw new q(t,"Array",e);if(e.length{if(!Ki(e))throw new q(t,["Buffer","TypedArray","DataView"],e)});function uo(e,t){let n=Vi(t),r=e.length;if(n==="hex"&&r%2!==0)throw new me("encoding",t,`is invalid for data of length ${r}`)}function so(e,t="Port",n=!0){if(typeof e!="number"&&typeof e!="string"||typeof e=="string"&&Gi(e).length===0||+e!==+e>>>0||e>65535||e===0&&!n)throw new Hi(t,e,n);return e|0}var co=k((e,t)=>{if(e!==void 0&&(e===null||typeof e!="object"||!("aborted"in e)))throw new q(t,"AbortSignal",e)}),ho=k((e,t)=>{if(typeof e!="function")throw new q(t,"Function",e)}),bo=k((e,t)=>{if(typeof e!="function"||Yi(e))throw new q(t,"Function",e)}),_o=k((e,t)=>{if(e!==void 0)throw new q(t,"undefined",e)});function po(e,t,n){if(!Qt(n,e))throw new q(t,`('${Zt(n,"|")}')`,e)}nn.exports={isInt32:zi,isUint32:Xi,parseFileMode:Zi,validateArray:lo,validateBoolean:io,validateBuffer:fo,validateEncoding:uo,validateFunction:ho,validateInt32:to,validateInteger:eo,validateNumber:no,validateObject:oo,validateOneOf:ro,validatePlainFunction:bo,validatePort:so,validateSignalName:ao,validateString:tn,validateUint32:en,validateUndefined:_o,validateUnion:po,validateAbortSignal:co}});var V=g((Jf,_n)=>{"use strict";var{Symbol:Te,SymbolAsyncIterator:rn,SymbolIterator:on}=m(),ln=Te("kDestroyed"),an=Te("kIsErrored"),tt=Te("kIsReadable"),fn=Te("kIsDisturbed");function Ie(e,t=!1){var n;return!!(e&&typeof e.pipe=="function"&&typeof e.on=="function"&&(!t||typeof e.pause=="function"&&typeof e.resume=="function")&&(!e._writableState||((n=e._readableState)===null||n===void 0?void 0:n.readable)!==!1)&&(!e._writableState||e._readableState))}function Me(e){var t;return!!(e&&typeof e.write=="function"&&typeof e.on=="function"&&(!e._readableState||((t=e._writableState)===null||t===void 0?void 0:t.writable)!==!1))}function wo(e){return!!(e&&typeof e.pipe=="function"&&e._readableState&&typeof e.on=="function"&&typeof e.write=="function")}function Q(e){return e&&(e._readableState||e._writableState||typeof e.write=="function"&&typeof e.on=="function"||typeof e.pipe=="function"&&typeof e.on=="function")}function yo(e,t){return e==null?!1:t===!0?typeof e[rn]=="function":t===!1?typeof e[on]=="function":typeof e[rn]=="function"||typeof e[on]=="function"}function Ne(e){if(!Q(e))return null;let t=e._writableState,n=e._readableState,r=t||n;return!!(e.destroyed||e[ln]||r!=null&&r.destroyed)}function un(e){if(!Me(e))return null;if(e.writableEnded===!0)return!0;let t=e._writableState;return t!=null&&t.errored?!1:typeof t?.ended!="boolean"?null:t.ended}function go(e,t){if(!Me(e))return null;if(e.writableFinished===!0)return!0;let n=e._writableState;return n!=null&&n.errored?!1:typeof n?.finished!="boolean"?null:!!(n.finished||t===!1&&n.ended===!0&&n.length===0)}function So(e){if(!Ie(e))return null;if(e.readableEnded===!0)return!0;let t=e._readableState;return!t||t.errored?!1:typeof t?.ended!="boolean"?null:t.ended}function sn(e,t){if(!Ie(e))return null;let n=e._readableState;return n!=null&&n.errored?!1:typeof n?.endEmitted!="boolean"?null:!!(n.endEmitted||t===!1&&n.ended===!0&&n.length===0)}function dn(e){return e&&e[tt]!=null?e[tt]:typeof e?.readable!="boolean"?null:Ne(e)?!1:Ie(e)&&e.readable&&!sn(e)}function cn(e){return typeof e?.writable!="boolean"?null:Ne(e)?!1:Me(e)&&e.writable&&!un(e)}function Eo(e,t){return Q(e)?Ne(e)?!0:!(t?.readable!==!1&&dn(e)||t?.writable!==!1&&cn(e)):null}function Ro(e){var t,n;return Q(e)?e.writableErrored?e.writableErrored:(t=(n=e._writableState)===null||n===void 0?void 0:n.errored)!==null&&t!==void 0?t:null:null}function Ao(e){var t,n;return Q(e)?e.readableErrored?e.readableErrored:(t=(n=e._readableState)===null||n===void 0?void 0:n.errored)!==null&&t!==void 0?t:null:null}function mo(e){if(!Q(e))return null;if(typeof e.closed=="boolean")return e.closed;let t=e._writableState,n=e._readableState;return typeof t?.closed=="boolean"||typeof n?.closed=="boolean"?t?.closed||n?.closed:typeof e._closed=="boolean"&&hn(e)?e._closed:null}function hn(e){return typeof e._closed=="boolean"&&typeof e._defaultKeepAlive=="boolean"&&typeof e._removedConnection=="boolean"&&typeof e._removedContLen=="boolean"}function bn(e){return typeof e._sent100=="boolean"&&hn(e)}function To(e){var t;return typeof e._consuming=="boolean"&&typeof e._dumped=="boolean"&&((t=e.req)===null||t===void 0?void 0:t.upgradeOrConnect)===void 0}function Io(e){if(!Q(e))return null;let t=e._writableState,n=e._readableState,r=t||n;return!r&&bn(e)||!!(r&&r.autoDestroy&&r.emitClose&&r.closed===!1)}function Mo(e){var t;return!!(e&&((t=e[fn])!==null&&t!==void 0?t:e.readableDidRead||e.readableAborted))}function No(e){var t,n,r,i,o,l,u,f,a,c;return!!(e&&((t=(n=(r=(i=(o=(l=e[an])!==null&&l!==void 0?l:e.readableErrored)!==null&&o!==void 0?o:e.writableErrored)!==null&&i!==void 0?i:(u=e._readableState)===null||u===void 0?void 0:u.errorEmitted)!==null&&r!==void 0?r:(f=e._writableState)===null||f===void 0?void 0:f.errorEmitted)!==null&&n!==void 0?n:(a=e._readableState)===null||a===void 0?void 0:a.errored)!==null&&t!==void 0?t:(c=e._writableState)===null||c===void 0?void 0:c.errored))}_n.exports={kDestroyed:ln,isDisturbed:Mo,kIsDisturbed:fn,isErrored:No,kIsErrored:an,isReadable:dn,kIsReadable:tt,isClosed:mo,isDestroyed:Ne,isDuplexNodeStream:wo,isFinished:Eo,isIterable:yo,isReadableNodeStream:Ie,isReadableEnded:So,isReadableFinished:sn,isReadableErrored:Ao,isNodeStream:Q,isWritable:cn,isWritableNodeStream:Me,isWritableEnded:un,isWritableFinished:go,isWritableErrored:Ro,isServerRequest:To,isServerResponse:bn,willEmitClose:Io}});var Y=g((Qf,rt)=>{var oe=__process$,{AbortError:Do,codes:Oo}=O(),{ERR_INVALID_ARG_TYPE:qo,ERR_STREAM_PREMATURE_CLOSE:pn}=Oo,{kEmptyObject:wn,once:yn}=j(),{validateAbortSignal:xo,validateFunction:Lo,validateObject:Po}=_e(),{Promise:ko}=m(),{isClosed:Wo,isReadable:gn,isReadableNodeStream:nt,isReadableFinished:Sn,isReadableErrored:Co,isWritable:En,isWritableNodeStream:Rn,isWritableFinished:An,isWritableErrored:jo,isNodeStream:$o,willEmitClose:vo}=V();function Fo(e){return e.setHeader&&typeof e.abort=="function"}var Uo=()=>{};function mn(e,t,n){var r,i;arguments.length===2?(n=t,t=wn):t==null?t=wn:Po(t,"options"),Lo(n,"callback"),xo(t.signal,"options.signal"),n=yn(n);let o=(r=t.readable)!==null&&r!==void 0?r:nt(e),l=(i=t.writable)!==null&&i!==void 0?i:Rn(e);if(!$o(e))throw new qo("stream","Stream",e);let u=e._writableState,f=e._readableState,a=()=>{e.writable||b()},c=vo(e)&&nt(e)===o&&Rn(e)===l,s=An(e,!1),b=()=>{s=!0,e.destroyed&&(c=!1),!(c&&(!e.readable||o))&&(!o||d)&&n.call(e)},d=Sn(e,!1),h=()=>{d=!0,e.destroyed&&(c=!1),!(c&&(!e.writable||l))&&(!l||s)&&n.call(e)},D=M=>{n.call(e,M)},L=Wo(e),_=()=>{L=!0;let M=jo(e)||Co(e);if(M&&typeof M!="boolean")return n.call(e,M);if(o&&!d&&nt(e,!0)&&!Sn(e,!1))return n.call(e,new pn);if(l&&!s&&!An(e,!1))return n.call(e,new pn);n.call(e)},p=()=>{e.req.on("finish",b)};Fo(e)?(e.on("complete",b),c||e.on("abort",_),e.req?p():e.on("request",p)):l&&!u&&(e.on("end",a),e.on("close",a)),!c&&typeof e.aborted=="boolean"&&e.on("aborted",_),e.on("end",h),e.on("finish",b),t.error!==!1&&e.on("error",D),e.on("close",_),L?oe.nextTick(_):u!=null&&u.errorEmitted||f!=null&&f.errorEmitted?c||oe.nextTick(_):(!o&&(!c||gn(e))&&(s||En(e)===!1)||!l&&(!c||En(e))&&(d||gn(e)===!1)||f&&e.req&&e.aborted)&&oe.nextTick(_);let I=()=>{n=Uo,e.removeListener("aborted",_),e.removeListener("complete",b),e.removeListener("abort",_),e.removeListener("request",p),e.req&&e.req.removeListener("finish",b),e.removeListener("end",a),e.removeListener("close",a),e.removeListener("finish",b),e.removeListener("end",h),e.removeListener("error",D),e.removeListener("close",_)};if(t.signal&&!L){let M=()=>{let F=n;I(),F.call(e,new Do(void 0,{cause:t.signal.reason}))};if(t.signal.aborted)oe.nextTick(M);else{let F=n;n=yn((...re)=>{t.signal.removeEventListener("abort",M),F.apply(e,re)}),t.signal.addEventListener("abort",M)}}return I}function Bo(e,t){return new ko((n,r)=>{mn(e,t,i=>{i?r(i):n()})})}rt.exports=mn;rt.exports.finished=Bo});var xn=g((Zf,lt)=>{"use strict";var Nn=AbortController,{codes:{ERR_INVALID_ARG_TYPE:pe,ERR_MISSING_ARGS:Go,ERR_OUT_OF_RANGE:Ho},AbortError:$}=O(),{validateAbortSignal:le,validateInteger:Vo,validateObject:ae}=_e(),Yo=m().Symbol("kWeak"),{finished:Ko}=Y(),{ArrayPrototypePush:zo,MathFloor:Xo,Number:Jo,NumberIsNaN:Qo,Promise:Tn,PromiseReject:In,PromisePrototypeThen:Zo,Symbol:Dn}=m(),De=Dn("kEmpty"),Mn=Dn("kEof");function Oe(e,t){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);t!=null&&ae(t,"options"),t?.signal!=null&&le(t.signal,"options.signal");let n=1;return t?.concurrency!=null&&(n=Xo(t.concurrency)),Vo(n,"concurrency",1),async function*(){var i,o;let l=new Nn,u=this,f=[],a=l.signal,c={signal:a},s=()=>l.abort();t!=null&&(i=t.signal)!==null&&i!==void 0&&i.aborted&&s(),t==null||(o=t.signal)===null||o===void 0||o.addEventListener("abort",s);let b,d,h=!1;function D(){h=!0}async function L(){try{for await(let I of u){var _;if(h)return;if(a.aborted)throw new $;try{I=e(I,c)}catch(M){I=In(M)}I!==De&&(typeof((_=I)===null||_===void 0?void 0:_.catch)=="function"&&I.catch(D),f.push(I),b&&(b(),b=null),!h&&f.length&&f.length>=n&&await new Tn(M=>{d=M}))}f.push(Mn)}catch(I){let M=In(I);Zo(M,void 0,D),f.push(M)}finally{var p;h=!0,b&&(b(),b=null),t==null||(p=t.signal)===null||p===void 0||p.removeEventListener("abort",s)}}L();try{for(;;){for(;f.length>0;){let _=await f[0];if(_===Mn)return;if(a.aborted)throw new $;_!==De&&(yield _),f.shift(),d&&(d(),d=null)}await new Tn(_=>{b=_})}}finally{l.abort(),h=!0,d&&(d(),d=null)}}.call(this)}function el(e=void 0){return e!=null&&ae(e,"options"),e?.signal!=null&&le(e.signal,"options.signal"),async function*(){let n=0;for await(let i of this){var r;if(e!=null&&(r=e.signal)!==null&&r!==void 0&&r.aborted)throw new $({cause:e.signal.reason});yield[n++,i]}}.call(this)}async function On(e,t=void 0){for await(let n of ot.call(this,e,t))return!0;return!1}async function tl(e,t=void 0){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);return!await On.call(this,async(...n)=>!await e(...n),t)}async function nl(e,t){for await(let n of ot.call(this,e,t))return n}async function rl(e,t){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);async function n(r,i){return await e(r,i),De}for await(let r of Oe.call(this,n,t));}function ot(e,t){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);async function n(r,i){return await e(r,i)?r:De}return Oe.call(this,n,t)}var it=class extends Go{constructor(){super("reduce"),this.message="Reduce of an empty stream requires an initial value"}};async function il(e,t,n){var r;if(typeof e!="function")throw new pe("reducer",["Function","AsyncFunction"],e);n!=null&&ae(n,"options"),n?.signal!=null&&le(n.signal,"options.signal");let i=arguments.length>1;if(n!=null&&(r=n.signal)!==null&&r!==void 0&&r.aborted){let a=new $(void 0,{cause:n.signal.reason});throw this.once("error",()=>{}),await Ko(this.destroy(a)),a}let o=new Nn,l=o.signal;if(n!=null&&n.signal){let a={once:!0,[Yo]:this};n.signal.addEventListener("abort",()=>o.abort(),a)}let u=!1;try{for await(let a of this){var f;if(u=!0,n!=null&&(f=n.signal)!==null&&f!==void 0&&f.aborted)throw new $;i?t=await e(t,a,{signal:l}):(t=a,i=!0)}if(!u&&!i)throw new it}finally{o.abort()}return t}async function ol(e){e!=null&&ae(e,"options"),e?.signal!=null&&le(e.signal,"options.signal");let t=[];for await(let r of this){var n;if(e!=null&&(n=e.signal)!==null&&n!==void 0&&n.aborted)throw new $(void 0,{cause:e.signal.reason});zo(t,r)}return t}function ll(e,t){let n=Oe.call(this,e,t);return async function*(){for await(let i of n)yield*i}.call(this)}function qn(e){if(e=Jo(e),Qo(e))return 0;if(e<0)throw new Ho("number",">= 0",e);return e}function al(e,t=void 0){return t!=null&&ae(t,"options"),t?.signal!=null&&le(t.signal,"options.signal"),e=qn(e),async function*(){var r;if(t!=null&&(r=t.signal)!==null&&r!==void 0&&r.aborted)throw new $;for await(let o of this){var i;if(t!=null&&(i=t.signal)!==null&&i!==void 0&&i.aborted)throw new $;e--<=0&&(yield o)}}.call(this)}function fl(e,t=void 0){return t!=null&&ae(t,"options"),t?.signal!=null&&le(t.signal,"options.signal"),e=qn(e),async function*(){var r;if(t!=null&&(r=t.signal)!==null&&r!==void 0&&r.aborted)throw new $;for await(let o of this){var i;if(t!=null&&(i=t.signal)!==null&&i!==void 0&&i.aborted)throw new $;if(e-- >0)yield o;else return}}.call(this)}lt.exports.streamReturningOperators={asIndexedPairs:el,drop:al,filter:ot,flatMap:ll,map:Oe,take:fl};lt.exports.promiseReturningOperators={every:tl,forEach:rl,reduce:il,toArray:ol,some:On,find:nl}});var Z=g((eu,vn)=>{"use strict";var K=__process$,{aggregateTwoErrors:ul,codes:{ERR_MULTIPLE_CALLBACK:sl},AbortError:dl}=O(),{Symbol:kn}=m(),{kDestroyed:cl,isDestroyed:hl,isFinished:bl,isServerRequest:_l}=V(),Wn=kn("kDestroy"),at=kn("kConstruct");function Cn(e,t,n){e&&(e.stack,t&&!t.errored&&(t.errored=e),n&&!n.errored&&(n.errored=e))}function pl(e,t){let n=this._readableState,r=this._writableState,i=r||n;return r&&r.destroyed||n&&n.destroyed?(typeof t=="function"&&t(),this):(Cn(e,r,n),r&&(r.destroyed=!0),n&&(n.destroyed=!0),i.constructed?Ln(this,e,t):this.once(Wn,function(o){Ln(this,ul(o,e),t)}),this)}function Ln(e,t,n){let r=!1;function i(o){if(r)return;r=!0;let l=e._readableState,u=e._writableState;Cn(o,u,l),u&&(u.closed=!0),l&&(l.closed=!0),typeof n=="function"&&n(o),o?K.nextTick(wl,e,o):K.nextTick(jn,e)}try{e._destroy(t||null,i)}catch(o){i(o)}}function wl(e,t){ft(e,t),jn(e)}function jn(e){let t=e._readableState,n=e._writableState;n&&(n.closeEmitted=!0),t&&(t.closeEmitted=!0),(n&&n.emitClose||t&&t.emitClose)&&e.emit("close")}function ft(e,t){let n=e._readableState,r=e._writableState;r&&r.errorEmitted||n&&n.errorEmitted||(r&&(r.errorEmitted=!0),n&&(n.errorEmitted=!0),e.emit("error",t))}function yl(){let e=this._readableState,t=this._writableState;e&&(e.constructed=!0,e.closed=!1,e.closeEmitted=!1,e.destroyed=!1,e.errored=null,e.errorEmitted=!1,e.reading=!1,e.ended=e.readable===!1,e.endEmitted=e.readable===!1),t&&(t.constructed=!0,t.destroyed=!1,t.closed=!1,t.closeEmitted=!1,t.errored=null,t.errorEmitted=!1,t.finalCalled=!1,t.prefinished=!1,t.ended=t.writable===!1,t.ending=t.writable===!1,t.finished=t.writable===!1)}function ut(e,t,n){let r=e._readableState,i=e._writableState;if(i&&i.destroyed||r&&r.destroyed)return this;r&&r.autoDestroy||i&&i.autoDestroy?e.destroy(t):t&&(t.stack,i&&!i.errored&&(i.errored=t),r&&!r.errored&&(r.errored=t),n?K.nextTick(ft,e,t):ft(e,t))}function gl(e,t){if(typeof e._construct!="function")return;let n=e._readableState,r=e._writableState;n&&(n.constructed=!1),r&&(r.constructed=!1),e.once(at,t),!(e.listenerCount(at)>1)&&K.nextTick(Sl,e)}function Sl(e){let t=!1;function n(r){if(t){ut(e,r??new sl);return}t=!0;let i=e._readableState,o=e._writableState,l=o||i;i&&(i.constructed=!0),o&&(o.constructed=!0),l.destroyed?e.emit(Wn,r):r?ut(e,r,!0):K.nextTick(El,e)}try{e._construct(n)}catch(r){n(r)}}function El(e){e.emit(at)}function Pn(e){return e&&e.setHeader&&typeof e.abort=="function"}function $n(e){e.emit("close")}function Rl(e,t){e.emit("error",t),K.nextTick($n,e)}function Al(e,t){!e||hl(e)||(!t&&!bl(e)&&(t=new dl),_l(e)?(e.socket=null,e.destroy(t)):Pn(e)?e.abort():Pn(e.req)?e.req.abort():typeof e.destroy=="function"?e.destroy(t):typeof e.close=="function"?e.close():t?K.nextTick(Rl,e,t):K.nextTick($n,e),e.destroyed||(e[cl]=!0))}vn.exports={construct:gl,destroyer:Al,destroy:pl,undestroy:yl,errorOrDestroy:ut}});var Le=g((tu,Un)=>{"use strict";var{ArrayIsArray:ml,ObjectSetPrototypeOf:Fn}=m(),{EventEmitter:qe}=__events$;function xe(e){qe.call(this,e)}Fn(xe.prototype,qe.prototype);Fn(xe,qe);xe.prototype.pipe=function(e,t){let n=this;function r(c){e.writable&&e.write(c)===!1&&n.pause&&n.pause()}n.on("data",r);function i(){n.readable&&n.resume&&n.resume()}e.on("drain",i),!e._isStdio&&(!t||t.end!==!1)&&(n.on("end",l),n.on("close",u));let o=!1;function l(){o||(o=!0,e.end())}function u(){o||(o=!0,typeof e.destroy=="function"&&e.destroy())}function f(c){a(),qe.listenerCount(this,"error")===0&&this.emit("error",c)}st(n,"error",f),st(e,"error",f);function a(){n.removeListener("data",r),e.removeListener("drain",i),n.removeListener("end",l),n.removeListener("close",u),n.removeListener("error",f),e.removeListener("error",f),n.removeListener("end",a),n.removeListener("close",a),e.removeListener("close",a)}return n.on("end",a),n.on("close",a),e.on("close",a),e.emit("pipe",n),e};function st(e,t,n){if(typeof e.prependListener=="function")return e.prependListener(t,n);!e._events||!e._events[t]?e.on(t,n):ml(e._events[t])?e._events[t].unshift(n):e._events[t]=[n,e._events[t]]}Un.exports={Stream:xe,prependListener:st}});var ke=g((nu,Pe)=>{"use strict";var{AbortError:Tl,codes:Il}=O(),Ml=Y(),{ERR_INVALID_ARG_TYPE:Bn}=Il,Nl=(e,t)=>{if(typeof e!="object"||!("aborted"in e))throw new Bn(t,"AbortSignal",e)};function Dl(e){return!!(e&&typeof e.pipe=="function")}Pe.exports.addAbortSignal=function(t,n){if(Nl(t,"signal"),!Dl(n))throw new Bn("stream","stream.Stream",n);return Pe.exports.addAbortSignalNoValidate(t,n)};Pe.exports.addAbortSignalNoValidate=function(e,t){if(typeof e!="object"||!("aborted"in e))return t;let n=()=>{t.destroy(new Tl(void 0,{cause:e.reason}))};return e.aborted?n():(e.addEventListener("abort",n),Ml(t,()=>e.removeEventListener("abort",n))),t}});var Vn=g((iu,Hn)=>{"use strict";var{StringPrototypeSlice:Gn,SymbolIterator:Ol,TypedArrayPrototypeSet:We,Uint8Array:ql}=m(),{Buffer:dt}=__buffer$,{inspect:xl}=j();Hn.exports=class{constructor(){this.head=null,this.tail=null,this.length=0}push(t){let n={data:t,next:null};this.length>0?this.tail.next=n:this.head=n,this.tail=n,++this.length}unshift(t){let n={data:t,next:this.head};this.length===0&&(this.tail=n),this.head=n,++this.length}shift(){if(this.length===0)return;let t=this.head.data;return this.length===1?this.head=this.tail=null:this.head=this.head.next,--this.length,t}clear(){this.head=this.tail=null,this.length=0}join(t){if(this.length===0)return"";let n=this.head,r=""+n.data;for(;(n=n.next)!==null;)r+=t+n.data;return r}concat(t){if(this.length===0)return dt.alloc(0);let n=dt.allocUnsafe(t>>>0),r=this.head,i=0;for(;r;)We(n,r.data,i),i+=r.data.length,r=r.next;return n}consume(t,n){let r=this.head.data;if(to.length)n+=o,t-=o.length;else{t===o.length?(n+=o,++i,r.next?this.head=r.next:this.head=this.tail=null):(n+=Gn(o,0,t),this.head=r,r.data=Gn(o,t));break}++i}while((r=r.next)!==null);return this.length-=i,n}_getBuffer(t){let n=dt.allocUnsafe(t),r=t,i=this.head,o=0;do{let l=i.data;if(t>l.length)We(n,l,r-t),t-=l.length;else{t===l.length?(We(n,l,r-t),++o,i.next?this.head=i.next:this.head=this.tail=null):(We(n,new ql(l.buffer,l.byteOffset,t),r-t),this.head=i,i.data=l.slice(t));break}++o}while((i=i.next)!==null);return this.length-=o,n}[Symbol.for("nodejs.util.inspect.custom")](t,n){return xl(this,{...n,depth:0,customInspect:!1})}}});var Ce=g((ou,Kn)=>{"use strict";var{MathFloor:Ll,NumberIsInteger:Pl}=m(),{ERR_INVALID_ARG_VALUE:kl}=O().codes;function Wl(e,t,n){return e.highWaterMark!=null?e.highWaterMark:t?e[n]:null}function Yn(e){return e?16:16*1024}function Cl(e,t,n,r){let i=Wl(t,r,n);if(i!=null){if(!Pl(i)||i<0){let o=r?`options.${n}`:"options.highWaterMark";throw new kl(o,i)}return Ll(i)}return Yn(e.objectMode)}Kn.exports={getHighWaterMark:Cl,getDefaultHighWaterMark:Yn}});var ct=g((lu,Qn)=>{"use strict";var zn=__process$,{PromisePrototypeThen:jl,SymbolAsyncIterator:Xn,SymbolIterator:Jn}=m(),{Buffer:$l}=__buffer$,{ERR_INVALID_ARG_TYPE:vl,ERR_STREAM_NULL_VALUES:Fl}=O().codes;function Ul(e,t,n){let r;if(typeof t=="string"||t instanceof $l)return new e({objectMode:!0,...n,read(){this.push(t),this.push(null)}});let i;if(t&&t[Xn])i=!0,r=t[Xn]();else if(t&&t[Jn])i=!1,r=t[Jn]();else throw new vl("iterable",["Iterable"],t);let o=new e({objectMode:!0,highWaterMark:1,...n}),l=!1;o._read=function(){l||(l=!0,f())},o._destroy=function(a,c){jl(u(a),()=>zn.nextTick(c,a),s=>zn.nextTick(c,s||a))};async function u(a){let c=a!=null,s=typeof r.throw=="function";if(c&&s){let{value:b,done:d}=await r.throw(a);if(await b,d)return}if(typeof r.return=="function"){let{value:b}=await r.return();await b}}async function f(){for(;;){try{let{value:a,done:c}=i?await r.next():r.next();if(c)o.push(null);else{let s=a&&typeof a.then=="function"?await a:a;if(s===null)throw l=!1,new Fl;if(o.push(s))continue;l=!1}}catch(a){o.destroy(a)}break}}return o}Qn.exports=Ul});var we=g((au,dr)=>{var W=__process$,{ArrayPrototypeIndexOf:Bl,NumberIsInteger:Gl,NumberIsNaN:Hl,NumberParseInt:Vl,ObjectDefineProperties:tr,ObjectKeys:Yl,ObjectSetPrototypeOf:nr,Promise:Kl,SafeSet:zl,SymbolAsyncIterator:Xl,Symbol:Jl}=m();dr.exports=w;w.ReadableState=yt;var{EventEmitter:Ql}=__events$,{Stream:z,prependListener:Zl}=Le(),{Buffer:ht}=__buffer$,{addAbortSignal:ea}=ke(),ta=Y(),y=j().debuglog("stream",e=>{y=e}),na=Vn(),ue=Z(),{getHighWaterMark:ra,getDefaultHighWaterMark:ia}=Ce(),{aggregateTwoErrors:Zn,codes:{ERR_INVALID_ARG_TYPE:oa,ERR_METHOD_NOT_IMPLEMENTED:la,ERR_OUT_OF_RANGE:aa,ERR_STREAM_PUSH_AFTER_EOF:fa,ERR_STREAM_UNSHIFT_AFTER_END_EVENT:ua}}=O(),{validateObject:sa}=_e(),ee=Jl("kPaused"),{StringDecoder:rr}=__string_decoder$,da=ct();nr(w.prototype,z.prototype);nr(w,z);var bt=()=>{},{errorOrDestroy:fe}=ue;function yt(e,t,n){typeof n!="boolean"&&(n=t instanceof v()),this.objectMode=!!(e&&e.objectMode),n&&(this.objectMode=this.objectMode||!!(e&&e.readableObjectMode)),this.highWaterMark=e?ra(this,e,"readableHighWaterMark",n):ia(!1),this.buffer=new na,this.length=0,this.pipes=[],this.flowing=null,this.ended=!1,this.endEmitted=!1,this.reading=!1,this.constructed=!0,this.sync=!0,this.needReadable=!1,this.emittedReadable=!1,this.readableListening=!1,this.resumeScheduled=!1,this[ee]=null,this.errorEmitted=!1,this.emitClose=!e||e.emitClose!==!1,this.autoDestroy=!e||e.autoDestroy!==!1,this.destroyed=!1,this.errored=null,this.closed=!1,this.closeEmitted=!1,this.defaultEncoding=e&&e.defaultEncoding||"utf8",this.awaitDrainWriters=null,this.multiAwaitDrain=!1,this.readingMore=!1,this.dataEmitted=!1,this.decoder=null,this.encoding=null,e&&e.encoding&&(this.decoder=new rr(e.encoding),this.encoding=e.encoding)}function w(e){if(!(this instanceof w))return new w(e);let t=this instanceof v();this._readableState=new yt(e,this,t),e&&(typeof e.read=="function"&&(this._read=e.read),typeof e.destroy=="function"&&(this._destroy=e.destroy),typeof e.construct=="function"&&(this._construct=e.construct),e.signal&&!t&&ea(e.signal,this)),z.call(this,e),ue.construct(this,()=>{this._readableState.needReadable&&je(this,this._readableState)})}w.prototype.destroy=ue.destroy;w.prototype._undestroy=ue.undestroy;w.prototype._destroy=function(e,t){t(e)};w.prototype[Ql.captureRejectionSymbol]=function(e){this.destroy(e)};w.prototype.push=function(e,t){return ir(this,e,t,!1)};w.prototype.unshift=function(e,t){return ir(this,e,t,!0)};function ir(e,t,n,r){y("readableAddChunk",t);let i=e._readableState,o;if(i.objectMode||(typeof t=="string"?(n=n||i.defaultEncoding,i.encoding!==n&&(r&&i.encoding?t=ht.from(t,n).toString(i.encoding):(t=ht.from(t,n),n=""))):t instanceof ht?n="":z._isUint8Array(t)?(t=z._uint8ArrayToBuffer(t),n=""):t!=null&&(o=new oa("chunk",["string","Buffer","Uint8Array"],t))),o)fe(e,o);else if(t===null)i.reading=!1,ba(e,i);else if(i.objectMode||t&&t.length>0)if(r)if(i.endEmitted)fe(e,new ua);else{if(i.destroyed||i.errored)return!1;_t(e,i,t,!0)}else if(i.ended)fe(e,new fa);else{if(i.destroyed||i.errored)return!1;i.reading=!1,i.decoder&&!n?(t=i.decoder.write(t),i.objectMode||t.length!==0?_t(e,i,t,!1):je(e,i)):_t(e,i,t,!1)}else r||(i.reading=!1,je(e,i));return!i.ended&&(i.length0?(t.multiAwaitDrain?t.awaitDrainWriters.clear():t.awaitDrainWriters=null,t.dataEmitted=!0,e.emit("data",n)):(t.length+=t.objectMode?1:n.length,r?t.buffer.unshift(n):t.buffer.push(n),t.needReadable&&$e(e)),je(e,t)}w.prototype.isPaused=function(){let e=this._readableState;return e[ee]===!0||e.flowing===!1};w.prototype.setEncoding=function(e){let t=new rr(e);this._readableState.decoder=t,this._readableState.encoding=this._readableState.decoder.encoding;let n=this._readableState.buffer,r="";for(let i of n)r+=t.write(i);return n.clear(),r!==""&&n.push(r),this._readableState.length=r.length,this};var ca=1073741824;function ha(e){if(e>ca)throw new aa("size","<= 1GiB",e);return e--,e|=e>>>1,e|=e>>>2,e|=e>>>4,e|=e>>>8,e|=e>>>16,e++,e}function er(e,t){return e<=0||t.length===0&&t.ended?0:t.objectMode?1:Hl(e)?t.flowing&&t.length?t.buffer.first().length:t.length:e<=t.length?e:t.ended?t.length:0}w.prototype.read=function(e){y("read",e),e===void 0?e=NaN:Gl(e)||(e=Vl(e,10));let t=this._readableState,n=e;if(e>t.highWaterMark&&(t.highWaterMark=ha(e)),e!==0&&(t.emittedReadable=!1),e===0&&t.needReadable&&((t.highWaterMark!==0?t.length>=t.highWaterMark:t.length>0)||t.ended))return y("read: emitReadable",t.length,t.ended),t.length===0&&t.ended?pt(this):$e(this),null;if(e=er(e,t),e===0&&t.ended)return t.length===0&&pt(this),null;let r=t.needReadable;if(y("need readable",r),(t.length===0||t.length-e0?i=ur(e,t):i=null,i===null?(t.needReadable=t.length<=t.highWaterMark,e=0):(t.length-=e,t.multiAwaitDrain?t.awaitDrainWriters.clear():t.awaitDrainWriters=null),t.length===0&&(t.ended||(t.needReadable=!0),n!==e&&t.ended&&pt(this)),i!==null&&!t.errorEmitted&&!t.closeEmitted&&(t.dataEmitted=!0,this.emit("data",i)),i};function ba(e,t){if(y("onEofChunk"),!t.ended){if(t.decoder){let n=t.decoder.end();n&&n.length&&(t.buffer.push(n),t.length+=t.objectMode?1:n.length)}t.ended=!0,t.sync?$e(e):(t.needReadable=!1,t.emittedReadable=!0,or(e))}}function $e(e){let t=e._readableState;y("emitReadable",t.needReadable,t.emittedReadable),t.needReadable=!1,t.emittedReadable||(y("emitReadable",t.flowing),t.emittedReadable=!0,W.nextTick(or,e))}function or(e){let t=e._readableState;y("emitReadable_",t.destroyed,t.length,t.ended),!t.destroyed&&!t.errored&&(t.length||t.ended)&&(e.emit("readable"),t.emittedReadable=!1),t.needReadable=!t.flowing&&!t.ended&&t.length<=t.highWaterMark,ar(e)}function je(e,t){!t.readingMore&&t.constructed&&(t.readingMore=!0,W.nextTick(_a,e,t))}function _a(e,t){for(;!t.reading&&!t.ended&&(t.length1&&r.pipes.includes(e)&&(y("false write response, pause",r.awaitDrainWriters.size),r.awaitDrainWriters.add(e)),n.pause()),f||(f=pa(n,e),e.on("drain",f))}n.on("data",b);function b(_){y("ondata");let p=e.write(_);y("dest.write",p),p===!1&&s()}function d(_){if(y("onerror",_),L(),e.removeListener("error",d),e.listenerCount("error")===0){let p=e._writableState||e._readableState;p&&!p.errorEmitted?fe(e,_):e.emit("error",_)}}Zl(e,"error",d);function h(){e.removeListener("finish",D),L()}e.once("close",h);function D(){y("onfinish"),e.removeListener("close",h),L()}e.once("finish",D);function L(){y("unpipe"),n.unpipe(e)}return e.emit("pipe",n),e.writableNeedDrain===!0?r.flowing&&s():r.flowing||(y("pipe resume"),n.resume()),e};function pa(e,t){return function(){let r=e._readableState;r.awaitDrainWriters===t?(y("pipeOnDrain",1),r.awaitDrainWriters=null):r.multiAwaitDrain&&(y("pipeOnDrain",r.awaitDrainWriters.size),r.awaitDrainWriters.delete(t)),(!r.awaitDrainWriters||r.awaitDrainWriters.size===0)&&e.listenerCount("data")&&e.resume()}}w.prototype.unpipe=function(e){let t=this._readableState,n={hasUnpiped:!1};if(t.pipes.length===0)return this;if(!e){let i=t.pipes;t.pipes=[],this.pause();for(let o=0;o0,r.flowing!==!1&&this.resume()):e==="readable"&&!r.endEmitted&&!r.readableListening&&(r.readableListening=r.needReadable=!0,r.flowing=!1,r.emittedReadable=!1,y("on readable",r.length,r.reading),r.length?$e(this):r.reading||W.nextTick(wa,this)),n};w.prototype.addListener=w.prototype.on;w.prototype.removeListener=function(e,t){let n=z.prototype.removeListener.call(this,e,t);return e==="readable"&&W.nextTick(lr,this),n};w.prototype.off=w.prototype.removeListener;w.prototype.removeAllListeners=function(e){let t=z.prototype.removeAllListeners.apply(this,arguments);return(e==="readable"||e===void 0)&&W.nextTick(lr,this),t};function lr(e){let t=e._readableState;t.readableListening=e.listenerCount("readable")>0,t.resumeScheduled&&t[ee]===!1?t.flowing=!0:e.listenerCount("data")>0?e.resume():t.readableListening||(t.flowing=null)}function wa(e){y("readable nexttick read 0"),e.read(0)}w.prototype.resume=function(){let e=this._readableState;return e.flowing||(y("resume"),e.flowing=!e.readableListening,ya(this,e)),e[ee]=!1,this};function ya(e,t){t.resumeScheduled||(t.resumeScheduled=!0,W.nextTick(ga,e,t))}function ga(e,t){y("resume",t.reading),t.reading||e.read(0),t.resumeScheduled=!1,e.emit("resume"),ar(e),t.flowing&&!t.reading&&e.read(0)}w.prototype.pause=function(){return y("call pause flowing=%j",this._readableState.flowing),this._readableState.flowing!==!1&&(y("pause"),this._readableState.flowing=!1,this.emit("pause")),this._readableState[ee]=!0,this};function ar(e){let t=e._readableState;for(y("flow",t.flowing);t.flowing&&e.read()!==null;);}w.prototype.wrap=function(e){let t=!1;e.on("data",r=>{!this.push(r)&&e.pause&&(t=!0,e.pause())}),e.on("end",()=>{this.push(null)}),e.on("error",r=>{fe(this,r)}),e.on("close",()=>{this.destroy()}),e.on("destroy",()=>{this.destroy()}),this._read=()=>{t&&e.resume&&(t=!1,e.resume())};let n=Yl(e);for(let r=1;r{i=l?Zn(i,l):null,n(),n=bt});try{for(;;){let l=e.destroyed?null:e.read();if(l!==null)yield l;else{if(i)throw i;if(i===null)return;await new Kl(r)}}}catch(l){throw i=Zn(i,l),i}finally{(i||t?.destroyOnReturn!==!1)&&(i===void 0||e._readableState.autoDestroy)?ue.destroyer(e,null):(e.off("readable",r),o())}}tr(w.prototype,{readable:{__proto__:null,get(){let e=this._readableState;return!!e&&e.readable!==!1&&!e.destroyed&&!e.errorEmitted&&!e.endEmitted},set(e){this._readableState&&(this._readableState.readable=!!e)}},readableDidRead:{__proto__:null,enumerable:!1,get:function(){return this._readableState.dataEmitted}},readableAborted:{__proto__:null,enumerable:!1,get:function(){return!!(this._readableState.readable!==!1&&(this._readableState.destroyed||this._readableState.errored)&&!this._readableState.endEmitted)}},readableHighWaterMark:{__proto__:null,enumerable:!1,get:function(){return this._readableState.highWaterMark}},readableBuffer:{__proto__:null,enumerable:!1,get:function(){return this._readableState&&this._readableState.buffer}},readableFlowing:{__proto__:null,enumerable:!1,get:function(){return this._readableState.flowing},set:function(e){this._readableState&&(this._readableState.flowing=e)}},readableLength:{__proto__:null,enumerable:!1,get(){return this._readableState.length}},readableObjectMode:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.objectMode:!1}},readableEncoding:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.encoding:null}},errored:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.errored:null}},closed:{__proto__:null,get(){return this._readableState?this._readableState.closed:!1}},destroyed:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.destroyed:!1},set(e){!this._readableState||(this._readableState.destroyed=e)}},readableEnded:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.endEmitted:!1}}});tr(yt.prototype,{pipesCount:{__proto__:null,get(){return this.pipes.length}},paused:{__proto__:null,get(){return this[ee]!==!1},set(e){this[ee]=!!e}}});w._fromList=ur;function ur(e,t){if(t.length===0)return null;let n;return t.objectMode?n=t.buffer.shift():!e||e>=t.length?(t.decoder?n=t.buffer.join(""):t.buffer.length===1?n=t.buffer.first():n=t.buffer.concat(t.length),t.buffer.clear()):n=t.buffer.consume(e,t.decoder),n}function pt(e){let t=e._readableState;y("endReadable",t.endEmitted),t.endEmitted||(t.ended=!0,W.nextTick(Ea,t,e))}function Ea(e,t){if(y("endReadableNT",e.endEmitted,e.length),!e.errored&&!e.closeEmitted&&!e.endEmitted&&e.length===0){if(e.endEmitted=!0,t.emit("end"),t.writable&&t.allowHalfOpen===!1)W.nextTick(Ra,t);else if(e.autoDestroy){let n=t._writableState;(!n||n.autoDestroy&&(n.finished||n.writable===!1))&&t.destroy()}}}function Ra(e){e.writable&&!e.writableEnded&&!e.destroyed&&e.end()}w.from=function(e,t){return da(w,e,t)};var wt;function sr(){return wt===void 0&&(wt={}),wt}w.fromWeb=function(e,t){return sr().newStreamReadableFromReadableStream(e,t)};w.toWeb=function(e,t){return sr().newReadableStreamFromStreamReadable(e,t)};w.wrap=function(e,t){var n,r;return new w({objectMode:(n=(r=e.readableObjectMode)!==null&&r!==void 0?r:e.objectMode)!==null&&n!==void 0?n:!0,...t,destroy(i,o){ue.destroyer(e,i),o(i)}}).wrap(e)}});var Tt=g((fu,Ar)=>{var te=__process$,{ArrayPrototypeSlice:br,Error:Aa,FunctionPrototypeSymbolHasInstance:_r,ObjectDefineProperty:pr,ObjectDefineProperties:ma,ObjectSetPrototypeOf:wr,StringPrototypeToLowerCase:Ta,Symbol:Ia,SymbolHasInstance:Ma}=m();Ar.exports=S;S.WritableState=Se;var{EventEmitter:Na}=__events$,ye=Le().Stream,{Buffer:ve}=__buffer$,Be=Z(),{addAbortSignal:Da}=ke(),{getHighWaterMark:Oa,getDefaultHighWaterMark:qa}=Ce(),{ERR_INVALID_ARG_TYPE:xa,ERR_METHOD_NOT_IMPLEMENTED:La,ERR_MULTIPLE_CALLBACK:yr,ERR_STREAM_CANNOT_PIPE:Pa,ERR_STREAM_DESTROYED:ge,ERR_STREAM_ALREADY_FINISHED:ka,ERR_STREAM_NULL_VALUES:Wa,ERR_STREAM_WRITE_AFTER_END:Ca,ERR_UNKNOWN_ENCODING:gr}=O().codes,{errorOrDestroy:se}=Be;wr(S.prototype,ye.prototype);wr(S,ye);function Et(){}var de=Ia("kOnFinished");function Se(e,t,n){typeof n!="boolean"&&(n=t instanceof v()),this.objectMode=!!(e&&e.objectMode),n&&(this.objectMode=this.objectMode||!!(e&&e.writableObjectMode)),this.highWaterMark=e?Oa(this,e,"writableHighWaterMark",n):qa(!1),this.finalCalled=!1,this.needDrain=!1,this.ending=!1,this.ended=!1,this.finished=!1,this.destroyed=!1;let r=!!(e&&e.decodeStrings===!1);this.decodeStrings=!r,this.defaultEncoding=e&&e.defaultEncoding||"utf8",this.length=0,this.writing=!1,this.corked=0,this.sync=!0,this.bufferProcessing=!1,this.onwrite=$a.bind(void 0,t),this.writecb=null,this.writelen=0,this.afterWriteTickInfo=null,Ue(this),this.pendingcb=0,this.constructed=!0,this.prefinished=!1,this.errorEmitted=!1,this.emitClose=!e||e.emitClose!==!1,this.autoDestroy=!e||e.autoDestroy!==!1,this.errored=null,this.closed=!1,this.closeEmitted=!1,this[de]=[]}function Ue(e){e.buffered=[],e.bufferedIndex=0,e.allBuffers=!0,e.allNoop=!0}Se.prototype.getBuffer=function(){return br(this.buffered,this.bufferedIndex)};pr(Se.prototype,"bufferedRequestCount",{__proto__:null,get(){return this.buffered.length-this.bufferedIndex}});function S(e){let t=this instanceof v();if(!t&&!_r(S,this))return new S(e);this._writableState=new Se(e,this,t),e&&(typeof e.write=="function"&&(this._write=e.write),typeof e.writev=="function"&&(this._writev=e.writev),typeof e.destroy=="function"&&(this._destroy=e.destroy),typeof e.final=="function"&&(this._final=e.final),typeof e.construct=="function"&&(this._construct=e.construct),e.signal&&Da(e.signal,this)),ye.call(this,e),Be.construct(this,()=>{let n=this._writableState;n.writing||At(this,n),mt(this,n)})}pr(S,Ma,{__proto__:null,value:function(e){return _r(this,e)?!0:this!==S?!1:e&&e._writableState instanceof Se}});S.prototype.pipe=function(){se(this,new Pa)};function Sr(e,t,n,r){let i=e._writableState;if(typeof n=="function")r=n,n=i.defaultEncoding;else{if(!n)n=i.defaultEncoding;else if(n!=="buffer"&&!ve.isEncoding(n))throw new gr(n);typeof r!="function"&&(r=Et)}if(t===null)throw new Wa;if(!i.objectMode)if(typeof t=="string")i.decodeStrings!==!1&&(t=ve.from(t,n),n="buffer");else if(t instanceof ve)n="buffer";else if(ye._isUint8Array(t))t=ye._uint8ArrayToBuffer(t),n="buffer";else throw new xa("chunk",["string","Buffer","Uint8Array"],t);let o;return i.ending?o=new Ca:i.destroyed&&(o=new ge("write")),o?(te.nextTick(r,o),se(e,o,!0),o):(i.pendingcb++,ja(e,i,t,n,r))}S.prototype.write=function(e,t,n){return Sr(this,e,t,n)===!0};S.prototype.cork=function(){this._writableState.corked++};S.prototype.uncork=function(){let e=this._writableState;e.corked&&(e.corked--,e.writing||At(this,e))};S.prototype.setDefaultEncoding=function(t){if(typeof t=="string"&&(t=Ta(t)),!ve.isEncoding(t))throw new gr(t);return this._writableState.defaultEncoding=t,this};function ja(e,t,n,r,i){let o=t.objectMode?1:n.length;t.length+=o;let l=t.lengthn.bufferedIndex&&At(e,n),r?n.afterWriteTickInfo!==null&&n.afterWriteTickInfo.cb===i?n.afterWriteTickInfo.count++:(n.afterWriteTickInfo={count:1,cb:i,stream:e,state:n},te.nextTick(va,n.afterWriteTickInfo)):Er(e,n,1,i))}function va({stream:e,state:t,count:n,cb:r}){return t.afterWriteTickInfo=null,Er(e,t,n,r)}function Er(e,t,n,r){for(!t.ending&&!e.destroyed&&t.length===0&&t.needDrain&&(t.needDrain=!1,e.emit("drain"));n-- >0;)t.pendingcb--,r();t.destroyed&&Rt(t),mt(e,t)}function Rt(e){if(e.writing)return;for(let i=e.bufferedIndex;i1&&e._writev){t.pendingcb-=o-1;let u=t.allNoop?Et:a=>{for(let c=l;c256?(n.splice(0,l),t.bufferedIndex=0):t.bufferedIndex=l}t.bufferProcessing=!1}S.prototype._write=function(e,t,n){if(this._writev)this._writev([{chunk:e,encoding:t}],n);else throw new La("_write()")};S.prototype._writev=null;S.prototype.end=function(e,t,n){let r=this._writableState;typeof e=="function"?(n=e,e=null,t=null):typeof t=="function"&&(n=t,t=null);let i;if(e!=null){let o=Sr(this,e,t);o instanceof Aa&&(i=o)}return r.corked&&(r.corked=1,this.uncork()),i||(!r.errored&&!r.ending?(r.ending=!0,mt(this,r,!0),r.ended=!0):r.finished?i=new ka("end"):r.destroyed&&(i=new ge("end"))),typeof n=="function"&&(i||r.finished?te.nextTick(n,i):r[de].push(n)),this};function Fe(e){return e.ending&&!e.destroyed&&e.constructed&&e.length===0&&!e.errored&&e.buffered.length===0&&!e.finished&&!e.writing&&!e.errorEmitted&&!e.closeEmitted}function Fa(e,t){let n=!1;function r(i){if(n){se(e,i??yr());return}if(n=!0,t.pendingcb--,i){let o=t[de].splice(0);for(let l=0;l{Fe(i)?St(r,i):i.pendingcb--},e,t)):Fe(t)&&(t.pendingcb++,St(e,t))))}function St(e,t){t.pendingcb--,t.finished=!0;let n=t[de].splice(0);for(let r=0;r{var It=__process$,Ga=__buffer$,{isReadable:Ha,isWritable:Va,isIterable:mr,isNodeStream:Ya,isReadableNodeStream:Tr,isWritableNodeStream:Ir,isDuplexNodeStream:Ka}=V(),Mr=Y(),{AbortError:Lr,codes:{ERR_INVALID_ARG_TYPE:za,ERR_INVALID_RETURN_VALUE:Nr}}=O(),{destroyer:ce}=Z(),Xa=v(),Ja=we(),{createDeferredPromise:Dr}=j(),Or=ct(),qr=Blob||Ga.Blob,Qa=typeof qr<"u"?function(t){return t instanceof qr}:function(t){return!1},Za=AbortController,{FunctionPrototypeCall:xr}=m(),ne=class extends Xa{constructor(t){super(t),t?.readable===!1&&(this._readableState.readable=!1,this._readableState.ended=!0,this._readableState.endEmitted=!0),t?.writable===!1&&(this._writableState.writable=!1,this._writableState.ending=!0,this._writableState.ended=!0,this._writableState.finished=!0)}};Pr.exports=function e(t,n){if(Ka(t))return t;if(Tr(t))return Ge({readable:t});if(Ir(t))return Ge({writable:t});if(Ya(t))return Ge({writable:!1,readable:!1});if(typeof t=="function"){let{value:i,write:o,final:l,destroy:u}=ef(t);if(mr(i))return Or(ne,i,{objectMode:!0,write:o,final:l,destroy:u});let f=i?.then;if(typeof f=="function"){let a,c=xr(f,i,s=>{if(s!=null)throw new Nr("nully","body",s)},s=>{ce(a,s)});return a=new ne({objectMode:!0,readable:!1,write:o,final(s){l(async()=>{try{await c,It.nextTick(s,null)}catch(b){It.nextTick(s,b)}})},destroy:u})}throw new Nr("Iterable, AsyncIterable or AsyncFunction",n,i)}if(Qa(t))return e(t.arrayBuffer());if(mr(t))return Or(ne,t,{objectMode:!0,writable:!1});if(typeof t?.writable=="object"||typeof t?.readable=="object"){let i=t!=null&&t.readable?Tr(t?.readable)?t?.readable:e(t.readable):void 0,o=t!=null&&t.writable?Ir(t?.writable)?t?.writable:e(t.writable):void 0;return Ge({readable:i,writable:o})}let r=t?.then;if(typeof r=="function"){let i;return xr(r,t,o=>{o!=null&&i.push(o),i.push(null)},o=>{ce(i,o)}),i=new ne({objectMode:!0,writable:!1,read(){}})}throw new za(n,["Blob","ReadableStream","WritableStream","Stream","Iterable","AsyncIterable","Function","{ readable, writable } pair","Promise"],t)};function ef(e){let{promise:t,resolve:n}=Dr(),r=new Za,i=r.signal;return{value:e(async function*(){for(;;){let l=t;t=null;let{chunk:u,done:f,cb:a}=await l;if(It.nextTick(a),f)return;if(i.aborted)throw new Lr(void 0,{cause:i.reason});({promise:t,resolve:n}=Dr()),yield u}}(),{signal:i}),write(l,u,f){let a=n;n=null,a({chunk:l,done:!1,cb:f})},final(l){let u=n;n=null,u({done:!0,cb:l})},destroy(l,u){r.abort(),u(l)}}}function Ge(e){let t=e.readable&&typeof e.readable.read!="function"?Ja.wrap(e.readable):e.readable,n=e.writable,r=!!Ha(t),i=!!Va(n),o,l,u,f,a;function c(s){let b=f;f=null,b?b(s):s?a.destroy(s):!r&&!i&&a.destroy()}return a=new ne({readableObjectMode:!!(t!=null&&t.readableObjectMode),writableObjectMode:!!(n!=null&&n.writableObjectMode),readable:r,writable:i}),i&&(Mr(n,s=>{i=!1,s&&ce(t,s),c(s)}),a._write=function(s,b,d){n.write(s,b)?d():o=d},a._final=function(s){n.end(),l=s},n.on("drain",function(){if(o){let s=o;o=null,s()}}),n.on("finish",function(){if(l){let s=l;l=null,s()}})),r&&(Mr(t,s=>{r=!1,s&&ce(t,s),c(s)}),t.on("readable",function(){if(u){let s=u;u=null,s()}}),t.on("end",function(){a.push(null)}),a._read=function(){for(;;){let s=t.read();if(s===null){u=a._read;return}if(!a.push(s))return}}),a._destroy=function(s,b){!s&&f!==null&&(s=new Lr),u=null,o=null,l=null,f===null?b(s):(f=b,ce(n,s),ce(t,s))},a}});var v=g((su,jr)=>{"use strict";var{ObjectDefineProperties:tf,ObjectGetOwnPropertyDescriptor:B,ObjectKeys:nf,ObjectSetPrototypeOf:Wr}=m();jr.exports=C;var Dt=we(),x=Tt();Wr(C.prototype,Dt.prototype);Wr(C,Dt);{let e=nf(x.prototype);for(let t=0;t{"use strict";var{ObjectSetPrototypeOf:$r,Symbol:rf}=m();vr.exports=G;var{ERR_METHOD_NOT_IMPLEMENTED:of}=O().codes,qt=v(),{getHighWaterMark:lf}=Ce();$r(G.prototype,qt.prototype);$r(G,qt);var Ee=rf("kCallback");function G(e){if(!(this instanceof G))return new G(e);let t=e?lf(this,e,"readableHighWaterMark",!0):null;t===0&&(e={...e,highWaterMark:null,readableHighWaterMark:t,writableHighWaterMark:e.writableHighWaterMark||0}),qt.call(this,e),this._readableState.sync=!1,this[Ee]=null,e&&(typeof e.transform=="function"&&(this._transform=e.transform),typeof e.flush=="function"&&(this._flush=e.flush)),this.on("prefinish",af)}function Ot(e){typeof this._flush=="function"&&!this.destroyed?this._flush((t,n)=>{if(t){e?e(t):this.destroy(t);return}n!=null&&this.push(n),this.push(null),e&&e()}):(this.push(null),e&&e())}function af(){this._final!==Ot&&Ot.call(this)}G.prototype._final=Ot;G.prototype._transform=function(e,t,n){throw new of("_transform()")};G.prototype._write=function(e,t,n){let r=this._readableState,i=this._writableState,o=r.length;this._transform(e,t,(l,u)=>{if(l){n(l);return}u!=null&&this.push(u),i.ended||o===r.length||r.length{"use strict";var{ObjectSetPrototypeOf:Fr}=m();Ur.exports=he;var Lt=xt();Fr(he.prototype,Lt.prototype);Fr(he,Lt);function he(e){if(!(this instanceof he))return new he(e);Lt.call(this,e)}he.prototype._transform=function(e,t,n){n(null,e)}});var Ye=g((hu,zr)=>{var He=__process$,{ArrayIsArray:ff,Promise:uf,SymbolAsyncIterator:sf}=m(),Ve=Y(),{once:df}=j(),cf=Z(),Br=v(),{aggregateTwoErrors:hf,codes:{ERR_INVALID_ARG_TYPE:Yr,ERR_INVALID_RETURN_VALUE:kt,ERR_MISSING_ARGS:bf,ERR_STREAM_DESTROYED:_f,ERR_STREAM_PREMATURE_CLOSE:pf},AbortError:wf}=O(),{validateFunction:yf,validateAbortSignal:gf}=_e(),{isIterable:be,isReadable:Wt,isReadableNodeStream:$t,isNodeStream:Gr}=V(),Sf=AbortController,Ct,jt;function Hr(e,t,n){let r=!1;e.on("close",()=>{r=!0});let i=Ve(e,{readable:t,writable:n},o=>{r=!o});return{destroy:o=>{r||(r=!0,cf.destroyer(e,o||new _f("pipe")))},cleanup:i}}function Ef(e){return yf(e[e.length-1],"streams[stream.length - 1]"),e.pop()}function Rf(e){if(be(e))return e;if($t(e))return Af(e);throw new Yr("val",["Readable","Iterable","AsyncIterable"],e)}async function*Af(e){jt||(jt=we()),yield*jt.prototype[sf].call(e)}async function Vr(e,t,n,{end:r}){let i,o=null,l=a=>{if(a&&(i=a),o){let c=o;o=null,c()}},u=()=>new uf((a,c)=>{i?c(i):o=()=>{i?c(i):a()}});t.on("drain",l);let f=Ve(t,{readable:!1},l);try{t.writableNeedDrain&&await u();for await(let a of e)t.write(a)||await u();r&&t.end(),await u(),n()}catch(a){n(i!==a?hf(i,a):a)}finally{f(),t.off("drain",l)}}function mf(...e){return Kr(e,df(Ef(e)))}function Kr(e,t,n){if(e.length===1&&ff(e[0])&&(e=e[0]),e.length<2)throw new bf("streams");let r=new Sf,i=r.signal,o=n?.signal,l=[];gf(o,"options.signal");function u(){d(new wf)}o?.addEventListener("abort",u);let f,a,c=[],s=0;function b(_){d(_,--s===0)}function d(_,p){if(_&&(!f||f.code==="ERR_STREAM_PREMATURE_CLOSE")&&(f=_),!(!f&&!p)){for(;c.length;)c.shift()(f);o?.removeEventListener("abort",u),r.abort(),p&&(f||l.forEach(I=>I()),He.nextTick(t,f,a))}}let h;for(let _=0;_0,F=I||n?.end!==!1,re=_===e.length-1;if(Gr(p)){let P=function(U){U&&U.name!=="AbortError"&&U.code!=="ERR_STREAM_PREMATURE_CLOSE"&&b(U)};var L=P;if(F){let{destroy:U,cleanup:ze}=Hr(p,I,M);c.push(U),Wt(p)&&re&&l.push(ze)}p.on("error",P),Wt(p)&&re&&l.push(()=>{p.removeListener("error",P)})}if(_===0)if(typeof p=="function"){if(h=p({signal:i}),!be(h))throw new kt("Iterable, AsyncIterable or Stream","source",h)}else be(p)||$t(p)?h=p:h=Br.from(p);else if(typeof p=="function")if(h=Rf(h),h=p(h,{signal:i}),I){if(!be(h,!0))throw new kt("AsyncIterable",`transform[${_-1}]`,h)}else{var D;Ct||(Ct=Pt());let P=new Ct({objectMode:!0}),U=(D=h)===null||D===void 0?void 0:D.then;if(typeof U=="function")s++,U.call(h,ie=>{a=ie,ie!=null&&P.write(ie),F&&P.end(),He.nextTick(b)},ie=>{P.destroy(ie),He.nextTick(b,ie)});else if(be(h,!0))s++,Vr(h,P,b,{end:F});else throw new kt("AsyncIterable or Promise","destination",h);h=P;let{destroy:ze,cleanup:_i}=Hr(h,!1,!0);c.push(ze),re&&l.push(_i)}else if(Gr(p)){if($t(h)){s+=2;let P=Tf(h,p,b,{end:F});Wt(p)&&re&&l.push(P)}else if(be(h))s++,Vr(h,p,b,{end:F});else throw new Yr("val",["Readable","Iterable","AsyncIterable"],h);h=p}else h=Br.from(p)}return(i!=null&&i.aborted||o!=null&&o.aborted)&&He.nextTick(u),h}function Tf(e,t,n,{end:r}){let i=!1;return t.on("close",()=>{i||n(new pf)}),e.pipe(t,{end:r}),r?e.once("end",()=>{i=!0,t.end()}):n(),Ve(e,{readable:!0,writable:!1},o=>{let l=e._readableState;o&&o.code==="ERR_STREAM_PREMATURE_CLOSE"&&l&&l.ended&&!l.errored&&!l.errorEmitted?e.once("end",n).once("error",n):n(o)}),Ve(t,{readable:!1,writable:!0},n)}zr.exports={pipelineImpl:Kr,pipeline:mf}});var ei=g((bu,Zr)=>{"use strict";var{pipeline:If}=Ye(),Ke=v(),{destroyer:Mf}=Z(),{isNodeStream:Nf,isReadable:Xr,isWritable:Jr}=V(),{AbortError:Df,codes:{ERR_INVALID_ARG_VALUE:Qr,ERR_MISSING_ARGS:Of}}=O();Zr.exports=function(...t){if(t.length===0)throw new Of("streams");if(t.length===1)return Ke.from(t[0]);let n=[...t];if(typeof t[0]=="function"&&(t[0]=Ke.from(t[0])),typeof t[t.length-1]=="function"){let d=t.length-1;t[d]=Ke.from(t[d])}for(let d=0;d0&&!Jr(t[d]))throw new Qr(`streams[${d}]`,n[d],"must be writable")}let r,i,o,l,u;function f(d){let h=l;l=null,h?h(d):d?u.destroy(d):!b&&!s&&u.destroy()}let a=t[0],c=If(t,f),s=!!Jr(a),b=!!Xr(c);return u=new Ke({writableObjectMode:!!(a!=null&&a.writableObjectMode),readableObjectMode:!!(c!=null&&c.writableObjectMode),writable:s,readable:b}),s&&(u._write=function(d,h,D){a.write(d,h)?D():r=D},u._final=function(d){a.end(),i=d},a.on("drain",function(){if(r){let d=r;r=null,d()}}),c.on("finish",function(){if(i){let d=i;i=null,d()}})),b&&(c.on("readable",function(){if(o){let d=o;o=null,d()}}),c.on("end",function(){u.push(null)}),u._read=function(){for(;;){let d=c.read();if(d===null){o=u._read;return}if(!u.push(d))return}}),u._destroy=function(d,h){!d&&l!==null&&(d=new Df),o=null,r=null,i=null,l===null?h(d):(l=h,Mf(c,d))},u}});var vt=g((_u,ti)=>{"use strict";var{ArrayPrototypePop:qf,Promise:xf}=m(),{isIterable:Lf,isNodeStream:Pf}=V(),{pipelineImpl:kf}=Ye(),{finished:Wf}=Y();function Cf(...e){return new xf((t,n)=>{let r,i,o=e[e.length-1];if(o&&typeof o=="object"&&!Pf(o)&&!Lf(o)){let l=qf(e);r=l.signal,i=l.end}kf(e,(l,u)=>{l?n(l):t(u)},{signal:r,end:i})})}ti.exports={finished:Wf,pipeline:Cf}});var di=g((pu,si)=>{var{Buffer:jf}=__buffer$,{ObjectDefineProperty:H,ObjectKeys:ii,ReflectApply:oi}=m(),{promisify:{custom:li}}=j(),{streamReturningOperators:ni,promiseReturningOperators:ri}=xn(),{codes:{ERR_ILLEGAL_CONSTRUCTOR:ai}}=O(),$f=ei(),{pipeline:fi}=Ye(),{destroyer:vf}=Z(),ui=Y(),Ft=vt(),Ut=V(),R=si.exports=Le().Stream;R.isDisturbed=Ut.isDisturbed;R.isErrored=Ut.isErrored;R.isReadable=Ut.isReadable;R.Readable=we();for(let e of ii(ni)){let n=function(...r){if(new.target)throw ai();return R.Readable.from(oi(t,this,r))};Uf=n;let t=ni[e];H(n,"name",{__proto__:null,value:t.name}),H(n,"length",{__proto__:null,value:t.length}),H(R.Readable.prototype,e,{__proto__:null,value:n,enumerable:!1,configurable:!0,writable:!0})}var Uf;for(let e of ii(ri)){let n=function(...i){if(new.target)throw ai();return oi(t,this,i)};Uf=n;let t=ri[e];H(n,"name",{__proto__:null,value:t.name}),H(n,"length",{__proto__:null,value:t.length}),H(R.Readable.prototype,e,{__proto__:null,value:n,enumerable:!1,configurable:!0,writable:!0})}var Uf;R.Writable=Tt();R.Duplex=v();R.Transform=xt();R.PassThrough=Pt();R.pipeline=fi;var{addAbortSignal:Ff}=ke();R.addAbortSignal=Ff;R.finished=ui;R.destroy=vf;R.compose=$f;H(R,"promises",{__proto__:null,configurable:!0,enumerable:!0,get(){return Ft}});H(fi,li,{__proto__:null,enumerable:!0,get(){return Ft.pipeline}});H(ui,li,{__proto__:null,enumerable:!0,get(){return Ft.finished}});R.Stream=R;R._isUint8Array=function(t){return t instanceof Uint8Array};R._uint8ArrayToBuffer=function(t){return jf.from(t.buffer,t.byteOffset,t.byteLength)}});var ci=g((wu,A)=>{"use strict";var T=di(),Bf=vt(),Gf=T.Readable.destroy;A.exports=T.Readable;A.exports._uint8ArrayToBuffer=T._uint8ArrayToBuffer;A.exports._isUint8Array=T._isUint8Array;A.exports.isDisturbed=T.isDisturbed;A.exports.isErrored=T.isErrored;A.exports.isReadable=T.isReadable;A.exports.Readable=T.Readable;A.exports.Writable=T.Writable;A.exports.Duplex=T.Duplex;A.exports.Transform=T.Transform;A.exports.PassThrough=T.PassThrough;A.exports.addAbortSignal=T.addAbortSignal;A.exports.finished=T.finished;A.exports.destroy=T.destroy;A.exports.destroy=Gf;A.exports.pipeline=T.pipeline;A.exports.compose=T.compose;Object.defineProperty(T,"promises",{configurable:!0,enumerable:!0,get(){return Bf}});A.exports.Stream=T.Stream;A.exports.default=A.exports});var bi=Ri(ci()),{_uint8ArrayToBuffer:yu,_isUint8Array:gu,isDisturbed:Su,isErrored:Eu,isReadable:Ru,Readable:Au,Writable:mu,Duplex:Tu,Transform:Iu,PassThrough:Mu,addAbortSignal:Nu,finished:Du,destroy:Ou,pipeline:qu,compose:xu,Stream:Lu}=bi,{default:hi,...Hf}=bi,Pu=hi!==void 0?hi:Hf;export{Tu as Duplex,Mu as PassThrough,Au as Readable,Lu as Stream,Iu as Transform,mu as Writable,gu as _isUint8Array,yu as _uint8ArrayToBuffer,Nu as addAbortSignal,xu as compose,Pu as default,Ou as destroy,Du as finished,Su as isDisturbed,Eu as isErrored,Ru as isReadable,qu as pipeline}; +// generated with +// $ esbuild --bundle --legal-comments=none --target=es2022 --tree-shaking=true --format=esm . +// ... then making sure the file uses the existing ext:deno_node stuff instead of bundling it +const __process$ = { nextTick }; +import __buffer$ from "ext:deno_node/buffer.ts"; +import __string_decoder$ from "ext:deno_node/string_decoder.ts"; +import __events$ from "ext:deno_node/events.ts"; + +var __getOwnPropNames = Object.getOwnPropertyNames; +var __commonJS = (cb, mod) => + function __require() { + return mod || + (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), + mod.exports; + }; + +// node_modules/buffer/index.js +var require_buffer = () => { + return __buffer$; +}; + +// lib/ours/errors.js +var require_primordials = __commonJS({ + "lib/ours/primordials.js"(exports2, module2) { + "use strict"; + module2.exports = { + ArrayIsArray(self2) { + return Array.isArray(self2); + }, + ArrayPrototypeIncludes(self2, el) { + return self2.includes(el); + }, + ArrayPrototypeIndexOf(self2, el) { + return self2.indexOf(el); + }, + ArrayPrototypeJoin(self2, sep) { + return self2.join(sep); + }, + ArrayPrototypeMap(self2, fn) { + return self2.map(fn); + }, + ArrayPrototypePop(self2, el) { + return self2.pop(el); + }, + ArrayPrototypePush(self2, el) { + return self2.push(el); + }, + ArrayPrototypeSlice(self2, start, end) { + return self2.slice(start, end); + }, + Error, + FunctionPrototypeCall(fn, thisArgs, ...args) { + return fn.call(thisArgs, ...args); + }, + FunctionPrototypeSymbolHasInstance(self2, instance) { + return Function.prototype[Symbol.hasInstance].call(self2, instance); + }, + MathFloor: Math.floor, + Number, + NumberIsInteger: Number.isInteger, + NumberIsNaN: Number.isNaN, + NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, + NumberParseInt: Number.parseInt, + ObjectDefineProperties(self2, props) { + return Object.defineProperties(self2, props); + }, + ObjectDefineProperty(self2, name, prop) { + return Object.defineProperty(self2, name, prop); + }, + ObjectGetOwnPropertyDescriptor(self2, name) { + return Object.getOwnPropertyDescriptor(self2, name); + }, + ObjectKeys(obj) { + return Object.keys(obj); + }, + ObjectSetPrototypeOf(target, proto) { + return Object.setPrototypeOf(target, proto); + }, + Promise, + PromisePrototypeCatch(self2, fn) { + return self2.catch(fn); + }, + PromisePrototypeThen(self2, thenFn, catchFn) { + return self2.then(thenFn, catchFn); + }, + PromiseReject(err) { + return Promise.reject(err); + }, + ReflectApply: Reflect.apply, + RegExpPrototypeTest(self2, value) { + return self2.test(value); + }, + SafeSet: Set, + String, + StringPrototypeSlice(self2, start, end) { + return self2.slice(start, end); + }, + StringPrototypeToLowerCase(self2) { + return self2.toLowerCase(); + }, + StringPrototypeToUpperCase(self2) { + return self2.toUpperCase(); + }, + StringPrototypeTrim(self2) { + return self2.trim(); + }, + Symbol, + SymbolAsyncIterator: Symbol.asyncIterator, + SymbolHasInstance: Symbol.hasInstance, + SymbolIterator: Symbol.iterator, + TypedArrayPrototypeSet(self2, buf, len) { + return self2.set(buf, len); + }, + Uint8Array, + }; + }, +}); + +// lib/internal/validators.js +var require_validators = __commonJS({ + "lib/internal/validators.js"(exports, module) { + "use strict"; + var { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeMap, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + NumberParseInt, + ObjectPrototypeHasOwnProperty, + RegExpPrototypeExec, + String: String2, + StringPrototypeToUpperCase, + StringPrototypeTrim, + } = require_primordials(); + var signals = {}; + function isInt32(value) { + return value === (value | 0); + } + function isUint32(value) { + return value === value >>> 0; + } + var octalReg = /^[0-7]+$/; + var modeDesc = "must be a 32-bit unsigned integer or an octal string"; + function parseFileMode(value, name, def) { + if (typeof value === "undefined") { + value = def; + } + if (typeof value === "string") { + if (RegExpPrototypeExec(octalReg, value) === null) { + throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc); + } + value = NumberParseInt(value, 8); + } + validateUint32(value, name); + return value; + } + var validateInteger = hideStackFrames( + ( + value, + name, + min = NumberMIN_SAFE_INTEGER, + max = NumberMAX_SAFE_INTEGER, + ) => { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, "an integer", value); + } + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + }, + ); + var validateInt32 = hideStackFrames( + (value, name, min = -2147483648, max = 2147483647) => { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, "an integer", value); + } + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + }, + ); + var validateUint32 = hideStackFrames((value, name, positive = false) => { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, "an integer", value); + } + const min = positive ? 1 : 0; + const max = 4294967295; + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + }); + function validateString(value, name) { + if (typeof value !== "string") { + throw new ERR_INVALID_ARG_TYPE(name, "string", value); + } + } + function validateNumber(value, name, min = void 0, max) { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if ( + min != null && value < min || max != null && value > max || + (min != null || max != null) && NumberIsNaN(value) + ) { + throw new ERR_OUT_OF_RANGE( + name, + `${min != null ? `>= ${min}` : ""}${ + min != null && max != null ? " && " : "" + }${max != null ? `<= ${max}` : ""}`, + value, + ); + } + } + var validateOneOf = hideStackFrames((value, name, oneOf) => { + if (!ArrayPrototypeIncludes(oneOf, value)) { + const allowed = ArrayPrototypeJoin( + ArrayPrototypeMap( + oneOf, + (v) => typeof v === "string" ? `'${v}'` : String2(v), + ), + ", ", + ); + const reason = "must be one of: " + allowed; + throw new ERR_INVALID_ARG_VALUE(name, value, reason); + } + }); + function validateBoolean(value, name) { + if (typeof value !== "boolean") { + throw new ERR_INVALID_ARG_TYPE(name, "boolean", value); + } + } + function getOwnPropertyValueOrDefault(options, key, defaultValue) { + return options == null || !ObjectPrototypeHasOwnProperty(options, key) + ? defaultValue + : options[key]; + } + var validateObject = hideStackFrames((value, name, options = null) => { + const allowArray = getOwnPropertyValueOrDefault( + options, + "allowArray", + false, + ); + const allowFunction = getOwnPropertyValueOrDefault( + options, + "allowFunction", + false, + ); + const nullable = getOwnPropertyValueOrDefault(options, "nullable", false); + if ( + !nullable && value === null || !allowArray && ArrayIsArray(value) || + typeof value !== "object" && + (!allowFunction || typeof value !== "function") + ) { + throw new ERR_INVALID_ARG_TYPE(name, "Object", value); + } + }); + var validateArray = hideStackFrames((value, name, minLength = 0) => { + if (!ArrayIsArray(value)) { + throw new ERR_INVALID_ARG_TYPE(name, "Array", value); + } + if (value.length < minLength) { + const reason = `must be longer than ${minLength}`; + throw new ERR_INVALID_ARG_VALUE(name, value, reason); + } + }); + function validateSignalName(signal, name = "signal") { + validateString(signal, name); + if (signals[signal] === void 0) { + if (signals[StringPrototypeToUpperCase(signal)] !== void 0) { + throw new ERR_UNKNOWN_SIGNAL( + signal + " (signals must use all capital letters)", + ); + } + throw new ERR_UNKNOWN_SIGNAL(signal); + } + } + var validateBuffer = hideStackFrames((buffer, name = "buffer") => { + if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE(name, [ + "Buffer", + "TypedArray", + "DataView", + ], buffer); + } + }); + function validateEncoding(data, encoding) { + const normalizedEncoding = normalizeEncoding(encoding); + const length = data.length; + if (normalizedEncoding === "hex" && length % 2 !== 0) { + throw new ERR_INVALID_ARG_VALUE( + "encoding", + encoding, + `is invalid for data of length ${length}`, + ); + } + } + function validatePort(port, name = "Port", allowZero = true) { + if ( + typeof port !== "number" && typeof port !== "string" || + typeof port === "string" && StringPrototypeTrim(port).length === 0 || + +port !== +port >>> 0 || port > 65535 || port === 0 && !allowZero + ) { + throw new ERR_SOCKET_BAD_PORT(name, port, allowZero); + } + return port | 0; + } + var validateAbortSignal = hideStackFrames((signal, name) => { + if ( + signal !== void 0 && + (signal === null || typeof signal !== "object" || + !("aborted" in signal)) + ) { + throw new ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal); + } + }); + var validateFunction = hideStackFrames((value, name) => { + if (typeof value !== "function") { + throw new ERR_INVALID_ARG_TYPE(name, "Function", value); + } + }); + var validatePlainFunction = hideStackFrames((value, name) => { + if (typeof value !== "function" || isAsyncFunction(value)) { + throw new ERR_INVALID_ARG_TYPE(name, "Function", value); + } + }); + var validateUndefined = hideStackFrames((value, name) => { + if (value !== void 0) { + throw new ERR_INVALID_ARG_TYPE(name, "undefined", value); + } + }); + function validateUnion(value, name, union) { + if (!ArrayPrototypeIncludes(union, value)) { + throw new ERR_INVALID_ARG_TYPE( + name, + `('${ArrayPrototypeJoin(union, "|")}')`, + value, + ); + } + } + module.exports = { + isInt32, + isUint32, + parseFileMode, + validateArray, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInt32, + validateInteger, + validateNumber, + validateObject, + validateOneOf, + validatePlainFunction, + validatePort, + validateSignalName, + validateString, + validateUint32, + validateUndefined, + validateUnion, + validateAbortSignal, + }; + }, +}); + +// node_modules/process/browser.js +var require_browser2 = () => { + return __process$; +}; + +// lib/internal/streams/utils.js +var require_utils = __commonJS({ + "lib/internal/streams/utils.js"(exports, module) { + "use strict"; + var { Symbol: Symbol2, SymbolAsyncIterator, SymbolIterator } = + require_primordials(); + var kDestroyed = Symbol2("kDestroyed"); + var kIsErrored = Symbol2("kIsErrored"); + var kIsReadable = Symbol2("kIsReadable"); + var kIsDisturbed = Symbol2("kIsDisturbed"); + function isReadableNodeStream(obj, strict = false) { + var _obj$_readableState; + return !!(obj && typeof obj.pipe === "function" && + typeof obj.on === "function" && + (!strict || + typeof obj.pause === "function" && + typeof obj.resume === "function") && + (!obj._writableState || + ((_obj$_readableState = obj._readableState) === null || + _obj$_readableState === void 0 + ? void 0 + : _obj$_readableState.readable) !== false) && // Duplex + (!obj._writableState || obj._readableState)); + } + function isWritableNodeStream(obj) { + var _obj$_writableState; + return !!(obj && typeof obj.write === "function" && + typeof obj.on === "function" && + (!obj._readableState || + ((_obj$_writableState = obj._writableState) === null || + _obj$_writableState === void 0 + ? void 0 + : _obj$_writableState.writable) !== false)); + } + function isDuplexNodeStream(obj) { + return !!(obj && typeof obj.pipe === "function" && obj._readableState && + typeof obj.on === "function" && typeof obj.write === "function"); + } + function isNodeStream(obj) { + return obj && + (obj._readableState || obj._writableState || + typeof obj.write === "function" && typeof obj.on === "function" || + typeof obj.pipe === "function" && typeof obj.on === "function"); + } + function isIterable(obj, isAsync) { + if (obj == null) { + return false; + } + if (isAsync === true) { + return typeof obj[SymbolAsyncIterator] === "function"; + } + if (isAsync === false) { + return typeof obj[SymbolIterator] === "function"; + } + return typeof obj[SymbolAsyncIterator] === "function" || + typeof obj[SymbolIterator] === "function"; + } + function isDestroyed(stream) { + if (!isNodeStream(stream)) { + return null; + } + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + return !!(stream.destroyed || stream[kDestroyed] || + state !== null && state !== void 0 && state.destroyed); + } + function isWritableEnded(stream) { + if (!isWritableNodeStream(stream)) { + return null; + } + if (stream.writableEnded === true) { + return true; + } + const wState = stream._writableState; + if (wState !== null && wState !== void 0 && wState.errored) { + return false; + } + if ( + typeof (wState === null || wState === void 0 + ? void 0 + : wState.ended) !== "boolean" + ) { + return null; + } + return wState.ended; + } + function isWritableFinished(stream, strict) { + if (!isWritableNodeStream(stream)) { + return null; + } + if (stream.writableFinished === true) { + return true; + } + const wState = stream._writableState; + if (wState !== null && wState !== void 0 && wState.errored) { + return false; + } + if ( + typeof (wState === null || wState === void 0 + ? void 0 + : wState.finished) !== "boolean" + ) { + return null; + } + return !!(wState.finished || + strict === false && wState.ended === true && wState.length === 0); + } + function isReadableEnded(stream) { + if (!isReadableNodeStream(stream)) { + return null; + } + if (stream.readableEnded === true) { + return true; + } + const rState = stream._readableState; + if (!rState || rState.errored) { + return false; + } + if ( + typeof (rState === null || rState === void 0 + ? void 0 + : rState.ended) !== "boolean" + ) { + return null; + } + return rState.ended; + } + function isReadableFinished(stream, strict) { + if (!isReadableNodeStream(stream)) { + return null; + } + const rState = stream._readableState; + if (rState !== null && rState !== void 0 && rState.errored) { + return false; + } + if ( + typeof (rState === null || rState === void 0 + ? void 0 + : rState.endEmitted) !== "boolean" + ) { + return null; + } + return !!(rState.endEmitted || + strict === false && rState.ended === true && rState.length === 0); + } + function isReadable(stream) { + if (stream && stream[kIsReadable] != null) { + return stream[kIsReadable]; + } + if ( + typeof (stream === null || stream === void 0 + ? void 0 + : stream.readable) !== "boolean" + ) { + return null; + } + if (isDestroyed(stream)) { + return false; + } + return isReadableNodeStream(stream) && stream.readable && + !isReadableFinished(stream); + } + function isWritable(stream) { + if ( + typeof (stream === null || stream === void 0 + ? void 0 + : stream.writable) !== "boolean" + ) { + return null; + } + if (isDestroyed(stream)) { + return false; + } + return isWritableNodeStream(stream) && stream.writable && + !isWritableEnded(stream); + } + function isFinished(stream, opts) { + if (!isNodeStream(stream)) { + return null; + } + if (isDestroyed(stream)) { + return true; + } + if ( + (opts === null || opts === void 0 ? void 0 : opts.readable) !== false && + isReadable(stream) + ) { + return false; + } + if ( + (opts === null || opts === void 0 ? void 0 : opts.writable) !== false && + isWritable(stream) + ) { + return false; + } + return true; + } + function isWritableErrored(stream) { + var _stream$_writableStat, _stream$_writableStat2; + if (!isNodeStream(stream)) { + return null; + } + if (stream.writableErrored) { + return stream.writableErrored; + } + return (_stream$_writableStat = + (_stream$_writableStat2 = stream._writableState) === null || + _stream$_writableStat2 === void 0 + ? void 0 + : _stream$_writableStat2.errored) !== null && + _stream$_writableStat !== void 0 + ? _stream$_writableStat + : null; + } + function isReadableErrored(stream) { + var _stream$_readableStat, _stream$_readableStat2; + if (!isNodeStream(stream)) { + return null; + } + if (stream.readableErrored) { + return stream.readableErrored; + } + return (_stream$_readableStat = + (_stream$_readableStat2 = stream._readableState) === null || + _stream$_readableStat2 === void 0 + ? void 0 + : _stream$_readableStat2.errored) !== null && + _stream$_readableStat !== void 0 + ? _stream$_readableStat + : null; + } + function isClosed(stream) { + if (!isNodeStream(stream)) { + return null; + } + if (typeof stream.closed === "boolean") { + return stream.closed; + } + const wState = stream._writableState; + const rState = stream._readableState; + if ( + typeof (wState === null || wState === void 0 + ? void 0 + : wState.closed) === "boolean" || + typeof (rState === null || rState === void 0 + ? void 0 + : rState.closed) === "boolean" + ) { + return (wState === null || wState === void 0 + ? void 0 + : wState.closed) || + (rState === null || rState === void 0 ? void 0 : rState.closed); + } + if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) { + return stream._closed; + } + return null; + } + function isOutgoingMessage(stream) { + return typeof stream._closed === "boolean" && + typeof stream._defaultKeepAlive === "boolean" && + typeof stream._removedConnection === "boolean" && + typeof stream._removedContLen === "boolean"; + } + function isServerResponse(stream) { + return typeof stream._sent100 === "boolean" && isOutgoingMessage(stream); + } + function isServerRequest(stream) { + var _stream$req; + return typeof stream._consuming === "boolean" && + typeof stream._dumped === "boolean" && + ((_stream$req = stream.req) === null || _stream$req === void 0 + ? void 0 + : _stream$req.upgradeOrConnect) === void 0; + } + function willEmitClose(stream) { + if (!isNodeStream(stream)) { + return null; + } + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + return !state && isServerResponse(stream) || + !!(state && state.autoDestroy && state.emitClose && + state.closed === false); + } + function isDisturbed(stream) { + var _stream$kIsDisturbed; + return !!(stream && + ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && + _stream$kIsDisturbed !== void 0 + ? _stream$kIsDisturbed + : stream.readableDidRead || stream.readableAborted)); + } + function isErrored(stream) { + var _ref, + _ref2, + _ref3, + _ref4, + _ref5, + _stream$kIsErrored, + _stream$_readableStat3, + _stream$_writableStat3, + _stream$_readableStat4, + _stream$_writableStat4; + return !!(stream && + ((_ref = + (_ref2 = + (_ref3 = + (_ref4 = + (_ref5 = + (_stream$kIsErrored = + stream[kIsErrored]) !== null && + _stream$kIsErrored !== void 0 + ? _stream$kIsErrored + : stream.readableErrored) !== null && + _ref5 !== void 0 + ? _ref5 + : stream.writableErrored) !== null && + _ref4 !== void 0 + ? _ref4 + : (_stream$_readableStat3 = + stream._readableState) === null || + _stream$_readableStat3 === void 0 + ? void 0 + : _stream$_readableStat3.errorEmitted) !== null && + _ref3 !== void 0 + ? _ref3 + : (_stream$_writableStat3 = stream._writableState) === + null || _stream$_writableStat3 === void 0 + ? void 0 + : _stream$_writableStat3.errorEmitted) !== null && + _ref2 !== void 0 + ? _ref2 + : (_stream$_readableStat4 = stream._readableState) === null || + _stream$_readableStat4 === void 0 + ? void 0 + : _stream$_readableStat4.errored) !== null && _ref !== void 0 + ? _ref + : (_stream$_writableStat4 = stream._writableState) === null || + _stream$_writableStat4 === void 0 + ? void 0 + : _stream$_writableStat4.errored)); + } + module.exports = { + kDestroyed, + isDisturbed, + kIsDisturbed, + isErrored, + kIsErrored, + isReadable, + kIsReadable, + isClosed, + isDestroyed, + isDuplexNodeStream, + isFinished, + isIterable, + isReadableNodeStream, + isReadableEnded, + isReadableFinished, + isReadableErrored, + isNodeStream, + isWritable, + isWritableNodeStream, + isWritableEnded, + isWritableFinished, + isWritableErrored, + isServerRequest, + isServerResponse, + willEmitClose, + }; + }, +}); + +// lib/internal/streams/end-of-stream.js +var require_end_of_stream = __commonJS({ + "lib/internal/streams/end-of-stream.js"(exports, module) { + var process = require_browser2(); + var { validateAbortSignal, validateFunction, validateObject } = + require_validators(); + var { Promise: Promise2 } = require_primordials(); + var { + isClosed, + isReadable, + isReadableNodeStream, + isReadableFinished, + isReadableErrored, + isWritable, + isWritableNodeStream, + isWritableFinished, + isWritableErrored, + isNodeStream, + willEmitClose: _willEmitClose, + } = require_utils(); + function isRequest(stream) { + return stream.setHeader && typeof stream.abort === "function"; + } + var nop = () => { + }; + function eos(stream, options, callback) { + var _options$readable, _options$writable; + if (arguments.length === 2) { + callback = options; + options = kEmptyObject; + } else if (options == null) { + options = kEmptyObject; + } else { + validateObject(options, "options"); + } + validateFunction(callback, "callback"); + validateAbortSignal(options.signal, "options.signal"); + callback = once(callback); + const readable = (_options$readable = options.readable) !== null && + _options$readable !== void 0 + ? _options$readable + : isReadableNodeStream(stream); + const writable = (_options$writable = options.writable) !== null && + _options$writable !== void 0 + ? _options$writable + : isWritableNodeStream(stream); + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE("stream", "Stream", stream); + } + const wState = stream._writableState; + const rState = stream._readableState; + const onlegacyfinish = () => { + if (!stream.writable) { + onfinish(); + } + }; + let willEmitClose = _willEmitClose(stream) && + isReadableNodeStream(stream) === readable && + isWritableNodeStream(stream) === writable; + let writableFinished = isWritableFinished(stream, false); + const onfinish = () => { + writableFinished = true; + if (stream.destroyed) { + willEmitClose = false; + } + if (willEmitClose && (!stream.readable || readable)) { + return; + } + if (!readable || readableFinished) { + callback.call(stream); + } + }; + let readableFinished = isReadableFinished(stream, false); + const onend = () => { + readableFinished = true; + if (stream.destroyed) { + willEmitClose = false; + } + if (willEmitClose && (!stream.writable || writable)) { + return; + } + if (!writable || writableFinished) { + callback.call(stream); + } + }; + const onerror = (err) => { + callback.call(stream, err); + }; + let closed = isClosed(stream); + const onclose = () => { + closed = true; + const errored = isWritableErrored(stream) || isReadableErrored(stream); + if (errored && typeof errored !== "boolean") { + return callback.call(stream, errored); + } + if ( + readable && !readableFinished && isReadableNodeStream(stream, true) + ) { + if (!isReadableFinished(stream, false)) { + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); + } + } + if (writable && !writableFinished) { + if (!isWritableFinished(stream, false)) { + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); + } + } + callback.call(stream); + }; + const onrequest = () => { + stream.req.on("finish", onfinish); + }; + if (isRequest(stream)) { + stream.on("complete", onfinish); + if (!willEmitClose) { + stream.on("abort", onclose); + } + if (stream.req) { + onrequest(); + } else { + stream.on("request", onrequest); + } + } else if (writable && !wState) { + stream.on("end", onlegacyfinish); + stream.on("close", onlegacyfinish); + } + if (!willEmitClose && typeof stream.aborted === "boolean") { + stream.on("aborted", onclose); + } + stream.on("end", onend); + stream.on("finish", onfinish); + if (options.error !== false) { + stream.on("error", onerror); + } + stream.on("close", onclose); + if (closed) { + process.nextTick(onclose); + } else if ( + wState !== null && wState !== void 0 && wState.errorEmitted || + rState !== null && rState !== void 0 && rState.errorEmitted + ) { + if (!willEmitClose) { + process.nextTick(onclose); + } + } else if ( + !readable && (!willEmitClose || isReadable(stream)) && + (writableFinished || isWritable(stream) === false) + ) { + process.nextTick(onclose); + } else if ( + !writable && (!willEmitClose || isWritable(stream)) && + (readableFinished || isReadable(stream) === false) + ) { + process.nextTick(onclose); + } else if (rState && stream.req && stream.aborted) { + process.nextTick(onclose); + } + const cleanup = () => { + callback = nop; + stream.removeListener("aborted", onclose); + stream.removeListener("complete", onfinish); + stream.removeListener("abort", onclose); + stream.removeListener("request", onrequest); + if (stream.req) { + stream.req.removeListener("finish", onfinish); + } + stream.removeListener("end", onlegacyfinish); + stream.removeListener("close", onlegacyfinish); + stream.removeListener("finish", onfinish); + stream.removeListener("end", onend); + stream.removeListener("error", onerror); + stream.removeListener("close", onclose); + }; + if (options.signal && !closed) { + const abort = () => { + const endCallback = callback; + cleanup(); + endCallback.call( + stream, + new AbortError(void 0, { + cause: options.signal.reason, + }), + ); + }; + if (options.signal.aborted) { + process.nextTick(abort); + } else { + const originalCallback = callback; + callback = once((...args) => { + options.signal.removeEventListener("abort", abort); + originalCallback.apply(stream, args); + }); + options.signal.addEventListener("abort", abort); + } + } + return cleanup; + } + function finished(stream, opts) { + return new Promise2((resolve, reject) => { + eos(stream, opts, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + } + module.exports = eos; + module.exports.finished = finished; + }, +}); + +// lib/internal/streams/operators.js +var require_operators = __commonJS({ + "lib/internal/streams/operators.js"(exports, module) { + "use strict"; + var { validateAbortSignal, validateInteger, validateObject } = + require_validators(); + var kWeakHandler = require_primordials().Symbol("kWeak"); + var { finished } = require_end_of_stream(); + var { + ArrayPrototypePush, + MathFloor, + Number: Number2, + NumberIsNaN, + Promise: Promise2, + PromiseReject, + PromisePrototypeThen, + Symbol: Symbol2, + } = require_primordials(); + var kEmpty = Symbol2("kEmpty"); + var kEof = Symbol2("kEof"); + function map(fn, options) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + let concurrency = 1; + if ( + (options === null || options === void 0 + ? void 0 + : options.concurrency) != null + ) { + concurrency = MathFloor(options.concurrency); + } + validateInteger(concurrency, "concurrency", 1); + return async function* map2() { + var _options$signal, _options$signal2; + const ac = new AbortController(); + const stream = this; + const queue = []; + const signal = ac.signal; + const signalOpt = { + signal, + }; + const abort = () => ac.abort(); + if ( + options !== null && options !== void 0 && + (_options$signal = options.signal) !== null && + _options$signal !== void 0 && _options$signal.aborted + ) { + abort(); + } + options === null || options === void 0 + ? void 0 + : (_options$signal2 = options.signal) === null || + _options$signal2 === void 0 + ? void 0 + : _options$signal2.addEventListener("abort", abort); + let next; + let resume; + let done = false; + function onDone() { + done = true; + } + async function pump() { + try { + for await (let val of stream) { + var _val; + if (done) { + return; + } + if (signal.aborted) { + throw new AbortError(); + } + try { + val = fn(val, signalOpt); + } catch (err) { + val = PromiseReject(err); + } + if (val === kEmpty) { + continue; + } + if ( + typeof ((_val = val) === null || _val === void 0 + ? void 0 + : _val.catch) === "function" + ) { + val.catch(onDone); + } + queue.push(val); + if (next) { + next(); + next = null; + } + if (!done && queue.length && queue.length >= concurrency) { + await new Promise2((resolve) => { + resume = resolve; + }); + } + } + queue.push(kEof); + } catch (err) { + const val = PromiseReject(err); + PromisePrototypeThen(val, void 0, onDone); + queue.push(val); + } finally { + var _options$signal3; + done = true; + if (next) { + next(); + next = null; + } + options === null || options === void 0 + ? void 0 + : (_options$signal3 = options.signal) === null || + _options$signal3 === void 0 + ? void 0 + : _options$signal3.removeEventListener("abort", abort); + } + } + pump(); + try { + while (true) { + while (queue.length > 0) { + const val = await queue[0]; + if (val === kEof) { + return; + } + if (signal.aborted) { + throw new AbortError(); + } + if (val !== kEmpty) { + yield val; + } + queue.shift(); + if (resume) { + resume(); + resume = null; + } + } + await new Promise2((resolve) => { + next = resolve; + }); + } + } finally { + ac.abort(); + done = true; + if (resume) { + resume(); + resume = null; + } + } + }.call(this); + } + function asIndexedPairs(options = void 0) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + return async function* asIndexedPairs2() { + let index = 0; + for await (const val of this) { + var _options$signal4; + if ( + options !== null && options !== void 0 && + (_options$signal4 = options.signal) !== null && + _options$signal4 !== void 0 && _options$signal4.aborted + ) { + throw new AbortError({ + cause: options.signal.reason, + }); + } + yield [index++, val]; + } + }.call(this); + } + async function some(fn, options = void 0) { + for await (const unused of filter.call(this, fn, options)) { + return true; + } + return false; + } + async function every(fn, options = void 0) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + return !await some.call( + this, + async (...args) => { + return !await fn(...args); + }, + options, + ); + } + async function find(fn, options) { + for await (const result of filter.call(this, fn, options)) { + return result; + } + return void 0; + } + async function forEach(fn, options) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + async function forEachFn(value, options2) { + await fn(value, options2); + return kEmpty; + } + for await (const unused of map.call(this, forEachFn, options)); + } + function filter(fn, options) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + async function filterFn(value, options2) { + if (await fn(value, options2)) { + return value; + } + return kEmpty; + } + return map.call(this, filterFn, options); + } + var ReduceAwareErrMissingArgs = class extends ERR_MISSING_ARGS { + constructor() { + super("reduce"); + this.message = "Reduce of an empty stream requires an initial value"; + } + }; + async function reduce(reducer, initialValue, options) { + var _options$signal5; + if (typeof reducer !== "function") { + throw new ERR_INVALID_ARG_TYPE( + "reducer", + ["Function", "AsyncFunction"], + reducer, + ); + } + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + let hasInitialValue = arguments.length > 1; + if ( + options !== null && options !== void 0 && + (_options$signal5 = options.signal) !== null && + _options$signal5 !== void 0 && _options$signal5.aborted + ) { + const err = new AbortError(void 0, { + cause: options.signal.reason, + }); + this.once("error", () => { + }); + await finished(this.destroy(err)); + throw err; + } + const ac = new AbortController(); + const signal = ac.signal; + if (options !== null && options !== void 0 && options.signal) { + const opts = { + once: true, + [kWeakHandler]: this, + }; + options.signal.addEventListener("abort", () => ac.abort(), opts); + } + let gotAnyItemFromStream = false; + try { + for await (const value of this) { + var _options$signal6; + gotAnyItemFromStream = true; + if ( + options !== null && options !== void 0 && + (_options$signal6 = options.signal) !== null && + _options$signal6 !== void 0 && _options$signal6.aborted + ) { + throw new AbortError(); + } + if (!hasInitialValue) { + initialValue = value; + hasInitialValue = true; + } else { + initialValue = await reducer(initialValue, value, { + signal, + }); + } + } + if (!gotAnyItemFromStream && !hasInitialValue) { + throw new ReduceAwareErrMissingArgs(); + } + } finally { + ac.abort(); + } + return initialValue; + } + async function toArray(options) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + const result = []; + for await (const val of this) { + var _options$signal7; + if ( + options !== null && options !== void 0 && + (_options$signal7 = options.signal) !== null && + _options$signal7 !== void 0 && _options$signal7.aborted + ) { + throw new AbortError(void 0, { + cause: options.signal.reason, + }); + } + ArrayPrototypePush(result, val); + } + return result; + } + function flatMap(fn, options) { + const values = map.call(this, fn, options); + return async function* flatMap2() { + for await (const val of values) { + yield* val; + } + }.call(this); + } + function toIntegerOrInfinity(number) { + number = Number2(number); + if (NumberIsNaN(number)) { + return 0; + } + if (number < 0) { + throw new ERR_OUT_OF_RANGE("number", ">= 0", number); + } + return number; + } + function drop(number, options = void 0) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + number = toIntegerOrInfinity(number); + return async function* drop2() { + var _options$signal8; + if ( + options !== null && options !== void 0 && + (_options$signal8 = options.signal) !== null && + _options$signal8 !== void 0 && _options$signal8.aborted + ) { + throw new AbortError(); + } + for await (const val of this) { + var _options$signal9; + if ( + options !== null && options !== void 0 && + (_options$signal9 = options.signal) !== null && + _options$signal9 !== void 0 && _options$signal9.aborted + ) { + throw new AbortError(); + } + if (number-- <= 0) { + yield val; + } + } + }.call(this); + } + function take(number, options = void 0) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + number = toIntegerOrInfinity(number); + return async function* take2() { + var _options$signal10; + if ( + options !== null && options !== void 0 && + (_options$signal10 = options.signal) !== null && + _options$signal10 !== void 0 && _options$signal10.aborted + ) { + throw new AbortError(); + } + for await (const val of this) { + var _options$signal11; + if ( + options !== null && options !== void 0 && + (_options$signal11 = options.signal) !== null && + _options$signal11 !== void 0 && _options$signal11.aborted + ) { + throw new AbortError(); + } + if (number-- > 0) { + yield val; + } else { + return; + } + } + }.call(this); + } + module.exports.streamReturningOperators = { + asIndexedPairs, + drop, + filter, + flatMap, + map, + take, + }; + module.exports.promiseReturningOperators = { + every, + forEach, + reduce, + toArray, + some, + find, + }; + }, +}); + +// lib/internal/streams/destroy.js +var require_destroy = __commonJS({ + "lib/internal/streams/destroy.js"(exports, module) { + "use strict"; + var process = require_browser2(); + var { Symbol: Symbol2 } = require_primordials(); + var { kDestroyed, isDestroyed, isFinished, isServerRequest } = + require_utils(); + var kDestroy = Symbol2("kDestroy"); + var kConstruct = Symbol2("kConstruct"); + function checkError(err, w, r) { + if (err) { + err.stack; + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + } + } + function destroy(err, cb) { + const r = this._readableState; + const w = this._writableState; + const s = w || r; + if (w && w.destroyed || r && r.destroyed) { + if (typeof cb === "function") { + cb(); + } + return this; + } + checkError(err, w, r); + if (w) { + w.destroyed = true; + } + if (r) { + r.destroyed = true; + } + if (!s.constructed) { + this.once(kDestroy, function (er) { + _destroy(this, aggregateTwoErrors(er, err), cb); + }); + } else { + _destroy(this, err, cb); + } + return this; + } + function _destroy(self2, err, cb) { + let called = false; + function onDestroy(err2) { + if (called) { + return; + } + called = true; + const r = self2._readableState; + const w = self2._writableState; + checkError(err2, w, r); + if (w) { + w.closed = true; + } + if (r) { + r.closed = true; + } + if (typeof cb === "function") { + cb(err2); + } + if (err2) { + process.nextTick(emitErrorCloseNT, self2, err2); + } else { + process.nextTick(emitCloseNT, self2); + } + } + try { + self2._destroy(err || null, onDestroy); + } catch (err2) { + onDestroy(err2); + } + } + function emitErrorCloseNT(self2, err) { + emitErrorNT(self2, err); + emitCloseNT(self2); + } + function emitCloseNT(self2) { + const r = self2._readableState; + const w = self2._writableState; + if (w) { + w.closeEmitted = true; + } + if (r) { + r.closeEmitted = true; + } + if (w && w.emitClose || r && r.emitClose) { + self2.emit("close"); + } + } + function emitErrorNT(self2, err) { + const r = self2._readableState; + const w = self2._writableState; + if (w && w.errorEmitted || r && r.errorEmitted) { + return; + } + if (w) { + w.errorEmitted = true; + } + if (r) { + r.errorEmitted = true; + } + self2.emit("error", err); + } + function undestroy() { + const r = this._readableState; + const w = this._writableState; + if (r) { + r.constructed = true; + r.closed = false; + r.closeEmitted = false; + r.destroyed = false; + r.errored = null; + r.errorEmitted = false; + r.reading = false; + r.ended = r.readable === false; + r.endEmitted = r.readable === false; + } + if (w) { + w.constructed = true; + w.destroyed = false; + w.closed = false; + w.closeEmitted = false; + w.errored = null; + w.errorEmitted = false; + w.finalCalled = false; + w.prefinished = false; + w.ended = w.writable === false; + w.ending = w.writable === false; + w.finished = w.writable === false; + } + } + function errorOrDestroy(stream, err, sync) { + const r = stream._readableState; + const w = stream._writableState; + if (w && w.destroyed || r && r.destroyed) { + return this; + } + if (r && r.autoDestroy || w && w.autoDestroy) { + stream.destroy(err); + } else if (err) { + err.stack; + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + if (sync) { + process.nextTick(emitErrorNT, stream, err); + } else { + emitErrorNT(stream, err); + } + } + } + function construct(stream, cb) { + if (typeof stream._construct !== "function") { + return; + } + const r = stream._readableState; + const w = stream._writableState; + if (r) { + r.constructed = false; + } + if (w) { + w.constructed = false; + } + stream.once(kConstruct, cb); + if (stream.listenerCount(kConstruct) > 1) { + return; + } + process.nextTick(constructNT, stream); + } + function constructNT(stream) { + let called = false; + function onConstruct(err) { + if (called) { + errorOrDestroy( + stream, + err !== null && err !== void 0 ? err : new ERR_MULTIPLE_CALLBACK(), + ); + return; + } + called = true; + const r = stream._readableState; + const w = stream._writableState; + const s = w || r; + if (r) { + r.constructed = true; + } + if (w) { + w.constructed = true; + } + if (s.destroyed) { + stream.emit(kDestroy, err); + } else if (err) { + errorOrDestroy(stream, err, true); + } else { + process.nextTick(emitConstructNT, stream); + } + } + try { + stream._construct(onConstruct); + } catch (err) { + onConstruct(err); + } + } + function emitConstructNT(stream) { + stream.emit(kConstruct); + } + function isRequest(stream) { + return stream && stream.setHeader && typeof stream.abort === "function"; + } + function emitCloseLegacy(stream) { + stream.emit("close"); + } + function emitErrorCloseLegacy(stream, err) { + stream.emit("error", err); + process.nextTick(emitCloseLegacy, stream); + } + function destroyer(stream, err) { + if (!stream || isDestroyed(stream)) { + return; + } + if (!err && !isFinished(stream)) { + err = new AbortError(); + } + if (isServerRequest(stream)) { + stream.socket = null; + stream.destroy(err); + } else if (isRequest(stream)) { + stream.abort(); + } else if (isRequest(stream.req)) { + stream.req.abort(); + } else if (typeof stream.destroy === "function") { + stream.destroy(err); + } else if (typeof stream.close === "function") { + stream.close(); + } else if (err) { + process.nextTick(emitErrorCloseLegacy, stream, err); + } else { + process.nextTick(emitCloseLegacy, stream); + } + if (!stream.destroyed) { + stream[kDestroyed] = true; + } + } + module.exports = { + construct, + destroyer, + destroy, + undestroy, + errorOrDestroy, + }; + }, +}); + +// node_modules/events/events.js +var require_events = __commonJS({ + "node_modules/events/events.js"(exports, module) { + "use strict"; + var R = typeof Reflect === "object" ? Reflect : null; + var ReflectApply = R && typeof R.apply === "function" + ? R.apply + : function ReflectApply2(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + }; + var ReflectOwnKeys; + if (R && typeof R.ownKeys === "function") { + ReflectOwnKeys = R.ownKeys; + } else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys2(target) { + return Object.getOwnPropertyNames(target).concat( + Object.getOwnPropertySymbols(target), + ); + }; + } else { + ReflectOwnKeys = function ReflectOwnKeys2(target) { + return Object.getOwnPropertyNames(target); + }; + } + function ProcessEmitWarning(warning) { + if (console && console.warn) { + console.warn(warning); + } + } + var NumberIsNaN = Number.isNaN || function NumberIsNaN2(value) { + return value !== value; + }; + function EventEmitter() { + EventEmitter.init.call(this); + } + module.exports = EventEmitter; + module.exports.once = once; + EventEmitter.EventEmitter = EventEmitter; + EventEmitter.prototype._events = void 0; + EventEmitter.prototype._eventsCount = 0; + EventEmitter.prototype._maxListeners = void 0; + var defaultMaxListeners = 10; + function checkListener(listener) { + if (typeof listener !== "function") { + throw new TypeError( + 'The "listener" argument must be of type Function. Received type ' + + typeof listener, + ); + } + } + Object.defineProperty(EventEmitter, "defaultMaxListeners", { + enumerable: true, + get: function () { + return defaultMaxListeners; + }, + set: function (arg) { + if (typeof arg !== "number" || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError( + 'The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + + arg + ".", + ); + } + defaultMaxListeners = arg; + }, + }); + EventEmitter.init = function () { + if ( + this._events === void 0 || + this._events === Object.getPrototypeOf(this)._events + ) { + this._events = /* @__PURE__ */ Object.create(null); + this._eventsCount = 0; + } + this._maxListeners = this._maxListeners || void 0; + }; + EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== "number" || n < 0 || NumberIsNaN(n)) { + throw new RangeError( + 'The value of "n" is out of range. It must be a non-negative number. Received ' + + n + ".", + ); + } + this._maxListeners = n; + return this; + }; + function _getMaxListeners(that) { + if (that._maxListeners === void 0) { + return EventEmitter.defaultMaxListeners; + } + return that._maxListeners; + } + EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); + }; + EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) { + args.push(arguments[i]); + } + var doError = type === "error"; + var events = this._events; + if (events !== void 0) { + doError = doError && events.error === void 0; + } else if (!doError) { + return false; + } + if (doError) { + var er; + if (args.length > 0) { + er = args[0]; + } + if (er instanceof Error) { + throw er; + } + var err = new Error( + "Unhandled error." + (er ? " (" + er.message + ")" : ""), + ); + err.context = er; + throw err; + } + var handler = events[type]; + if (handler === void 0) { + return false; + } + if (typeof handler === "function") { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); + for (var i = 0; i < len; ++i) { + ReflectApply(listeners[i], this, args); + } + } + return true; + }; + function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + checkListener(listener); + events = target._events; + if (events === void 0) { + events = target._events = /* @__PURE__ */ Object.create(null); + target._eventsCount = 0; + } else { + if (events.newListener !== void 0) { + target.emit( + "newListener", + type, + listener.listener ? listener.listener : listener, + ); + events = target._events; + } + existing = events[type]; + } + if (existing === void 0) { + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === "function") { + existing = events[type] = prepend + ? [listener, existing] + : [existing, listener]; + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + var w = new Error( + "Possible EventEmitter memory leak detected. " + existing.length + + " " + String(type) + + " listeners added. Use emitter.setMaxListeners() to increase limit", + ); + w.name = "MaxListenersExceededWarning"; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + return target; + } + EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); + }; + EventEmitter.prototype.on = EventEmitter.prototype.addListener; + EventEmitter.prototype.prependListener = function prependListener( + type, + listener, + ) { + return _addListener(this, type, listener, true); + }; + function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) { + return this.listener.call(this.target); + } + return this.listener.apply(this.target, arguments); + } + } + function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: void 0, target, type, listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; + } + EventEmitter.prototype.once = function once2(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; + }; + EventEmitter.prototype.prependOnceListener = function prependOnceListener( + type, + listener, + ) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + EventEmitter.prototype.removeListener = function removeListener( + type, + listener, + ) { + var list, events, position, i, originalListener; + checkListener(listener); + events = this._events; + if (events === void 0) { + return this; + } + list = events[type]; + if (list === void 0) { + return this; + } + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) { + this._events = /* @__PURE__ */ Object.create(null); + } else { + delete events[type]; + if (events.removeListener) { + this.emit("removeListener", type, list.listener || listener); + } + } + } else if (typeof list !== "function") { + position = -1; + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + if (position < 0) { + return this; + } + if (position === 0) { + list.shift(); + } else { + spliceOne(list, position); + } + if (list.length === 1) { + events[type] = list[0]; + } + if (events.removeListener !== void 0) { + this.emit("removeListener", type, originalListener || listener); + } + } + return this; + }; + EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + EventEmitter.prototype.removeAllListeners = function removeAllListeners( + type, + ) { + var listeners, events, i; + events = this._events; + if (events === void 0) { + return this; + } + if (events.removeListener === void 0) { + if (arguments.length === 0) { + this._events = /* @__PURE__ */ Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== void 0) { + if (--this._eventsCount === 0) { + this._events = /* @__PURE__ */ Object.create(null); + } else { + delete events[type]; + } + } + return this; + } + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === "removeListener") { + continue; + } + this.removeAllListeners(key); + } + this.removeAllListeners("removeListener"); + this._events = /* @__PURE__ */ Object.create(null); + this._eventsCount = 0; + return this; + } + listeners = events[type]; + if (typeof listeners === "function") { + this.removeListener(type, listeners); + } else if (listeners !== void 0) { + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + return this; + }; + function _listeners(target, type, unwrap) { + var events = target._events; + if (events === void 0) { + return []; + } + var evlistener = events[type]; + if (evlistener === void 0) { + return []; + } + if (typeof evlistener === "function") { + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + } + return unwrap + ? unwrapListeners(evlistener) + : arrayClone(evlistener, evlistener.length); + } + EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); + }; + EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); + }; + EventEmitter.listenerCount = function (emitter, type) { + if (typeof emitter.listenerCount === "function") { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } + }; + EventEmitter.prototype.listenerCount = listenerCount; + function listenerCount(type) { + var events = this._events; + if (events !== void 0) { + var evlistener = events[type]; + if (typeof evlistener === "function") { + return 1; + } else if (evlistener !== void 0) { + return evlistener.length; + } + } + return 0; + } + EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; + }; + function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) { + copy[i] = arr[i]; + } + return copy; + } + function spliceOne(list, index) { + for (; index + 1 < list.length; index++) { + list[index] = list[index + 1]; + } + list.pop(); + } + function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; + } + function once(emitter, name) { + return new Promise(function (resolve, reject) { + function errorListener(err) { + emitter.removeListener(name, resolver); + reject(err); + } + function resolver() { + if (typeof emitter.removeListener === "function") { + emitter.removeListener("error", errorListener); + } + resolve([].slice.call(arguments)); + } + + eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); + if (name !== "error") { + addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); + } + }); + } + function addErrorHandlerIfEventEmitter(emitter, handler, flags) { + if (typeof emitter.on === "function") { + eventTargetAgnosticAddListener(emitter, "error", handler, flags); + } + } + function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === "function") { + if (flags.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === "function") { + emitter.addEventListener(name, function wrapListener(arg) { + if (flags.once) { + emitter.removeEventListener(name, wrapListener); + } + listener(arg); + }); + } else { + throw new TypeError( + 'The "emitter" argument must be of type EventEmitter. Received type ' + + typeof emitter, + ); + } + } + }, +}); + +// lib/internal/streams/legacy.js +var require_legacy = __commonJS({ + "lib/internal/streams/legacy.js"(exports, module) { + "use strict"; + var { ArrayIsArray, ObjectSetPrototypeOf } = require_primordials(); + var { EventEmitter: EE } = require_events(); + function Stream(opts) { + EE.call(this, opts); + } + ObjectSetPrototypeOf(Stream.prototype, EE.prototype); + ObjectSetPrototypeOf(Stream, EE); + Stream.prototype.pipe = function (dest, options) { + const source = this; + function ondata(chunk) { + if (dest.writable && dest.write(chunk) === false && source.pause) { + source.pause(); + } + } + source.on("data", ondata); + function ondrain() { + if (source.readable && source.resume) { + source.resume(); + } + } + dest.on("drain", ondrain); + if (!dest._isStdio && (!options || options.end !== false)) { + source.on("end", onend); + source.on("close", onclose); + } + let didOnEnd = false; + function onend() { + if (didOnEnd) { + return; + } + didOnEnd = true; + dest.end(); + } + function onclose() { + if (didOnEnd) { + return; + } + didOnEnd = true; + if (typeof dest.destroy === "function") { + dest.destroy(); + } + } + function onerror(er) { + cleanup(); + if (EE.listenerCount(this, "error") === 0) { + this.emit("error", er); + } + } + prependListener(source, "error", onerror); + prependListener(dest, "error", onerror); + function cleanup() { + source.removeListener("data", ondata); + dest.removeListener("drain", ondrain); + source.removeListener("end", onend); + source.removeListener("close", onclose); + source.removeListener("error", onerror); + dest.removeListener("error", onerror); + source.removeListener("end", cleanup); + source.removeListener("close", cleanup); + dest.removeListener("close", cleanup); + } + source.on("end", cleanup); + source.on("close", cleanup); + dest.on("close", cleanup); + dest.emit("pipe", source); + return dest; + }; + function prependListener(emitter, event, fn) { + if (typeof emitter.prependListener === "function") { + return emitter.prependListener(event, fn); + } + if (!emitter._events || !emitter._events[event]) { + emitter.on(event, fn); + } else if (ArrayIsArray(emitter._events[event])) { + emitter._events[event].unshift(fn); + } else { + emitter._events[event] = [fn, emitter._events[event]]; + } + } + module.exports = { + Stream, + prependListener, + }; + }, +}); + +// lib/internal/streams/add-abort-signal.js +var require_add_abort_signal = __commonJS({ + "lib/internal/streams/add-abort-signal.js"(exports, module) { + "use strict"; + var eos = require_end_of_stream(); + var validateAbortSignal = (signal, name) => { + if (typeof signal !== "object" || !("aborted" in signal)) { + throw new ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal); + } + }; + function isNodeStream(obj) { + return !!(obj && typeof obj.pipe === "function"); + } + module.exports.addAbortSignal = function addAbortSignal(signal, stream) { + validateAbortSignal(signal, "signal"); + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE("stream", "stream.Stream", stream); + } + return module.exports.addAbortSignalNoValidate(signal, stream); + }; + module.exports.addAbortSignalNoValidate = function (signal, stream) { + if (typeof signal !== "object" || !("aborted" in signal)) { + return stream; + } + const onAbort = () => { + stream.destroy( + new AbortError(void 0, { + cause: signal.reason, + }), + ); + }; + if (signal.aborted) { + onAbort(); + } else { + signal.addEventListener("abort", onAbort); + eos(stream, () => signal.removeEventListener("abort", onAbort)); + } + return stream; + }; + }, +}); + +// lib/internal/streams/buffer_list.js +var require_buffer_list = __commonJS({ + "lib/internal/streams/buffer_list.js"(exports, module) { + "use strict"; + var { + StringPrototypeSlice, + SymbolIterator, + TypedArrayPrototypeSet, + Uint8Array: Uint8Array2, + } = require_primordials(); + var { Buffer: Buffer2 } = require_buffer(); + module.exports = class BufferList { + constructor() { + this.head = null; + this.tail = null; + this.length = 0; + } + push(v) { + const entry = { + data: v, + next: null, + }; + if (this.length > 0) { + this.tail.next = entry; + } else { + this.head = entry; + } + this.tail = entry; + ++this.length; + } + unshift(v) { + const entry = { + data: v, + next: this.head, + }; + if (this.length === 0) { + this.tail = entry; + } + this.head = entry; + ++this.length; + } + shift() { + if (this.length === 0) { + return; + } + const ret = this.head.data; + if (this.length === 1) { + this.head = this.tail = null; + } else { + this.head = this.head.next; + } + --this.length; + return ret; + } + clear() { + this.head = this.tail = null; + this.length = 0; + } + join(s) { + if (this.length === 0) { + return ""; + } + let p = this.head; + let ret = "" + p.data; + while ((p = p.next) !== null) { + ret += s + p.data; + } + return ret; + } + concat(n) { + if (this.length === 0) { + return Buffer2.alloc(0); + } + const ret = Buffer2.allocUnsafe(n >>> 0); + let p = this.head; + let i = 0; + while (p) { + TypedArrayPrototypeSet(ret, p.data, i); + i += p.data.length; + p = p.next; + } + return ret; + } + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + const data = this.head.data; + if (n < data.length) { + const slice = data.slice(0, n); + this.head.data = data.slice(n); + return slice; + } + if (n === data.length) { + return this.shift(); + } + return hasStrings ? this._getString(n) : this._getBuffer(n); + } + first() { + return this.head.data; + } + *[SymbolIterator]() { + for (let p = this.head; p; p = p.next) { + yield p.data; + } + } + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + let ret = ""; + let p = this.head; + let c = 0; + do { + const str = p.data; + if (n > str.length) { + ret += str; + n -= str.length; + } else { + if (n === str.length) { + ret += str; + ++c; + if (p.next) { + this.head = p.next; + } else { + this.head = this.tail = null; + } + } else { + ret += StringPrototypeSlice(str, 0, n); + this.head = p; + p.data = StringPrototypeSlice(str, n); + } + break; + } + ++c; + } while ((p = p.next) !== null); + this.length -= c; + return ret; + } + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer2.allocUnsafe(n); + const retLen = n; + let p = this.head; + let c = 0; + do { + const buf = p.data; + if (n > buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n); + n -= buf.length; + } else { + if (n === buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n); + ++c; + if (p.next) { + this.head = p.next; + } else { + this.head = this.tail = null; + } + } else { + TypedArrayPrototypeSet( + ret, + new Uint8Array2(buf.buffer, buf.byteOffset, n), + retLen - n, + ); + this.head = p; + p.data = buf.slice(n); + } + break; + } + ++c; + } while ((p = p.next) !== null); + this.length -= c; + return ret; + } + // Make sure the linked list only shows the minimal necessary information. + [Symbol.for("nodejs.util.inspect.custom")](_, options) { + return inspect(this, { + ...options, + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false, + }); + } + }; + }, +}); + +// lib/internal/streams/state.js +var require_state = __commonJS({ + "lib/internal/streams/state.js"(exports, module) { + "use strict"; + var { MathFloor, NumberIsInteger } = require_primordials(); + function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null + ? options.highWaterMark + : isDuplex + ? options[duplexKey] + : null; + } + function getDefaultHighWaterMark(objectMode) { + return objectMode ? 16 : 16 * 1024; + } + function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!NumberIsInteger(hwm) || hwm < 0) { + const name = isDuplex + ? `options.${duplexKey}` + : "options.highWaterMark"; + throw new ERR_INVALID_ARG_VALUE(name, hwm); + } + return MathFloor(hwm); + } + return getDefaultHighWaterMark(state.objectMode); + } + module.exports = { + getHighWaterMark, + getDefaultHighWaterMark, + }; + }, +}); + +// node_modules/safe-buffer/index.js +var require_safe_buffer = __commonJS({ + "node_modules/safe-buffer/index.js"(exports, module) { + var buffer = require_buffer(); + var Buffer2 = buffer.Buffer; + function copyProps(src, dst) { + for (var key in src) { + dst[key] = src[key]; + } + } + if ( + Buffer2.from && Buffer2.alloc && Buffer2.allocUnsafe && + Buffer2.allocUnsafeSlow + ) { + module.exports = buffer; + } else { + copyProps(buffer, exports); + exports.Buffer = SafeBuffer; + } + function SafeBuffer(arg, encodingOrOffset, length) { + return Buffer2(arg, encodingOrOffset, length); + } + SafeBuffer.prototype = Object.create(Buffer2.prototype); + copyProps(Buffer2, SafeBuffer); + SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === "number") { + throw new TypeError("Argument must not be a number"); + } + return Buffer2(arg, encodingOrOffset, length); + }; + SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + var buf = Buffer2(size); + if (fill !== void 0) { + if (typeof encoding === "string") { + buf.fill(fill, encoding); + } else { + buf.fill(fill); + } + } else { + buf.fill(0); + } + return buf; + }; + SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + return Buffer2(size); + }; + SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + return buffer.SlowBuffer(size); + }; + }, +}); + +// lib/internal/streams/from.js +var require_from = __commonJS({ + "lib/internal/streams/from.js"(exports, module) { + "use strict"; + var process = require_browser2(); + var { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = + require_primordials(); + var { Buffer: Buffer2 } = require_buffer(); + function from(Readable, iterable, opts) { + let iterator; + if (typeof iterable === "string" || iterable instanceof Buffer2) { + return new Readable({ + objectMode: true, + ...opts, + read() { + this.push(iterable); + this.push(null); + }, + }); + } + let isAsync; + if (iterable && iterable[SymbolAsyncIterator]) { + isAsync = true; + iterator = iterable[SymbolAsyncIterator](); + } else if (iterable && iterable[SymbolIterator]) { + isAsync = false; + iterator = iterable[SymbolIterator](); + } else { + throw new ERR_INVALID_ARG_TYPE("iterable", ["Iterable"], iterable); + } + const readable = new Readable({ + objectMode: true, + highWaterMark: 1, + // TODO(ronag): What options should be allowed? + ...opts, + }); + let reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + readable._destroy = function (error, cb) { + PromisePrototypeThen( + close(error), + () => process.nextTick(cb, error), + // nextTick is here in case cb throws + (e) => process.nextTick(cb, e || error), + ); + }; + async function close(error) { + const hadError = error !== void 0 && error !== null; + const hasThrow = typeof iterator.throw === "function"; + if (hadError && hasThrow) { + const { value, done } = await iterator.throw(error); + await value; + if (done) { + return; + } + } + if (typeof iterator.return === "function") { + const { value } = await iterator.return(); + await value; + } + } + async function next() { + for (;;) { + try { + const { value, done } = isAsync + ? await iterator.next() + : iterator.next(); + if (done) { + readable.push(null); + } else { + const res = value && typeof value.then === "function" + ? await value + : value; + if (res === null) { + reading = false; + throw new ERR_STREAM_NULL_VALUES(); + } else if (readable.push(res)) { + continue; + } else { + reading = false; + } + } + } catch (err) { + readable.destroy(err); + } + break; + } + } + return readable; + } + module.exports = from; + }, +}); + +// lib/internal/streams/readable.js +var require_readable = __commonJS({ + "lib/internal/streams/readable.js"(exports, module) { + var process = require_browser2(); + var { + ArrayPrototypeIndexOf, + NumberIsInteger, + NumberIsNaN, + NumberParseInt, + ObjectDefineProperties, + ObjectKeys, + ObjectSetPrototypeOf, + Promise: Promise2, + SafeSet, + SymbolAsyncIterator, + Symbol: Symbol2, + } = require_primordials(); + module.exports = Readable; + Readable.ReadableState = ReadableState; + var { EventEmitter: EE } = require_events(); + var { Stream, prependListener } = require_legacy(); + var { Buffer: Buffer2 } = require_buffer(); + var { addAbortSignal } = require_add_abort_signal(); + var eos = require_end_of_stream(); + var debug = debuglog("stream", (fn) => { + debug = fn; + }); + var BufferList = require_buffer_list(); + var destroyImpl = require_destroy(); + var { getHighWaterMark, getDefaultHighWaterMark } = require_state(); + var { validateObject } = require_validators(); + var kPaused = Symbol2("kPaused"); + var from = require_from(); + ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); + ObjectSetPrototypeOf(Readable, Stream); + var nop = () => { + }; + var { errorOrDestroy } = destroyImpl; + function ReadableState(options, stream, isDuplex) { + if (typeof isDuplex !== "boolean") { + isDuplex = stream instanceof require_duplex(); + } + this.objectMode = !!(options && options.objectMode); + if (isDuplex) { + this.objectMode = this.objectMode || + !!(options && options.readableObjectMode); + } + this.highWaterMark = options + ? getHighWaterMark(this, options, "readableHighWaterMark", isDuplex) + : getDefaultHighWaterMark(false); + this.buffer = new BufferList(); + this.length = 0; + this.pipes = []; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + this.constructed = true; + this.sync = true; + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this[kPaused] = null; + this.errorEmitted = false; + this.emitClose = !options || options.emitClose !== false; + this.autoDestroy = !options || options.autoDestroy !== false; + this.destroyed = false; + this.errored = null; + this.closed = false; + this.closeEmitted = false; + this.defaultEncoding = options && options.defaultEncoding || "utf8"; + this.awaitDrainWriters = null; + this.multiAwaitDrain = false; + this.readingMore = false; + this.dataEmitted = false; + this.decoder = null; + this.encoding = null; + if (options && options.encoding) { + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } + } + function Readable(options) { + if (!(this instanceof Readable)) { + return new Readable(options); + } + const isDuplex = this instanceof require_duplex(); + this._readableState = new ReadableState(options, this, isDuplex); + if (options) { + if (typeof options.read === "function") { + this._read = options.read; + } + if (typeof options.destroy === "function") { + this._destroy = options.destroy; + } + if (typeof options.construct === "function") { + this._construct = options.construct; + } + if (options.signal && !isDuplex) { + addAbortSignal(options.signal, this); + } + } + Stream.call(this, options); + destroyImpl.construct(this, () => { + if (this._readableState.needReadable) { + maybeReadMore(this, this._readableState); + } + }); + } + Readable.prototype.destroy = destroyImpl.destroy; + Readable.prototype._undestroy = destroyImpl.undestroy; + Readable.prototype._destroy = function (err, cb) { + cb(err); + }; + Readable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err); + }; + Readable.prototype.push = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, false); + }; + Readable.prototype.unshift = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, true); + }; + function readableAddChunk(stream, chunk, encoding, addToFront) { + debug("readableAddChunk", chunk); + const state = stream._readableState; + let err; + if (!state.objectMode) { + if (typeof chunk === "string") { + encoding = encoding || state.defaultEncoding; + if (state.encoding !== encoding) { + if (addToFront && state.encoding) { + chunk = Buffer2.from(chunk, encoding).toString(state.encoding); + } else { + chunk = Buffer2.from(chunk, encoding); + encoding = ""; + } + } + } else if (chunk instanceof Buffer2) { + encoding = ""; + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = ""; + } else if (chunk != null) { + err = new ERR_INVALID_ARG_TYPE("chunk", [ + "string", + "Buffer", + "Uint8Array", + ], chunk); + } + } + if (err) { + errorOrDestroy(stream, err); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (addToFront) { + if (state.endEmitted) { + errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); + } else if (state.destroyed || state.errored) { + return false; + } else { + addChunk(stream, state, chunk, true); + } + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed || state.errored) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) { + addChunk(stream, state, chunk, false); + } else { + maybeReadMore(stream, state); + } + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + return !state.ended && + (state.length < state.highWaterMark || state.length === 0); + } + function addChunk(stream, state, chunk, addToFront) { + if ( + state.flowing && state.length === 0 && !state.sync && + stream.listenerCount("data") > 0 + ) { + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + state.dataEmitted = true; + stream.emit("data", chunk); + } else { + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) { + state.buffer.unshift(chunk); + } else { + state.buffer.push(chunk); + } + if (state.needReadable) { + emitReadable(stream); + } + } + maybeReadMore(stream, state); + } + Readable.prototype.isPaused = function () { + const state = this._readableState; + return state[kPaused] === true || state.flowing === false; + }; + Readable.prototype.setEncoding = function (enc) { + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + this._readableState.encoding = this._readableState.decoder.encoding; + const buffer = this._readableState.buffer; + let content = ""; + for (const data of buffer) { + content += decoder.write(data); + } + buffer.clear(); + if (content !== "") { + buffer.push(content); + } + this._readableState.length = content.length; + return this; + }; + var MAX_HWM = 1073741824; + function computeNewHighWaterMark(n) { + if (n > MAX_HWM) { + throw new ERR_OUT_OF_RANGE("size", "<= 1GiB", n); + } else { + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; + } + function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) { + return 0; + } + if (state.objectMode) { + return 1; + } + if (NumberIsNaN(n)) { + if (state.flowing && state.length) { + return state.buffer.first().length; + } + return state.length; + } + if (n <= state.length) { + return n; + } + return state.ended ? state.length : 0; + } + Readable.prototype.read = function (n) { + debug("read", n); + if (n === void 0) { + n = NaN; + } else if (!NumberIsInteger(n)) { + n = NumberParseInt(n, 10); + } + const state = this._readableState; + const nOrig = n; + if (n > state.highWaterMark) { + state.highWaterMark = computeNewHighWaterMark(n); + } + if (n !== 0) { + state.emittedReadable = false; + } + if ( + n === 0 && state.needReadable && + ((state.highWaterMark !== 0 + ? state.length >= state.highWaterMark + : state.length > 0) || state.ended) + ) { + debug("read: emitReadable", state.length, state.ended); + if (state.length === 0 && state.ended) { + endReadable(this); + } else { + emitReadable(this); + } + return null; + } + n = howMuchToRead(n, state); + if (n === 0 && state.ended) { + if (state.length === 0) { + endReadable(this); + } + return null; + } + let doRead = state.needReadable; + debug("need readable", doRead); + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug("length less than watermark", doRead); + } + if ( + state.ended || state.reading || state.destroyed || state.errored || + !state.constructed + ) { + doRead = false; + debug("reading, ended or constructing", doRead); + } else if (doRead) { + debug("do read"); + state.reading = true; + state.sync = true; + if (state.length === 0) { + state.needReadable = true; + } + try { + this._read(state.highWaterMark); + } catch (err) { + errorOrDestroy(this, err); + } + state.sync = false; + if (!state.reading) { + n = howMuchToRead(nOrig, state); + } + } + let ret; + if (n > 0) { + ret = fromList(n, state); + } else { + ret = null; + } + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + } + if (state.length === 0) { + if (!state.ended) { + state.needReadable = true; + } + if (nOrig !== n && state.ended) { + endReadable(this); + } + } + if (ret !== null && !state.errorEmitted && !state.closeEmitted) { + state.dataEmitted = true; + this.emit("data", ret); + } + return ret; + }; + function onEofChunk(stream, state) { + debug("onEofChunk"); + if (state.ended) { + return; + } + if (state.decoder) { + const chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + emitReadable(stream); + } else { + state.needReadable = false; + state.emittedReadable = true; + emitReadable_(stream); + } + } + function emitReadable(stream) { + const state = stream._readableState; + debug("emitReadable", state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug("emitReadable", state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } + } + function emitReadable_(stream) { + const state = stream._readableState; + debug("emitReadable_", state.destroyed, state.length, state.ended); + if (!state.destroyed && !state.errored && (state.length || state.ended)) { + stream.emit("readable"); + state.emittedReadable = false; + } + state.needReadable = !state.flowing && !state.ended && + state.length <= state.highWaterMark; + flow(stream); + } + function maybeReadMore(stream, state) { + if (!state.readingMore && state.constructed) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } + } + function maybeReadMore_(stream, state) { + while ( + !state.reading && !state.ended && + (state.length < state.highWaterMark || + state.flowing && state.length === 0) + ) { + const len = state.length; + debug("maybeReadMore read 0"); + stream.read(0); + if (len === state.length) { + break; + } + } + state.readingMore = false; + } + Readable.prototype._read = function (n) { + throw new ERR_METHOD_NOT_IMPLEMENTED("_read()"); + }; + Readable.prototype.pipe = function (dest, pipeOpts) { + const src = this; + const state = this._readableState; + if (state.pipes.length === 1) { + if (!state.multiAwaitDrain) { + state.multiAwaitDrain = true; + state.awaitDrainWriters = new SafeSet( + state.awaitDrainWriters ? [state.awaitDrainWriters] : [], + ); + } + } + state.pipes.push(dest); + debug("pipe count=%d opts=%j", state.pipes.length, pipeOpts); + const doEnd = (!pipeOpts || pipeOpts.end !== false) && + dest !== process.stdout && dest !== process.stderr; + const endFn = doEnd ? onend : unpipe; + if (state.endEmitted) { + process.nextTick(endFn); + } else { + src.once("end", endFn); + } + dest.on("unpipe", onunpipe); + function onunpipe(readable, unpipeInfo) { + debug("onunpipe"); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug("onend"); + dest.end(); + } + let ondrain; + let cleanedUp = false; + function cleanup() { + debug("cleanup"); + dest.removeListener("close", onclose); + dest.removeListener("finish", onfinish); + if (ondrain) { + dest.removeListener("drain", ondrain); + } + dest.removeListener("error", onerror); + dest.removeListener("unpipe", onunpipe); + src.removeListener("end", onend); + src.removeListener("end", unpipe); + src.removeListener("data", ondata); + cleanedUp = true; + if ( + ondrain && state.awaitDrainWriters && + (!dest._writableState || dest._writableState.needDrain) + ) { + ondrain(); + } + } + function pause() { + if (!cleanedUp) { + if (state.pipes.length === 1 && state.pipes[0] === dest) { + debug("false write response, pause", 0); + state.awaitDrainWriters = dest; + state.multiAwaitDrain = false; + } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { + debug("false write response, pause", state.awaitDrainWriters.size); + state.awaitDrainWriters.add(dest); + } + src.pause(); + } + if (!ondrain) { + ondrain = pipeOnDrain(src, dest); + dest.on("drain", ondrain); + } + } + src.on("data", ondata); + function ondata(chunk) { + debug("ondata"); + const ret = dest.write(chunk); + debug("dest.write", ret); + if (ret === false) { + pause(); + } + } + function onerror(er) { + debug("onerror", er); + unpipe(); + dest.removeListener("error", onerror); + if (dest.listenerCount("error") === 0) { + const s = dest._writableState || dest._readableState; + if (s && !s.errorEmitted) { + errorOrDestroy(dest, er); + } else { + dest.emit("error", er); + } + } + } + prependListener(dest, "error", onerror); + function onclose() { + dest.removeListener("finish", onfinish); + unpipe(); + } + dest.once("close", onclose); + function onfinish() { + debug("onfinish"); + dest.removeListener("close", onclose); + unpipe(); + } + dest.once("finish", onfinish); + function unpipe() { + debug("unpipe"); + src.unpipe(dest); + } + dest.emit("pipe", src); + if (dest.writableNeedDrain === true) { + if (state.flowing) { + pause(); + } + } else if (!state.flowing) { + debug("pipe resume"); + src.resume(); + } + return dest; + }; + function pipeOnDrain(src, dest) { + return function pipeOnDrainFunctionResult() { + const state = src._readableState; + if (state.awaitDrainWriters === dest) { + debug("pipeOnDrain", 1); + state.awaitDrainWriters = null; + } else if (state.multiAwaitDrain) { + debug("pipeOnDrain", state.awaitDrainWriters.size); + state.awaitDrainWriters.delete(dest); + } + if ( + (!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && + src.listenerCount("data") + ) { + src.resume(); + } + }; + } + Readable.prototype.unpipe = function (dest) { + const state = this._readableState; + const unpipeInfo = { + hasUnpiped: false, + }; + if (state.pipes.length === 0) { + return this; + } + if (!dest) { + const dests = state.pipes; + state.pipes = []; + this.pause(); + for (let i = 0; i < dests.length; i++) { + dests[i].emit("unpipe", this, { + hasUnpiped: false, + }); + } + return this; + } + const index = ArrayPrototypeIndexOf(state.pipes, dest); + if (index === -1) { + return this; + } + state.pipes.splice(index, 1); + if (state.pipes.length === 0) { + this.pause(); + } + dest.emit("unpipe", this, unpipeInfo); + return this; + }; + Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; + if (ev === "data") { + state.readableListening = this.listenerCount("readable") > 0; + if (state.flowing !== false) { + this.resume(); + } + } else if (ev === "readable") { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug("on readable", state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; + }; + Readable.prototype.addListener = Readable.prototype.on; + Readable.prototype.removeListener = function (ev, fn) { + const res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === "readable") { + process.nextTick(updateReadableListening, this); + } + return res; + }; + Readable.prototype.off = Readable.prototype.removeListener; + Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === "readable" || ev === void 0) { + process.nextTick(updateReadableListening, this); + } + return res; + }; + function updateReadableListening(self2) { + const state = self2._readableState; + state.readableListening = self2.listenerCount("readable") > 0; + if (state.resumeScheduled && state[kPaused] === false) { + state.flowing = true; + } else if (self2.listenerCount("data") > 0) { + self2.resume(); + } else if (!state.readableListening) { + state.flowing = null; + } + } + function nReadingNextTick(self2) { + debug("readable nexttick read 0"); + self2.read(0); + } + Readable.prototype.resume = function () { + const state = this._readableState; + if (!state.flowing) { + debug("resume"); + state.flowing = !state.readableListening; + resume(this, state); + } + state[kPaused] = false; + return this; + }; + function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } + } + function resume_(stream, state) { + debug("resume", state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit("resume"); + flow(stream); + if (state.flowing && !state.reading) { + stream.read(0); + } + } + Readable.prototype.pause = function () { + debug("call pause flowing=%j", this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug("pause"); + this._readableState.flowing = false; + this.emit("pause"); + } + this._readableState[kPaused] = true; + return this; + }; + function flow(stream) { + const state = stream._readableState; + debug("flow", state.flowing); + while (state.flowing && stream.read() !== null); + } + Readable.prototype.wrap = function (stream) { + let paused = false; + stream.on("data", (chunk) => { + if (!this.push(chunk) && stream.pause) { + paused = true; + stream.pause(); + } + }); + stream.on("end", () => { + this.push(null); + }); + stream.on("error", (err) => { + errorOrDestroy(this, err); + }); + stream.on("close", () => { + this.destroy(); + }); + stream.on("destroy", () => { + this.destroy(); + }); + this._read = () => { + if (paused && stream.resume) { + paused = false; + stream.resume(); + } + }; + const streamKeys = ObjectKeys(stream); + for (let j = 1; j < streamKeys.length; j++) { + const i = streamKeys[j]; + if (this[i] === void 0 && typeof stream[i] === "function") { + this[i] = stream[i].bind(stream); + } + } + return this; + }; + Readable.prototype[SymbolAsyncIterator] = function () { + return streamToAsyncIterator(this); + }; + Readable.prototype.iterator = function (options) { + if (options !== void 0) { + validateObject(options, "options"); + } + return streamToAsyncIterator(this, options); + }; + function streamToAsyncIterator(stream, options) { + if (typeof stream.read !== "function") { + stream = Readable.wrap(stream, { + objectMode: true, + }); + } + const iter = createAsyncIterator(stream, options); + iter.stream = stream; + return iter; + } + async function* createAsyncIterator(stream, options) { + let callback = nop; + function next(resolve) { + if (this === stream) { + callback(); + callback = nop; + } else { + callback = resolve; + } + } + stream.on("readable", next); + let error; + const cleanup = eos( + stream, + { + writable: false, + }, + (err) => { + error = err ? aggregateTwoErrors(error, err) : null; + callback(); + callback = nop; + }, + ); + try { + while (true) { + const chunk = stream.destroyed ? null : stream.read(); + if (chunk !== null) { + yield chunk; + } else if (error) { + throw error; + } else if (error === null) { + return; + } else { + await new Promise2(next); + } + } + } catch (err) { + error = aggregateTwoErrors(error, err); + throw error; + } finally { + if ( + (error || + (options === null || options === void 0 + ? void 0 + : options.destroyOnReturn) !== false) && + (error === void 0 || stream._readableState.autoDestroy) + ) { + destroyImpl.destroyer(stream, null); + } else { + stream.off("readable", next); + cleanup(); + } + } + } + ObjectDefineProperties(Readable.prototype, { + readable: { + __proto__: null, + get() { + const r = this._readableState; + return !!r && r.readable !== false && !r.destroyed && + !r.errorEmitted && !r.endEmitted; + }, + set(val) { + if (this._readableState) { + this._readableState.readable = !!val; + } + }, + }, + readableDidRead: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.dataEmitted; + }, + }, + readableAborted: { + __proto__: null, + enumerable: false, + get: function () { + return !!(this._readableState.readable !== false && + (this._readableState.destroyed || this._readableState.errored) && + !this._readableState.endEmitted); + }, + }, + readableHighWaterMark: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + }, + }, + readableBuffer: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState && this._readableState.buffer; + }, + }, + readableFlowing: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.flowing; + }, + set: function (state) { + if (this._readableState) { + this._readableState.flowing = state; + } + }, + }, + readableLength: { + __proto__: null, + enumerable: false, + get() { + return this._readableState.length; + }, + }, + readableObjectMode: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.objectMode : false; + }, + }, + readableEncoding: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.encoding : null; + }, + }, + errored: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.errored : null; + }, + }, + closed: { + __proto__: null, + get() { + return this._readableState ? this._readableState.closed : false; + }, + }, + destroyed: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.destroyed : false; + }, + set(value) { + if (!this._readableState) { + return; + } + this._readableState.destroyed = value; + }, + }, + readableEnded: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.endEmitted : false; + }, + }, + }); + ObjectDefineProperties(ReadableState.prototype, { + // Legacy getter for `pipesCount`. + pipesCount: { + __proto__: null, + get() { + return this.pipes.length; + }, + }, + // Legacy property for `paused`. + paused: { + __proto__: null, + get() { + return this[kPaused] !== false; + }, + set(value) { + this[kPaused] = !!value; + }, + }, + }); + Readable._fromList = fromList; + function fromList(n, state) { + if (state.length === 0) { + return null; + } + let ret; + if (state.objectMode) { + ret = state.buffer.shift(); + } else if (!n || n >= state.length) { + if (state.decoder) { + ret = state.buffer.join(""); + } else if (state.buffer.length === 1) { + ret = state.buffer.first(); + } else { + ret = state.buffer.concat(state.length); + } + state.buffer.clear(); + } else { + ret = state.buffer.consume(n, state.decoder); + } + return ret; + } + function endReadable(stream) { + const state = stream._readableState; + debug("endReadable", state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } + } + function endReadableNT(state, stream) { + debug("endReadableNT", state.endEmitted, state.length); + if ( + !state.errored && !state.closeEmitted && !state.endEmitted && + state.length === 0 + ) { + state.endEmitted = true; + stream.emit("end"); + if (stream.writable && stream.allowHalfOpen === false) { + process.nextTick(endWritableNT, stream); + } else if (state.autoDestroy) { + const wState = stream._writableState; + const autoDestroy = !wState || wState.autoDestroy && // We don't expect the writable to ever 'finish' + // if writable is explicitly set to false. + (wState.finished || wState.writable === false); + if (autoDestroy) { + stream.destroy(); + } + } + } + } + function endWritableNT(stream) { + const writable = stream.writable && !stream.writableEnded && + !stream.destroyed; + if (writable) { + stream.end(); + } + } + Readable.from = function (iterable, opts) { + return from(Readable, iterable, opts); + }; + var webStreamsAdapters; + function lazyWebStreams() { + if (webStreamsAdapters === void 0) { + webStreamsAdapters = {}; + } + return webStreamsAdapters; + } + Readable.fromWeb = function (readableStream, options) { + return lazyWebStreams().newStreamReadableFromReadableStream( + readableStream, + options, + ); + }; + Readable.toWeb = function (streamReadable, options) { + return lazyWebStreams().newReadableStreamFromStreamReadable( + streamReadable, + options, + ); + }; + Readable.wrap = function (src, options) { + var _ref, _src$readableObjectMo; + return new Readable({ + objectMode: + (_ref = (_src$readableObjectMo = src.readableObjectMode) !== null && + _src$readableObjectMo !== void 0 + ? _src$readableObjectMo + : src.objectMode) !== null && _ref !== void 0 + ? _ref + : true, + ...options, + destroy(err, callback) { + destroyImpl.destroyer(src, err); + callback(err); + }, + }).wrap(src); + }; + }, +}); + +// lib/internal/streams/writable.js +var require_writable = __commonJS({ + "lib/internal/streams/writable.js"(exports, module) { + var process = require_browser2(); + var { + ArrayPrototypeSlice, + Error: Error2, + FunctionPrototypeSymbolHasInstance, + ObjectDefineProperty, + ObjectDefineProperties, + ObjectSetPrototypeOf, + StringPrototypeToLowerCase, + Symbol: Symbol2, + SymbolHasInstance, + } = require_primordials(); + module.exports = Writable; + Writable.WritableState = WritableState; + var { EventEmitter: EE } = require_events(); + var Stream = require_legacy().Stream; + var { Buffer: Buffer2 } = require_buffer(); + var destroyImpl = require_destroy(); + var { addAbortSignal } = require_add_abort_signal(); + var { getHighWaterMark, getDefaultHighWaterMark } = require_state(); + var { errorOrDestroy } = destroyImpl; + ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); + ObjectSetPrototypeOf(Writable, Stream); + function nop() { + } + var kOnFinished = Symbol2("kOnFinished"); + function WritableState(options, stream, isDuplex) { + if (typeof isDuplex !== "boolean") { + isDuplex = stream instanceof require_duplex(); + } + this.objectMode = !!(options && options.objectMode); + if (isDuplex) { + this.objectMode = this.objectMode || + !!(options && options.writableObjectMode); + } + this.highWaterMark = options + ? getHighWaterMark(this, options, "writableHighWaterMark", isDuplex) + : getDefaultHighWaterMark(false); + this.finalCalled = false; + this.needDrain = false; + this.ending = false; + this.ended = false; + this.finished = false; + this.destroyed = false; + const noDecode = !!(options && options.decodeStrings === false); + this.decodeStrings = !noDecode; + this.defaultEncoding = options && options.defaultEncoding || "utf8"; + this.length = 0; + this.writing = false; + this.corked = 0; + this.sync = true; + this.bufferProcessing = false; + this.onwrite = onwrite.bind(void 0, stream); + this.writecb = null; + this.writelen = 0; + this.afterWriteTickInfo = null; + resetBuffer(this); + this.pendingcb = 0; + this.constructed = true; + this.prefinished = false; + this.errorEmitted = false; + this.emitClose = !options || options.emitClose !== false; + this.autoDestroy = !options || options.autoDestroy !== false; + this.errored = null; + this.closed = false; + this.closeEmitted = false; + this[kOnFinished] = []; + } + function resetBuffer(state) { + state.buffered = []; + state.bufferedIndex = 0; + state.allBuffers = true; + state.allNoop = true; + } + WritableState.prototype.getBuffer = function getBuffer() { + return ArrayPrototypeSlice(this.buffered, this.bufferedIndex); + }; + ObjectDefineProperty(WritableState.prototype, "bufferedRequestCount", { + __proto__: null, + get() { + return this.buffered.length - this.bufferedIndex; + }, + }); + function Writable(options) { + const isDuplex = this instanceof require_duplex(); + if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) { + return new Writable(options); + } + this._writableState = new WritableState(options, this, isDuplex); + if (options) { + if (typeof options.write === "function") { + this._write = options.write; + } + if (typeof options.writev === "function") { + this._writev = options.writev; + } + if (typeof options.destroy === "function") { + this._destroy = options.destroy; + } + if (typeof options.final === "function") { + this._final = options.final; + } + if (typeof options.construct === "function") { + this._construct = options.construct; + } + if (options.signal) { + addAbortSignal(options.signal, this); + } + } + Stream.call(this, options); + destroyImpl.construct(this, () => { + const state = this._writableState; + if (!state.writing) { + clearBuffer(this, state); + } + finishMaybe(this, state); + }); + } + ObjectDefineProperty(Writable, SymbolHasInstance, { + __proto__: null, + value: function (object) { + if (FunctionPrototypeSymbolHasInstance(this, object)) { + return true; + } + if (this !== Writable) { + return false; + } + return object && object._writableState instanceof WritableState; + }, + }); + Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); + }; + function _write(stream, chunk, encoding, cb) { + const state = stream._writableState; + if (typeof encoding === "function") { + cb = encoding; + encoding = state.defaultEncoding; + } else { + if (!encoding) { + encoding = state.defaultEncoding; + } else if (encoding !== "buffer" && !Buffer2.isEncoding(encoding)) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + if (typeof cb !== "function") { + cb = nop; + } + } + if (chunk === null) { + throw new ERR_STREAM_NULL_VALUES(); + } else if (!state.objectMode) { + if (typeof chunk === "string") { + if (state.decodeStrings !== false) { + chunk = Buffer2.from(chunk, encoding); + encoding = "buffer"; + } + } else if (chunk instanceof Buffer2) { + encoding = "buffer"; + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = "buffer"; + } else { + throw new ERR_INVALID_ARG_TYPE("chunk", [ + "string", + "Buffer", + "Uint8Array", + ], chunk); + } + } + let err; + if (state.ending) { + err = new ERR_STREAM_WRITE_AFTER_END(); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED("write"); + } + if (err) { + process.nextTick(cb, err); + errorOrDestroy(stream, err, true); + return err; + } + state.pendingcb++; + return writeOrBuffer(stream, state, chunk, encoding, cb); + } + Writable.prototype.write = function (chunk, encoding, cb) { + return _write(this, chunk, encoding, cb) === true; + }; + Writable.prototype.cork = function () { + this._writableState.corked++; + }; + Writable.prototype.uncork = function () { + const state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing) { + clearBuffer(this, state); + } + } + }; + Writable.prototype.setDefaultEncoding = function setDefaultEncoding( + encoding, + ) { + if (typeof encoding === "string") { + encoding = StringPrototypeToLowerCase(encoding); + } + if (!Buffer2.isEncoding(encoding)) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + this._writableState.defaultEncoding = encoding; + return this; + }; + function writeOrBuffer(stream, state, chunk, encoding, callback) { + const len = state.objectMode ? 1 : chunk.length; + state.length += len; + const ret = state.length < state.highWaterMark; + if (!ret) { + state.needDrain = true; + } + if ( + state.writing || state.corked || state.errored || !state.constructed + ) { + state.buffered.push({ + chunk, + encoding, + callback, + }); + if (state.allBuffers && encoding !== "buffer") { + state.allBuffers = false; + } + if (state.allNoop && callback !== nop) { + state.allNoop = false; + } + } else { + state.writelen = len; + state.writecb = callback; + state.writing = true; + state.sync = true; + stream._write(chunk, encoding, state.onwrite); + state.sync = false; + } + return ret && !state.errored && !state.destroyed; + } + function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) { + state.onwrite(new ERR_STREAM_DESTROYED("write")); + } else if (writev) { + stream._writev(chunk, state.onwrite); + } else { + stream._write(chunk, encoding, state.onwrite); + } + state.sync = false; + } + function onwriteError(stream, state, er, cb) { + --state.pendingcb; + cb(er); + errorBuffer(state); + errorOrDestroy(stream, er); + } + function onwrite(stream, er) { + const state = stream._writableState; + const sync = state.sync; + const cb = state.writecb; + if (typeof cb !== "function") { + errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()); + return; + } + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; + if (er) { + er.stack; + if (!state.errored) { + state.errored = er; + } + if (stream._readableState && !stream._readableState.errored) { + stream._readableState.errored = er; + } + if (sync) { + process.nextTick(onwriteError, stream, state, er, cb); + } else { + onwriteError(stream, state, er, cb); + } + } else { + if (state.buffered.length > state.bufferedIndex) { + clearBuffer(stream, state); + } + if (sync) { + if ( + state.afterWriteTickInfo !== null && + state.afterWriteTickInfo.cb === cb + ) { + state.afterWriteTickInfo.count++; + } else { + state.afterWriteTickInfo = { + count: 1, + cb, + stream, + state, + }; + process.nextTick(afterWriteTick, state.afterWriteTickInfo); + } + } else { + afterWrite(stream, state, 1, cb); + } + } + } + function afterWriteTick({ stream, state, count, cb }) { + state.afterWriteTickInfo = null; + return afterWrite(stream, state, count, cb); + } + function afterWrite(stream, state, count, cb) { + const needDrain = !state.ending && !stream.destroyed && + state.length === 0 && state.needDrain; + if (needDrain) { + state.needDrain = false; + stream.emit("drain"); + } + while (count-- > 0) { + state.pendingcb--; + cb(); + } + if (state.destroyed) { + errorBuffer(state); + } + finishMaybe(stream, state); + } + function errorBuffer(state) { + if (state.writing) { + return; + } + for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { + var _state$errored; + const { chunk, callback } = state.buffered[n]; + const len = state.objectMode ? 1 : chunk.length; + state.length -= len; + callback( + (_state$errored = state.errored) !== null && _state$errored !== void 0 + ? _state$errored + : new ERR_STREAM_DESTROYED("write"), + ); + } + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + var _state$errored2; + onfinishCallbacks[i]( + (_state$errored2 = state.errored) !== null && + _state$errored2 !== void 0 + ? _state$errored2 + : new ERR_STREAM_DESTROYED("end"), + ); + } + resetBuffer(state); + } + function clearBuffer(stream, state) { + if ( + state.corked || state.bufferProcessing || state.destroyed || + !state.constructed + ) { + return; + } + const { buffered, bufferedIndex, objectMode } = state; + const bufferedLength = buffered.length - bufferedIndex; + if (!bufferedLength) { + return; + } + let i = bufferedIndex; + state.bufferProcessing = true; + if (bufferedLength > 1 && stream._writev) { + state.pendingcb -= bufferedLength - 1; + const callback = state.allNoop ? nop : (err) => { + for (let n = i; n < buffered.length; ++n) { + buffered[n].callback(err); + } + }; + const chunks = state.allNoop && i === 0 + ? buffered + : ArrayPrototypeSlice(buffered, i); + chunks.allBuffers = state.allBuffers; + doWrite(stream, state, true, state.length, chunks, "", callback); + resetBuffer(state); + } else { + do { + const { chunk, encoding, callback } = buffered[i]; + buffered[i++] = null; + const len = objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, callback); + } while (i < buffered.length && !state.writing); + if (i === buffered.length) { + resetBuffer(state); + } else if (i > 256) { + buffered.splice(0, i); + state.bufferedIndex = 0; + } else { + state.bufferedIndex = i; + } + } + state.bufferProcessing = false; + } + Writable.prototype._write = function (chunk, encoding, cb) { + if (this._writev) { + this._writev( + [ + { + chunk, + encoding, + }, + ], + cb, + ); + } else { + throw new ERR_METHOD_NOT_IMPLEMENTED("_write()"); + } + }; + Writable.prototype._writev = null; + Writable.prototype.end = function (chunk, encoding, cb) { + const state = this._writableState; + if (typeof chunk === "function") { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === "function") { + cb = encoding; + encoding = null; + } + let err; + if (chunk !== null && chunk !== void 0) { + const ret = _write(this, chunk, encoding); + if (ret instanceof Error2) { + err = ret; + } + } + if (state.corked) { + state.corked = 1; + this.uncork(); + } + if (err) { + } else if (!state.errored && !state.ending) { + state.ending = true; + finishMaybe(this, state, true); + state.ended = true; + } else if (state.finished) { + err = new ERR_STREAM_ALREADY_FINISHED("end"); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED("end"); + } + if (typeof cb === "function") { + if (err || state.finished) { + process.nextTick(cb, err); + } else { + state[kOnFinished].push(cb); + } + } + return this; + }; + function needFinish(state) { + return state.ending && !state.destroyed && state.constructed && + state.length === 0 && !state.errored && state.buffered.length === 0 && + !state.finished && !state.writing && !state.errorEmitted && + !state.closeEmitted; + } + function callFinal(stream, state) { + let called = false; + function onFinish(err) { + if (called) { + errorOrDestroy( + stream, + err !== null && err !== void 0 ? err : ERR_MULTIPLE_CALLBACK(), + ); + return; + } + called = true; + state.pendingcb--; + if (err) { + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](err); + } + errorOrDestroy(stream, err, state.sync); + } else if (needFinish(state)) { + state.prefinished = true; + stream.emit("prefinish"); + state.pendingcb++; + process.nextTick(finish, stream, state); + } + } + state.sync = true; + state.pendingcb++; + try { + stream._final(onFinish); + } catch (err) { + onFinish(err); + } + state.sync = false; + } + function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === "function" && !state.destroyed) { + state.finalCalled = true; + callFinal(stream, state); + } else { + state.prefinished = true; + stream.emit("prefinish"); + } + } + } + function finishMaybe(stream, state, sync) { + if (needFinish(state)) { + prefinish(stream, state); + if (state.pendingcb === 0) { + if (sync) { + state.pendingcb++; + process.nextTick( + (stream2, state2) => { + if (needFinish(state2)) { + finish(stream2, state2); + } else { + state2.pendingcb--; + } + }, + stream, + state, + ); + } else if (needFinish(state)) { + state.pendingcb++; + finish(stream, state); + } + } + } + } + function finish(stream, state) { + state.pendingcb--; + state.finished = true; + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](); + } + stream.emit("finish"); + if (state.autoDestroy) { + const rState = stream._readableState; + const autoDestroy = !rState || rState.autoDestroy && // We don't expect the readable to ever 'end' + // if readable is explicitly set to false. + (rState.endEmitted || rState.readable === false); + if (autoDestroy) { + stream.destroy(); + } + } + } + ObjectDefineProperties(Writable.prototype, { + closed: { + __proto__: null, + get() { + return this._writableState ? this._writableState.closed : false; + }, + }, + destroyed: { + __proto__: null, + get() { + return this._writableState ? this._writableState.destroyed : false; + }, + set(value) { + if (this._writableState) { + this._writableState.destroyed = value; + } + }, + }, + writable: { + __proto__: null, + get() { + const w = this._writableState; + return !!w && w.writable !== false && !w.destroyed && !w.errored && + !w.ending && !w.ended; + }, + set(val) { + if (this._writableState) { + this._writableState.writable = !!val; + } + }, + }, + writableFinished: { + __proto__: null, + get() { + return this._writableState ? this._writableState.finished : false; + }, + }, + writableObjectMode: { + __proto__: null, + get() { + return this._writableState ? this._writableState.objectMode : false; + }, + }, + writableBuffer: { + __proto__: null, + get() { + return this._writableState && this._writableState.getBuffer(); + }, + }, + writableEnded: { + __proto__: null, + get() { + return this._writableState ? this._writableState.ending : false; + }, + }, + writableNeedDrain: { + __proto__: null, + get() { + const wState = this._writableState; + if (!wState) { + return false; + } + return !wState.destroyed && !wState.ending && wState.needDrain; + }, + }, + writableHighWaterMark: { + __proto__: null, + get() { + return this._writableState && this._writableState.highWaterMark; + }, + }, + writableCorked: { + __proto__: null, + get() { + return this._writableState ? this._writableState.corked : 0; + }, + }, + writableLength: { + __proto__: null, + get() { + return this._writableState && this._writableState.length; + }, + }, + errored: { + __proto__: null, + enumerable: false, + get() { + return this._writableState ? this._writableState.errored : null; + }, + }, + writableAborted: { + __proto__: null, + enumerable: false, + get: function () { + return !!(this._writableState.writable !== false && + (this._writableState.destroyed || this._writableState.errored) && + !this._writableState.finished); + }, + }, + }); + var destroy = destroyImpl.destroy; + Writable.prototype.destroy = function (err, cb) { + const state = this._writableState; + if ( + !state.destroyed && + (state.bufferedIndex < state.buffered.length || + state[kOnFinished].length) + ) { + process.nextTick(errorBuffer, state); + } + destroy.call(this, err, cb); + return this; + }; + Writable.prototype._undestroy = destroyImpl.undestroy; + Writable.prototype._destroy = function (err, cb) { + cb(err); + }; + Writable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err); + }; + var webStreamsAdapters; + function lazyWebStreams() { + if (webStreamsAdapters === void 0) { + webStreamsAdapters = {}; + } + return webStreamsAdapters; + } + Writable.fromWeb = function (writableStream, options) { + return lazyWebStreams().newStreamWritableFromWritableStream( + writableStream, + options, + ); + }; + Writable.toWeb = function (streamWritable) { + return lazyWebStreams().newWritableStreamFromStreamWritable( + streamWritable, + ); + }; + }, +}); + +// lib/internal/streams/duplexify.js +var require_duplexify = __commonJS({ + "lib/internal/streams/duplexify.js"(exports, module) { + var process = require_browser2(); + var bufferModule = require_buffer(); + var { + isReadable, + isWritable, + isIterable, + isNodeStream, + isReadableNodeStream, + isWritableNodeStream, + isDuplexNodeStream, + } = require_utils(); + var eos = require_end_of_stream(); + var { destroyer } = require_destroy(); + var Duplex = require_duplex(); + var Readable = require_readable(); + var from = require_from(); + var isBlob = typeof Blob !== "undefined" + ? function isBlob2(b) { + return b instanceof Blob; + } + : function isBlob2(b) { + return false; + }; + var { FunctionPrototypeCall } = require_primordials(); + var Duplexify = class extends Duplex { + constructor(options) { + super(options); + if ( + (options === null || options === void 0 + ? void 0 + : options.readable) === false + ) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + if ( + (options === null || options === void 0 + ? void 0 + : options.writable) === false + ) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } + }; + module.exports = function duplexify(body, name) { + if (isDuplexNodeStream(body)) { + return body; + } + if (isReadableNodeStream(body)) { + return _duplexify({ + readable: body, + }); + } + if (isWritableNodeStream(body)) { + return _duplexify({ + writable: body, + }); + } + if (isNodeStream(body)) { + return _duplexify({ + writable: false, + readable: false, + }); + } + if (typeof body === "function") { + const { value, write, final, destroy } = fromAsyncGen(body); + if (isIterable(value)) { + return from(Duplexify, value, { + // TODO (ronag): highWaterMark? + objectMode: true, + write, + final, + destroy, + }); + } + const then2 = value === null || value === void 0 ? void 0 : value.then; + if (typeof then2 === "function") { + let d; + const promise = FunctionPrototypeCall( + then2, + value, + (val) => { + if (val != null) { + throw new ERR_INVALID_RETURN_VALUE("nully", "body", val); + } + }, + (err) => { + destroyer(d, err); + }, + ); + return d = new Duplexify({ + // TODO (ronag): highWaterMark? + objectMode: true, + readable: false, + write, + final(cb) { + final(async () => { + try { + await promise; + process.nextTick(cb, null); + } catch (err) { + process.nextTick(cb, err); + } + }); + }, + destroy, + }); + } + throw new ERR_INVALID_RETURN_VALUE( + "Iterable, AsyncIterable or AsyncFunction", + name, + value, + ); + } + if (isBlob(body)) { + return duplexify(body.arrayBuffer()); + } + if (isIterable(body)) { + return from(Duplexify, body, { + // TODO (ronag): highWaterMark? + objectMode: true, + writable: false, + }); + } + if ( + typeof (body === null || body === void 0 ? void 0 : body.writable) === + "object" || + typeof (body === null || body === void 0 ? void 0 : body.readable) === + "object" + ) { + const readable = body !== null && body !== void 0 && body.readable + ? isReadableNodeStream( + body === null || body === void 0 ? void 0 : body.readable, + ) + ? body === null || body === void 0 ? void 0 : body.readable + : duplexify(body.readable) + : void 0; + const writable = body !== null && body !== void 0 && body.writable + ? isWritableNodeStream( + body === null || body === void 0 ? void 0 : body.writable, + ) + ? body === null || body === void 0 ? void 0 : body.writable + : duplexify(body.writable) + : void 0; + return _duplexify({ + readable, + writable, + }); + } + const then = body === null || body === void 0 ? void 0 : body.then; + if (typeof then === "function") { + let d; + FunctionPrototypeCall( + then, + body, + (val) => { + if (val != null) { + d.push(val); + } + d.push(null); + }, + (err) => { + destroyer(d, err); + }, + ); + return d = new Duplexify({ + objectMode: true, + writable: false, + read() { + }, + }); + } + throw new ERR_INVALID_ARG_TYPE( + name, + [ + "Blob", + "ReadableStream", + "WritableStream", + "Stream", + "Iterable", + "AsyncIterable", + "Function", + "{ readable, writable } pair", + "Promise", + ], + body, + ); + }; + function fromAsyncGen(fn) { + let { promise, resolve } = createDeferredPromise(); + const ac = new AbortController(); + const signal = ac.signal; + const value = fn( + async function* () { + while (true) { + const _promise = promise; + promise = null; + const { chunk, done, cb } = await _promise; + process.nextTick(cb); + if (done) { + return; + } + if (signal.aborted) { + throw new AbortError(void 0, { + cause: signal.reason, + }); + } + ({ promise, resolve } = createDeferredPromise()); + yield chunk; + } + }(), + { + signal, + }, + ); + return { + value, + write(chunk, encoding, cb) { + const _resolve = resolve; + resolve = null; + _resolve({ + chunk, + done: false, + cb, + }); + }, + final(cb) { + const _resolve = resolve; + resolve = null; + _resolve({ + done: true, + cb, + }); + }, + destroy(err, cb) { + ac.abort(); + cb(err); + }, + }; + } + function _duplexify(pair) { + const r = pair.readable && typeof pair.readable.read !== "function" + ? Readable.wrap(pair.readable) + : pair.readable; + const w = pair.writable; + let readable = !!isReadable(r); + let writable = !!isWritable(w); + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + function onfinished(err) { + const cb = onclose; + onclose = null; + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + d = new Duplexify({ + // TODO (ronag): highWaterMark? + readableObjectMode: + !!(r !== null && r !== void 0 && r.readableObjectMode), + writableObjectMode: + !!(w !== null && w !== void 0 && w.writableObjectMode), + readable, + writable, + }); + if (writable) { + eos(w, (err) => { + writable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + d._write = function (chunk, encoding, callback) { + if (w.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + d._final = function (callback) { + w.end(); + onfinish = callback; + }; + w.on("drain", function () { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + w.on("finish", function () { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + if (readable) { + eos(r, (err) => { + readable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + r.on("readable", function () { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + r.on("end", function () { + d.push(null); + }); + d._read = function () { + while (true) { + const buf = r.read(); + if (buf === null) { + onreadable = d._read; + return; + } + if (!d.push(buf)) { + return; + } + } + }; + } + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = new AbortError(); + } + onreadable = null; + ondrain = null; + onfinish = null; + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(w, err); + destroyer(r, err); + } + }; + return d; + } + }, +}); + +// lib/internal/streams/duplex.js +var require_duplex = __commonJS({ + "lib/internal/streams/duplex.js"(exports, module) { + "use strict"; + var { + ObjectDefineProperties, + ObjectGetOwnPropertyDescriptor, + ObjectKeys, + ObjectSetPrototypeOf, + } = require_primordials(); + module.exports = Duplex; + var Readable = require_readable(); + var Writable = require_writable(); + ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); + ObjectSetPrototypeOf(Duplex, Readable); + { + const keys = ObjectKeys(Writable.prototype); + for (let i = 0; i < keys.length; i++) { + const method = keys[i]; + if (!Duplex.prototype[method]) { + Duplex.prototype[method] = Writable.prototype[method]; + } + } + } + function Duplex(options) { + if (!(this instanceof Duplex)) { + return new Duplex(options); + } + Readable.call(this, options); + Writable.call(this, options); + if (options) { + this.allowHalfOpen = options.allowHalfOpen !== false; + if (options.readable === false) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + if (options.writable === false) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } else { + this.allowHalfOpen = true; + } + } + ObjectDefineProperties(Duplex.prototype, { + writable: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writable"), + }, + writableHighWaterMark: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableHighWaterMark", + ), + }, + writableObjectMode: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableObjectMode", + ), + }, + writableBuffer: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableBuffer"), + }, + writableLength: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableLength"), + }, + writableFinished: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableFinished", + ), + }, + writableCorked: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableCorked"), + }, + writableEnded: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableEnded"), + }, + writableNeedDrain: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableNeedDrain", + ), + }, + destroyed: { + __proto__: null, + get() { + if ( + this._readableState === void 0 || this._writableState === void 0 + ) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set(value) { + if (this._readableState && this._writableState) { + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } + }, + }, + }); + var webStreamsAdapters; + function lazyWebStreams() { + if (webStreamsAdapters === void 0) { + webStreamsAdapters = {}; + } + return webStreamsAdapters; + } + Duplex.fromWeb = function (pair, options) { + return lazyWebStreams().newStreamDuplexFromReadableWritablePair( + pair, + options, + ); + }; + Duplex.toWeb = function (duplex) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); + }; + var duplexify; + Duplex.from = function (body) { + if (!duplexify) { + duplexify = require_duplexify(); + } + return duplexify(body, "body"); + }; + }, +}); + +// lib/internal/streams/transform.js +var require_transform = __commonJS({ + "lib/internal/streams/transform.js"(exports, module) { + "use strict"; + var { ObjectSetPrototypeOf, Symbol: Symbol2 } = require_primordials(); + module.exports = Transform; + var Duplex = require_duplex(); + var { getHighWaterMark } = require_state(); + ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); + ObjectSetPrototypeOf(Transform, Duplex); + var kCallback = Symbol2("kCallback"); + function Transform(options) { + if (!(this instanceof Transform)) { + return new Transform(options); + } + const readableHighWaterMark = options + ? getHighWaterMark(this, options, "readableHighWaterMark", true) + : null; + if (readableHighWaterMark === 0) { + options = { + ...options, + highWaterMark: null, + readableHighWaterMark, + // TODO (ronag): 0 is not optimal since we have + // a "bug" where we check needDrain before calling _write and not after. + // Refs: https://github.com/nodejs/node/pull/32887 + // Refs: https://github.com/nodejs/node/pull/35941 + writableHighWaterMark: options.writableHighWaterMark || 0, + }; + } + Duplex.call(this, options); + this._readableState.sync = false; + this[kCallback] = null; + if (options) { + if (typeof options.transform === "function") { + this._transform = options.transform; + } + if (typeof options.flush === "function") { + this._flush = options.flush; + } + } + this.on("prefinish", prefinish); + } + function final(cb) { + if (typeof this._flush === "function" && !this.destroyed) { + this._flush((er, data) => { + if (er) { + if (cb) { + cb(er); + } else { + this.destroy(er); + } + return; + } + if (data != null) { + this.push(data); + } + this.push(null); + if (cb) { + cb(); + } + }); + } else { + this.push(null); + if (cb) { + cb(); + } + } + } + function prefinish() { + if (this._final !== final) { + final.call(this); + } + } + Transform.prototype._final = final; + Transform.prototype._transform = function (chunk, encoding, callback) { + throw new ERR_METHOD_NOT_IMPLEMENTED("_transform()"); + }; + Transform.prototype._write = function (chunk, encoding, callback) { + const rState = this._readableState; + const wState = this._writableState; + const length = rState.length; + this._transform(chunk, encoding, (err, val) => { + if (err) { + callback(err); + return; + } + if (val != null) { + this.push(val); + } + if ( + wState.ended || // Backwards compat. + length === rState.length || // Backwards compat. + rState.length < rState.highWaterMark + ) { + callback(); + } else { + this[kCallback] = callback; + } + }); + }; + Transform.prototype._read = function () { + if (this[kCallback]) { + const callback = this[kCallback]; + this[kCallback] = null; + callback(); + } + }; + }, +}); + +// lib/internal/streams/passthrough.js +var require_passthrough = __commonJS({ + "lib/internal/streams/passthrough.js"(exports, module) { + "use strict"; + var { ObjectSetPrototypeOf } = require_primordials(); + module.exports = PassThrough; + var Transform = require_transform(); + ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype); + ObjectSetPrototypeOf(PassThrough, Transform); + function PassThrough(options) { + if (!(this instanceof PassThrough)) { + return new PassThrough(options); + } + Transform.call(this, options); + } + PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); + }; + }, +}); + +// lib/internal/streams/pipeline.js +var require_pipeline = __commonJS({ + "lib/internal/streams/pipeline.js"(exports, module) { + var process = require_browser2(); + var { ArrayIsArray, Promise: Promise2, SymbolAsyncIterator } = + require_primordials(); + var eos = require_end_of_stream(); + var destroyImpl = require_destroy(); + var Duplex = require_duplex(); + var { validateFunction, validateAbortSignal } = require_validators(); + var { isIterable, isReadable, isReadableNodeStream, isNodeStream } = + require_utils(); + var PassThrough; + var Readable; + function destroyer(stream, reading, writing) { + let finished = false; + stream.on("close", () => { + finished = true; + }); + const cleanup = eos( + stream, + { + readable: reading, + writable: writing, + }, + (err) => { + finished = !err; + }, + ); + return { + destroy: (err) => { + if (finished) { + return; + } + finished = true; + destroyImpl.destroyer( + stream, + err || new ERR_STREAM_DESTROYED("pipe"), + ); + }, + cleanup, + }; + } + function popCallback(streams) { + validateFunction( + streams[streams.length - 1], + "streams[stream.length - 1]", + ); + return streams.pop(); + } + function makeAsyncIterable(val) { + if (isIterable(val)) { + return val; + } else if (isReadableNodeStream(val)) { + return fromReadable(val); + } + throw new ERR_INVALID_ARG_TYPE("val", [ + "Readable", + "Iterable", + "AsyncIterable", + ], val); + } + async function* fromReadable(val) { + if (!Readable) { + Readable = require_readable(); + } + yield* Readable.prototype[SymbolAsyncIterator].call(val); + } + async function pump(iterable, writable, finish, { end }) { + let error; + let onresolve = null; + const resume = (err) => { + if (err) { + error = err; + } + if (onresolve) { + const callback = onresolve; + onresolve = null; + callback(); + } + }; + const wait = () => + new Promise2((resolve, reject) => { + if (error) { + reject(error); + } else { + onresolve = () => { + if (error) { + reject(error); + } else { + resolve(); + } + }; + } + }); + writable.on("drain", resume); + const cleanup = eos( + writable, + { + readable: false, + }, + resume, + ); + try { + if (writable.writableNeedDrain) { + await wait(); + } + for await (const chunk of iterable) { + if (!writable.write(chunk)) { + await wait(); + } + } + if (end) { + writable.end(); + } + await wait(); + finish(); + } catch (err) { + finish(error !== err ? aggregateTwoErrors(error, err) : err); + } finally { + cleanup(); + writable.off("drain", resume); + } + } + function pipeline(...streams) { + return pipelineImpl(streams, once(popCallback(streams))); + } + function pipelineImpl(streams, callback, opts) { + if (streams.length === 1 && ArrayIsArray(streams[0])) { + streams = streams[0]; + } + if (streams.length < 2) { + throw new ERR_MISSING_ARGS("streams"); + } + const ac = new AbortController(); + const signal = ac.signal; + const outerSignal = opts === null || opts === void 0 + ? void 0 + : opts.signal; + const lastStreamCleanup = []; + validateAbortSignal(outerSignal, "options.signal"); + function abort() { + finishImpl(new AbortError()); + } + outerSignal === null || outerSignal === void 0 + ? void 0 + : outerSignal.addEventListener("abort", abort); + let error; + let value; + const destroys = []; + let finishCount = 0; + function finish(err) { + finishImpl(err, --finishCount === 0); + } + function finishImpl(err, final) { + if (err && (!error || error.code === "ERR_STREAM_PREMATURE_CLOSE")) { + error = err; + } + if (!error && !final) { + return; + } + while (destroys.length) { + destroys.shift()(error); + } + outerSignal === null || outerSignal === void 0 + ? void 0 + : outerSignal.removeEventListener("abort", abort); + ac.abort(); + if (final) { + if (!error) { + lastStreamCleanup.forEach((fn) => fn()); + } + process.nextTick(callback, error, value); + } + } + let ret; + for (let i = 0; i < streams.length; i++) { + const stream = streams[i]; + const reading = i < streams.length - 1; + const writing = i > 0; + const end = reading || + (opts === null || opts === void 0 ? void 0 : opts.end) !== false; + const isLastStream = i === streams.length - 1; + if (isNodeStream(stream)) { + let onError2 = function (err) { + if ( + err && err.name !== "AbortError" && + err.code !== "ERR_STREAM_PREMATURE_CLOSE" + ) { + finish(err); + } + }; + var onError = onError2; + if (end) { + const { destroy, cleanup } = destroyer(stream, reading, writing); + destroys.push(destroy); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } + } + stream.on("error", onError2); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(() => { + stream.removeListener("error", onError2); + }); + } + } + if (i === 0) { + if (typeof stream === "function") { + ret = stream({ + signal, + }); + if (!isIterable(ret)) { + throw new ERR_INVALID_RETURN_VALUE( + "Iterable, AsyncIterable or Stream", + "source", + ret, + ); + } + } else if (isIterable(stream) || isReadableNodeStream(stream)) { + ret = stream; + } else { + ret = Duplex.from(stream); + } + } else if (typeof stream === "function") { + ret = makeAsyncIterable(ret); + ret = stream(ret, { + signal, + }); + if (reading) { + if (!isIterable(ret, true)) { + throw new ERR_INVALID_RETURN_VALUE( + "AsyncIterable", + `transform[${i - 1}]`, + ret, + ); + } + } else { + var _ret; + if (!PassThrough) { + PassThrough = require_passthrough(); + } + const pt = new PassThrough({ + objectMode: true, + }); + const then = (_ret = ret) === null || _ret === void 0 + ? void 0 + : _ret.then; + if (typeof then === "function") { + finishCount++; + then.call( + ret, + (val) => { + value = val; + if (val != null) { + pt.write(val); + } + if (end) { + pt.end(); + } + process.nextTick(finish); + }, + (err) => { + pt.destroy(err); + process.nextTick(finish, err); + }, + ); + } else if (isIterable(ret, true)) { + finishCount++; + pump(ret, pt, finish, { + end, + }); + } else { + throw new ERR_INVALID_RETURN_VALUE( + "AsyncIterable or Promise", + "destination", + ret, + ); + } + ret = pt; + const { destroy, cleanup } = destroyer(ret, false, true); + destroys.push(destroy); + if (isLastStream) { + lastStreamCleanup.push(cleanup); + } + } + } else if (isNodeStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount += 2; + const cleanup = pipe(ret, stream, finish, { + end, + }); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } + } else if (isIterable(ret)) { + finishCount++; + pump(ret, stream, finish, { + end, + }); + } else { + throw new ERR_INVALID_ARG_TYPE("val", [ + "Readable", + "Iterable", + "AsyncIterable", + ], ret); + } + ret = stream; + } else { + ret = Duplex.from(stream); + } + } + if ( + signal !== null && signal !== void 0 && signal.aborted || + outerSignal !== null && outerSignal !== void 0 && outerSignal.aborted + ) { + process.nextTick(abort); + } + return ret; + } + function pipe(src, dst, finish, { end }) { + let ended = false; + dst.on("close", () => { + if (!ended) { + finish(new ERR_STREAM_PREMATURE_CLOSE()); + } + }); + src.pipe(dst, { + end, + }); + if (end) { + src.once("end", () => { + ended = true; + dst.end(); + }); + } else { + finish(); + } + eos( + src, + { + readable: true, + writable: false, + }, + (err) => { + const rState = src._readableState; + if ( + err && err.code === "ERR_STREAM_PREMATURE_CLOSE" && rState && + rState.ended && !rState.errored && !rState.errorEmitted + ) { + src.once("end", finish).once("error", finish); + } else { + finish(err); + } + }, + ); + return eos( + dst, + { + readable: false, + writable: true, + }, + finish, + ); + } + module.exports = { + pipelineImpl, + pipeline, + }; + }, +}); + +// lib/internal/streams/compose.js +var require_compose = __commonJS({ + "lib/internal/streams/compose.js"(exports, module) { + "use strict"; + var { pipeline } = require_pipeline(); + var Duplex = require_duplex(); + var { destroyer } = require_destroy(); + var { isNodeStream, isReadable, isWritable } = require_utils(); + module.exports = function compose(...streams) { + if (streams.length === 0) { + throw new ERR_MISSING_ARGS("streams"); + } + if (streams.length === 1) { + return Duplex.from(streams[0]); + } + const orgStreams = [...streams]; + if (typeof streams[0] === "function") { + streams[0] = Duplex.from(streams[0]); + } + if (typeof streams[streams.length - 1] === "function") { + const idx = streams.length - 1; + streams[idx] = Duplex.from(streams[idx]); + } + for (let n = 0; n < streams.length; ++n) { + if (!isNodeStream(streams[n])) { + continue; + } + if (n < streams.length - 1 && !isReadable(streams[n])) { + throw new ERR_INVALID_ARG_VALUE( + `streams[${n}]`, + orgStreams[n], + "must be readable", + ); + } + if (n > 0 && !isWritable(streams[n])) { + throw new ERR_INVALID_ARG_VALUE( + `streams[${n}]`, + orgStreams[n], + "must be writable", + ); + } + } + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + function onfinished(err) { + const cb = onclose; + onclose = null; + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + const head = streams[0]; + const tail = pipeline(streams, onfinished); + const writable = !!isWritable(head); + const readable = !!isReadable(tail); + d = new Duplex({ + // TODO (ronag): highWaterMark? + writableObjectMode: + !!(head !== null && head !== void 0 && head.writableObjectMode), + readableObjectMode: + !!(tail !== null && tail !== void 0 && tail.writableObjectMode), + writable, + readable, + }); + if (writable) { + d._write = function (chunk, encoding, callback) { + if (head.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + d._final = function (callback) { + head.end(); + onfinish = callback; + }; + head.on("drain", function () { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + tail.on("finish", function () { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + if (readable) { + tail.on("readable", function () { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + tail.on("end", function () { + d.push(null); + }); + d._read = function () { + while (true) { + const buf = tail.read(); + if (buf === null) { + onreadable = d._read; + return; + } + if (!d.push(buf)) { + return; + } + } + }; + } + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = new AbortError(); + } + onreadable = null; + ondrain = null; + onfinish = null; + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(tail, err); + } + }; + return d; + }; + }, +}); + +// lib/stream/promises.js +var require_promises = __commonJS({ + "lib/stream/promises.js"(exports, module) { + "use strict"; + var { ArrayPrototypePop, Promise: Promise2 } = require_primordials(); + var { isIterable, isNodeStream } = require_utils(); + var { pipelineImpl: pl } = require_pipeline(); + var { finished } = require_end_of_stream(); + function pipeline(...streams) { + return new Promise2((resolve, reject) => { + let signal; + let end; + const lastArg = streams[streams.length - 1]; + if ( + lastArg && typeof lastArg === "object" && !isNodeStream(lastArg) && + !isIterable(lastArg) + ) { + const options = ArrayPrototypePop(streams); + signal = options.signal; + end = options.end; + } + pl( + streams, + (err, value) => { + if (err) { + reject(err); + } else { + resolve(value); + } + }, + { + signal, + end, + }, + ); + }); + } + module.exports = { + finished, + pipeline, + }; + }, +}); + +// lib/stream.js +var require_stream = __commonJS({ + "lib/stream.js"(exports, module) { + var { Buffer: Buffer2 } = require_buffer(); + var { ObjectDefineProperty, ObjectKeys, ReflectApply } = + require_primordials(); + var { streamReturningOperators, promiseReturningOperators } = + require_operators(); + var compose = require_compose(); + var { pipeline } = require_pipeline(); + var { destroyer } = require_destroy(); + var eos = require_end_of_stream(); + var promises = require_promises(); + var utils = require_utils(); + var Stream = module.exports = require_legacy().Stream; + Stream.isDisturbed = utils.isDisturbed; + Stream.isErrored = utils.isErrored; + Stream.isReadable = utils.isReadable; + Stream.Readable = require_readable(); + for (const key of ObjectKeys(streamReturningOperators)) { + let fn2 = function (...args) { + if (new.target) { + throw ERR_ILLEGAL_CONSTRUCTOR(); + } + return Stream.Readable.from(ReflectApply(op, this, args)); + }; + fn = fn2; + const op = streamReturningOperators[key]; + ObjectDefineProperty(fn2, "name", { + __proto__: null, + value: op.name, + }); + ObjectDefineProperty(fn2, "length", { + __proto__: null, + value: op.length, + }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn2, + enumerable: false, + configurable: true, + writable: true, + }); + } + var fn; + for (const key of ObjectKeys(promiseReturningOperators)) { + let fn2 = function (...args) { + if (new.target) { + throw ERR_ILLEGAL_CONSTRUCTOR(); + } + return ReflectApply(op, this, args); + }; + fn = fn2; + const op = promiseReturningOperators[key]; + ObjectDefineProperty(fn2, "name", { + __proto__: null, + value: op.name, + }); + ObjectDefineProperty(fn2, "length", { + __proto__: null, + value: op.length, + }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn2, + enumerable: false, + configurable: true, + writable: true, + }); + } + var fn; + Stream.Writable = require_writable(); + Stream.Duplex = require_duplex(); + Stream.Transform = require_transform(); + Stream.PassThrough = require_passthrough(); + Stream.pipeline = pipeline; + var { addAbortSignal } = require_add_abort_signal(); + Stream.addAbortSignal = addAbortSignal; + Stream.finished = eos; + Stream.destroy = destroyer; + Stream.compose = compose; + ObjectDefineProperty(Stream, "promises", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return promises; + }, + }); + ObjectDefineProperty(pipeline, promisify, { + __proto__: null, + enumerable: true, + get() { + return promises.pipeline; + }, + }); + ObjectDefineProperty(eos, promisify, { + __proto__: null, + enumerable: true, + get() { + return promises.finished; + }, + }); + Stream.Stream = Stream; + Stream._isUint8Array = function isUint8Array(value) { + return value instanceof Uint8Array; + }; + Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return Buffer2.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + }; + }, +}); /* End esm.sh bundle */ // The following code implements Readable.fromWeb(), Writable.fromWeb(), and @@ -16,12 +5713,6 @@ const __process$ = { nextTick };import __buffer$ from "ext:deno_node/buffer.ts"; // readable-stream module yet. This can be removed once the following upstream // issue is resolved: https://github.com/nodejs/readable-stream/issues/482 -import { - AbortError, - ERR_INVALID_ARG_TYPE, - ERR_INVALID_ARG_VALUE, - ERR_STREAM_PREMATURE_CLOSE, -} from "ext:deno_node/internal/errors.ts"; import { destroy } from "ext:deno_node/internal/streams/destroy.mjs"; import finished from "ext:deno_node/internal/streams/end-of-stream.mjs"; import { @@ -31,23 +5722,36 @@ import { isWritable, isWritableEnded, } from "ext:deno_node/internal/streams/utils.mjs"; -import { createDeferredPromise, kEmptyObject } from "ext:deno_node/internal/util.mjs"; -import { validateBoolean, validateObject } from "ext:deno_node/internal/validators.mjs"; +import { ReadableStream, WritableStream } from "ext:deno_node/stream/web.ts"; +import { + validateBoolean, + validateObject, +} from "ext:deno_node/internal/validators.mjs"; +const CustomStream = require_stream(); const process = __process$; const { Buffer } = __buffer$; -const Readable = Au; -const Writable = mu; -const Duplex = Tu; -function isReadableStream(object) { - return object instanceof ReadableStream; -} +export const Readable = CustomStream.Readable; +export const Writable = CustomStream.Writable; +export const Duplex = CustomStream.Duplex; +export const PassThrough = CustomStream.PassThrough; +export const Stream = CustomStream.Stream; +export const Transform = CustomStream.Transform; +export const _isUint8Array = CustomStream._isUint8Array; +export const _uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer; +export const addAbortSignal = CustomStream.addAbortSignal; +export const pipeline = CustomStream.pipeline; +export { finished }; function isWritableStream(object) { return object instanceof WritableStream; } +function isReadableStream(object) { + return object instanceof ReadableStream; +} + Readable.fromWeb = function ( readableStream, options = kEmptyObject, diff --git a/ext/node/polyfills/assertion_error.ts b/ext/node/polyfills/assertion_error.ts index bc3ebb2a36..0c54f12cde 100644 --- a/ext/node/polyfills/assertion_error.ts +++ b/ext/node/polyfills/assertion_error.ts @@ -23,6 +23,7 @@ import { inspect } from "ext:deno_node/util.ts"; import { stripColor as removeColors } from "ext:deno_node/_util/std_fmt_colors.ts"; +import * as io from "ext:deno_io/12_io.js"; function getConsoleWidth(): number { try { @@ -159,7 +160,7 @@ export function createErrDiff( // If the stderr is a tty and the input length is lower than the current // columns per line, add a mismatch indicator below the output. If it is // not a tty, use a default value of 80 characters. - const maxLength = Deno.isatty(Deno.stderr.rid) ? getConsoleWidth() : 80; + const maxLength = Deno.isatty(io.stderr.rid) ? getConsoleWidth() : 80; if (inputLength < maxLength) { while (actualRaw[i] === expectedRaw[i]) { i++; @@ -402,7 +403,7 @@ export class AssertionError extends Error { if (message != null) { super(String(message)); } else { - if (Deno.isatty(Deno.stderr.rid)) { + if (Deno.isatty(io.stderr.rid)) { // Reset on each call to make sure we handle dynamically set environment // variables correct. if (Deno.noColor) { diff --git a/ext/node/polyfills/async_hooks.ts b/ext/node/polyfills/async_hooks.ts index 8111af73b2..d2c9390009 100644 --- a/ext/node/polyfills/async_hooks.ts +++ b/ext/node/polyfills/async_hooks.ts @@ -5,7 +5,8 @@ // https://github.com/cloudflare/workerd/blob/77fd0ed6ddba184414f0216508fc62b06e716cab/src/workerd/api/node/async-hooks.c++#L9 import { validateFunction } from "ext:deno_node/internal/validators.mjs"; -import { core } from "ext:deno_node/_core.ts"; + +const { core } = globalThis.__bootstrap; function assert(cond: boolean) { if (!cond) throw new Error("Assertion failed"); diff --git a/ext/node/polyfills/child_process.ts b/ext/node/polyfills/child_process.ts index 5ca760ed03..f731a0bf11 100644 --- a/ext/node/polyfills/child_process.ts +++ b/ext/node/polyfills/child_process.ts @@ -2,7 +2,6 @@ // This module implements 'child_process' module of Node.JS API. // ref: https://nodejs.org/api/child_process.html -import { core } from "ext:deno_node/_core.ts"; import { ChildProcess, ChildProcessOptions, @@ -44,6 +43,8 @@ import { kEmptyObject, } from "ext:deno_node/internal/util.mjs"; +const { core } = globalThis.__bootstrap; + const MAX_BUFFER = 1024 * 1024; type ForkOptions = ChildProcessOptions; diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 9104183cac..250d34e7cb 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -1,23 +1,57 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// import { ReadableStreamPrototype } from "ext:deno_web/06_streams.js"; + +const core = globalThis.__bootstrap.core; import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; -import { _normalizeArgs, ListenOptions, Socket } from "ext:deno_node/net.ts"; +import { type Deferred, deferred } from "ext:deno_node/_util/async.ts"; +import { + _normalizeArgs, + // createConnection, + ListenOptions, + Socket, +} from "ext:deno_node/net.ts"; import { Buffer } from "ext:deno_node/buffer.ts"; import { ERR_SERVER_NOT_RUNNING } from "ext:deno_node/internal/errors.ts"; import { EventEmitter } from "ext:deno_node/events.ts"; import { nextTick } from "ext:deno_node/_next_tick.ts"; -import { validatePort } from "ext:deno_node/internal/validators.mjs"; import { + validateBoolean, + validateInteger, + validatePort, +} from "ext:deno_node/internal/validators.mjs"; +import { + addAbortSignal, + finished, Readable as NodeReadable, Writable as NodeWritable, } from "ext:deno_node/stream.ts"; -import { OutgoingMessage } from "ext:deno_node/_http_outgoing.ts"; -import { Agent } from "ext:deno_node/_http_agent.mjs"; -import { chunkExpression as RE_TE_CHUNKED } from "ext:deno_node/_http_common.ts"; +import { + OutgoingMessage, + parseUniqueHeadersOption, + validateHeaderName, +} from "ext:deno_node/_http_outgoing.ts"; +import { kOutHeaders } from "ext:deno_node/internal/http.ts"; +import { _checkIsHttpToken as checkIsHttpToken } from "ext:deno_node/_http_common.ts"; +import { Agent, globalAgent } from "ext:deno_node/_http_agent.mjs"; +// import { chunkExpression as RE_TE_CHUNKED } from "ext:deno_node/_http_common.ts"; import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; +import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts"; -import * as denoHttp from "ext:deno_http/01_http.js"; -import * as httpRuntime from "ext:runtime/40_http.js"; +import { notImplemented } from "ext:deno_node/_utils.ts"; +import { + connResetException, + ERR_HTTP_HEADERS_SENT, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_HTTP_TOKEN, + ERR_INVALID_PROTOCOL, + ERR_UNESCAPED_CHARACTERS, +} from "ext:deno_node/internal/errors.ts"; +import { getTimerDuration } from "ext:deno_node/internal/timers.mjs"; +import { serve, upgradeHttpRaw } from "ext:deno_http/00_serve.js"; +import { createHttpClient } from "ext:deno_fetch/22_http_client.js"; +import { timerId } from "ext:deno_web/03_abort_signal.js"; +import { clearTimeout as webClearTimeout } from "ext:deno_web/02_timers.js"; enum STATUS_CODES { /** RFC 7231, 6.2.1 */ @@ -217,182 +251,954 @@ export interface RequestOptions { href?: string; } -// TODO(@bartlomieju): Implement ClientRequest methods (e.g. setHeader()) +function validateHost(host, name) { + if (host !== null && host !== undefined && typeof host !== "string") { + throw new ERR_INVALID_ARG_TYPE(`options.${name}`, [ + "string", + "undefined", + "null", + ], host); + } + return host; +} + +const INVALID_PATH_REGEX = /[^\u0021-\u00ff]/; +const kError = Symbol("kError"); + +const kUniqueHeaders = Symbol("kUniqueHeaders"); + +class FakeSocket extends EventEmitter { +} + /** ClientRequest represents the http(s) request from the client */ -class ClientRequest extends NodeWritable { +class ClientRequest extends OutgoingMessage { defaultProtocol = "http:"; - body: null | ReadableStream = null; - controller: ReadableStreamDefaultController | null = null; + aborted = false; + destroyed = false; + agent: Agent; + method: string; + maxHeaderSize: number | undefined; + insecureHTTPParser: boolean; + useChunkedEncodingByDefault: boolean; + path: string; + constructor( - public opts: RequestOptions, - public cb?: (res: IncomingMessageForClient) => void, + input: string | URL, + options?: RequestOptions, + cb?: (res: IncomingMessageForClient) => void, ) { super(); - } - // deno-lint-ignore no-explicit-any - override _write(chunk: any, _enc: string, cb: () => void) { - if (this.controller) { - this.controller.enqueue(chunk); - cb(); - return; + if (typeof input === "string") { + const urlStr = input; + input = urlToHttpOptions(new URL(urlStr)); + } else if (input instanceof URL) { + // url.URL instance + input = urlToHttpOptions(input); + } else { + cb = options; + options = input; + input = null; } - this.body = new ReadableStream({ - start: (controller) => { - this.controller = controller; - controller.enqueue(chunk); - cb(); - }, + if (typeof options === "function") { + cb = options; + options = input || kEmptyObject; + } else { + options = Object.assign(input || {}, options); + } + + let agent = options!.agent; + const defaultAgent = options!._defaultAgent || globalAgent; + if (agent === false) { + agent = new defaultAgent.constructor(); + } else if (agent === null || agent === undefined) { + if (typeof options!.createConnection !== "function") { + agent = defaultAgent; + } + // Explicitly pass through this statement as agent will not be used + // when createConnection is provided. + } else if (typeof agent.addRequest !== "function") { + throw new ERR_INVALID_ARG_TYPE("options.agent", [ + "Agent-like Object", + "undefined", + "false", + ], agent); + } + this.agent = agent; + + const protocol = options!.protocol || defaultAgent.protocol; + let expectedProtocol = defaultAgent.protocol; + if (this.agent?.protocol) { + expectedProtocol = this.agent!.protocol; + } + + if (options!.path) { + const path = String(options.path); + if (INVALID_PATH_REGEX.exec(path) !== null) { + throw new ERR_UNESCAPED_CHARACTERS("Request path"); + } + } + + if (protocol !== expectedProtocol) { + throw new ERR_INVALID_PROTOCOL(protocol, expectedProtocol); + } + + const defaultPort = options!.defaultPort || this.agent?.defaultPort; + + const port = options!.port = options!.port || defaultPort || 80; + const host = options!.host = validateHost(options!.hostname, "hostname") || + validateHost(options!.host, "host") || "localhost"; + + const setHost = options!.setHost === undefined || Boolean(options!.setHost); + + this.socketPath = options!.socketPath; + + if (options!.timeout !== undefined) { + this.setTimeout(options.timeout); + } + + const signal = options!.signal; + if (signal) { + addAbortSignal(signal, this); + } + let method = options!.method; + const methodIsString = typeof method === "string"; + if (method !== null && method !== undefined && !methodIsString) { + throw new ERR_INVALID_ARG_TYPE("options.method", "string", method); + } + + if (methodIsString && method) { + if (!checkIsHttpToken(method)) { + throw new ERR_INVALID_HTTP_TOKEN("Method", method); + } + method = this.method = method.toUpperCase(); + } else { + method = this.method = "GET"; + } + + const maxHeaderSize = options!.maxHeaderSize; + if (maxHeaderSize !== undefined) { + validateInteger(maxHeaderSize, "maxHeaderSize", 0); + } + this.maxHeaderSize = maxHeaderSize; + + const insecureHTTPParser = options!.insecureHTTPParser; + if (insecureHTTPParser !== undefined) { + validateBoolean(insecureHTTPParser, "options.insecureHTTPParser"); + } + + this.insecureHTTPParser = insecureHTTPParser; + + if (options!.joinDuplicateHeaders !== undefined) { + validateBoolean( + options!.joinDuplicateHeaders, + "options.joinDuplicateHeaders", + ); + } + + this.joinDuplicateHeaders = options!.joinDuplicateHeaders; + + this.path = options!.path || "/"; + if (cb) { + this.once("response", cb); + } + + if ( + method === "GET" || + method === "HEAD" || + method === "DELETE" || + method === "OPTIONS" || + method === "TRACE" || + method === "CONNECT" + ) { + this.useChunkedEncodingByDefault = false; + } else { + this.useChunkedEncodingByDefault = true; + } + + this._ended = false; + this.res = null; + this.aborted = false; + this.upgradeOrConnect = false; + this.parser = null; + this.maxHeadersCount = null; + this.reusedSocket = false; + this.host = host; + this.protocol = protocol; + this.port = port; + this.hash = options.hash; + this.search = options.search; + this.auth = options.auth; + + if (this.agent) { + // If there is an agent we should default to Connection:keep-alive, + // but only if the Agent will actually reuse the connection! + // If it's not a keepAlive agent, and the maxSockets==Infinity, then + // there's never a case where this socket will actually be reused + if (!this.agent.keepAlive && !Number.isFinite(this.agent.maxSockets)) { + this._last = true; + this.shouldKeepAlive = false; + } else { + this._last = false; + this.shouldKeepAlive = true; + } + } + + const headersArray = Array.isArray(options!.headers); + if (!headersArray) { + if (options!.headers) { + const keys = Object.keys(options!.headers); + // Retain for(;;) loop for performance reasons + // Refs: https://github.com/nodejs/node/pull/30958 + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + this.setHeader(key, options!.headers[key]); + } + } + + if (host && !this.getHeader("host") && setHost) { + let hostHeader = host; + + // For the Host header, ensure that IPv6 addresses are enclosed + // in square brackets, as defined by URI formatting + // https://tools.ietf.org/html/rfc3986#section-3.2.2 + const posColon = hostHeader.indexOf(":"); + if ( + posColon !== -1 && + hostHeader.includes(":", posColon + 1) && + hostHeader.charCodeAt(0) !== 91 /* '[' */ + ) { + hostHeader = `[${hostHeader}]`; + } + + if (port && +port !== defaultPort) { + hostHeader += ":" + port; + } + this.setHeader("Host", hostHeader); + } + + if (options!.auth && !this.getHeader("Authorization")) { + this.setHeader( + "Authorization", + "Basic " + + Buffer.from(options!.auth).toString("base64"), + ); + } + + if (this.getHeader("expect") && this._header) { + throw new ERR_HTTP_HEADERS_SENT("render"); + } + } else { + for (const [key, val] of options!.headers) { + this.setHeader(key, val); + } + } + + this[kUniqueHeaders] = parseUniqueHeadersOption(options!.uniqueHeaders); + + let optsWithoutSignal = options as RequestOptions; + if (optsWithoutSignal.signal) { + optsWithoutSignal = Object.assign({}, options); + delete optsWithoutSignal.signal; + } + + if (options!.createConnection) { + notImplemented("ClientRequest.options.createConnection"); + } + + if (options!.lookup) { + notImplemented("ClientRequest.options.lookup"); + } + + // initiate connection + // TODO(crowlKats): finish this + /*if (this.agent) { + this.agent.addRequest(this, optsWithoutSignal); + } else { + // No agent, default to Connection:close. + this._last = true; + this.shouldKeepAlive = false; + if (typeof optsWithoutSignal.createConnection === "function") { + const oncreate = once((err, socket) => { + if (err) { + this.emit("error", err); + } else { + this.onSocket(socket); + } + }); + + try { + const newSocket = optsWithoutSignal.createConnection( + optsWithoutSignal, + oncreate, + ); + if (newSocket) { + oncreate(null, newSocket); + } + } catch (err) { + oncreate(err); + } + } else { + debug("CLIENT use net.createConnection", optsWithoutSignal); + this.onSocket(createConnection(optsWithoutSignal)); + } + }*/ + this.onSocket(new FakeSocket()); + + const url = this._createUrlStrFromOptions(); + + const headers = []; + for (const key in this[kOutHeaders]) { + if (Object.hasOwn(this[kOutHeaders], key)) { + const entry = this[kOutHeaders][key]; + this._processHeader(headers, entry[0], entry[1], false); + } + } + + const client = this._getClient() ?? createHttpClient({ http2: false }); + this._client = client; + + this._req = core.ops.op_node_http_request( + this.method, + url, + headers, + client.rid, + this.method === "POST" || this.method === "PATCH" || + this.method === "PUT", + ); + this._bodyWriteRid = this._req.requestBodyRid; + } + + _getClient(): Deno.HttpClient | undefined { + return undefined; + } + + // TODO(bartlomieju): handle error + onSocket(socket, _err) { + nextTick(() => { + this.socket = socket; + this.emit("socket", socket); }); } - override async _final() { - if (this.controller) { - this.controller.close(); + // deno-lint-ignore no-explicit-any + end(chunk?: any, encoding?: any, cb?: any): this { + this.finished = true; + if (chunk !== undefined && chunk !== null) { + this.write(chunk, encoding); } - const body = await this._createBody(this.body, this.opts); - const client = await this._createCustomClient(); - const opts = { - body, - method: this.opts.method, - client, - headers: this.opts.headers, - }; - const mayResponse = fetch(this._createUrlStrFromOptions(this.opts), opts) - .catch((e) => { - if (e.message.includes("connection closed before message completed")) { - // Node.js seems ignoring this error - } else { - this.emit("error", e); + (async () => { + try { + const [res, _] = await Promise.all([ + core.opAsync("op_fetch_send", this._req.requestRid), + (async () => { + if (this._bodyWriteRid) { + try { + await core.shutdown(this._bodyWriteRid); + } catch (err) { + this._requestSendError = err; + } + + core.tryClose(this._bodyWriteRid); + + try { + cb?.(); + } catch (_) { + // + } + } + })(), + ]); + if (this._timeout) { + this._timeout.removeEventListener("abort", this._timeoutCb); + webClearTimeout(this._timeout[timerId]); } - return undefined; - }); - const res = new IncomingMessageForClient( - await mayResponse, - this._createSocket(), - ); - this.emit("response", res); - if (client) { - res.on("end", () => { - client.close(); - }); - } - this.cb?.(res); + this._client.close(); + const incoming = new IncomingMessageForClient(this.socket); + incoming.req = this; + this.res = incoming; + + // TODO(@crowlKats): + // incoming.httpVersionMajor = versionMajor; + // incoming.httpVersionMinor = versionMinor; + // incoming.httpVersion = `${versionMajor}.${versionMinor}`; + // incoming.joinDuplicateHeaders = socket?.server?.joinDuplicateHeaders || + // parser.joinDuplicateHeaders; + + incoming.url = res.url; + incoming.statusCode = res.status; + incoming.statusMessage = res.statusText; + + incoming._addHeaderLines( + res.headers, + Object.entries(res.headers).flat().length, + ); + incoming._bodyRid = res.responseRid; + + if (this._req.cancelHandleRid !== null) { + core.tryClose(this._req.cancelHandleRid); + } + + this.emit("response", incoming); + } catch (err) { + if (this._req.cancelHandleRid !== null) { + core.tryClose(this._req.cancelHandleRid); + } + + if (this._requestSendError !== undefined) { + // if the request body stream errored, we want to propagate that error + // instead of the original error from opFetchSend + throw new TypeError( + "Failed to fetch: request body stream errored", + { + cause: this._requestSendError, + }, + ); + } + + if ( + err.message.includes("connection closed before message completed") + ) { + // Node.js seems ignoring this error + } else if (err.message.includes("The signal has been aborted")) { + // Remap this error + this.emit("error", connResetException("socket hang up")); + } else { + this.emit("error", err); + } + } + })(); } abort() { + if (this.aborted) { + return; + } + this.aborted = true; + this.emit("abort"); + //process.nextTick(emitAbortNT, this); this.destroy(); } - async _createBody( - body: ReadableStream | null, - opts: RequestOptions, - ): Promise { - if (!body) return null; - if (!opts.headers) return body; + // deno-lint-ignore no-explicit-any + destroy(err?: any) { + if (this.destroyed) { + return this; + } + this.destroyed = true; - const headers = Object.fromEntries( - Object.entries(opts.headers).map(([k, v]) => [k.toLowerCase(), v]), - ); - - if ( - !RE_TE_CHUNKED.test(headers["transfer-encoding"]) && - !Number.isNaN(Number.parseInt(headers["content-length"], 10)) - ) { - const bufferList: Buffer[] = []; - for await (const chunk of body) { - bufferList.push(chunk); - } - return Buffer.concat(bufferList); + // If we're aborting, we don't care about any more response data. + if (this.res) { + this.res._dump(); } - return body; + this[kError] = err; + this.socket?.destroy(err); + + return this; } _createCustomClient(): Promise { return Promise.resolve(undefined); } - _createSocket(): Socket { - // Note: Creates a dummy socket for the compatibility - // Sometimes the libraries check some properties of socket - // e.g. if (!response.socket.authorized) { ... } - return new Socket({}); - } - - _createUrlStrFromOptions(opts: RequestOptions): string { - if (opts.href) { - return opts.href; + _createUrlStrFromOptions(): string { + if (this.href) { + return this.href; } - const protocol = opts.protocol ?? this.defaultProtocol; - const auth = opts.auth; - const host = opts.host ?? opts.hostname ?? "localhost"; - const defaultPort = opts.agent?.defaultPort; - const port = opts.port ?? defaultPort ?? 80; - let path = opts.path ?? "/"; + const protocol = this.protocol ?? this.defaultProtocol; + const auth = this.auth; + const host = this.host ?? this.hostname ?? "localhost"; + const hash = this.hash ? `#${this.hash}` : ""; + const defaultPort = this.agent?.defaultPort; + const port = this.port ?? defaultPort ?? 80; + let path = this.path ?? "/"; if (!path.startsWith("/")) { path = "/" + path; } - return `${protocol}//${auth ? `${auth}@` : ""}${host}${ - port === 80 ? "" : `:${port}` - }${path}`; + const url = new URL( + `${protocol}//${auth ? `${auth}@` : ""}${host}${ + port === 80 ? "" : `:${port}` + }${path}`, + ); + url.hash = hash; + return url.href; } - setTimeout() { - console.log("not implemented: ClientRequest.setTimeout"); + setTimeout(msecs: number, callback?: () => void) { + if (msecs === 0) { + if (this._timeout) { + this.removeAllListeners("timeout"); + this._timeout.removeEventListener("abort", this._timeoutCb); + this._timeout = undefined; + } + + return this; + } + if (this._ended || this._timeout) { + return this; + } + + msecs = getTimerDuration(msecs, "msecs"); + if (callback) this.once("timeout", callback); + + const timeout = AbortSignal.timeout(msecs); + this._timeoutCb = () => this.emit("timeout"); + timeout.addEventListener("abort", this._timeoutCb); + this._timeout = timeout; + + return this; + } + + _processHeader(headers, key, value, validate) { + if (validate) { + validateHeaderName(key); + } + + // If key is content-disposition and there is content-length + // encode the value in latin1 + // https://www.rfc-editor.org/rfc/rfc6266#section-4.3 + // Refs: https://github.com/nodejs/node/pull/46528 + if (isContentDispositionField(key) && this._contentLength) { + value = Buffer.from(value, "latin1"); + } + + if (Array.isArray(value)) { + if ( + (value.length < 2 || !isCookieField(key)) && + (!this[kUniqueHeaders] || !this[kUniqueHeaders].has(key.toLowerCase())) + ) { + // Retain for(;;) loop for performance reasons + // Refs: https://github.com/nodejs/node/pull/30958 + for (let i = 0; i < value.length; i++) { + headers.push([key, value[i]]); + } + return; + } + value = value.join("; "); + } + headers.push([key, value]); } } +// isCookieField performs a case-insensitive comparison of a provided string +// against the word "cookie." As of V8 6.6 this is faster than handrolling or +// using a case-insensitive RegExp. +function isCookieField(s) { + return s.length === 6 && s.toLowerCase() === "cookie"; +} + +function isContentDispositionField(s) { + return s.length === 19 && + s.toLowerCase() === "content-disposition"; +} + +const kHeaders = Symbol("kHeaders"); +const kHeadersDistinct = Symbol("kHeadersDistinct"); +const kHeadersCount = Symbol("kHeadersCount"); +const kTrailers = Symbol("kTrailers"); +const kTrailersDistinct = Symbol("kTrailersDistinct"); +const kTrailersCount = Symbol("kTrailersCount"); + /** IncomingMessage for http(s) client */ export class IncomingMessageForClient extends NodeReadable { - reader: ReadableStreamDefaultReader | undefined; - #statusMessage = ""; - constructor(public response: Response | undefined, public socket: Socket) { + decoder = new TextDecoder(); + + constructor(socket: Socket) { super(); - this.reader = response?.body?.getReader(); + + this._readableState.readingMore = true; + + this.socket = socket; + + this.httpVersionMajor = null; + this.httpVersionMinor = null; + this.httpVersion = null; + this.complete = false; + this[kHeaders] = null; + this[kHeadersCount] = 0; + this.rawHeaders = []; + this[kTrailers] = null; + this[kTrailersCount] = 0; + this.rawTrailers = []; + this.joinDuplicateHeaders = false; + this.aborted = false; + + this.upgrade = null; + + // request (server) only + this.url = ""; + this.method = null; + + // response (client) only + this.statusCode = null; + this.statusMessage = null; + this.client = socket; + + this._consuming = false; + // Flag for when we decide that this message cannot possibly be + // read by the user, so there's no point continuing to handle it. + this._dumped = false; } - override async _read(_size: number) { - if (this.reader === undefined) { - this.push(null); - return; - } - try { - const res = await this.reader.read(); - if (res.done) { - this.push(null); - return; - } - this.push(res.value); - } catch (e) { - // deno-lint-ignore no-explicit-any - this.destroy(e as any); - } + get connection() { + return this.socket; + } + + set connection(val) { + this.socket = val; } get headers() { - if (this.response) { - return Object.fromEntries(this.response.headers.entries()); + if (!this[kHeaders]) { + this[kHeaders] = {}; + + const src = this.rawHeaders; + const dst = this[kHeaders]; + + for (let n = 0; n < this[kHeadersCount]; n += 2) { + this._addHeaderLine(src[n + 0], src[n + 1], dst); + } } - return {}; + return this[kHeaders]; + } + + set headers(val) { + this[kHeaders] = val; + } + + get headersDistinct() { + if (!this[kHeadersDistinct]) { + this[kHeadersDistinct] = {}; + + const src = this.rawHeaders; + const dst = this[kHeadersDistinct]; + + for (let n = 0; n < this[kHeadersCount]; n += 2) { + this._addHeaderLineDistinct(src[n + 0], src[n + 1], dst); + } + } + return this[kHeadersDistinct]; + } + + set headersDistinct(val) { + this[kHeadersDistinct] = val; } get trailers() { - return {}; + if (!this[kTrailers]) { + this[kTrailers] = {}; + + const src = this.rawTrailers; + const dst = this[kTrailers]; + + for (let n = 0; n < this[kTrailersCount]; n += 2) { + this._addHeaderLine(src[n + 0], src[n + 1], dst); + } + } + return this[kTrailers]; } - get statusCode() { - return this.response?.status || 0; + set trailers(val) { + this[kTrailers] = val; } - get statusMessage() { - return this.#statusMessage || this.response?.statusText || ""; + get trailersDistinct() { + if (!this[kTrailersDistinct]) { + this[kTrailersDistinct] = {}; + + const src = this.rawTrailers; + const dst = this[kTrailersDistinct]; + + for (let n = 0; n < this[kTrailersCount]; n += 2) { + this._addHeaderLineDistinct(src[n + 0], src[n + 1], dst); + } + } + return this[kTrailersDistinct]; } - set statusMessage(v: string) { - this.#statusMessage = v; + set trailersDistinct(val) { + this[kTrailersDistinct] = val; + } + + setTimeout(msecs, callback) { + if (callback) { + this.on("timeout", callback); + } + this.socket.setTimeout(msecs); + return this; + } + + _read(_n) { + if (!this._consuming) { + this._readableState.readingMore = false; + this._consuming = true; + } + + const buf = new Uint8Array(16 * 1024); + + core.read(this._bodyRid, buf).then((bytesRead) => { + if (bytesRead === 0) { + this.push(null); + } else { + this.push(Buffer.from(buf.subarray(0, bytesRead))); + } + }); + } + + // It's possible that the socket will be destroyed, and removed from + // any messages, before ever calling this. In that case, just skip + // it, since something else is destroying this connection anyway. + _destroy(err, cb) { + this.complete = true; + if (!this.readableEnded || !this.complete) { + this.aborted = true; + this.emit("aborted"); + } + + core.tryClose(this._bodyRid); + + // If aborted and the underlying socket is not already destroyed, + // destroy it. + // We have to check if the socket is already destroyed because finished + // does not call the callback when this method is invoked from `_http_client` + // in `test/parallel/test-http-client-spurious-aborted.js` + if (this.socket && !this.socket.destroyed && this.aborted) { + this.socket.destroy(err); + const cleanup = finished(this.socket, (e) => { + if (e?.code === "ERR_STREAM_PREMATURE_CLOSE") { + e = null; + } + cleanup(); + onError(this, e || err, cb); + }); + } else { + onError(this, err, cb); + } + } + + _addHeaderLines(headers, n) { + if (headers && headers.length) { + let dest; + if (this.complete) { + this.rawTrailers = headers.flat(); + this[kTrailersCount] = n; + dest = this[kTrailers]; + } else { + this.rawHeaders = headers.flat(); + this[kHeadersCount] = n; + dest = this[kHeaders]; + } + + if (dest) { + for (const header of headers) { + this._addHeaderLine(header[0], header[1], dest); + } + } + } + } + + // Add the given (field, value) pair to the message + // + // Per RFC2616, section 4.2 it is acceptable to join multiple instances of the + // same header with a ', ' if the header in question supports specification of + // multiple values this way. The one exception to this is the Cookie header, + // which has multiple values joined with a '; ' instead. If a header's values + // cannot be joined in either of these ways, we declare the first instance the + // winner and drop the second. Extended header fields (those beginning with + // 'x-') are always joined. + _addHeaderLine(field, value, dest) { + field = matchKnownFields(field); + const flag = field.charCodeAt(0); + if (flag === 0 || flag === 2) { + field = field.slice(1); + // Make a delimited list + if (typeof dest[field] === "string") { + dest[field] += (flag === 0 ? ", " : "; ") + value; + } else { + dest[field] = value; + } + } else if (flag === 1) { + // Array header -- only Set-Cookie at the moment + if (dest["set-cookie"] !== undefined) { + dest["set-cookie"].push(value); + } else { + dest["set-cookie"] = [value]; + } + } else if (this.joinDuplicateHeaders) { + // RFC 9110 https://www.rfc-editor.org/rfc/rfc9110#section-5.2 + // https://github.com/nodejs/node/issues/45699 + // allow authorization multiple fields + // Make a delimited list + if (dest[field] === undefined) { + dest[field] = value; + } else { + dest[field] += ", " + value; + } + } else if (dest[field] === undefined) { + // Drop duplicates + dest[field] = value; + } + } + + _addHeaderLineDistinct(field, value, dest) { + field = field.toLowerCase(); + if (!dest[field]) { + dest[field] = [value]; + } else { + dest[field].push(value); + } + } + + // Call this instead of resume() if we want to just + // dump all the data to /dev/null + _dump() { + if (!this._dumped) { + this._dumped = true; + // If there is buffered data, it may trigger 'data' events. + // Remove 'data' event listeners explicitly. + this.removeAllListeners("data"); + this.resume(); + } + } +} + +// This function is used to help avoid the lowercasing of a field name if it +// matches a 'traditional cased' version of a field name. It then returns the +// lowercased name to both avoid calling toLowerCase() a second time and to +// indicate whether the field was a 'no duplicates' field. If a field is not a +// 'no duplicates' field, a `0` byte is prepended as a flag. The one exception +// to this is the Set-Cookie header which is indicated by a `1` byte flag, since +// it is an 'array' field and thus is treated differently in _addHeaderLines(). +function matchKnownFields(field, lowercased) { + switch (field.length) { + case 3: + if (field === "Age" || field === "age") return "age"; + break; + case 4: + if (field === "Host" || field === "host") return "host"; + if (field === "From" || field === "from") return "from"; + if (field === "ETag" || field === "etag") return "etag"; + if (field === "Date" || field === "date") return "\u0000date"; + if (field === "Vary" || field === "vary") return "\u0000vary"; + break; + case 6: + if (field === "Server" || field === "server") return "server"; + if (field === "Cookie" || field === "cookie") return "\u0002cookie"; + if (field === "Origin" || field === "origin") return "\u0000origin"; + if (field === "Expect" || field === "expect") return "\u0000expect"; + if (field === "Accept" || field === "accept") return "\u0000accept"; + break; + case 7: + if (field === "Referer" || field === "referer") return "referer"; + if (field === "Expires" || field === "expires") return "expires"; + if (field === "Upgrade" || field === "upgrade") return "\u0000upgrade"; + break; + case 8: + if (field === "Location" || field === "location") { + return "location"; + } + if (field === "If-Match" || field === "if-match") { + return "\u0000if-match"; + } + break; + case 10: + if (field === "User-Agent" || field === "user-agent") { + return "user-agent"; + } + if (field === "Set-Cookie" || field === "set-cookie") { + return "\u0001"; + } + if (field === "Connection" || field === "connection") { + return "\u0000connection"; + } + break; + case 11: + if (field === "Retry-After" || field === "retry-after") { + return "retry-after"; + } + break; + case 12: + if (field === "Content-Type" || field === "content-type") { + return "content-type"; + } + if (field === "Max-Forwards" || field === "max-forwards") { + return "max-forwards"; + } + break; + case 13: + if (field === "Authorization" || field === "authorization") { + return "authorization"; + } + if (field === "Last-Modified" || field === "last-modified") { + return "last-modified"; + } + if (field === "Cache-Control" || field === "cache-control") { + return "\u0000cache-control"; + } + if (field === "If-None-Match" || field === "if-none-match") { + return "\u0000if-none-match"; + } + break; + case 14: + if (field === "Content-Length" || field === "content-length") { + return "content-length"; + } + break; + case 15: + if (field === "Accept-Encoding" || field === "accept-encoding") { + return "\u0000accept-encoding"; + } + if (field === "Accept-Language" || field === "accept-language") { + return "\u0000accept-language"; + } + if (field === "X-Forwarded-For" || field === "x-forwarded-for") { + return "\u0000x-forwarded-for"; + } + break; + case 16: + if (field === "Content-Encoding" || field === "content-encoding") { + return "\u0000content-encoding"; + } + if (field === "X-Forwarded-Host" || field === "x-forwarded-host") { + return "\u0000x-forwarded-host"; + } + break; + case 17: + if (field === "If-Modified-Since" || field === "if-modified-since") { + return "if-modified-since"; + } + if (field === "Transfer-Encoding" || field === "transfer-encoding") { + return "\u0000transfer-encoding"; + } + if (field === "X-Forwarded-Proto" || field === "x-forwarded-proto") { + return "\u0000x-forwarded-proto"; + } + break; + case 19: + if (field === "Proxy-Authorization" || field === "proxy-authorization") { + return "proxy-authorization"; + } + if (field === "If-Unmodified-Since" || field === "if-unmodified-since") { + return "if-unmodified-since"; + } + break; + } + if (lowercased) { + return "\u0000" + field; + } + return matchKnownFields(field.toLowerCase(), true); +} + +function onError(self, error, cb) { + // This is to keep backward compatible behavior. + // An error is emitted only if there are listeners attached to the event. + if (self.listenerCount("error") === 0) { + cb(); + } else { + cb(error); } } @@ -406,7 +1212,7 @@ export class ServerResponse extends NodeWritable { finished = false; headersSent = false; #firstChunk: Chunk | null = null; - #reqEvent?: Deno.RequestEvent; + #resolve: (value: Response | PromiseLike) => void; static #enqueue(controller: ReadableStreamDefaultController, chunk: Chunk) { if (typeof chunk === "string") { @@ -422,7 +1228,7 @@ export class ServerResponse extends NodeWritable { return status === 101 || status === 204 || status === 205 || status === 304; } - constructor(reqEvent: undefined | Deno.RequestEvent) { + constructor(resolve: (value: Response | PromiseLike) => void) { let controller: ReadableByteStreamController; const readable = new ReadableStream({ start(c) { @@ -464,7 +1270,7 @@ export class ServerResponse extends NodeWritable { }, }); this.#readable = readable; - this.#reqEvent = reqEvent; + this.#resolve = resolve; } setHeader(name: string, value: string) { @@ -515,16 +1321,13 @@ export class ServerResponse extends NodeWritable { if (ServerResponse.#bodyShouldBeNull(this.statusCode!)) { body = null; } - this.#reqEvent!.respondWith( + this.#resolve( new Response(body, { headers: this.#headers, status: this.statusCode, statusText: this.statusMessage, }), - ).catch(() => { - // TODO(bartlomieju): this error should be handled somehow - // ignore this error - }); + ); } // deno-lint-ignore no-explicit-any @@ -556,7 +1359,7 @@ export class IncomingMessageForServer extends NodeReadable { // These properties are used by `npm:forwarded` for example. socket: { remoteAddress: string; remotePort: number }; - constructor(req: Request, conn: Deno.Conn) { + constructor(req: Request, remoteAddr: { hostname: string; port: number }) { // Check if no body (GET/HEAD/OPTIONS/...) const reader = req.body?.getReader(); super({ @@ -584,8 +1387,8 @@ export class IncomingMessageForServer extends NodeReadable { this.url = req.url?.slice(req.url.indexOf("/", 8)); this.method = req.method; this.socket = { - remoteAddress: conn.remoteAddr.hostname, - remotePort: conn.remoteAddr.port, + remoteAddress: remoteAddr.hostname, + remotePort: remoteAddr.port, }; this.#req = req; } @@ -627,10 +1430,19 @@ export function Server(handler?: ServerHandler): ServerImpl { class ServerImpl extends EventEmitter { #httpConnections: Set = new Set(); #listener?: Deno.Listener; + + #addr: Deno.NetAddr; + #hasClosed = false; + #server: Deno.Server; + #unref = false; + #ac?: AbortController; + #servePromise: Deferred; listening = false; constructor(handler?: ServerHandler) { super(); + this.#servePromise = deferred(); + this.#servePromise.then(() => this.emit("close")); if (handler !== undefined) { this.on("request", handler); } @@ -655,80 +1467,81 @@ class ServerImpl extends EventEmitter { // TODO(bnoordhuis) Node prefers [::] when host is omitted, // we on the other hand default to 0.0.0.0. + const hostname = options.host ?? "0.0.0.0"; + this.#addr = { + hostname, + port, + } as Deno.NetAddr; this.listening = true; - const hostname = options.host ?? ""; - this.#listener = Deno.listen({ port, hostname }); - nextTick(() => this.#listenLoop()); + nextTick(() => this.#serve()); return this; } - async #listenLoop() { - const go = async (tcpConn: Deno.Conn, httpConn: Deno.HttpConn) => { - try { - for (;;) { - let reqEvent = null; - try { - // Note: httpConn.nextRequest() calls httpConn.close() on error. - reqEvent = await httpConn.nextRequest(); - } catch { - // Connection closed. - // TODO(bnoordhuis) Emit "clientError" event on the http.Server - // instance? Node emits it when request parsing fails and expects - // the listener to send a raw 4xx HTTP response on the underlying - // net.Socket but we don't have one to pass to the listener. - } - if (reqEvent === null) { - break; - } - const req = new IncomingMessageForServer(reqEvent.request, tcpConn); - if (req.upgrade && this.listenerCount("upgrade") > 0) { - const conn = await denoHttp.upgradeHttpRaw( - reqEvent.request, - tcpConn, - ) as Deno.Conn; - const socket = new Socket({ - handle: new TCP(constants.SERVER, conn), - }); - this.emit("upgrade", req, socket, Buffer.from([])); - return; - } else { - const res = new ServerResponse(reqEvent); - this.emit("request", req, res); - } - } - } finally { - this.#httpConnections.delete(httpConn); + #serve() { + const ac = new AbortController(); + const handler = (request: Request, info: Deno.ServeHandlerInfo) => { + const req = new IncomingMessageForServer(request, info.remoteAddr); + if (req.upgrade && this.listenerCount("upgrade") > 0) { + const { conn, response } = upgradeHttpRaw(request); + const socket = new Socket({ + handle: new TCP(constants.SERVER, conn), + }); + this.emit("upgrade", req, socket, Buffer.from([])); + return response; + } else { + return new Promise((resolve): void => { + const res = new ServerResponse(resolve); + this.emit("request", req, res); + }); } }; - const listener = this.#listener; - - if (listener !== undefined) { - this.emit("listening"); - - for await (const conn of listener) { - let httpConn: Deno.HttpConn; - try { - httpConn = httpRuntime.serveHttp(conn); - } catch { - continue; /// Connection closed. - } - - this.#httpConnections.add(httpConn); - go(conn, httpConn); - } + if (this.#hasClosed) { + return; } + this.#ac = ac; + this.#server = serve( + { + handler: handler as Deno.ServeHandler, + ...this.#addr, + signal: ac.signal, + // @ts-ignore Might be any without `--unstable` flag + onListen: ({ port }) => { + this.#addr!.port = port; + this.emit("listening"); + }, + }, + ); + if (this.#unref) { + this.#server.unref(); + } + this.#server.finished.then(() => this.#servePromise!.resolve()); } setTimeout() { console.error("Not implemented: Server.setTimeout()"); } + ref() { + if (this.#server) { + this.#server.ref(); + } + this.#unref = false; + } + + unref() { + if (this.#server) { + this.#server.unref(); + } + this.#unref = true; + } + close(cb?: (err?: Error) => void): this { const listening = this.listening; this.listening = false; + this.#hasClosed = true; if (typeof cb === "function") { if (listening) { this.once("close", cb); @@ -739,31 +1552,21 @@ class ServerImpl extends EventEmitter { } } - nextTick(() => this.emit("close")); - - if (listening) { - this.#listener!.close(); - this.#listener = undefined; - - for (const httpConn of this.#httpConnections) { - try { - httpConn.close(); - } catch { - // Already closed. - } - } - - this.#httpConnections.clear(); + if (listening && this.#ac) { + this.#ac.abort(); + this.#ac = undefined; + } else { + this.#servePromise!.resolve(); } + this.#server = undefined; return this; } address() { - const addr = this.#listener!.addr as Deno.NetAddr; return { - port: addr.port, - address: addr.hostname, + port: this.#addr.port, + address: this.#addr.hostname, }; } } @@ -790,17 +1593,7 @@ export function request( ): ClientRequest; // deno-lint-ignore no-explicit-any export function request(...args: any[]) { - let options = {}; - if (typeof args[0] === "string") { - options = urlToHttpOptions(new URL(args.shift())); - } else if (args[0] instanceof URL) { - options = urlToHttpOptions(args.shift()); - } - if (args[0] && typeof args[0] !== "function") { - Object.assign(options, args.shift()); - } - args.unshift(options); - return new ClientRequest(args[0], args[1]); + return new ClientRequest(args[0], args[1], args[2]); } /** Makes a `GET` HTTP request. */ diff --git a/ext/node/polyfills/http2.ts b/ext/node/polyfills/http2.ts index 5ba3db48c1..a5d945efea 100644 --- a/ext/node/polyfills/http2.ts +++ b/ext/node/polyfills/http2.ts @@ -1,75 +1,1084 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent and Node contributors. All rights reserved. MIT license. -import { notImplemented } from "ext:deno_node/_utils.ts"; +import { notImplemented, warnNotImplemented } from "ext:deno_node/_utils.ts"; +import { EventEmitter } from "ext:deno_node/events.ts"; +import { Buffer } from "ext:deno_node/buffer.ts"; +import { Server, Socket, TCP } from "ext:deno_node/net.ts"; +import { TypedArray } from "ext:deno_node/internal/util/types.ts"; +import { setStreamTimeout } from "ext:deno_node/internal/stream_base_commons.ts"; +import { FileHandle } from "ext:deno_node/fs/promises.ts"; +import { kStreamBaseField } from "ext:deno_node/internal_binding/stream_wrap.ts"; +import { addTrailers, serveHttpOnConnection } from "ext:deno_http/00_serve.js"; +import { type Deferred, deferred } from "ext:deno_node/_util/async.ts"; +import { nextTick } from "ext:deno_node/_next_tick.ts"; +import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; -export class Http2Session { +const ENCODER = new TextEncoder(); +type Http2Headers = Record; + +export class Http2Session extends EventEmitter { constructor() { - notImplemented("Http2Session.prototype.constructor"); + super(); + } + + get alpnProtocol(): string | undefined { + notImplemented("Http2Session.alpnProtocol"); + return undefined; + } + + close(_callback?: () => void) { + warnNotImplemented("Http2Session.close"); + } + + get closed(): boolean { + return false; + } + + get connecting(): boolean { + notImplemented("Http2Session.connecting"); + return false; + } + + destroy(_error?: Error, _code?: number) { + notImplemented("Http2Session.destroy"); + } + + get destroyed(): boolean { + return false; + } + + get encrypted(): boolean { + notImplemented("Http2Session.encrypted"); + return false; + } + + goaway( + _code: number, + _lastStreamID: number, + _opaqueData: Buffer | TypedArray | DataView, + ) { + notImplemented("Http2Session.goaway"); + } + + get localSettings(): Record { + notImplemented("Http2Session.localSettings"); + return {}; + } + + get originSet(): string[] | undefined { + notImplemented("Http2Session.originSet"); + return undefined; + } + + get pendingSettingsAck(): boolean { + notImplemented("Http2Session.pendingSettingsAck"); + return false; + } + + ping( + _payload: Buffer | TypedArray | DataView, + _callback: () => void, + ): boolean { + notImplemented("Http2Session.ping"); + return false; + } + + ref() { + warnNotImplemented("Http2Session.ref"); + } + + get remoteSettings(): Record { + notImplemented("Http2Session.remoteSettings"); + return {}; + } + + setLocalWindowSize(_windowSize: number) { + notImplemented("Http2Session.setLocalWindowSize"); + } + + setTimeout(msecs: number, callback?: () => void) { + setStreamTimeout(this, msecs, callback); + } + + get socket(): Socket /*| TlsSocket*/ { + return {}; + } + + get state(): Record { + return {}; + } + + settings(_settings: Record, _callback: () => void) { + notImplemented("Http2Session.settings"); + } + + get type(): number { + notImplemented("Http2Session.type"); + return 0; + } + + unref() { + warnNotImplemented("Http2Session.unref"); } } -export class ServerHttp2Session { + +export class ServerHttp2Session extends Http2Session { constructor() { - notImplemented("ServerHttp2Session"); + super(); + } + + altsvc( + _alt: string, + _originOrStream: number | string | URL | { origin: string }, + ) { + notImplemented("ServerHttp2Session.altsvc"); + } + + origin(..._origins: (string | URL | { origin: string })[]) { + notImplemented("ServerHttp2Session.origins"); } } -export class ClientHttp2Session { - constructor() { - notImplemented("ClientHttp2Session"); + +export class ClientHttp2Session extends Http2Session { + constructor( + _authority: string | URL, + _options: Record, + callback: (session: Http2Session) => void, + ) { + super(); + if (callback) { + this.on("connect", callback); + } + nextTick(() => this.emit("connect", this)); + } + + request( + headers: Http2Headers, + _options?: Record, + ): ClientHttp2Stream { + const reqHeaders: string[][] = []; + const controllerPromise: Deferred< + ReadableStreamDefaultController + > = deferred(); + const body = new ReadableStream({ + start(controller) { + controllerPromise.resolve(controller); + }, + }); + const request: RequestInit = { headers: reqHeaders, body }; + let authority = null; + let path = null; + for (const [name, value] of Object.entries(headers)) { + if (name == constants.HTTP2_HEADER_PATH) { + path = String(value); + } else if (name == constants.HTTP2_HEADER_METHOD) { + request.method = String(value); + } else if (name == constants.HTTP2_HEADER_AUTHORITY) { + authority = String(value); + } else { + reqHeaders.push([name, String(value)]); + } + } + + const fetchPromise = fetch(`http://${authority}${path}`, request); + const readerPromise = deferred(); + const headersPromise = deferred(); + (async () => { + const fetch = await fetchPromise; + readerPromise.resolve(fetch.body); + + const headers: Http2Headers = {}; + for (const [key, value] of fetch.headers) { + headers[key] = value; + } + headers[constants.HTTP2_HEADER_STATUS] = String(fetch.status); + + headersPromise.resolve(headers); + })(); + return new ClientHttp2Stream( + this, + headersPromise, + controllerPromise, + readerPromise, + ); } } -export class Http2Stream { - constructor() { - notImplemented("Http2Stream"); + +export class Http2Stream extends EventEmitter { + #session: Http2Session; + #headers: Deferred; + #controllerPromise: Deferred>; + #readerPromise: Deferred>; + #closed: boolean; + _response: Response; + + constructor( + session: Http2Session, + headers: Promise, + controllerPromise: Promise>, + readerPromise: Promise>, + ) { + super(); + this.#session = session; + this.#headers = headers; + this.#controllerPromise = controllerPromise; + this.#readerPromise = readerPromise; + this.#closed = false; + nextTick(() => { + (async () => { + const headers = await this.#headers; + this.emit("headers", headers); + })(); + (async () => { + const reader = await this.#readerPromise; + if (reader) { + for await (const data of reader) { + this.emit("data", new Buffer(data)); + } + } + this.emit("end"); + })(); + }); + } + + // TODO(mmastrac): Implement duplex + end() { + (async () => { + const controller = await this.#controllerPromise; + controller.close(); + })(); + } + + write(buffer, callback?: () => void) { + (async () => { + const controller = await this.#controllerPromise; + if (typeof buffer === "string") { + controller.enqueue(ENCODER.encode(buffer)); + } else { + controller.enqueue(buffer); + } + callback?.(); + })(); + } + + resume() { + } + + pause() { + } + + get aborted(): boolean { + notImplemented("Http2Stream.aborted"); + return false; + } + + get bufferSize(): number { + notImplemented("Http2Stream.bufferSize"); + return 0; + } + + close(_code: number, _callback: () => void) { + this.#closed = true; + this.emit("close"); + } + + get closed(): boolean { + return this.#closed; + } + + get destroyed(): boolean { + return false; + } + + get endAfterHeaders(): boolean { + notImplemented("Http2Stream.endAfterHeaders"); + return false; + } + + get id(): number | undefined { + notImplemented("Http2Stream.id"); + return undefined; + } + + get pending(): boolean { + notImplemented("Http2Stream.pending"); + return false; + } + + priority(_options: Record) { + notImplemented("Http2Stream.priority"); + } + + get rstCode(): number { + // notImplemented("Http2Stream.rstCode"); + return 0; + } + + get sentHeaders(): boolean { + notImplemented("Http2Stream.sentHeaders"); + return false; + } + + get sentInfoHeaders(): Record { + notImplemented("Http2Stream.sentInfoHeaders"); + return {}; + } + + get sentTrailers(): Record { + notImplemented("Http2Stream.sentTrailers"); + return {}; + } + + get session(): Http2Session { + return this.#session; + } + + setTimeout(msecs: number, callback?: () => void) { + setStreamTimeout(this, msecs, callback); + } + + get state(): Record { + notImplemented("Http2Stream.state"); + return {}; + } + + sendTrailers(_headers: Record) { + addTrailers(this._response, [["grpc-status", "0"], ["grpc-message", "OK"]]); } } -export class ClientHttp2Stream { - constructor() { - notImplemented("ClientHttp2Stream"); + +export class ClientHttp2Stream extends Http2Stream { + constructor( + session: Http2Session, + headers: Promise, + controllerPromise: Deferred>, + readerPromise: Deferred>, + ) { + super(session, headers, controllerPromise, readerPromise); } } -export class ServerHttp2Stream { - constructor() { - notImplemented("ServerHttp2Stream"); + +export class ServerHttp2Stream extends Http2Stream { + _promise: Deferred; + #body: ReadableStream; + #waitForTrailers: boolean; + #headersSent: boolean; + + constructor( + session: Http2Session, + headers: Promise, + controllerPromise: Promise>, + reader: ReadableStream, + body: ReadableStream, + ) { + super(session, headers, controllerPromise, Promise.resolve(reader)); + this._promise = new deferred(); + this.#body = body; + } + + additionalHeaders(_headers: Record) { + notImplemented("ServerHttp2Stream.additionalHeaders"); + } + + end(): void { + super.end(); + if (this.#waitForTrailers) { + this.emit("wantTrailers"); + } + } + + get headersSent(): boolean { + return this.#headersSent; + } + + get pushAllowed(): boolean { + notImplemented("ServerHttp2Stream.pushAllowed"); + return false; + } + + pushStream( + _headers: Record, + _options: Record, + _callback: () => unknown, + ) { + notImplemented("ServerHttp2Stream.pushStream"); + } + + respond( + headers: Http2Headers, + options: Record, + ) { + this.#headersSent = true; + const response: ResponseInit = {}; + if (headers) { + for (const [name, value] of Object.entries(headers)) { + if (name == constants.HTTP2_HEADER_STATUS) { + response.status = Number(value); + } + } + } + if (options?.endStream) { + this._promise.resolve(this._response = new Response("", response)); + } else { + this.#waitForTrailers = options?.waitForTrailers; + this._promise.resolve( + this._response = new Response(this.#body, response), + ); + } + } + + respondWithFD( + _fd: number | FileHandle, + _headers: Record, + _options: Record, + ) { + notImplemented("ServerHttp2Stream.respondWithFD"); + } + + respondWithFile( + _path: string | Buffer | URL, + _headers: Record, + _options: Record, + ) { + notImplemented("ServerHttp2Stream.respondWithFile"); } } -export class Http2Server { - constructor() { - notImplemented("Http2Server"); + +export class Http2Server extends Server { + #options: Record = {}; + #abortController; + #server; + timeout = 0; + + constructor( + options: Record, + requestListener: () => unknown, + ) { + super(options); + this.#abortController = new AbortController(); + this.on( + "connection", + (conn: Deno.Conn) => { + try { + const session = new ServerHttp2Session(); + this.emit("session", session); + this.#server = serveHttpOnConnection( + conn, + this.#abortController.signal, + async (req: Request) => { + try { + const controllerPromise: Deferred< + ReadableStreamDefaultController + > = deferred(); + const body = new ReadableStream({ + start(controller) { + controllerPromise.resolve(controller); + }, + }); + const headers: Http2Headers = {}; + for (const [name, value] of req.headers) { + headers[name] = value; + } + headers[constants.HTTP2_HEADER_PATH] = + new URL(req.url).pathname; + const stream = new ServerHttp2Stream( + session, + Promise.resolve(headers), + controllerPromise, + req.body, + body, + ); + session.emit("stream", stream, headers); + this.emit("stream", stream, headers); + return await stream._promise; + } catch (e) { + console.log("Error in serveHttpOnConnection", e); + } + return new Response(""); + }, + () => { + console.log("error"); + }, + () => {}, + ); + } catch (e) { + console.log("Error in Http2Server", e); + } + }, + ); + this.on( + "newListener", + (event) => console.log(`Event in newListener: ${event}`), + ); + this.#options = options; + if (typeof requestListener === "function") { + this.on("request", requestListener); + } + } + + // Prevent the TCP server from wrapping this in a socket, since we need it to serve HTTP + _createSocket(clientHandle: TCP) { + return clientHandle[kStreamBaseField]; + } + + close(callback?: () => unknown) { + if (callback) { + this.on("close", callback); + } + this.#abortController.abort(); + super.close(); + } + + setTimeout(msecs: number, callback?: () => unknown) { + this.timeout = msecs; + if (callback !== undefined) { + this.on("timeout", callback); + } + } + + updateSettings(settings: Record) { + this.#options.settings = { ...this.#options.settings, ...settings }; } } -export class Http2SecureServer { - constructor() { - notImplemented("Http2SecureServer"); + +export class Http2SecureServer extends Server { + #options: Record = {}; + timeout = 0; + + constructor( + options: Record, + requestListener: () => unknown, + ) { + super(options, function () { + notImplemented("connectionListener"); + }); + this.#options = options; + if (typeof requestListener === "function") { + this.on("request", requestListener); + } + } + + close(_callback?: () => unknown) { + notImplemented("Http2SecureServer.close"); + } + + setTimeout(msecs: number, callback?: () => unknown) { + this.timeout = msecs; + if (callback !== undefined) { + this.on("timeout", callback); + } + } + + updateSettings(settings: Record) { + this.#options.settings = { ...this.#options.settings, ...settings }; } } -export function createServer() {} -export function createSecureServer() {} -export function connect() {} -export const constants = {}; -export function getDefaultSettings() {} -export function getPackedSettings() {} -export function getUnpackedSettings() {} + +export function createServer( + options: Record, + onRequestHandler: () => unknown, +): Http2Server { + if (typeof options === "function") { + onRequestHandler = options; + options = {}; + } + return new Http2Server(options, onRequestHandler); +} + +export function createSecureServer( + _options: Record, + _onRequestHandler: () => unknown, +): Http2SecureServer { + notImplemented("http2.createSecureServer"); + return new Http2SecureServer(); +} + +export function connect( + authority: string | URL, + options: Record, + callback: (session: ClientHttp2Session) => void, +): ClientHttp2Session { + return new ClientHttp2Session(authority, options, callback); +} + +export const constants = { + NGHTTP2_ERR_FRAME_SIZE_ERROR: -522, + NGHTTP2_SESSION_SERVER: 0, + NGHTTP2_SESSION_CLIENT: 1, + NGHTTP2_STREAM_STATE_IDLE: 1, + NGHTTP2_STREAM_STATE_OPEN: 2, + NGHTTP2_STREAM_STATE_RESERVED_LOCAL: 3, + NGHTTP2_STREAM_STATE_RESERVED_REMOTE: 4, + NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: 5, + NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: 6, + NGHTTP2_STREAM_STATE_CLOSED: 7, + NGHTTP2_FLAG_NONE: 0, + NGHTTP2_FLAG_END_STREAM: 1, + NGHTTP2_FLAG_END_HEADERS: 4, + NGHTTP2_FLAG_ACK: 1, + NGHTTP2_FLAG_PADDED: 8, + NGHTTP2_FLAG_PRIORITY: 32, + DEFAULT_SETTINGS_HEADER_TABLE_SIZE: 4096, + DEFAULT_SETTINGS_ENABLE_PUSH: 1, + DEFAULT_SETTINGS_MAX_CONCURRENT_STREAMS: 4294967295, + DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: 65535, + DEFAULT_SETTINGS_MAX_FRAME_SIZE: 16384, + DEFAULT_SETTINGS_MAX_HEADER_LIST_SIZE: 65535, + DEFAULT_SETTINGS_ENABLE_CONNECT_PROTOCOL: 0, + MAX_MAX_FRAME_SIZE: 16777215, + MIN_MAX_FRAME_SIZE: 16384, + MAX_INITIAL_WINDOW_SIZE: 2147483647, + NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: 1, + NGHTTP2_SETTINGS_ENABLE_PUSH: 2, + NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: 3, + NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: 4, + NGHTTP2_SETTINGS_MAX_FRAME_SIZE: 5, + NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: 6, + NGHTTP2_SETTINGS_ENABLE_CONNECT_PROTOCOL: 8, + PADDING_STRATEGY_NONE: 0, + PADDING_STRATEGY_ALIGNED: 1, + PADDING_STRATEGY_MAX: 2, + PADDING_STRATEGY_CALLBACK: 1, + NGHTTP2_NO_ERROR: 0, + NGHTTP2_PROTOCOL_ERROR: 1, + NGHTTP2_INTERNAL_ERROR: 2, + NGHTTP2_FLOW_CONTROL_ERROR: 3, + NGHTTP2_SETTINGS_TIMEOUT: 4, + NGHTTP2_STREAM_CLOSED: 5, + NGHTTP2_FRAME_SIZE_ERROR: 6, + NGHTTP2_REFUSED_STREAM: 7, + NGHTTP2_CANCEL: 8, + NGHTTP2_COMPRESSION_ERROR: 9, + NGHTTP2_CONNECT_ERROR: 10, + NGHTTP2_ENHANCE_YOUR_CALM: 11, + NGHTTP2_INADEQUATE_SECURITY: 12, + NGHTTP2_HTTP_1_1_REQUIRED: 13, + NGHTTP2_DEFAULT_WEIGHT: 16, + HTTP2_HEADER_STATUS: ":status", + HTTP2_HEADER_METHOD: ":method", + HTTP2_HEADER_AUTHORITY: ":authority", + HTTP2_HEADER_SCHEME: ":scheme", + HTTP2_HEADER_PATH: ":path", + HTTP2_HEADER_PROTOCOL: ":protocol", + HTTP2_HEADER_ACCEPT_ENCODING: "accept-encoding", + HTTP2_HEADER_ACCEPT_LANGUAGE: "accept-language", + HTTP2_HEADER_ACCEPT_RANGES: "accept-ranges", + HTTP2_HEADER_ACCEPT: "accept", + HTTP2_HEADER_ACCESS_CONTROL_ALLOW_CREDENTIALS: + "access-control-allow-credentials", + HTTP2_HEADER_ACCESS_CONTROL_ALLOW_HEADERS: "access-control-allow-headers", + HTTP2_HEADER_ACCESS_CONTROL_ALLOW_METHODS: "access-control-allow-methods", + HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: "access-control-allow-origin", + HTTP2_HEADER_ACCESS_CONTROL_EXPOSE_HEADERS: "access-control-expose-headers", + HTTP2_HEADER_ACCESS_CONTROL_REQUEST_HEADERS: "access-control-request-headers", + HTTP2_HEADER_ACCESS_CONTROL_REQUEST_METHOD: "access-control-request-method", + HTTP2_HEADER_AGE: "age", + HTTP2_HEADER_AUTHORIZATION: "authorization", + HTTP2_HEADER_CACHE_CONTROL: "cache-control", + HTTP2_HEADER_CONNECTION: "connection", + HTTP2_HEADER_CONTENT_DISPOSITION: "content-disposition", + HTTP2_HEADER_CONTENT_ENCODING: "content-encoding", + HTTP2_HEADER_CONTENT_LENGTH: "content-length", + HTTP2_HEADER_CONTENT_TYPE: "content-type", + HTTP2_HEADER_COOKIE: "cookie", + HTTP2_HEADER_DATE: "date", + HTTP2_HEADER_ETAG: "etag", + HTTP2_HEADER_FORWARDED: "forwarded", + HTTP2_HEADER_HOST: "host", + HTTP2_HEADER_IF_MODIFIED_SINCE: "if-modified-since", + HTTP2_HEADER_IF_NONE_MATCH: "if-none-match", + HTTP2_HEADER_IF_RANGE: "if-range", + HTTP2_HEADER_LAST_MODIFIED: "last-modified", + HTTP2_HEADER_LINK: "link", + HTTP2_HEADER_LOCATION: "location", + HTTP2_HEADER_RANGE: "range", + HTTP2_HEADER_REFERER: "referer", + HTTP2_HEADER_SERVER: "server", + HTTP2_HEADER_SET_COOKIE: "set-cookie", + HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: "strict-transport-security", + HTTP2_HEADER_TRANSFER_ENCODING: "transfer-encoding", + HTTP2_HEADER_TE: "te", + HTTP2_HEADER_UPGRADE_INSECURE_REQUESTS: "upgrade-insecure-requests", + HTTP2_HEADER_UPGRADE: "upgrade", + HTTP2_HEADER_USER_AGENT: "user-agent", + HTTP2_HEADER_VARY: "vary", + HTTP2_HEADER_X_CONTENT_TYPE_OPTIONS: "x-content-type-options", + HTTP2_HEADER_X_FRAME_OPTIONS: "x-frame-options", + HTTP2_HEADER_KEEP_ALIVE: "keep-alive", + HTTP2_HEADER_PROXY_CONNECTION: "proxy-connection", + HTTP2_HEADER_X_XSS_PROTECTION: "x-xss-protection", + HTTP2_HEADER_ALT_SVC: "alt-svc", + HTTP2_HEADER_CONTENT_SECURITY_POLICY: "content-security-policy", + HTTP2_HEADER_EARLY_DATA: "early-data", + HTTP2_HEADER_EXPECT_CT: "expect-ct", + HTTP2_HEADER_ORIGIN: "origin", + HTTP2_HEADER_PURPOSE: "purpose", + HTTP2_HEADER_TIMING_ALLOW_ORIGIN: "timing-allow-origin", + HTTP2_HEADER_X_FORWARDED_FOR: "x-forwarded-for", + HTTP2_HEADER_PRIORITY: "priority", + HTTP2_HEADER_ACCEPT_CHARSET: "accept-charset", + HTTP2_HEADER_ACCESS_CONTROL_MAX_AGE: "access-control-max-age", + HTTP2_HEADER_ALLOW: "allow", + HTTP2_HEADER_CONTENT_LANGUAGE: "content-language", + HTTP2_HEADER_CONTENT_LOCATION: "content-location", + HTTP2_HEADER_CONTENT_MD5: "content-md5", + HTTP2_HEADER_CONTENT_RANGE: "content-range", + HTTP2_HEADER_DNT: "dnt", + HTTP2_HEADER_EXPECT: "expect", + HTTP2_HEADER_EXPIRES: "expires", + HTTP2_HEADER_FROM: "from", + HTTP2_HEADER_IF_MATCH: "if-match", + HTTP2_HEADER_IF_UNMODIFIED_SINCE: "if-unmodified-since", + HTTP2_HEADER_MAX_FORWARDS: "max-forwards", + HTTP2_HEADER_PREFER: "prefer", + HTTP2_HEADER_PROXY_AUTHENTICATE: "proxy-authenticate", + HTTP2_HEADER_PROXY_AUTHORIZATION: "proxy-authorization", + HTTP2_HEADER_REFRESH: "refresh", + HTTP2_HEADER_RETRY_AFTER: "retry-after", + HTTP2_HEADER_TRAILER: "trailer", + HTTP2_HEADER_TK: "tk", + HTTP2_HEADER_VIA: "via", + HTTP2_HEADER_WARNING: "warning", + HTTP2_HEADER_WWW_AUTHENTICATE: "www-authenticate", + HTTP2_HEADER_HTTP2_SETTINGS: "http2-settings", + HTTP2_METHOD_ACL: "ACL", + HTTP2_METHOD_BASELINE_CONTROL: "BASELINE-CONTROL", + HTTP2_METHOD_BIND: "BIND", + HTTP2_METHOD_CHECKIN: "CHECKIN", + HTTP2_METHOD_CHECKOUT: "CHECKOUT", + HTTP2_METHOD_CONNECT: "CONNECT", + HTTP2_METHOD_COPY: "COPY", + HTTP2_METHOD_DELETE: "DELETE", + HTTP2_METHOD_GET: "GET", + HTTP2_METHOD_HEAD: "HEAD", + HTTP2_METHOD_LABEL: "LABEL", + HTTP2_METHOD_LINK: "LINK", + HTTP2_METHOD_LOCK: "LOCK", + HTTP2_METHOD_MERGE: "MERGE", + HTTP2_METHOD_MKACTIVITY: "MKACTIVITY", + HTTP2_METHOD_MKCALENDAR: "MKCALENDAR", + HTTP2_METHOD_MKCOL: "MKCOL", + HTTP2_METHOD_MKREDIRECTREF: "MKREDIRECTREF", + HTTP2_METHOD_MKWORKSPACE: "MKWORKSPACE", + HTTP2_METHOD_MOVE: "MOVE", + HTTP2_METHOD_OPTIONS: "OPTIONS", + HTTP2_METHOD_ORDERPATCH: "ORDERPATCH", + HTTP2_METHOD_PATCH: "PATCH", + HTTP2_METHOD_POST: "POST", + HTTP2_METHOD_PRI: "PRI", + HTTP2_METHOD_PROPFIND: "PROPFIND", + HTTP2_METHOD_PROPPATCH: "PROPPATCH", + HTTP2_METHOD_PUT: "PUT", + HTTP2_METHOD_REBIND: "REBIND", + HTTP2_METHOD_REPORT: "REPORT", + HTTP2_METHOD_SEARCH: "SEARCH", + HTTP2_METHOD_TRACE: "TRACE", + HTTP2_METHOD_UNBIND: "UNBIND", + HTTP2_METHOD_UNCHECKOUT: "UNCHECKOUT", + HTTP2_METHOD_UNLINK: "UNLINK", + HTTP2_METHOD_UNLOCK: "UNLOCK", + HTTP2_METHOD_UPDATE: "UPDATE", + HTTP2_METHOD_UPDATEREDIRECTREF: "UPDATEREDIRECTREF", + HTTP2_METHOD_VERSION_CONTROL: "VERSION-CONTROL", + HTTP_STATUS_CONTINUE: 100, + HTTP_STATUS_SWITCHING_PROTOCOLS: 101, + HTTP_STATUS_PROCESSING: 102, + HTTP_STATUS_EARLY_HINTS: 103, + HTTP_STATUS_OK: 200, + HTTP_STATUS_CREATED: 201, + HTTP_STATUS_ACCEPTED: 202, + HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: 203, + HTTP_STATUS_NO_CONTENT: 204, + HTTP_STATUS_RESET_CONTENT: 205, + HTTP_STATUS_PARTIAL_CONTENT: 206, + HTTP_STATUS_MULTI_STATUS: 207, + HTTP_STATUS_ALREADY_REPORTED: 208, + HTTP_STATUS_IM_USED: 226, + HTTP_STATUS_MULTIPLE_CHOICES: 300, + HTTP_STATUS_MOVED_PERMANENTLY: 301, + HTTP_STATUS_FOUND: 302, + HTTP_STATUS_SEE_OTHER: 303, + HTTP_STATUS_NOT_MODIFIED: 304, + HTTP_STATUS_USE_PROXY: 305, + HTTP_STATUS_TEMPORARY_REDIRECT: 307, + HTTP_STATUS_PERMANENT_REDIRECT: 308, + HTTP_STATUS_BAD_REQUEST: 400, + HTTP_STATUS_UNAUTHORIZED: 401, + HTTP_STATUS_PAYMENT_REQUIRED: 402, + HTTP_STATUS_FORBIDDEN: 403, + HTTP_STATUS_NOT_FOUND: 404, + HTTP_STATUS_METHOD_NOT_ALLOWED: 405, + HTTP_STATUS_NOT_ACCEPTABLE: 406, + HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: 407, + HTTP_STATUS_REQUEST_TIMEOUT: 408, + HTTP_STATUS_CONFLICT: 409, + HTTP_STATUS_GONE: 410, + HTTP_STATUS_LENGTH_REQUIRED: 411, + HTTP_STATUS_PRECONDITION_FAILED: 412, + HTTP_STATUS_PAYLOAD_TOO_LARGE: 413, + HTTP_STATUS_URI_TOO_LONG: 414, + HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: 415, + HTTP_STATUS_RANGE_NOT_SATISFIABLE: 416, + HTTP_STATUS_EXPECTATION_FAILED: 417, + HTTP_STATUS_TEAPOT: 418, + HTTP_STATUS_MISDIRECTED_REQUEST: 421, + HTTP_STATUS_UNPROCESSABLE_ENTITY: 422, + HTTP_STATUS_LOCKED: 423, + HTTP_STATUS_FAILED_DEPENDENCY: 424, + HTTP_STATUS_TOO_EARLY: 425, + HTTP_STATUS_UPGRADE_REQUIRED: 426, + HTTP_STATUS_PRECONDITION_REQUIRED: 428, + HTTP_STATUS_TOO_MANY_REQUESTS: 429, + HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: 431, + HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: 451, + HTTP_STATUS_INTERNAL_SERVER_ERROR: 500, + HTTP_STATUS_NOT_IMPLEMENTED: 501, + HTTP_STATUS_BAD_GATEWAY: 502, + HTTP_STATUS_SERVICE_UNAVAILABLE: 503, + HTTP_STATUS_GATEWAY_TIMEOUT: 504, + HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: 505, + HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: 506, + HTTP_STATUS_INSUFFICIENT_STORAGE: 507, + HTTP_STATUS_LOOP_DETECTED: 508, + HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: 509, + HTTP_STATUS_NOT_EXTENDED: 510, + HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: 511, +}; + +export function getDefaultSettings(): Record { + notImplemented("http2.getDefaultSettings"); + return {}; +} + +export function getPackedSettings(_settings: Record): Buffer { + notImplemented("http2.getPackedSettings"); + return {}; +} + +export function getUnpackedSettings( + _buffer: Buffer | TypedArray, +): Record { + notImplemented("http2.getUnpackedSettings"); + return {}; +} + export const sensitiveHeaders = Symbol("nodejs.http2.sensitiveHeaders"); + export class Http2ServerRequest { constructor() { - notImplemented("Http2ServerRequest"); + } + + get aborted(): boolean { + notImplemented("Http2ServerRequest.aborted"); + return false; + } + + get authority(): string { + notImplemented("Http2ServerRequest.authority"); + return ""; + } + + get complete(): boolean { + notImplemented("Http2ServerRequest.complete"); + return false; + } + + get connection(): Socket /*| TlsSocket*/ { + notImplemented("Http2ServerRequest.connection"); + return {}; + } + + destroy(_error: Error) { + notImplemented("Http2ServerRequest.destroy"); + } + + get headers(): Record { + notImplemented("Http2ServerRequest.headers"); + return {}; + } + + get httpVersion(): string { + notImplemented("Http2ServerRequest.httpVersion"); + return ""; + } + + get method(): string { + notImplemented("Http2ServerRequest.method"); + return ""; + } + + get rawHeaders(): string[] { + notImplemented("Http2ServerRequest.rawHeaders"); + return []; + } + + get rawTrailers(): string[] { + notImplemented("Http2ServerRequest.rawTrailers"); + return []; + } + + get scheme(): string { + notImplemented("Http2ServerRequest.scheme"); + return ""; + } + + setTimeout(msecs: number, callback?: () => unknown) { + this.stream.setTimeout(callback, msecs); + } + + get socket(): Socket /*| TlsSocket*/ { + notImplemented("Http2ServerRequest.socket"); + return {}; + } + + get stream(): Http2Stream { + notImplemented("Http2ServerRequest.stream"); + return new Http2Stream(); + } + + get trailers(): Record { + notImplemented("Http2ServerRequest.trailers"); + return {}; + } + + get url(): string { + notImplemented("Http2ServerRequest.url"); + return ""; } } + export class Http2ServerResponse { constructor() { - notImplemented("Http2ServerResponse"); + } + + addTrailers(_headers: Record) { + notImplemented("Http2ServerResponse.addTrailers"); + } + + get connection(): Socket /*| TlsSocket*/ { + notImplemented("Http2ServerResponse.connection"); + return {}; + } + + createPushResponse( + _headers: Record, + _callback: () => unknown, + ) { + notImplemented("Http2ServerResponse.createPushResponse"); + } + + end( + _data: string | Buffer | Uint8Array, + _encoding: string, + _callback: () => unknown, + ) { + notImplemented("Http2ServerResponse.end"); + } + + get finished(): boolean { + notImplemented("Http2ServerResponse.finished"); + return false; + } + + getHeader(_name: string): string { + notImplemented("Http2ServerResponse.getHeader"); + return ""; + } + + getHeaderNames(): string[] { + notImplemented("Http2ServerResponse.getHeaderNames"); + return []; + } + + getHeaders(): Record { + notImplemented("Http2ServerResponse.getHeaders"); + return {}; + } + + hasHeader(_name: string) { + notImplemented("Http2ServerResponse.hasHeader"); + } + + get headersSent(): boolean { + notImplemented("Http2ServerResponse.headersSent"); + return false; + } + + removeHeader(_name: string) { + notImplemented("Http2ServerResponse.removeHeader"); + } + + get req(): Http2ServerRequest { + notImplemented("Http2ServerResponse.req"); + return new Http2ServerRequest(); + } + + get sendDate(): boolean { + notImplemented("Http2ServerResponse.sendDate"); + return false; + } + + setHeader(_name: string, _value: string | string[]) { + notImplemented("Http2ServerResponse.setHeader"); + } + + setTimeout(msecs: number, callback?: () => unknown) { + this.stream.setTimeout(msecs, callback); + } + + get socket(): Socket /*| TlsSocket*/ { + notImplemented("Http2ServerResponse.socket"); + return {}; + } + + get statusCode(): number { + notImplemented("Http2ServerResponse.statusCode"); + return 0; + } + + get statusMessage(): string { + notImplemented("Http2ServerResponse.statusMessage"); + return ""; + } + + get stream(): Http2Stream { + notImplemented("Http2ServerResponse.stream"); + return new Http2Stream(); + } + + get writableEnded(): boolean { + notImplemented("Http2ServerResponse.writableEnded"); + return false; + } + + write( + _chunk: string | Buffer | Uint8Array, + _encoding: string, + _callback: () => unknown, + ) { + notImplemented("Http2ServerResponse.write"); + return this.write; + } + + writeContinue() { + notImplemented("Http2ServerResponse.writeContinue"); + } + + writeEarlyHints(_hints: Record) { + notImplemented("Http2ServerResponse.writeEarlyHints"); + } + + writeHead( + _statusCode: number, + _statusMessage: string, + _headers: Record, + ) { + notImplemented("Http2ServerResponse.writeHead"); } } + export default { - Http2Session, - ServerHttp2Session, - ClientHttp2Session, - Http2Stream, - ClientHttp2Stream, - ServerHttp2Stream, - Http2Server, - Http2SecureServer, createServer, createSecureServer, connect, diff --git a/ext/node/polyfills/https.ts b/ext/node/polyfills/https.ts index a64e8265a2..dfd8f24d9f 100644 --- a/ext/node/polyfills/https.ts +++ b/ext/node/polyfills/https.ts @@ -4,15 +4,12 @@ import { notImplemented } from "ext:deno_node/_utils.ts"; import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { - Agent as HttpAgent, ClientRequest, IncomingMessageForClient as IncomingMessage, type RequestOptions, } from "ext:deno_node/http.ts"; -import type { Socket } from "ext:deno_node/net.ts"; - -export class Agent extends HttpAgent { -} +import { Agent as HttpAgent } from "ext:deno_node/_http_agent.mjs"; +import { createHttpClient } from "ext:deno_fetch/22_http_client.js"; export class Server { constructor() { @@ -53,40 +50,55 @@ export function get(...args: any[]) { return req; } -export const globalAgent = undefined; +export class Agent extends HttpAgent { + constructor(options) { + super(options); + this.defaultPort = 443; + this.protocol = "https:"; + this.maxCachedSessions = this.options.maxCachedSessions; + if (this.maxCachedSessions === undefined) { + this.maxCachedSessions = 100; + } + + this._sessionCache = { + map: {}, + list: [], + }; + } +} + +const globalAgent = new Agent({ + keepAlive: true, + scheduling: "lifo", + timeout: 5000, +}); + /** HttpsClientRequest class loosely follows http.ClientRequest class API. */ class HttpsClientRequest extends ClientRequest { override defaultProtocol = "https:"; - override async _createCustomClient(): Promise< - Deno.HttpClient | undefined - > { + override _getClient(): Deno.HttpClient | undefined { if (caCerts === null) { return undefined; } if (caCerts !== undefined) { - return Deno.createHttpClient({ caCerts }); - } - const status = await Deno.permissions.query({ - name: "env", - variable: "NODE_EXTRA_CA_CERTS", - }); - if (status.state !== "granted") { - caCerts = null; - return undefined; + return createHttpClient({ caCerts, http2: false }); } + // const status = await Deno.permissions.query({ + // name: "env", + // variable: "NODE_EXTRA_CA_CERTS", + // }); + // if (status.state !== "granted") { + // caCerts = null; + // return undefined; + // } const certFilename = Deno.env.get("NODE_EXTRA_CA_CERTS"); if (!certFilename) { caCerts = null; return undefined; } - const caCert = await Deno.readTextFile(certFilename); + const caCert = Deno.readTextFileSync(certFilename); caCerts = [caCert]; - return Deno.createHttpClient({ caCerts }); - } - - override _createSocket(): Socket { - // deno-lint-ignore no-explicit-any - return { authorized: true } as any; + return createHttpClient({ caCerts, http2: false }); } } @@ -107,15 +119,21 @@ export function request( // deno-lint-ignore no-explicit-any export function request(...args: any[]) { let options = {}; + if (typeof args[0] === "string") { - options = urlToHttpOptions(new URL(args.shift())); + const urlStr = args.shift(); + options = urlToHttpOptions(new URL(urlStr)); } else if (args[0] instanceof URL) { options = urlToHttpOptions(args.shift()); } + if (args[0] && typeof args[0] !== "function") { Object.assign(options, args.shift()); } + + options._defaultAgent = globalAgent; args.unshift(options); + return new HttpsClientRequest(args[0], args[1]); } export default { diff --git a/ext/node/polyfills/internal/child_process.ts b/ext/node/polyfills/internal/child_process.ts index 7c72cb0ca3..d4acf1db2a 100644 --- a/ext/node/polyfills/internal/child_process.ts +++ b/ext/node/polyfills/internal/child_process.ts @@ -34,7 +34,7 @@ import { ArrayPrototypeSlice, ArrayPrototypeSort, ArrayPrototypeUnshift, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, StringPrototypeToUpperCase, } from "ext:deno_node/internal/primordials.mjs"; import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; @@ -177,9 +177,9 @@ export class ChildProcess extends EventEmitter { args: cmdArgs, cwd, env: stringEnv, - stdin: toDenoStdio(stdin as NodeStdio | number), - stdout: toDenoStdio(stdout as NodeStdio | number), - stderr: toDenoStdio(stderr as NodeStdio | number), + stdin: toDenoStdio(stdin), + stdout: toDenoStdio(stdout), + stderr: toDenoStdio(stderr), windowsRawArguments: windowsVerbatimArguments, }).spawn(); this.pid = this.#process.pid; @@ -189,6 +189,16 @@ export class ChildProcess extends EventEmitter { this.stdin = Writable.fromWeb(this.#process.stdin); } + if (stdin instanceof Stream) { + this.stdin = stdin; + } + if (stdout instanceof Stream) { + this.stdout = stdout; + } + if (stderr instanceof Stream) { + this.stderr = stderr; + } + if (stdout === "pipe") { assert(this.#process.stdout); this.stdout = Readable.fromWeb(this.#process.stdout); @@ -285,15 +295,22 @@ export class ChildProcess extends EventEmitter { async #_waitForChildStreamsToClose() { const promises = [] as Array>; - if (this.stdin && !this.stdin.destroyed) { + // Don't close parent process stdin if that's passed through + if (this.stdin && !this.stdin.destroyed && this.stdin !== process.stdin) { assert(this.stdin); this.stdin.destroy(); promises.push(waitForStreamToClose(this.stdin)); } - if (this.stdout && !this.stdout.destroyed) { + // Only readable streams need to be closed + if ( + this.stdout && !this.stdout.destroyed && this.stdout instanceof Readable + ) { promises.push(waitForReadableToClose(this.stdout)); } - if (this.stderr && !this.stderr.destroyed) { + // Only readable streams need to be closed + if ( + this.stderr && !this.stderr.destroyed && this.stderr instanceof Readable + ) { promises.push(waitForReadableToClose(this.stderr)); } await Promise.all(promises); @@ -317,9 +334,13 @@ const supportedNodeStdioTypes: NodeStdio[] = ["pipe", "ignore", "inherit"]; function toDenoStdio( pipe: NodeStdio | number | Stream | null | undefined, ): DenoStdio { + if (pipe instanceof Stream) { + return "inherit"; + } + if ( !supportedNodeStdioTypes.includes(pipe as NodeStdio) || - typeof pipe === "number" || pipe instanceof Stream + typeof pipe === "number" ) { notImplemented(`toDenoStdio pipe=${typeof pipe} (${pipe})`); } @@ -429,7 +450,7 @@ function copyProcessEnvToEnv( if ( Deno.env.get(name) && (!optionEnv || - !ObjectPrototypeHasOwnProperty(optionEnv, name)) + !ObjectHasOwn(optionEnv, name)) ) { env[name] = Deno.env.get(name); } @@ -441,8 +462,24 @@ function normalizeStdioOption( "pipe", "pipe", ], -) { +): [ + Stream | NodeStdio | number, + Stream | NodeStdio | number, + Stream | NodeStdio | number, + ...Array, +] { if (Array.isArray(stdio)) { + // `[0, 1, 2]` is equivalent to `"inherit"` + if ( + stdio.length === 3 && stdio[0] === 0 && stdio[1] === 1 && stdio[2] === 2 + ) { + return ["inherit", "inherit", "inherit"]; + } + + // At least 3 stdio must be created to match node + while (stdio.length < 3) { + ArrayPrototypePush(stdio, undefined); + } return stdio; } else { switch (stdio) { @@ -796,8 +833,8 @@ export function spawnSync( args, cwd, env, - stdout: toDenoStdio(normalizedStdio[1] as NodeStdio | number), - stderr: toDenoStdio(normalizedStdio[2] as NodeStdio | number), + stdout: toDenoStdio(normalizedStdio[1]), + stderr: toDenoStdio(normalizedStdio[2]), uid, gid, windowsRawArguments: windowsVerbatimArguments, diff --git a/ext/node/polyfills/internal/crypto/diffiehellman.ts b/ext/node/polyfills/internal/crypto/diffiehellman.ts index 3aa1f80809..a5817d59a0 100644 --- a/ext/node/polyfills/internal/crypto/diffiehellman.ts +++ b/ext/node/polyfills/internal/crypto/diffiehellman.ts @@ -6,13 +6,19 @@ import { isAnyArrayBuffer, isArrayBufferView, } from "ext:deno_node/internal/util/types.ts"; -import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; +import { + ERR_CRYPTO_UNKNOWN_DH_GROUP, + ERR_INVALID_ARG_TYPE, + NodeError, +} from "ext:deno_node/internal/errors.ts"; import { validateInt32, validateString, } from "ext:deno_node/internal/validators.mjs"; import { Buffer } from "ext:deno_node/buffer.ts"; import { + EllipticCurve, + ellipticCurves, getDefaultEncoding, toBuf, } from "ext:deno_node/internal/crypto/util.ts"; @@ -24,13 +30,20 @@ import type { import { KeyObject } from "ext:deno_node/internal/crypto/keys.ts"; import type { BufferEncoding } from "ext:deno_node/_global.d.ts"; +const { ops } = Deno.core; + const DH_GENERATOR = 2; export class DiffieHellman { verifyError!: number; + #prime: Buffer; + #primeLength: number; + #generator: Buffer; + #privateKey: Buffer; + #publicKey: Buffer; constructor( - sizeOrKey: unknown, + sizeOrKey: number | string | ArrayBufferView, keyEncoding?: unknown, generator?: unknown, genEncoding?: unknown, @@ -67,24 +80,68 @@ export class DiffieHellman { genEncoding = genEncoding || encoding; if (typeof sizeOrKey !== "number") { - sizeOrKey = toBuf(sizeOrKey as string, keyEncoding as string); + this.#prime = toBuf(sizeOrKey as string, keyEncoding as string); + } else { + // The supplied parameter is our primeLength, generate a suitable prime. + this.#primeLength = sizeOrKey as number; + if (this.#primeLength < 2) { + throw new NodeError("ERR_OSSL_BN_BITS_TOO_SMALL", "bits too small"); + } + + this.#prime = Buffer.from( + ops.op_node_gen_prime(this.#primeLength).buffer, + ); } if (!generator) { - generator = DH_GENERATOR; + // While the commonly used cyclic group generators for DH are 2 and 5, we + // need this a buffer, because, well.. Node. + this.#generator = Buffer.alloc(4); + this.#generator.writeUint32BE(DH_GENERATOR); } else if (typeof generator === "number") { validateInt32(generator, "generator"); + this.#generator = Buffer.alloc(4); + if (generator <= 0 || generator >= 0x7fffffff) { + throw new NodeError("ERR_OSSL_DH_BAD_GENERATOR", "bad generator"); + } + this.#generator.writeUint32BE(generator); } else if (typeof generator === "string") { generator = toBuf(generator, genEncoding as string); + this.#generator = generator; } else if (!isArrayBufferView(generator) && !isAnyArrayBuffer(generator)) { throw new ERR_INVALID_ARG_TYPE( "generator", ["number", "string", "ArrayBuffer", "Buffer", "TypedArray", "DataView"], generator, ); + } else { + this.#generator = Buffer.from(generator); } - notImplemented("crypto.DiffieHellman"); + this.#checkGenerator(); + + // TODO(lev): actually implement this value + this.verifyError = 0; + } + + #checkGenerator(): number { + let generator: number; + + if (this.#generator.length == 0) { + throw new NodeError("ERR_OSSL_DH_BAD_GENERATOR", "bad generator"); + } else if (this.#generator.length == 1) { + generator = this.#generator.readUint8(); + } else if (this.#generator.length == 2) { + generator = this.#generator.readUint16BE(); + } else { + generator = this.#generator.readUint32BE(); + } + + if (generator != 2 && generator != 5) { + throw new NodeError("ERR_OSSL_DH_BAD_GENERATOR", "bad generator"); + } + + return generator; } computeSecret(otherPublicKey: ArrayBufferView): Buffer; @@ -102,67 +159,951 @@ export class DiffieHellman { outputEncoding: BinaryToTextEncoding, ): string; computeSecret( - _otherPublicKey: ArrayBufferView | string, - _inputEncoding?: BinaryToTextEncoding, - _outputEncoding?: BinaryToTextEncoding, + otherPublicKey: ArrayBufferView | string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.computeSecret"); + let buf; + if (inputEncoding != undefined && inputEncoding != "buffer") { + buf = Buffer.from(otherPublicKey.buffer, inputEncoding); + } else { + buf = Buffer.from(otherPublicKey.buffer); + } + + const sharedSecret = ops.op_node_dh_compute_secret( + this.#prime, + this.#privateKey, + buf, + ); + + if (outputEncoding == undefined || outputEncoding == "buffer") { + return Buffer.from(sharedSecret.buffer); + } + + return Buffer.from(sharedSecret.buffer).toString(outputEncoding); } generateKeys(): Buffer; generateKeys(encoding: BinaryToTextEncoding): string; generateKeys(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.generateKeys"); + const generator = this.#checkGenerator(); + const [privateKey, publicKey] = ops.op_node_dh_generate2( + this.#prime, + this.#primeLength, + generator, + ); + + this.#privateKey = Buffer.from(privateKey.buffer); + this.#publicKey = Buffer.from(publicKey.buffer); + + return this.#publicKey; } getGenerator(): Buffer; getGenerator(encoding: BinaryToTextEncoding): string; - getGenerator(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getGenerator"); + getGenerator(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#generator.toString(encoding); + } + + return this.#generator; } getPrime(): Buffer; getPrime(encoding: BinaryToTextEncoding): string; - getPrime(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrime"); + getPrime(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#prime.toString(encoding); + } + + return this.#prime; } getPrivateKey(): Buffer; getPrivateKey(encoding: BinaryToTextEncoding): string; - getPrivateKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrivateKey"); + getPrivateKey(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#privateKey.toString(encoding); + } + + return this.#privateKey; } getPublicKey(): Buffer; getPublicKey(encoding: BinaryToTextEncoding): string; - getPublicKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPublicKey"); + getPublicKey(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#publicKey.toString(encoding); + } + + return this.#publicKey; } setPrivateKey(privateKey: ArrayBufferView): void; setPrivateKey(privateKey: string, encoding: BufferEncoding): void; setPrivateKey( - _privateKey: ArrayBufferView | string, - _encoding?: BufferEncoding, + privateKey: ArrayBufferView | string, + encoding?: BufferEncoding, ) { - notImplemented("crypto.DiffieHellman.prototype.setPrivateKey"); + if (encoding == undefined || encoding == "buffer") { + this.#privateKey = Buffer.from(privateKey); + } else { + this.#privateKey = Buffer.from(privateKey, encoding); + } } setPublicKey(publicKey: ArrayBufferView): void; setPublicKey(publicKey: string, encoding: BufferEncoding): void; setPublicKey( - _publicKey: ArrayBufferView | string, - _encoding?: BufferEncoding, + publicKey: ArrayBufferView | string, + encoding?: BufferEncoding, ) { - notImplemented("crypto.DiffieHellman.prototype.setPublicKey"); + if (encoding == undefined || encoding == "buffer") { + this.#publicKey = Buffer.from(publicKey); + } else { + this.#publicKey = Buffer.from(publicKey, encoding); + } } } +const DH_GROUP_NAMES = [ + "modp5", + "modp14", + "modp15", + "modp16", + "modp17", + "modp18", +]; +const DH_GROUPS = { + "modp5": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA237327, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp14": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AACAA68, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp15": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA93AD2CA, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp16": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA9210801, + 0x1A723C12, + 0xA787E6D7, + 0x88719A10, + 0xBDBA5B26, + 0x99C32718, + 0x6AF4E23C, + 0x1A946834, + 0xB6150BDA, + 0x2583E9CA, + 0x2AD44CE8, + 0xDBBBC2DB, + 0x04DE8EF9, + 0x2E8EFC14, + 0x1FBECAA6, + 0x287C5947, + 0x4E6BC05D, + 0x99B2964F, + 0xA090C3A2, + 0x233BA186, + 0x515BE7ED, + 0x1F612970, + 0xCEE2D7AF, + 0xB81BDD76, + 0x2170481C, + 0xD0069127, + 0xD5B05AA9, + 0x93B4EA98, + 0x8D8FDDC1, + 0x86FFB7DC, + 0x90A6C08F, + 0x4DF435C9, + 0x34063199, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp17": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA9210801, + 0x1A723C12, + 0xA787E6D7, + 0x88719A10, + 0xBDBA5B26, + 0x99C32718, + 0x6AF4E23C, + 0x1A946834, + 0xB6150BDA, + 0x2583E9CA, + 0x2AD44CE8, + 0xDBBBC2DB, + 0x04DE8EF9, + 0x2E8EFC14, + 0x1FBECAA6, + 0x287C5947, + 0x4E6BC05D, + 0x99B2964F, + 0xA090C3A2, + 0x233BA186, + 0x515BE7ED, + 0x1F612970, + 0xCEE2D7AF, + 0xB81BDD76, + 0x2170481C, + 0xD0069127, + 0xD5B05AA9, + 0x93B4EA98, + 0x8D8FDDC1, + 0x86FFB7DC, + 0x90A6C08F, + 0x4DF435C9, + 0x34028492, + 0x36C3FAB4, + 0xD27C7026, + 0xC1D4DCB2, + 0x602646DE, + 0xC9751E76, + 0x3DBA37BD, + 0xF8FF9406, + 0xAD9E530E, + 0xE5DB382F, + 0x413001AE, + 0xB06A53ED, + 0x9027D831, + 0x179727B0, + 0x865A8918, + 0xDA3EDBEB, + 0xCF9B14ED, + 0x44CE6CBA, + 0xCED4BB1B, + 0xDB7F1447, + 0xE6CC254B, + 0x33205151, + 0x2BD7AF42, + 0x6FB8F401, + 0x378CD2BF, + 0x5983CA01, + 0xC64B92EC, + 0xF032EA15, + 0xD1721D03, + 0xF482D7CE, + 0x6E74FEF6, + 0xD55E702F, + 0x46980C82, + 0xB5A84031, + 0x900B1C9E, + 0x59E7C97F, + 0xBEC7E8F3, + 0x23A97A7E, + 0x36CC88BE, + 0x0F1D45B7, + 0xFF585AC5, + 0x4BD407B2, + 0x2B4154AA, + 0xCC8F6D7E, + 0xBF48E1D8, + 0x14CC5ED2, + 0x0F8037E0, + 0xA79715EE, + 0xF29BE328, + 0x06A1D58B, + 0xB7C5DA76, + 0xF550AA3D, + 0x8A1FBFF0, + 0xEB19CCB1, + 0xA313D55C, + 0xDA56C9EC, + 0x2EF29632, + 0x387FE8D7, + 0x6E3C0468, + 0x043E8F66, + 0x3F4860EE, + 0x12BF2D5B, + 0x0B7474D6, + 0xE694F91E, + 0x6DCC4024, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp18": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA9210801, + 0x1A723C12, + 0xA787E6D7, + 0x88719A10, + 0xBDBA5B26, + 0x99C32718, + 0x6AF4E23C, + 0x1A946834, + 0xB6150BDA, + 0x2583E9CA, + 0x2AD44CE8, + 0xDBBBC2DB, + 0x04DE8EF9, + 0x2E8EFC14, + 0x1FBECAA6, + 0x287C5947, + 0x4E6BC05D, + 0x99B2964F, + 0xA090C3A2, + 0x233BA186, + 0x515BE7ED, + 0x1F612970, + 0xCEE2D7AF, + 0xB81BDD76, + 0x2170481C, + 0xD0069127, + 0xD5B05AA9, + 0x93B4EA98, + 0x8D8FDDC1, + 0x86FFB7DC, + 0x90A6C08F, + 0x4DF435C9, + 0x34028492, + 0x36C3FAB4, + 0xD27C7026, + 0xC1D4DCB2, + 0x602646DE, + 0xC9751E76, + 0x3DBA37BD, + 0xF8FF9406, + 0xAD9E530E, + 0xE5DB382F, + 0x413001AE, + 0xB06A53ED, + 0x9027D831, + 0x179727B0, + 0x865A8918, + 0xDA3EDBEB, + 0xCF9B14ED, + 0x44CE6CBA, + 0xCED4BB1B, + 0xDB7F1447, + 0xE6CC254B, + 0x33205151, + 0x2BD7AF42, + 0x6FB8F401, + 0x378CD2BF, + 0x5983CA01, + 0xC64B92EC, + 0xF032EA15, + 0xD1721D03, + 0xF482D7CE, + 0x6E74FEF6, + 0xD55E702F, + 0x46980C82, + 0xB5A84031, + 0x900B1C9E, + 0x59E7C97F, + 0xBEC7E8F3, + 0x23A97A7E, + 0x36CC88BE, + 0x0F1D45B7, + 0xFF585AC5, + 0x4BD407B2, + 0x2B4154AA, + 0xCC8F6D7E, + 0xBF48E1D8, + 0x14CC5ED2, + 0x0F8037E0, + 0xA79715EE, + 0xF29BE328, + 0x06A1D58B, + 0xB7C5DA76, + 0xF550AA3D, + 0x8A1FBFF0, + 0xEB19CCB1, + 0xA313D55C, + 0xDA56C9EC, + 0x2EF29632, + 0x387FE8D7, + 0x6E3C0468, + 0x043E8F66, + 0x3F4860EE, + 0x12BF2D5B, + 0x0B7474D6, + 0xE694F91E, + 0x6DBE1159, + 0x74A3926F, + 0x12FEE5E4, + 0x38777CB6, + 0xA932DF8C, + 0xD8BEC4D0, + 0x73B931BA, + 0x3BC832B6, + 0x8D9DD300, + 0x741FA7BF, + 0x8AFC47ED, + 0x2576F693, + 0x6BA42466, + 0x3AAB639C, + 0x5AE4F568, + 0x3423B474, + 0x2BF1C978, + 0x238F16CB, + 0xE39D652D, + 0xE3FDB8BE, + 0xFC848AD9, + 0x22222E04, + 0xA4037C07, + 0x13EB57A8, + 0x1A23F0C7, + 0x3473FC64, + 0x6CEA306B, + 0x4BCBC886, + 0x2F8385DD, + 0xFA9D4B7F, + 0xA2C087E8, + 0x79683303, + 0xED5BDD3A, + 0x062B3CF5, + 0xB3A278A6, + 0x6D2A13F8, + 0x3F44F82D, + 0xDF310EE0, + 0x74AB6A36, + 0x4597E899, + 0xA0255DC1, + 0x64F31CC5, + 0x0846851D, + 0xF9AB4819, + 0x5DED7EA1, + 0xB1D510BD, + 0x7EE74D73, + 0xFAF36BC3, + 0x1ECFA268, + 0x359046F4, + 0xEB879F92, + 0x4009438B, + 0x481C6CD7, + 0x889A002E, + 0xD5EE382B, + 0xC9190DA6, + 0xFC026E47, + 0x9558E447, + 0x5677E9AA, + 0x9E3050E2, + 0x765694DF, + 0xC81F56E8, + 0x80B96E71, + 0x60C980DD, + 0x98EDD3DF, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, +}; + export class DiffieHellmanGroup { verifyError!: number; + #diffiehellman: DiffieHellman; - constructor(_name: string) { - notImplemented("crypto.DiffieHellmanGroup"); + constructor(name: string) { + if (!DH_GROUP_NAMES.includes(name)) { + throw new ERR_CRYPTO_UNKNOWN_DH_GROUP(); + } + this.#diffiehellman = new DiffieHellman( + Buffer.from(DH_GROUPS[name].prime), + DH_GROUPS[name].generator, + ); + this.verifyError = 0; } computeSecret(otherPublicKey: ArrayBufferView): Buffer; @@ -180,49 +1121,64 @@ export class DiffieHellmanGroup { outputEncoding: BinaryToTextEncoding, ): string; computeSecret( - _otherPublicKey: ArrayBufferView | string, - _inputEncoding?: BinaryToTextEncoding, - _outputEncoding?: BinaryToTextEncoding, + otherPublicKey: ArrayBufferView | string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.computeSecret"); + return this.#diffiehellman.computeSecret( + otherPublicKey, + inputEncoding, + outputEncoding, + ); } generateKeys(): Buffer; generateKeys(encoding: BinaryToTextEncoding): string; - generateKeys(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.generateKeys"); + generateKeys(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.generateKeys(encoding); } getGenerator(): Buffer; getGenerator(encoding: BinaryToTextEncoding): string; - getGenerator(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getGenerator"); + getGenerator(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getGenerator(encoding); } getPrime(): Buffer; getPrime(encoding: BinaryToTextEncoding): string; - getPrime(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrime"); + getPrime(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getPrime(encoding); } getPrivateKey(): Buffer; getPrivateKey(encoding: BinaryToTextEncoding): string; - getPrivateKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrivateKey"); + getPrivateKey(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getPrivateKey(encoding); } getPublicKey(): Buffer; getPublicKey(encoding: BinaryToTextEncoding): string; - getPublicKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPublicKey"); + getPublicKey(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getPublicKey(encoding); } } export class ECDH { + #curve: EllipticCurve; // the selected curve + #privbuf: Buffer; // the private key + #pubbuf: Buffer; // the public key + constructor(curve: string) { validateString(curve, "curve"); - notImplemented("crypto.ECDH"); + const c = ellipticCurves.find((x) => x.name == curve); + if (c == undefined) { + throw new Error("invalid curve"); + } + + this.#curve = c; + this.#pubbuf = Buffer.alloc(this.#curve.publicKeySize); + this.#privbuf = Buffer.alloc(this.#curve.privateKeySize); } static convertKey( @@ -250,44 +1206,80 @@ export class ECDH { outputEncoding: BinaryToTextEncoding, ): string; computeSecret( - _otherPublicKey: ArrayBufferView | string, + otherPublicKey: ArrayBufferView | string, _inputEncoding?: BinaryToTextEncoding, _outputEncoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.computeSecret"); + const secretBuf = Buffer.alloc(this.#curve.sharedSecretSize); + + ops.op_node_ecdh_compute_secret( + this.#curve.name, + this.#privbuf, + otherPublicKey, + secretBuf, + ); + + return secretBuf; } generateKeys(): Buffer; generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; generateKeys( - _encoding?: BinaryToTextEncoding, + encoding?: BinaryToTextEncoding, _format?: ECDHKeyFormat, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.generateKeys"); + ops.op_node_ecdh_generate_keys( + this.#curve.name, + this.#pubbuf, + this.#privbuf, + ); + + if (encoding !== undefined) { + return this.#pubbuf.toString(encoding); + } + return this.#pubbuf; } getPrivateKey(): Buffer; getPrivateKey(encoding: BinaryToTextEncoding): string; - getPrivateKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.ECDH.prototype.getPrivateKey"); + getPrivateKey(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined) { + return this.#privbuf.toString(encoding); + } + return this.#privbuf; } getPublicKey(): Buffer; getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; getPublicKey( - _encoding?: BinaryToTextEncoding, + encoding?: BinaryToTextEncoding, _format?: ECDHKeyFormat, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.getPublicKey"); + if (encoding !== undefined) { + return this.#pubbuf.toString(encoding); + } + return this.#pubbuf; } setPrivateKey(privateKey: ArrayBufferView): void; setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; setPrivateKey( - _privateKey: ArrayBufferView | string, - _encoding?: BinaryToTextEncoding, + privateKey: ArrayBufferView | string, + encoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.setPrivateKey"); + this.#privbuf = privateKey; + this.#pubbuf = Buffer.alloc(this.#curve.publicKeySize); + + ops.op_node_ecdh_compute_public_key( + this.#curve.name, + this.#privbuf, + this.#pubbuf, + ); + + if (encoding !== undefined) { + return this.#pubbuf.toString(encoding); + } + return this.#pubbuf; } } diff --git a/ext/node/polyfills/internal/crypto/hash.ts b/ext/node/polyfills/internal/crypto/hash.ts index 00dfa19aff..34e3c1230e 100644 --- a/ext/node/polyfills/internal/crypto/hash.ts +++ b/ext/node/polyfills/internal/crypto/hash.ts @@ -66,7 +66,7 @@ export class Hash extends Transform { callback(); }, flush(callback: () => void) { - this.push(context.digest(undefined)); + this.push(this.digest(undefined)); callback(); }, }); diff --git a/ext/node/polyfills/internal/crypto/random.ts b/ext/node/polyfills/internal/crypto/random.ts index 04678b6be1..9156ab4e1a 100644 --- a/ext/node/polyfills/internal/crypto/random.ts +++ b/ext/node/polyfills/internal/crypto/random.ts @@ -1,6 +1,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase + import { notImplemented } from "ext:deno_node/_utils.ts"; import randomBytes from "ext:deno_node/internal/crypto/_randomBytes.ts"; import randomFill, { @@ -8,6 +10,7 @@ import randomFill, { } from "ext:deno_node/internal/crypto/_randomFill.ts"; import randomInt from "ext:deno_node/internal/crypto/_randomInt.ts"; import { + validateBoolean, validateFunction, validateInt32, validateObject, @@ -16,7 +19,10 @@ import { isAnyArrayBuffer, isArrayBufferView, } from "ext:deno_node/internal/util/types.ts"; -import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; +import { + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, +} from "ext:deno_node/internal/errors.ts"; export { default as randomBytes } from "ext:deno_node/internal/crypto/_randomBytes.ts"; export { @@ -27,6 +33,11 @@ export { default as randomInt } from "ext:deno_node/internal/crypto/_randomInt.t const { core } = globalThis.__bootstrap; const { ops } = core; +const { + op_node_gen_prime_async, + op_node_check_prime_bytes_async, + op_node_check_prime_async, +} = Deno.core.ensureFastOps(); export type LargeNumberLike = | ArrayBufferView @@ -75,9 +86,9 @@ export function checkPrime( validateInt32(checks, "options.checks", 0); - let op = "op_node_check_prime_bytes_async"; + let op = op_node_check_prime_bytes_async; if (typeof candidate === "bigint") { - op = "op_node_check_prime_async"; + op = op_node_check_prime_async; } else if (!isAnyArrayBuffer(candidate) && !isArrayBufferView(candidate)) { throw new ERR_INVALID_ARG_TYPE( "candidate", @@ -92,7 +103,7 @@ export function checkPrime( ); } - core.opAsync2(op, candidate, checks).then( + op(candidate, checks).then( (result) => { callback?.(null, result); }, @@ -142,62 +153,141 @@ export interface GeneratePrimeOptions { bigint?: boolean | undefined; } -export interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { - bigint: true; -} - -export interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { - bigint?: false | undefined; -} - export function generatePrime( size: number, - callback: (err: Error | null, prime: ArrayBuffer) => void, -): void; -export function generatePrime( - size: number, - options: GeneratePrimeOptionsBigInt, - callback: (err: Error | null, prime: bigint) => void, -): void; -export function generatePrime( - size: number, - options: GeneratePrimeOptionsArrayBuffer, - callback: (err: Error | null, prime: ArrayBuffer) => void, -): void; -export function generatePrime( - size: number, - options: GeneratePrimeOptions, - callback: (err: Error | null, prime: ArrayBuffer | bigint) => void, -): void; -export function generatePrime( - _size: number, - _options?: unknown, - _callback?: unknown, + options: GeneratePrimeOptions = {}, + callback?: (err: Error | null, prime: ArrayBuffer | bigint) => void, ) { - notImplemented("crypto.generatePrime"); + validateInt32(size, "size", 1); + if (typeof options === "function") { + callback = options; + options = {}; + } + validateFunction(callback, "callback"); + const { + bigint, + } = validateRandomPrimeJob(size, options); + op_node_gen_prime_async(size).then((prime: Uint8Array) => + bigint ? arrayBufferToUnsignedBigInt(prime.buffer) : prime.buffer + ).then((prime: ArrayBuffer | bigint) => { + callback?.(null, prime); + }); } -export function generatePrimeSync(size: number): ArrayBuffer; export function generatePrimeSync( size: number, - options: GeneratePrimeOptionsBigInt, -): bigint; -export function generatePrimeSync( - size: number, - options: GeneratePrimeOptionsArrayBuffer, -): ArrayBuffer; -export function generatePrimeSync( + options: GeneratePrimeOptions = {}, +): ArrayBuffer | bigint { + const { + bigint, + } = validateRandomPrimeJob(size, options); + + const prime = ops.op_node_gen_prime(size); + if (bigint) return arrayBufferToUnsignedBigInt(prime.buffer); + return prime.buffer; +} + +function validateRandomPrimeJob( size: number, options: GeneratePrimeOptions, -): ArrayBuffer | bigint; -export function generatePrimeSync( - _size: number, - _options?: - | GeneratePrimeOptionsBigInt - | GeneratePrimeOptionsArrayBuffer - | GeneratePrimeOptions, -): ArrayBuffer | bigint { - notImplemented("crypto.generatePrimeSync"); +): GeneratePrimeOptions { + validateInt32(size, "size", 1); + validateObject(options, "options"); + + let { + safe = false, + bigint = false, + add, + rem, + } = options!; + + validateBoolean(safe, "options.safe"); + validateBoolean(bigint, "options.bigint"); + + if (add !== undefined) { + if (typeof add === "bigint") { + add = unsignedBigIntToBuffer(add, "options.add"); + } else if (!isAnyArrayBuffer(add) && !isArrayBufferView(add)) { + throw new ERR_INVALID_ARG_TYPE( + "options.add", + [ + "ArrayBuffer", + "TypedArray", + "Buffer", + "DataView", + "bigint", + ], + add, + ); + } + } + + if (rem !== undefined) { + if (typeof rem === "bigint") { + rem = unsignedBigIntToBuffer(rem, "options.rem"); + } else if (!isAnyArrayBuffer(rem) && !isArrayBufferView(rem)) { + throw new ERR_INVALID_ARG_TYPE( + "options.rem", + [ + "ArrayBuffer", + "TypedArray", + "Buffer", + "DataView", + "bigint", + ], + rem, + ); + } + } + + // TODO(@littledivy): safe, add and rem options are not implemented. + if (safe || add || rem) { + notImplemented("safe, add and rem options are not implemented."); + } + + return { + safe, + bigint, + add, + rem, + }; +} + +/** + * 48 is the ASCII code for '0', 97 is the ASCII code for 'a'. + * @param {number} number An integer between 0 and 15. + * @returns {number} corresponding to the ASCII code of the hex representation + * of the parameter. + */ +const numberToHexCharCode = (number: number): number => + (number < 10 ? 48 : 87) + number; + +/** + * @param {ArrayBuffer} buf An ArrayBuffer. + * @return {bigint} + */ +function arrayBufferToUnsignedBigInt(buf: ArrayBuffer): bigint { + const length = buf.byteLength; + const chars: number[] = Array(length * 2); + const view = new DataView(buf); + + for (let i = 0; i < length; i++) { + const val = view.getUint8(i); + chars[2 * i] = numberToHexCharCode(val >> 4); + chars[2 * i + 1] = numberToHexCharCode(val & 0xf); + } + + return BigInt(`0x${String.fromCharCode(...chars)}`); +} + +function unsignedBigIntToBuffer(bigint: bigint, name: string) { + if (bigint < 0) { + throw new ERR_OUT_OF_RANGE(name, ">= 0", bigint); + } + + const hex = bigint.toString(16); + const padded = hex.padStart(hex.length + (hex.length % 2), 0); + return Buffer.from(padded, "hex"); } export const randomUUID = () => globalThis.crypto.randomUUID(); diff --git a/ext/node/polyfills/internal/crypto/util.ts b/ext/node/polyfills/internal/crypto/util.ts index ccb7726316..2e269b7fad 100644 --- a/ext/node/polyfills/internal/crypto/util.ts +++ b/ext/node/polyfills/internal/crypto/util.ts @@ -46,6 +46,47 @@ const digestAlgorithms = [ "sha1", ]; +export type EllipticCurve = { + name: string; + ephemeral: boolean; + privateKeySize: number; + publicKeySize: number; + sharedSecretSize: number; +}; + +export const ellipticCurves: Array = [ + { + name: "secp256k1", + privateKeySize: 32, + publicKeySize: 65, + sharedSecretSize: 32, + }, // Weierstrass-class EC used by Bitcoin + { + name: "prime256v1", + privateKeySize: 32, + publicKeySize: 65, + sharedSecretSize: 32, + }, // NIST P-256 EC + { + name: "secp256r1", + privateKeySize: 32, + publicKeySize: 65, + sharedSecretSize: 32, + }, // NIST P-256 EC (same as above) + { + name: "secp384r1", + privateKeySize: 48, + publicKeySize: 97, + sharedSecretSize: 48, + }, // NIST P-384 EC + { + name: "secp224r1", + privateKeySize: 28, + publicKeySize: 57, + sharedSecretSize: 28, + }, // NIST P-224 EC +]; + // deno-fmt-ignore const supportedCiphers = [ "aes-128-ecb", "aes-192-ecb", @@ -114,8 +155,9 @@ export function getHashes(): readonly string[] { return digestAlgorithms; } +const curveNames = ellipticCurves.map((x) => x.name); export function getCurves(): readonly string[] { - notImplemented("crypto.getCurves"); + return curveNames; } export interface SecureHeapUsage { diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 1894d8e24a..5e6b9378c6 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -13,7 +13,7 @@ * ERR_INVALID_PACKAGE_CONFIG // package.json stuff, probably useless */ -import { inspect } from "ext:deno_node/internal/util/inspect.mjs"; +import { format, inspect } from "ext:deno_node/internal/util/inspect.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; import { codeMap, @@ -836,6 +836,15 @@ export class ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY extends NodeError { } } +export class ERR_CRYPTO_UNKNOWN_DH_GROUP extends NodeError { + constructor() { + super( + "ERR_CRYPTO_UNKNOWN_DH_GROUP", + "Unknown DH group", + ); + } +} + export class ERR_CRYPTO_ENGINE_UNKNOWN extends NodeError { constructor(x: string) { super("ERR_CRYPTO_ENGINE_UNKNOWN", `Engine "${x}" was not found`); @@ -2065,7 +2074,7 @@ export class ERR_UNKNOWN_CREDENTIAL extends NodeError { } export class ERR_UNKNOWN_ENCODING extends NodeTypeError { constructor(x: string) { - super("ERR_UNKNOWN_ENCODING", `Unknown encoding: ${x}`); + super("ERR_UNKNOWN_ENCODING", format("Unknown encoding: %s", x)); } } export class ERR_UNKNOWN_FILE_EXTENSION extends NodeTypeError { diff --git a/ext/node/polyfills/internal/fs/handle.ts b/ext/node/polyfills/internal/fs/handle.ts new file mode 100644 index 0000000000..a1ee263ead --- /dev/null +++ b/ext/node/polyfills/internal/fs/handle.ts @@ -0,0 +1,36 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { EventEmitter } from "ext:deno_node/events.ts"; +import { Buffer } from "ext:deno_node/buffer.ts"; +import { promises } from "ext:deno_node/fs.ts"; +import { + BinaryOptionsArgument, + FileOptionsArgument, + TextOptionsArgument, +} from "ext:deno_node/_fs/_fs_common.ts"; + +export class FileHandle extends EventEmitter { + #rid: number; + constructor(rid: number) { + super(); + this.rid = rid; + } + + get fd() { + return this.rid; + } + + readFile( + opt?: TextOptionsArgument | BinaryOptionsArgument | FileOptionsArgument, + ): Promise { + return promises.readFile(this, opt); + } + + close(): Promise { + // Note that Deno.close is not async + return Promise.resolve(Deno.close(this.fd)); + } +} + +export default { + FileHandle, +}; diff --git a/ext/node/polyfills/internal/primordials.mjs b/ext/node/polyfills/internal/primordials.mjs index 1639efdb50..8127eebace 100644 --- a/ext/node/polyfills/internal/primordials.mjs +++ b/ext/node/polyfills/internal/primordials.mjs @@ -12,7 +12,7 @@ export const ArrayPrototypeSort = (that, ...args) => that.sort(...args); export const ArrayPrototypeUnshift = (that, ...args) => that.unshift(...args); export const ObjectAssign = Object.assign; export const ObjectCreate = Object.create; -export const ObjectPrototypeHasOwnProperty = Object.hasOwn; +export const ObjectHasOwn = Object.hasOwn; export const RegExpPrototypeTest = (that, ...args) => that.test(...args); export const RegExpPrototypeExec = RegExp.prototype.exec; export const StringFromCharCode = String.fromCharCode; diff --git a/ext/node/polyfills/internal/readline/interface.mjs b/ext/node/polyfills/internal/readline/interface.mjs index 3d3f99cade..bbb453df0a 100644 --- a/ext/node/polyfills/internal/readline/interface.mjs +++ b/ext/node/polyfills/internal/readline/interface.mjs @@ -44,6 +44,7 @@ import { } from "ext:deno_node/internal/readline/utils.mjs"; import { clearScreenDown, cursorTo, moveCursor } from "ext:deno_node/internal/readline/callbacks.mjs"; import { Readable } from "ext:deno_node/_stream.mjs"; +import process from "ext:deno_node/process.ts"; import { StringDecoder } from "ext:deno_node/string_decoder.ts"; import { diff --git a/ext/node/polyfills/internal/util/inspect.mjs b/ext/node/polyfills/internal/util/inspect.mjs index d8409f1988..2d34db9c71 100644 --- a/ext/node/polyfills/internal/util/inspect.mjs +++ b/ext/node/polyfills/internal/util/inspect.mjs @@ -20,215 +20,18 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -import * as types from "ext:deno_node/internal/util/types.ts"; import { validateObject, validateString } from "ext:deno_node/internal/validators.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; +import { createStylizeWithColor, formatValue, formatNumber, formatBigInt, styles, colors } from "ext:deno_console/01_console.js"; -import { - ALL_PROPERTIES, - getOwnNonIndexProperties, - ONLY_ENUMERABLE, -} from "ext:deno_node/internal_binding/util.ts"; -const kObjectType = 0; -const kArrayType = 1; -const kArrayExtrasType = 2; - -const kMinLineLength = 16; - -// Constants to map the iterator state. -const kWeak = 0; -const kIterator = 1; -const kMapEntries = 2; - -const kPending = 0; -const kRejected = 2; - -// Escaped control characters (plus the single quote and the backslash). Use -// empty strings to fill up unused entries. -// deno-fmt-ignore -const meta = [ - '\\x00', '\\x01', '\\x02', '\\x03', '\\x04', '\\x05', '\\x06', '\\x07', // x07 - '\\b', '\\t', '\\n', '\\x0B', '\\f', '\\r', '\\x0E', '\\x0F', // x0F - '\\x10', '\\x11', '\\x12', '\\x13', '\\x14', '\\x15', '\\x16', '\\x17', // x17 - '\\x18', '\\x19', '\\x1A', '\\x1B', '\\x1C', '\\x1D', '\\x1E', '\\x1F', // x1F - '', '', '', '', '', '', '', "\\'", '', '', '', '', '', '', '', '', // x2F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x3F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x4F - '', '', '', '', '', '', '', '', '', '', '', '', '\\\\', '', '', '', // x5F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x6F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '\\x7F', // x7F - '\\x80', '\\x81', '\\x82', '\\x83', '\\x84', '\\x85', '\\x86', '\\x87', // x87 - '\\x88', '\\x89', '\\x8A', '\\x8B', '\\x8C', '\\x8D', '\\x8E', '\\x8F', // x8F - '\\x90', '\\x91', '\\x92', '\\x93', '\\x94', '\\x95', '\\x96', '\\x97', // x97 - '\\x98', '\\x99', '\\x9A', '\\x9B', '\\x9C', '\\x9D', '\\x9E', '\\x9F', // x9F -]; - -// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot -const isUndetectableObject = (v) => typeof v === "undefined" && v !== undefined; - -// deno-lint-ignore no-control-regex -const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c\x7f-\x9f]/; -// deno-lint-ignore no-control-regex -const strEscapeSequencesReplacer = /[\x00-\x1f\x27\x5c\x7f-\x9f]/g; -// deno-lint-ignore no-control-regex -const strEscapeSequencesRegExpSingle = /[\x00-\x1f\x5c\x7f-\x9f]/; -// deno-lint-ignore no-control-regex -const strEscapeSequencesReplacerSingle = /[\x00-\x1f\x5c\x7f-\x9f]/g; - -const keyStrRegExp = /^[a-zA-Z_][a-zA-Z_0-9]*$/; -const numberRegExp = /^(0|[1-9][0-9]*)$/; -const nodeModulesRegExp = /[/\\]node_modules[/\\](.+?)(?=[/\\])/g; - -const classRegExp = /^(\s+[^(]*?)\s*{/; -// eslint-disable-next-line node-core/no-unescaped-regexp-dot -const stripCommentsRegExp = /(\/\/.*?\n)|(\/\*(.|\n)*?\*\/)/g; - -const inspectDefaultOptions = { - showHidden: false, - depth: 2, - colors: false, - customInspect: true, - showProxy: false, - maxArrayLength: 100, - maxStringLength: 10000, - breakLength: 80, - compact: 3, - sorted: false, - getters: false, -}; - -function getUserOptions(ctx, isCrossContext) { - const ret = { - stylize: ctx.stylize, - showHidden: ctx.showHidden, - depth: ctx.depth, - colors: ctx.colors, - customInspect: ctx.customInspect, - showProxy: ctx.showProxy, - maxArrayLength: ctx.maxArrayLength, - maxStringLength: ctx.maxStringLength, - breakLength: ctx.breakLength, - compact: ctx.compact, - sorted: ctx.sorted, - getters: ctx.getters, - ...ctx.userOptions, - }; - - // Typically, the target value will be an instance of `Object`. If that is - // *not* the case, the object may come from another vm.Context, and we want - // to avoid passing it objects from this Context in that case, so we remove - // the prototype from the returned object itself + the `stylize()` function, - // and remove all other non-primitives, including non-primitive user options. - if (isCrossContext) { - Object.setPrototypeOf(ret, null); - for (const key of Object.keys(ret)) { - if ( - (typeof ret[key] === "object" || typeof ret[key] === "function") && - ret[key] !== null - ) { - delete ret[key]; - } - } - ret.stylize = Object.setPrototypeOf((value, flavour) => { - let stylized; - try { - stylized = `${ctx.stylize(value, flavour)}`; - } catch { - // noop - } - - if (typeof stylized !== "string") return value; - // `stylized` is a string as it should be, which is safe to pass along. - return stylized; - }, null); - } - - return ret; -} - -/** - * Echos the value of any input. Tries to print the value out - * in the best way possible given the different types. - */ -/* Legacy: value, showHidden, depth, colors */ -export function inspect(value, opts) { - // Default options - const ctx = { - budget: {}, - indentationLvl: 0, - seen: [], - currentDepth: 0, - stylize: stylizeNoColor, - showHidden: inspectDefaultOptions.showHidden, - depth: inspectDefaultOptions.depth, - colors: inspectDefaultOptions.colors, - customInspect: inspectDefaultOptions.customInspect, - showProxy: inspectDefaultOptions.showProxy, - maxArrayLength: inspectDefaultOptions.maxArrayLength, - maxStringLength: inspectDefaultOptions.maxStringLength, - breakLength: inspectDefaultOptions.breakLength, - compact: inspectDefaultOptions.compact, - sorted: inspectDefaultOptions.sorted, - getters: inspectDefaultOptions.getters, - }; - if (arguments.length > 1) { - // Legacy... - if (arguments.length > 2) { - if (arguments[2] !== undefined) { - ctx.depth = arguments[2]; - } - if (arguments.length > 3 && arguments[3] !== undefined) { - ctx.colors = arguments[3]; - } - } - // Set user-specified options - if (typeof opts === "boolean") { - ctx.showHidden = opts; - } else if (opts) { - const optKeys = Object.keys(opts); - for (let i = 0; i < optKeys.length; ++i) { - const key = optKeys[i]; - // TODO(BridgeAR): Find a solution what to do about stylize. Either make - // this function public or add a new API with a similar or better - // functionality. - if ( - // deno-lint-ignore no-prototype-builtins - inspectDefaultOptions.hasOwnProperty(key) || - key === "stylize" - ) { - ctx[key] = opts[key]; - } else if (ctx.userOptions === undefined) { - // This is required to pass through the actual user input. - ctx.userOptions = opts; - } - } - } - } - if (ctx.colors) ctx.stylize = stylizeWithColor; - if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; - if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - return formatValue(ctx, value, 0); -} -const customInspectSymbol = Symbol.for("nodejs.util.inspect.custom"); -inspect.custom = customInspectSymbol; - -Object.defineProperty(inspect, "defaultOptions", { - get() { - return inspectDefaultOptions; - }, - set(options) { - validateObject(options, "options"); - return Object.assign(inspectDefaultOptions, options); - }, -}); // Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics // Each color consists of an array with the color code as first entry and the // reset code as second entry. const defaultFG = 39; const defaultBG = 49; -inspect.colors = Object.assign(Object.create(null), { +inspect.colors = { reset: [0, 0], bold: [1, 22], dim: [2, 22], // Alias: faint @@ -274,7 +77,7 @@ inspect.colors = Object.assign(Object.create(null), { bgMagentaBright: [105, defaultBG], bgCyanBright: [106, defaultBG], bgWhiteBright: [107, defaultBG], -}); +}; function defineColorAlias(target, alias) { Object.defineProperty(inspect.colors, alias, { @@ -289,1627 +92,146 @@ function defineColorAlias(target, alias) { }); } -defineColorAlias("gray", "grey"); -defineColorAlias("gray", "blackBright"); -defineColorAlias("bgGray", "bgGrey"); -defineColorAlias("bgGray", "bgBlackBright"); -defineColorAlias("dim", "faint"); -defineColorAlias("strikethrough", "crossedout"); -defineColorAlias("strikethrough", "strikeThrough"); -defineColorAlias("strikethrough", "crossedOut"); -defineColorAlias("hidden", "conceal"); -defineColorAlias("inverse", "swapColors"); -defineColorAlias("inverse", "swapcolors"); -defineColorAlias("doubleunderline", "doubleUnderline"); +defineColorAlias('gray', 'grey'); +defineColorAlias('gray', 'blackBright'); +defineColorAlias('bgGray', 'bgGrey'); +defineColorAlias('bgGray', 'bgBlackBright'); +defineColorAlias('dim', 'faint'); +defineColorAlias('strikethrough', 'crossedout'); +defineColorAlias('strikethrough', 'strikeThrough'); +defineColorAlias('strikethrough', 'crossedOut'); +defineColorAlias('hidden', 'conceal'); +defineColorAlias('inverse', 'swapColors'); +defineColorAlias('inverse', 'swapcolors'); +defineColorAlias('doubleunderline', 'doubleUnderline'); // TODO(BridgeAR): Add function style support for more complex styles. // Don't use 'blue' not visible on cmd.exe inspect.styles = Object.assign(Object.create(null), { - special: "cyan", - number: "yellow", - bigint: "yellow", - boolean: "yellow", - undefined: "grey", - null: "bold", - string: "green", - symbol: "green", - date: "magenta", + special: 'cyan', + number: 'yellow', + bigint: 'yellow', + boolean: 'yellow', + undefined: 'grey', + null: 'bold', + string: 'green', + symbol: 'green', + date: 'magenta', // "name": intentionally not styling // TODO(BridgeAR): Highlight regular expressions properly. - regexp: "red", - module: "underline", + regexp: 'red', + module: 'underline', }); -function addQuotes(str, quotes) { - if (quotes === -1) { - return `"${str}"`; - } - if (quotes === -2) { - return `\`${str}\``; - } - return `'${str}'`; -} -// TODO(wafuwafu13): Figure out -const escapeFn = (str) => meta[str.charCodeAt(0)]; +const inspectDefaultOptions = { + indentationLvl: 0, + currentDepth: 0, + stylize: stylizeNoColor, -// Escape control characters, single quotes and the backslash. -// This is similar to JSON stringify escaping. -function strEscape(str) { - let escapeTest = strEscapeSequencesRegExp; - let escapeReplace = strEscapeSequencesReplacer; - let singleQuote = 39; + showHidden: false, + depth: 2, + colors: false, + showProxy: false, + breakLength: 80, + escapeSequences: true, + compact: 3, + sorted: false, + getters: false, - // Check for double quotes. If not present, do not escape single quotes and - // instead wrap the text in double quotes. If double quotes exist, check for - // backticks. If they do not exist, use those as fallback instead of the - // double quotes. - if (str.includes("'")) { - // This invalidates the charCode and therefore can not be matched for - // anymore. - if (!str.includes('"')) { - singleQuote = -1; - } else if ( - !str.includes("`") && - !str.includes("${") - ) { - singleQuote = -2; - } - if (singleQuote !== 39) { - escapeTest = strEscapeSequencesRegExpSingle; - escapeReplace = strEscapeSequencesReplacerSingle; - } - } + // node only + maxArrayLength: 100, + maxStringLength: 10000, // deno: strAbbreviateSize: 100 + customInspect: true, - // Some magic numbers that worked out fine while benchmarking with v8 6.0 - if (str.length < 5000 && !escapeTest.test(str)) { - return addQuotes(str, singleQuote); - } - if (str.length > 100) { - str = str.replace(escapeReplace, escapeFn); - return addQuotes(str, singleQuote); - } + // deno only + /** You can override the quotes preference in inspectString. + * Used by util.inspect() */ + // TODO(kt3k): Consider using symbol as a key to hide this from the public + // API. + quotes: ["'", '"', "`"], + iterableLimit: Infinity, // similar to node's maxArrayLength, but doesn't only apply to arrays + trailingComma: false, - let result = ""; - let last = 0; - const lastIndex = str.length; - for (let i = 0; i < lastIndex; i++) { - const point = str.charCodeAt(i); - if ( - point === singleQuote || - point === 92 || - point < 32 || - (point > 126 && point < 160) - ) { - if (last === i) { - result += meta[point]; - } else { - result += `${str.slice(last, i)}${meta[point]}`; + inspect, + + // TODO(@crowlKats): merge into indentationLvl + indentLevel: 0, +}; + +/** + * Echos the value of any input. Tries to print the value out + * in the best way possible given the different types. + */ +/* Legacy: value, showHidden, depth, colors */ +export function inspect(value, opts) { + // Default options + const ctx = { + budget: {}, + seen: [], + ...inspectDefaultOptions, + }; + if (arguments.length > 1) { + // Legacy... + if (arguments.length > 2) { + if (arguments[2] !== undefined) { + ctx.depth = arguments[2]; + } + if (arguments.length > 3 && arguments[3] !== undefined) { + ctx.colors = arguments[3]; + } + } + // Set user-specified options + if (typeof opts === "boolean") { + ctx.showHidden = opts; + } else if (opts) { + const optKeys = Object.keys(opts); + for (let i = 0; i < optKeys.length; ++i) { + const key = optKeys[i]; + // TODO(BridgeAR): Find a solution what to do about stylize. Either make + // this function public or add a new API with a similar or better + // functionality. + if ( + // deno-lint-ignore no-prototype-builtins + inspectDefaultOptions.hasOwnProperty(key) || + key === "stylize" + ) { + ctx[key] = opts[key]; + } else if (ctx.userOptions === undefined) { + // This is required to pass through the actual user input. + ctx.userOptions = opts; + } } - last = i + 1; } } - - if (last !== lastIndex) { - result += str.slice(last); - } - return addQuotes(result, singleQuote); + if (ctx.colors) ctx.stylize = createStylizeWithColor(inspect.styles, inspect.colors); + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + return formatValue(ctx, value, 0); } +const customInspectSymbol = Symbol.for("nodejs.util.inspect.custom"); +inspect.custom = customInspectSymbol; -function stylizeWithColor(str, styleType) { - const style = inspect.styles[styleType]; - if (style !== undefined) { - const color = inspect.colors[style]; - if (color !== undefined) { - return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; - } - } - return str; -} +Object.defineProperty(inspect, "defaultOptions", { + get() { + return inspectDefaultOptions; + }, + set(options) { + validateObject(options, "options"); + return Object.assign(inspectDefaultOptions, options); + }, +}); function stylizeNoColor(str) { return str; } -// Note: using `formatValue` directly requires the indentation level to be -// corrected by setting `ctx.indentationLvL += diff` and then to decrease the -// value afterwards again. -function formatValue( - ctx, - value, - recurseTimes, - typedArray, -) { - // Primitive types cannot have properties. - if ( - typeof value !== "object" && - typeof value !== "function" && - !isUndetectableObject(value) - ) { - return formatPrimitive(ctx.stylize, value, ctx); - } - if (value === null) { - return ctx.stylize("null", "null"); - } - - // Memorize the context for custom inspection on proxies. - const context = value; - // Always check for proxies to prevent side effects and to prevent triggering - // any proxy handlers. - // TODO(wafuwafu13): Set Proxy - const proxy = undefined; - // const proxy = getProxyDetails(value, !!ctx.showProxy); - // if (proxy !== undefined) { - // if (ctx.showProxy) { - // return formatProxy(ctx, proxy, recurseTimes); - // } - // value = proxy; - // } - - // Provide a hook for user-specified inspect functions. - // Check that value is an object with an inspect function on it. - if (ctx.customInspect) { - const maybeCustom = value[customInspectSymbol]; - if ( - typeof maybeCustom === "function" && - // Filter out the util module, its inspect function is special. - maybeCustom !== inspect && - // Also filter out any prototype objects using the circular check. - !(value.constructor && value.constructor.prototype === value) - ) { - // This makes sure the recurseTimes are reported as before while using - // a counter internally. - const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; - const isCrossContext = proxy !== undefined || - !(context instanceof Object); - const ret = maybeCustom.call( - context, - depth, - getUserOptions(ctx, isCrossContext), - ); - // If the custom inspection method returned `this`, don't go into - // infinite recursion. - if (ret !== context) { - if (typeof ret !== "string") { - return formatValue(ctx, ret, recurseTimes); - } - return ret.replace(/\n/g, `\n${" ".repeat(ctx.indentationLvl)}`); - } - } - } - - // Using an array here is actually better for the average case than using - // a Set. `seen` will only check for the depth and will never grow too large. - if (ctx.seen.includes(value)) { - let index = 1; - if (ctx.circular === undefined) { - ctx.circular = new Map(); - ctx.circular.set(value, index); - } else { - index = ctx.circular.get(value); - if (index === undefined) { - index = ctx.circular.size + 1; - ctx.circular.set(value, index); - } - } - return ctx.stylize(`[Circular *${index}]`, "special"); - } - - return formatRaw(ctx, value, recurseTimes, typedArray); -} - -function formatRaw(ctx, value, recurseTimes, typedArray) { - let keys; - let protoProps; - if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { - protoProps = []; - } - - const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); - // Reset the variable to check for this later on. - if (protoProps !== undefined && protoProps.length === 0) { - protoProps = undefined; - } - - let tag = value[Symbol.toStringTag]; - // Only list the tag in case it's non-enumerable / not an own property. - // Otherwise we'd print this twice. - if ( - typeof tag !== "string" - // TODO(wafuwafu13): Implement - // (tag !== "" && - // (ctx.showHidden - // ? Object.prototype.hasOwnProperty - // : Object.prototype.propertyIsEnumerable)( - // value, - // Symbol.toStringTag, - // )) - ) { - tag = ""; - } - let base = ""; - let formatter = getEmptyFormatArray; - let braces; - let noIterator = true; - let i = 0; - const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; - - let extrasType = kObjectType; - - // Iterators and the rest are split to reduce checks. - // We have to check all values in case the constructor is set to null. - // Otherwise it would not possible to identify all types properly. - if (value[Symbol.iterator] || constructor === null) { - noIterator = false; - if (Array.isArray(value)) { - // Only set the constructor for non ordinary ("Array [...]") arrays. - const prefix = (constructor !== "Array" || tag !== "") - ? getPrefix(constructor, tag, "Array", `(${value.length})`) - : ""; - keys = getOwnNonIndexProperties(value, filter); - braces = [`${prefix}[`, "]"]; - if (value.length === 0 && keys.length === 0 && protoProps === undefined) { - return `${braces[0]}]`; - } - extrasType = kArrayExtrasType; - formatter = formatArray; - } else if (types.isSet(value)) { - const size = value.size; - const prefix = getPrefix(constructor, tag, "Set", `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null - ? formatSet.bind(null, value) - : formatSet.bind(null, value.values()); - if (size === 0 && keys.length === 0 && protoProps === undefined) { - return `${prefix}{}`; - } - braces = [`${prefix}{`, "}"]; - } else if (types.isMap(value)) { - const size = value.size; - const prefix = getPrefix(constructor, tag, "Map", `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null - ? formatMap.bind(null, value) - : formatMap.bind(null, value.entries()); - if (size === 0 && keys.length === 0 && protoProps === undefined) { - return `${prefix}{}`; - } - braces = [`${prefix}{`, "}"]; - } else if (types.isTypedArray(value)) { - keys = getOwnNonIndexProperties(value, filter); - const bound = value; - const fallback = ""; - if (constructor === null) { - // TODO(wafuwafu13): Implement - // fallback = TypedArrayPrototypeGetSymbolToStringTag(value); - // // Reconstruct the array information. - // bound = new primordials[fallback](value); - } - const size = value.length; - const prefix = getPrefix(constructor, tag, fallback, `(${size})`); - braces = [`${prefix}[`, "]"]; - if (value.length === 0 && keys.length === 0 && !ctx.showHidden) { - return `${braces[0]}]`; - } - // Special handle the value. The original value is required below. The - // bound function is required to reconstruct missing information. - (formatter) = formatTypedArray.bind(null, bound, size); - extrasType = kArrayExtrasType; - } else if (types.isMapIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces("Map", tag); - // Add braces to the formatter parameters. - (formatter) = formatIterator.bind(null, braces); - } else if (types.isSetIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces("Set", tag); - // Add braces to the formatter parameters. - (formatter) = formatIterator.bind(null, braces); - } else { - noIterator = true; - } - } - if (noIterator) { - keys = getKeys(value, ctx.showHidden); - braces = ["{", "}"]; - if (constructor === "Object") { - if (types.isArgumentsObject(value)) { - braces[0] = "[Arguments] {"; - } else if (tag !== "") { - braces[0] = `${getPrefix(constructor, tag, "Object")}{`; - } - if (keys.length === 0 && protoProps === undefined) { - return `${braces[0]}}`; - } - } else if (typeof value === "function") { - base = getFunctionBase(value, constructor, tag); - if (keys.length === 0 && protoProps === undefined) { - return ctx.stylize(base, "special"); - } - } else if (types.isRegExp(value)) { - // Make RegExps say that they are RegExps - base = RegExp(constructor !== null ? value : new RegExp(value)) - .toString(); - const prefix = getPrefix(constructor, tag, "RegExp"); - if (prefix !== "RegExp ") { - base = `${prefix}${base}`; - } - if ( - (keys.length === 0 && protoProps === undefined) || - (recurseTimes > ctx.depth && ctx.depth !== null) - ) { - return ctx.stylize(base, "regexp"); - } - } else if (types.isDate(value)) { - // Make dates with properties first say the date - base = Number.isNaN(value.getTime()) - ? value.toString() - : value.toISOString(); - const prefix = getPrefix(constructor, tag, "Date"); - if (prefix !== "Date ") { - base = `${prefix}${base}`; - } - if (keys.length === 0 && protoProps === undefined) { - return ctx.stylize(base, "date"); - } - } else if (value instanceof Error) { - base = formatError(value, constructor, tag, ctx, keys); - if (keys.length === 0 && protoProps === undefined) { - return base; - } - } else if (types.isAnyArrayBuffer(value)) { - // Fast path for ArrayBuffer and SharedArrayBuffer. - // Can't do the same for DataView because it has a non-primitive - // .buffer property that we need to recurse for. - const arrayType = types.isArrayBuffer(value) - ? "ArrayBuffer" - : "SharedArrayBuffer"; - const prefix = getPrefix(constructor, tag, arrayType); - if (typedArray === undefined) { - (formatter) = formatArrayBuffer; - } else if (keys.length === 0 && protoProps === undefined) { - return prefix + - `{ byteLength: ${formatNumber(ctx.stylize, value.byteLength)} }`; - } - braces[0] = `${prefix}{`; - Array.prototype.unshift.call(keys, "byteLength"); - } else if (types.isDataView(value)) { - braces[0] = `${getPrefix(constructor, tag, "DataView")}{`; - // .buffer goes last, it's not a primitive like the others. - Array.prototype.unshift.call(keys, "byteLength", "byteOffset", "buffer"); - } else if (types.isPromise(value)) { - braces[0] = `${getPrefix(constructor, tag, "Promise")}{`; - (formatter) = formatPromise; - } else if (types.isWeakSet(value)) { - braces[0] = `${getPrefix(constructor, tag, "WeakSet")}{`; - (formatter) = ctx.showHidden ? formatWeakSet : formatWeakCollection; - } else if (types.isWeakMap(value)) { - braces[0] = `${getPrefix(constructor, tag, "WeakMap")}{`; - (formatter) = ctx.showHidden ? formatWeakMap : formatWeakCollection; - } else if (types.isModuleNamespaceObject(value)) { - braces[0] = `${getPrefix(constructor, tag, "Module")}{`; - // Special handle keys for namespace objects. - (formatter) = formatNamespaceObject.bind(null, keys); - } else if (types.isBoxedPrimitive(value)) { - base = getBoxedBase(value, ctx, keys, constructor, tag); - if (keys.length === 0 && protoProps === undefined) { - return base; - } - } else { - if (keys.length === 0 && protoProps === undefined) { - // TODO(wafuwafu13): Implement - // if (types.isExternal(value)) { - // const address = getExternalValue(value).toString(16); - // return ctx.stylize(`[External: ${address}]`, 'special'); - // } - return `${getCtxStyle(value, constructor, tag)}{}`; - } - braces[0] = `${getCtxStyle(value, constructor, tag)}{`; - } - } - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - let constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - if (constructor !== null) { - constructorName = `[${constructorName}]`; - } - return ctx.stylize(constructorName, "special"); - } - recurseTimes += 1; - - ctx.seen.push(value); - ctx.currentDepth = recurseTimes; - let output; - const indentationLvl = ctx.indentationLvl; - try { - output = formatter(ctx, value, recurseTimes); - for (i = 0; i < keys.length; i++) { - output.push( - formatProperty(ctx, value, recurseTimes, keys[i], extrasType), - ); - } - if (protoProps !== undefined) { - output.push(...protoProps); - } - } catch (err) { - const constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); - } - if (ctx.circular !== undefined) { - const index = ctx.circular.get(value); - if (index !== undefined) { - const reference = ctx.stylize(``, "special"); - // Add reference always to the very beginning of the output. - if (ctx.compact !== true) { - base = base === "" ? reference : `${reference} ${base}`; - } else { - braces[0] = `${reference} ${braces[0]}`; - } - } - } - ctx.seen.pop(); - - if (ctx.sorted) { - const comparator = ctx.sorted === true ? undefined : ctx.sorted; - if (extrasType === kObjectType) { - output = output.sort(comparator); - } else if (keys.length > 1) { - const sorted = output.slice(output.length - keys.length).sort(comparator); - output.splice(output.length - keys.length, keys.length, ...sorted); - } - } - - const res = reduceToSingleString( - ctx, - output, - base, - braces, - extrasType, - recurseTimes, - value, - ); - const budget = ctx.budget[ctx.indentationLvl] || 0; - const newLength = budget + res.length; - ctx.budget[ctx.indentationLvl] = newLength; - // If any indentationLvl exceeds this limit, limit further inspecting to the - // minimum. Otherwise the recursive algorithm might continue inspecting the - // object even though the maximum string size (~2 ** 28 on 32 bit systems and - // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at - // exactly 2 ** 27 but a bit higher. This depends on the object shape. - // This limit also makes sure that huge objects don't block the event loop - // significantly. - if (newLength > 2 ** 27) { - ctx.depth = -1; - } - return res; -} - const builtInObjects = new Set( Object.getOwnPropertyNames(globalThis).filter((e) => /^[A-Z][a-zA-Z0-9]+$/.test(e) ), ); -function addPrototypeProperties( - ctx, - main, - obj, - recurseTimes, - output, -) { - let depth = 0; - let keys; - let keySet; - do { - if (depth !== 0 || main === obj) { - obj = Object.getPrototypeOf(obj); - // Stop as soon as a null prototype is encountered. - if (obj === null) { - return; - } - // Stop as soon as a built-in object type is detected. - const descriptor = Object.getOwnPropertyDescriptor(obj, "constructor"); - if ( - descriptor !== undefined && - typeof descriptor.value === "function" && - builtInObjects.has(descriptor.value.name) - ) { - return; - } - } - - if (depth === 0) { - keySet = new Set(); - } else { - Array.prototype.forEach.call(keys, (key) => keySet.add(key)); - } - // Get all own property names and symbols. - keys = Reflect.ownKeys(obj); - Array.prototype.push.call(ctx.seen, main); - for (const key of keys) { - // Ignore the `constructor` property and keys that exist on layers above. - if ( - key === "constructor" || - // deno-lint-ignore no-prototype-builtins - main.hasOwnProperty(key) || - (depth !== 0 && keySet.has(key)) - ) { - continue; - } - const desc = Object.getOwnPropertyDescriptor(obj, key); - if (typeof desc.value === "function") { - continue; - } - const value = formatProperty( - ctx, - obj, - recurseTimes, - key, - kObjectType, - desc, - main, - ); - if (ctx.colors) { - // Faint! - Array.prototype.push.call(output, `\u001b[2m${value}\u001b[22m`); - } else { - Array.prototype.push.call(output, value); - } - } - Array.prototype.pop.call(ctx.seen); - // Limit the inspection to up to three prototype layers. Using `recurseTimes` - // is not a good choice here, because it's as if the properties are declared - // on the current object from the users perspective. - } while (++depth !== 3); -} - -function getConstructorName( - obj, - ctx, - recurseTimes, - protoProps, -) { - let firstProto; - const tmp = obj; - while (obj || isUndetectableObject(obj)) { - const descriptor = Object.getOwnPropertyDescriptor(obj, "constructor"); - if ( - descriptor !== undefined && - typeof descriptor.value === "function" && - descriptor.value.name !== "" && - isInstanceof(tmp, descriptor.value) - ) { - if ( - protoProps !== undefined && - (firstProto !== obj || - !builtInObjects.has(descriptor.value.name)) - ) { - addPrototypeProperties( - ctx, - tmp, - firstProto || tmp, - recurseTimes, - protoProps, - ); - } - return descriptor.value.name; - } - - obj = Object.getPrototypeOf(obj); - if (firstProto === undefined) { - firstProto = obj; - } - } - - if (firstProto === null) { - return null; - } - - // TODO(wafuwafu13): Implement - // const res = internalGetConstructorName(tmp); - const res = undefined; - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - return `${res} `; - } - - const protoConstr = getConstructorName( - firstProto, - ctx, - recurseTimes + 1, - protoProps, - ); - - if (protoConstr === null) { - return `${res} <${ - inspect(firstProto, { - ...ctx, - customInspect: false, - depth: -1, - }) - }>`; - } - - return `${res} <${protoConstr}>`; -} - -function formatPrimitive(fn, value, ctx) { - if (typeof value === "string") { - let trailer = ""; - if (value.length > ctx.maxStringLength) { - const remaining = value.length - ctx.maxStringLength; - value = value.slice(0, ctx.maxStringLength); - trailer = `... ${remaining} more character${remaining > 1 ? "s" : ""}`; - } - if ( - ctx.compact !== true && - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. - value.length > kMinLineLength && - value.length > ctx.breakLength - ctx.indentationLvl - 4 - ) { - return value - .split(/(?<=\n)/) - .map((line) => fn(strEscape(line), "string")) - .join(` +\n${" ".repeat(ctx.indentationLvl + 2)}`) + trailer; - } - return fn(strEscape(value), "string") + trailer; - } - if (typeof value === "number") { - return formatNumber(fn, value); - } - if (typeof value === "bigint") { - return formatBigInt(fn, value); - } - if (typeof value === "boolean") { - return fn(`${value}`, "boolean"); - } - if (typeof value === "undefined") { - return fn("undefined", "undefined"); - } - // es6 symbol primitive - return fn(value.toString(), "symbol"); -} - -// Return a new empty array to push in the results of the default formatter. -function getEmptyFormatArray() { - return []; -} - -function isInstanceof(object, proto) { - try { - return object instanceof proto; - } catch { - return false; - } -} - -function getPrefix(constructor, tag, fallback, size = "") { - if (constructor === null) { - if (tag !== "" && fallback !== tag) { - return `[${fallback}${size}: null prototype] [${tag}] `; - } - return `[${fallback}${size}: null prototype] `; - } - - if (tag !== "" && constructor !== tag) { - return `${constructor}${size} [${tag}] `; - } - return `${constructor}${size} `; -} - -function formatArray(ctx, value, recurseTimes) { - const valLen = value.length; - const len = Math.min(Math.max(0, ctx.maxArrayLength), valLen); - - const remaining = valLen - len; - const output = []; - for (let i = 0; i < len; i++) { - // Special handle sparse arrays. - // deno-lint-ignore no-prototype-builtins - if (!value.hasOwnProperty(i)) { - return formatSpecialArray(ctx, value, recurseTimes, len, output, i); - } - output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); - } - if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); - } - return output; -} - -function getCtxStyle(_value, constructor, tag) { - let fallback = ""; - if (constructor === null) { - // TODO(wafuwafu13): Implement - // fallback = internalGetConstructorName(value); - if (fallback === tag) { - fallback = "Object"; - } - } - return getPrefix(constructor, tag, fallback); -} - -// Look up the keys of the object. -function getKeys(value, showHidden) { - let keys; - const symbols = Object.getOwnPropertySymbols(value); - if (showHidden) { - keys = Object.getOwnPropertyNames(value); - if (symbols.length !== 0) { - Array.prototype.push.apply(keys, symbols); - } - } else { - // This might throw if `value` is a Module Namespace Object from an - // unevaluated module, but we don't want to perform the actual type - // check because it's expensive. - // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 - // and modify this logic as needed. - try { - keys = Object.keys(value); - } catch (_err) { - // TODO(wafuwafu13): Implement - // assert(isNativeError(err) && err.name === 'ReferenceError' && - // isModuleNamespaceObject(value)); - keys = Object.getOwnPropertyNames(value); - } - if (symbols.length !== 0) { - // TODO(wafuwafu13): Implement - // const filter = (key: any) => - // - // Object.prototype.propertyIsEnumerable(value, key); - // Array.prototype.push.apply( - // keys, - // symbols.filter(filter), - // ); - } - } - return keys; -} - -function formatSet(value, ctx, _ignored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const v of value) { - Array.prototype.push.call(output, formatValue(ctx, v, recurseTimes)); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatMap(value, ctx, _gnored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const { 0: k, 1: v } of value) { - output.push( - `${formatValue(ctx, k, recurseTimes)} => ${ - formatValue(ctx, v, recurseTimes) - }`, - ); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatTypedArray( - value, - length, - ctx, - _ignored, - recurseTimes, -) { - const maxLength = Math.min(Math.max(0, ctx.maxArrayLength), length); - const remaining = value.length - maxLength; - const output = new Array(maxLength); - const elementFormatter = value.length > 0 && typeof value[0] === "number" - ? formatNumber - : formatBigInt; - for (let i = 0; i < maxLength; ++i) { - output[i] = elementFormatter(ctx.stylize, value[i]); - } - if (remaining > 0) { - output[maxLength] = `... ${remaining} more item${remaining > 1 ? "s" : ""}`; - } - if (ctx.showHidden) { - // .buffer goes last, it's not a primitive like the others. - // All besides `BYTES_PER_ELEMENT` are actually getters. - ctx.indentationLvl += 2; - for ( - const key of [ - "BYTES_PER_ELEMENT", - "length", - "byteLength", - "byteOffset", - "buffer", - ] - ) { - const str = formatValue(ctx, value[key], recurseTimes, true); - Array.prototype.push.call(output, `[${key}]: ${str}`); - } - ctx.indentationLvl -= 2; - } - return output; -} - -function getIteratorBraces(type, tag) { - if (tag !== `${type} Iterator`) { - if (tag !== "") { - tag += "] ["; - } - tag += `${type} Iterator`; - } - return [`[${tag}] {`, "}"]; -} - -function formatIterator(braces, ctx, value, recurseTimes) { - // TODO(wafuwafu13): Implement - // const { 0: entries, 1: isKeyValue } = previewEntries(value, true); - const { 0: entries, 1: isKeyValue } = value; - if (isKeyValue) { - // Mark entry iterators as such. - braces[0] = braces[0].replace(/ Iterator] {$/, " Entries] {"); - return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); - } - - return formatSetIterInner(ctx, recurseTimes, entries, kIterator); -} - -function getFunctionBase(value, constructor, tag) { - const stringified = Function.prototype.toString.call(value); - if (stringified.slice(0, 5) === "class" && stringified.endsWith("}")) { - const slice = stringified.slice(5, -1); - const bracketIndex = slice.indexOf("{"); - if ( - bracketIndex !== -1 && - (!slice.slice(0, bracketIndex).includes("(") || - // Slow path to guarantee that it's indeed a class. - classRegExp.test(slice.replace(stripCommentsRegExp))) - ) { - return getClassBase(value, constructor, tag); - } - } - let type = "Function"; - if (types.isGeneratorFunction(value)) { - type = `Generator${type}`; - } - if (types.isAsyncFunction(value)) { - type = `Async${type}`; - } - let base = `[${type}`; - if (constructor === null) { - base += " (null prototype)"; - } - if (value.name === "") { - base += " (anonymous)"; - } else { - base += `: ${value.name}`; - } - base += "]"; - if (constructor !== type && constructor !== null) { - base += ` ${constructor}`; - } - if (tag !== "" && constructor !== tag) { - base += ` [${tag}]`; - } - return base; -} - -function formatError( - err, - constructor, - tag, - ctx, - keys, -) { - const name = err.name != null ? String(err.name) : "Error"; - let len = name.length; - let stack = err.stack ? String(err.stack) : err.toString(); - - // Do not "duplicate" error properties that are already included in the output - // otherwise. - if (!ctx.showHidden && keys.length !== 0) { - for (const name of ["name", "message", "stack"]) { - const index = keys.indexOf(name); - // Only hide the property in case it's part of the original stack - if (index !== -1 && stack.includes(err[name])) { - keys.splice(index, 1); - } - } - } - - // A stack trace may contain arbitrary data. Only manipulate the output - // for "regular errors" (errors that "look normal") for now. - if ( - constructor === null || - (name.endsWith("Error") && - stack.startsWith(name) && - (stack.length === len || stack[len] === ":" || stack[len] === "\n")) - ) { - let fallback = "Error"; - if (constructor === null) { - const start = stack.match(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/) || - stack.match(/^([a-z_A-Z0-9-]*Error)$/); - fallback = (start && start[1]) || ""; - len = fallback.length; - fallback = fallback || "Error"; - } - const prefix = getPrefix(constructor, tag, fallback).slice(0, -1); - if (name !== prefix) { - if (prefix.includes(name)) { - if (len === 0) { - stack = `${prefix}: ${stack}`; - } else { - stack = `${prefix}${stack.slice(len)}`; - } - } else { - stack = `${prefix} [${name}]${stack.slice(len)}`; - } - } - } - // Ignore the error message if it's contained in the stack. - let pos = (err.message && stack.indexOf(err.message)) || -1; - if (pos !== -1) { - pos += err.message.length; - } - // Wrap the error in brackets in case it has no stack trace. - const stackStart = stack.indexOf("\n at", pos); - if (stackStart === -1) { - stack = `[${stack}]`; - } else if (ctx.colors) { - // Highlight userland code and node modules. - let newStack = stack.slice(0, stackStart); - const lines = stack.slice(stackStart + 1).split("\n"); - for (const line of lines) { - // const core = line.match(coreModuleRegExp); - // TODO(wafuwafu13): Implement - // if (core !== null && NativeModule.exists(core[1])) { - // newStack += `\n${ctx.stylize(line, 'undefined')}`; - // } else { - // This adds underscores to all node_modules to quickly identify them. - let nodeModule; - newStack += "\n"; - let pos = 0; - // deno-lint-ignore no-cond-assign - while (nodeModule = nodeModulesRegExp.exec(line)) { - // '/node_modules/'.length === 14 - newStack += line.slice(pos, nodeModule.index + 14); - newStack += ctx.stylize(nodeModule[1], "module"); - pos = nodeModule.index + nodeModule[0].length; - } - newStack += pos === 0 ? line : line.slice(pos); - // } - } - stack = newStack; - } - // The message and the stack have to be indented as well! - if (ctx.indentationLvl !== 0) { - const indentation = " ".repeat(ctx.indentationLvl); - stack = stack.replace(/\n/g, `\n${indentation}`); - } - return stack; -} - -let hexSlice; - -function formatArrayBuffer(ctx, value) { - let buffer; - try { - buffer = new Uint8Array(value); - } catch { - return [ctx.stylize("(detached)", "special")]; - } - // TODO(wafuwafu13): Implement - // if (hexSlice === undefined) - // hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); - let str = hexSlice(buffer, 0, Math.min(ctx.maxArrayLength, buffer.length)) - .replace(/(.{2})/g, "$1 ").trim(); - - const remaining = buffer.length - ctx.maxArrayLength; - if (remaining > 0) { - str += ` ... ${remaining} more byte${remaining > 1 ? "s" : ""}`; - } - return [`${ctx.stylize("[Uint8Contents]", "special")}: <${str}>`]; -} - -function formatNumber(fn, value) { - // Format -0 as '-0'. Checking `value === -0` won't distinguish 0 from -0. - return fn(Object.is(value, -0) ? "-0" : `${value}`, "number"); -} - -function formatPromise(ctx, value, recurseTimes) { - let output; - // TODO(wafuwafu13): Implement - // const { 0: state, 1: result } = getPromiseDetails(value); - const { 0: state, 1: result } = value; - if (state === kPending) { - output = [ctx.stylize("", "special")]; - } else { - ctx.indentationLvl += 2; - const str = formatValue(ctx, result, recurseTimes); - ctx.indentationLvl -= 2; - output = [ - state === kRejected - ? `${ctx.stylize("", "special")} ${str}` - : str, - ]; - } - return output; -} - -function formatWeakCollection(ctx) { - return [ctx.stylize("", "special")]; -} - -function formatWeakSet(ctx, value, recurseTimes) { - // TODO(wafuwafu13): Implement - // const entries = previewEntries(value); - const entries = value; - return formatSetIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatWeakMap(ctx, value, recurseTimes) { - // TODO(wafuwafu13): Implement - // const entries = previewEntries(value); - const entries = value; - return formatMapIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatProperty( - ctx, - value, - recurseTimes, - key, - type, - desc, - original = value, -) { - let name, str; - let extra = " "; - desc = desc || Object.getOwnPropertyDescriptor(value, key) || - { value: value[key], enumerable: true }; - if (desc.value !== undefined) { - const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; - ctx.indentationLvl += diff; - str = formatValue(ctx, desc.value, recurseTimes); - if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { - extra = `\n${" ".repeat(ctx.indentationLvl)}`; - } - ctx.indentationLvl -= diff; - } else if (desc.get !== undefined) { - const label = desc.set !== undefined ? "Getter/Setter" : "Getter"; - const s = ctx.stylize; - const sp = "special"; - if ( - ctx.getters && (ctx.getters === true || - (ctx.getters === "get" && desc.set === undefined) || - (ctx.getters === "set" && desc.set !== undefined)) - ) { - try { - const tmp = desc.get.call(original); - ctx.indentationLvl += 2; - if (tmp === null) { - str = `${s(`[${label}:`, sp)} ${s("null", "null")}${s("]", sp)}`; - } else if (typeof tmp === "object") { - str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; - } else { - const primitive = formatPrimitive(s, tmp, ctx); - str = `${s(`[${label}:`, sp)} ${primitive}${s("]", sp)}`; - } - ctx.indentationLvl -= 2; - } catch (err) { - const message = ``; - str = `${s(`[${label}:`, sp)} ${message}${s("]", sp)}`; - } - } else { - str = ctx.stylize(`[${label}]`, sp); - } - } else if (desc.set !== undefined) { - str = ctx.stylize("[Setter]", "special"); - } else { - str = ctx.stylize("undefined", "undefined"); - } - if (type === kArrayType) { - return str; - } - if (typeof key === "symbol") { - const tmp = key.toString().replace(strEscapeSequencesReplacer, escapeFn); - - name = `[${ctx.stylize(tmp, "symbol")}]`; - } else if (key === "__proto__") { - name = "['__proto__']"; - } else if (desc.enumerable === false) { - const tmp = key.replace(strEscapeSequencesReplacer, escapeFn); - - name = `[${tmp}]`; - } else if (keyStrRegExp.test(key)) { - name = ctx.stylize(key, "name"); - } else { - name = ctx.stylize(strEscape(key), "string"); - } - return `${name}:${extra}${str}`; -} - -function handleMaxCallStackSize( - _ctx, - _err, - _constructorName, - _indentationLvl, -) { - // TODO(wafuwafu13): Implement - // if (types.isStackOverflowError(err)) { - // ctx.seen.pop(); - // ctx.indentationLvl = indentationLvl; - // return ctx.stylize( - // `[${constructorName}: Inspection interrupted ` + - // 'prematurely. Maximum call stack size exceeded.]', - // 'special' - // ); - // } - // /* c8 ignore next */ - // assert.fail(err.stack); -} - -// deno-lint-ignore no-control-regex -const colorRegExp = /\u001b\[\d\d?m/g; -function removeColors(str) { - return str.replace(colorRegExp, ""); -} - -function isBelowBreakLength(ctx, output, start, base) { - // Each entry is separated by at least a comma. Thus, we start with a total - // length of at least `output.length`. In addition, some cases have a - // whitespace in-between each other that is added to the total as well. - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. Check the performance overhead and make it an opt-in in case it's - // significant. - let totalLength = output.length + start; - if (totalLength + output.length > ctx.breakLength) { - return false; - } - for (let i = 0; i < output.length; i++) { - if (ctx.colors) { - totalLength += removeColors(output[i]).length; - } else { - totalLength += output[i].length; - } - if (totalLength > ctx.breakLength) { - return false; - } - } - // Do not line up properties on the same line if `base` contains line breaks. - return base === "" || !base.includes("\n"); -} - -function formatBigInt(fn, value) { - return fn(`${value}n`, "bigint"); -} - -function formatNamespaceObject( - keys, - ctx, - value, - recurseTimes, -) { - const output = new Array(keys.length); - for (let i = 0; i < keys.length; i++) { - try { - output[i] = formatProperty( - ctx, - value, - recurseTimes, - keys[i], - kObjectType, - ); - } catch (_err) { - // TODO(wafuwfu13): Implement - // assert(isNativeError(err) && err.name === 'ReferenceError'); - // Use the existing functionality. This makes sure the indentation and - // line breaks are always correct. Otherwise it is very difficult to keep - // this aligned, even though this is a hacky way of dealing with this. - const tmp = { [keys[i]]: "" }; - output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); - const pos = output[i].lastIndexOf(" "); - // We have to find the last whitespace and have to replace that value as - // it will be visualized as a regular string. - output[i] = output[i].slice(0, pos + 1) + - ctx.stylize("", "special"); - } - } - // Reset the keys to an empty array. This prevents duplicated inspection. - keys.length = 0; - return output; -} - -// The array is sparse and/or has extra keys -function formatSpecialArray( - ctx, - value, - recurseTimes, - maxLength, - output, - i, -) { - const keys = Object.keys(value); - let index = i; - for (; i < keys.length && output.length < maxLength; i++) { - const key = keys[i]; - const tmp = +key; - // Arrays can only have up to 2^32 - 1 entries - if (tmp > 2 ** 32 - 2) { - break; - } - if (`${index}` !== key) { - if (!numberRegExp.test(key)) { - break; - } - const emptyItems = tmp - index; - const ending = emptyItems > 1 ? "s" : ""; - const message = `<${emptyItems} empty item${ending}>`; - output.push(ctx.stylize(message, "undefined")); - index = tmp; - if (output.length === maxLength) { - break; - } - } - output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); - index++; - } - const remaining = value.length - index; - if (output.length !== maxLength) { - if (remaining > 0) { - const ending = remaining > 1 ? "s" : ""; - const message = `<${remaining} empty item${ending}>`; - output.push(ctx.stylize(message, "undefined")); - } - } else if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); - } - return output; -} - -function getBoxedBase( - value, - ctx, - keys, - constructor, - tag, -) { - let type; - if (types.isNumberObject(value)) { - type = "Number"; - } else if (types.isStringObject(value)) { - type = "String"; - // For boxed Strings, we have to remove the 0-n indexed entries, - // since they just noisy up the output and are redundant - // Make boxed primitive Strings look like such - keys.splice(0, value.length); - } else if (types.isBooleanObject(value)) { - type = "Boolean"; - } else if (types.isBigIntObject(value)) { - type = "BigInt"; - } else { - type = "Symbol"; - } - let base = `[${type}`; - if (type !== constructor) { - if (constructor === null) { - base += " (null prototype)"; - } else { - base += ` (${constructor})`; - } - } - - base += `: ${formatPrimitive(stylizeNoColor, value.valueOf(), ctx)}]`; - if (tag !== "" && tag !== constructor) { - base += ` [${tag}]`; - } - if (keys.length !== 0 || ctx.stylize === stylizeNoColor) { - return base; - } - return ctx.stylize(base, type.toLowerCase()); -} - -function getClassBase(value, constructor, tag) { - // deno-lint-ignore no-prototype-builtins - const hasName = value.hasOwnProperty("name"); - const name = (hasName && value.name) || "(anonymous)"; - let base = `class ${name}`; - if (constructor !== "Function" && constructor !== null) { - base += ` [${constructor}]`; - } - if (tag !== "" && constructor !== tag) { - base += ` [${tag}]`; - } - if (constructor !== null) { - const superName = Object.getPrototypeOf(value).name; - if (superName) { - base += ` extends ${superName}`; - } - } else { - base += " extends [null prototype]"; - } - return `[${base}]`; -} - -function reduceToSingleString( - ctx, - output, - base, - braces, - extrasType, - recurseTimes, - value, -) { - if (ctx.compact !== true) { - if (typeof ctx.compact === "number" && ctx.compact >= 1) { - // Memorize the original output length. In case the output is grouped, - // prevent lining up the entries on a single line. - const entries = output.length; - // Group array elements together if the array contains at least six - // separate entries. - if (extrasType === kArrayExtrasType && entries > 6) { - output = groupArrayElements(ctx, output, value); - } - // `ctx.currentDepth` is set to the most inner depth of the currently - // inspected object part while `recurseTimes` is the actual current depth - // that is inspected. - // - // Example: - // - // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } - // - // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max - // depth of 1. - // - // Consolidate all entries of the local most inner depth up to - // `ctx.compact`, as long as the properties are smaller than - // `ctx.breakLength`. - if ( - ctx.currentDepth - recurseTimes < ctx.compact && - entries === output.length - ) { - // Line up all entries on a single line in case the entries do not - // exceed `breakLength`. Add 10 as constant to start next to all other - // factors that may reduce `breakLength`. - const start = output.length + ctx.indentationLvl + - braces[0].length + base.length + 10; - if (isBelowBreakLength(ctx, output, start, base)) { - return `${base ? `${base} ` : ""}${braces[0]} ${join(output, ", ")}` + - ` ${braces[1]}`; - } - } - } - // Line up each entry on an individual line. - const indentation = `\n${" ".repeat(ctx.indentationLvl)}`; - return `${base ? `${base} ` : ""}${braces[0]}${indentation} ` + - `${join(output, `,${indentation} `)}${indentation}${braces[1]}`; - } - // Line up all entries on a single line in case the entries do not exceed - // `breakLength`. - if (isBelowBreakLength(ctx, output, 0, base)) { - return `${braces[0]}${base ? ` ${base}` : ""} ${join(output, ", ")} ` + - braces[1]; - } - const indentation = " ".repeat(ctx.indentationLvl); - // If the opening "brace" is too large, like in the case of "Set {", - // we need to force the first item to be on the next line or the - // items will not line up correctly. - const ln = base === "" && braces[0].length === 1 - ? " " - : `${base ? ` ${base}` : ""}\n${indentation} `; - // Line up each entry on an individual line. - return `${braces[0]}${ln}${join(output, `,\n${indentation} `)} ${braces[1]}`; -} - -// The built-in Array#join is slower in v8 6.0 -function join(output, separator) { - let str = ""; - if (output.length !== 0) { - const lastIndex = output.length - 1; - for (let i = 0; i < lastIndex; i++) { - // It is faster not to use a template string here - str += output[i]; - str += separator; - } - str += output[lastIndex]; - } - return str; -} - -function groupArrayElements(ctx, output, value) { - let totalLength = 0; - let maxLength = 0; - let i = 0; - let outputLength = output.length; - if (ctx.maxArrayLength < output.length) { - // This makes sure the "... n more items" part is not taken into account. - outputLength--; - } - const separatorSpace = 2; // Add 1 for the space and 1 for the separator. - const dataLen = new Array(outputLength); - // Calculate the total length of all output entries and the individual max - // entries length of all output entries. We have to remove colors first, - // otherwise the length would not be calculated properly. - for (; i < outputLength; i++) { - const len = getStringWidth(output[i], ctx.colors); - dataLen[i] = len; - totalLength += len + separatorSpace; - if (maxLength < len) { - maxLength = len; - } - } - // Add two to `maxLength` as we add a single whitespace character plus a comma - // in-between two entries. - const actualMax = maxLength + separatorSpace; - // Check if at least three entries fit next to each other and prevent grouping - // of arrays that contains entries of very different length (i.e., if a single - // entry is longer than 1/5 of all other entries combined). Otherwise the - // space in-between small entries would be enormous. - if ( - actualMax * 3 + ctx.indentationLvl < ctx.breakLength && - (totalLength / actualMax > 5 || maxLength <= 6) - ) { - const approxCharHeights = 2.5; - const averageBias = Math.sqrt(actualMax - totalLength / output.length); - const biasedMax = Math.max(actualMax - 3 - averageBias, 1); - // Dynamically check how many columns seem possible. - const columns = Math.min( - // Ideally a square should be drawn. We expect a character to be about 2.5 - // times as high as wide. This is the area formula to calculate a square - // which contains n rectangles of size `actualMax * approxCharHeights`. - // Divide that by `actualMax` to receive the correct number of columns. - // The added bias increases the columns for short entries. - Math.round( - Math.sqrt( - approxCharHeights * biasedMax * outputLength, - ) / biasedMax, - ), - // Do not exceed the breakLength. - Math.floor((ctx.breakLength - ctx.indentationLvl) / actualMax), - // Limit array grouping for small `compact` modes as the user requested - // minimal grouping. - ctx.compact * 4, - // Limit the columns to a maximum of fifteen. - 15, - ); - // Return with the original output if no grouping should happen. - if (columns <= 1) { - return output; - } - const tmp = []; - const maxLineLength = []; - for (let i = 0; i < columns; i++) { - let lineMaxLength = 0; - for (let j = i; j < output.length; j += columns) { - if (dataLen[j] > lineMaxLength) { - lineMaxLength = dataLen[j]; - } - } - lineMaxLength += separatorSpace; - maxLineLength[i] = lineMaxLength; - } - let order = String.prototype.padStart; - if (value !== undefined) { - for (let i = 0; i < output.length; i++) { - if (typeof value[i] !== "number" && typeof value[i] !== "bigint") { - order = String.prototype.padEnd; - break; - } - } - } - // Each iteration creates a single line of grouped entries. - for (let i = 0; i < outputLength; i += columns) { - // The last lines may contain less entries than columns. - const max = Math.min(i + columns, outputLength); - let str = ""; - let j = i; - for (; j < max - 1; j++) { - // Calculate extra color padding in case it's active. This has to be - // done line by line as some lines might contain more colors than - // others. - const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; - str += `${output[j]}, `.padStart(padding, " "); - } - if (order === String.prototype.padStart) { - const padding = maxLineLength[j - i] + - output[j].length - - dataLen[j] - - separatorSpace; - str += output[j].padStart(padding, " "); - } else { - str += output[j]; - } - Array.prototype.push.call(tmp, str); - } - if (ctx.maxArrayLength < output.length) { - Array.prototype.push.call(tmp, output[outputLength]); - } - output = tmp; - } - return output; -} - -function formatMapIterInner( - ctx, - recurseTimes, - entries, - state, -) { - const maxArrayLength = Math.max(ctx.maxArrayLength, 0); - // Entries exist as [key1, val1, key2, val2, ...] - const len = entries.length / 2; - const remaining = len - maxArrayLength; - const maxLength = Math.min(maxArrayLength, len); - let output = new Array(maxLength); - let i = 0; - ctx.indentationLvl += 2; - if (state === kWeak) { - for (; i < maxLength; i++) { - const pos = i * 2; - output[i] = `${formatValue(ctx, entries[pos], recurseTimes)} => ${ - formatValue(ctx, entries[pos + 1], recurseTimes) - }`; - } - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - if (!ctx.sorted) { - output = output.sort(); - } - } else { - for (; i < maxLength; i++) { - const pos = i * 2; - const res = [ - formatValue(ctx, entries[pos], recurseTimes), - formatValue(ctx, entries[pos + 1], recurseTimes), - ]; - output[i] = reduceToSingleString( - ctx, - res, - "", - ["[", "]"], - kArrayExtrasType, - recurseTimes, - ); - } - } - ctx.indentationLvl -= 2; - if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); - } - return output; -} - -function formatSetIterInner( - ctx, - recurseTimes, - entries, - state, -) { - const maxArrayLength = Math.max(ctx.maxArrayLength, 0); - const maxLength = Math.min(maxArrayLength, entries.length); - const output = new Array(maxLength); - ctx.indentationLvl += 2; - for (let i = 0; i < maxLength; i++) { - output[i] = formatValue(ctx, entries[i], recurseTimes); - } - ctx.indentationLvl -= 2; - if (state === kWeak && !ctx.sorted) { - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - output.sort(); - } - const remaining = entries.length - maxLength; - if (remaining > 0) { - Array.prototype.push.call( - output, - `... ${remaining} more item${remaining > 1 ? "s" : ""}`, - ); - } - return output; -} - // Regex used for ansi escape code splitting // Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js // License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore diff --git a/ext/node/polyfills/internal/util/types.ts b/ext/node/polyfills/internal/util/types.ts index 8ed99456b2..24d323953a 100644 --- a/ext/node/polyfills/internal/util/types.ts +++ b/ext/node/polyfills/internal/util/types.ts @@ -79,7 +79,7 @@ export function isInt32Array(value: unknown): value is Int32Array { return _getTypedArrayToStringTag.call(value) === "Int32Array"; } -export function isTypedArray(value: unknown): value is +export type TypedArray = | BigInt64Array | BigUint64Array | Float32Array @@ -90,7 +90,9 @@ export function isTypedArray(value: unknown): value is | Uint8Array | Uint8ClampedArray | Uint16Array - | Uint32Array { + | Uint32Array; + +export function isTypedArray(value: unknown): value is TypedArray { return _getTypedArrayToStringTag.call(value) !== undefined; } diff --git a/ext/node/polyfills/internal_binding/node_file.ts b/ext/node/polyfills/internal_binding/node_file.ts index 82ff55b754..c81a7d830a 100644 --- a/ext/node/polyfills/internal_binding/node_file.ts +++ b/ext/node/polyfills/internal_binding/node_file.ts @@ -26,6 +26,8 @@ // - https://github.com/nodejs/node/blob/master/src/node_file.h import { assert } from "ext:deno_node/_util/asserts.ts"; +import * as io from "ext:deno_io/12_io.js"; +import * as fs from "ext:deno_fs/30_fs.js"; /** * Write to the given file from the given buffer synchronously. @@ -58,13 +60,13 @@ export function writeBuffer( ); if (position) { - Deno.seekSync(fd, position, Deno.SeekMode.Current); + fs.seekSync(fd, position, io.SeekMode.Current); } const subarray = buffer.subarray(offset, offset + length); try { - return Deno.writeSync(fd, subarray); + return io.writeSync(fd, subarray); } catch (e) { ctx.errno = extractOsErrorNumberFromErrorMessage(e); return 0; diff --git a/ext/node/polyfills/internal_binding/types.ts b/ext/node/polyfills/internal_binding/types.ts index 943f5e31dc..d03c342ad9 100644 --- a/ext/node/polyfills/internal_binding/types.ts +++ b/ext/node/polyfills/internal_binding/types.ts @@ -21,7 +21,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -import { core } from "ext:deno_node/_core.ts"; +const { core } = globalThis.__bootstrap; // https://tc39.es/ecma262/#sec-object.prototype.tostring const _toString = Object.prototype.toString; diff --git a/ext/node/polyfills/net.ts b/ext/node/polyfills/net.ts index 2c2f5f9448..79845adb2e 100644 --- a/ext/node/polyfills/net.ts +++ b/ext/node/polyfills/net.ts @@ -1834,21 +1834,8 @@ function _onconnection(this: any, err: number, clientHandle?: Handle) { return; } - const socket = new Socket({ - handle: clientHandle, - allowHalfOpen: self.allowHalfOpen, - pauseOnCreate: self.pauseOnConnect, - readable: true, - writable: true, - }); - - // TODO(@bartlomieju): implement noDelay and setKeepAlive - - self._connections++; - socket.server = self; - socket._server = self; - - DTRACE_NET_SERVER_CONNECTION(socket); + const socket = self._createSocket(clientHandle); + this._connections++; self.emit("connection", socket); if (netServerSocketChannel.hasSubscribers) { @@ -2369,6 +2356,23 @@ export class Server extends EventEmitter { return !!this._handle; } + _createSocket(clientHandle) { + const socket = new Socket({ + handle: clientHandle, + allowHalfOpen: this.allowHalfOpen, + pauseOnCreate: this.pauseOnConnect, + readable: true, + writable: true, + }); + + // TODO(@bartlomieju): implement noDelay and setKeepAlive + + socket.server = this; + socket._server = this; + + DTRACE_NET_SERVER_CONNECTION(socket); + } + _listen2 = _setupListenHandle; _emitCloseIfDrained() { diff --git a/ext/node/polyfills/perf_hooks.ts b/ext/node/polyfills/perf_hooks.ts index ac74c10f81..30c50d3637 100644 --- a/ext/node/polyfills/perf_hooks.ts +++ b/ext/node/polyfills/perf_hooks.ts @@ -22,6 +22,8 @@ const performance: timerify: any; // deno-lint-ignore no-explicit-any timeOrigin: any; + // deno-lint-ignore no-explicit-any + markResourceTiming: any; } = { clearMarks: (markName: string) => shimPerformance.clearMarks(markName), eventLoopUtilization: () => @@ -50,6 +52,7 @@ const performance: timerify: () => notImplemented("timerify from performance"), // deno-lint-ignore no-explicit-any timeOrigin: (shimPerformance as any).timeOrigin, + markResourceTiming: () => {}, // @ts-ignore waiting on update in `deno`, but currently this is // a circular dependency toJSON: () => shimPerformance.toJSON(), diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index eb5a491ae0..b676e87d75 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -2,7 +2,7 @@ // Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license. const internals = globalThis.__bootstrap.internals; -import { core } from "ext:deno_node/_core.ts"; +const { core } = globalThis.__bootstrap; import { notImplemented, warnNotImplemented } from "ext:deno_node/_utils.ts"; import { EventEmitter } from "ext:deno_node/events.ts"; import { validateString } from "ext:deno_node/internal/validators.mjs"; @@ -91,7 +91,7 @@ export const exit = (code?: number | string) => { process.emit("exit", process.exitCode || 0); } - Deno.exit(process.exitCode || 0); + process.reallyExit(process.exitCode || 0); }; function addReadOnlyProcessAlias( @@ -331,6 +331,17 @@ class Process extends EventEmitter { super(); } + /** https://nodejs.org/api/process.html#processrelease */ + get release() { + return { + name: "node", + sourceUrl: + `https://nodejs.org/download/release/${version}/node-${version}.tar.gz`, + headersUrl: + `https://nodejs.org/download/release/${version}/node-${version}-headers.tar.gz`, + }; + } + /** https://nodejs.org/api/process.html#process_process_arch */ get arch() { if (!arch) { @@ -369,6 +380,13 @@ class Process extends EventEmitter { /** https://nodejs.org/api/process.html#process_process_exit_code */ exit = exit; + // Undocumented Node API that is used by `signal-exit` which in turn + // is used by `node-tap`. It was marked for removal a couple of years + // ago. See https://github.com/nodejs/node/blob/6a6b3c54022104cc110ab09044a2a0cecb8988e7/lib/internal/bootstrap/node.js#L172 + reallyExit = (code: number) => { + return Deno.exit(code || 0); + }; + _exiting = _exiting; /** https://nodejs.org/api/process.html#processexitcode_1 */ @@ -705,9 +723,9 @@ internals.__bootstrapNodeProcess = function ( core.setMacrotaskCallback(runNextTicks); enableNextTick(); - // TODO(bartlomieju): this is buggy, see https://github.com/denoland/deno/issues/16928 - // We should use a specialized API in 99_main.js instead - globalThis.addEventListener("unhandledrejection", (event) => { + // Install special "unhandledrejection" handler, that will be called + // last. + internals.nodeProcessUnhandledRejectionCallback = (event) => { if (process.listenerCount("unhandledRejection") === 0) { // The Node.js default behavior is to raise an uncaught exception if // an unhandled rejection occurs and there are no unhandledRejection @@ -723,7 +741,7 @@ internals.__bootstrapNodeProcess = function ( event.preventDefault(); process.emit("unhandledRejection", event.reason, event.promise); - }); + }; globalThis.addEventListener("error", (event) => { if (process.listenerCount("uncaughtException") > 0) { diff --git a/ext/node/polyfills/stream/promises.mjs b/ext/node/polyfills/stream/promises.mjs index 69ba7fd0c1..98fe38e0a4 100644 --- a/ext/node/polyfills/stream/promises.mjs +++ b/ext/node/polyfills/stream/promises.mjs @@ -1,9 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent and Node contributors. All rights reserved. MIT license. -import stream from "ext:deno_node/_stream.mjs"; - -const { finished, pipeline } = stream.promises; +import { finished, pipeline } from "ext:deno_node/_stream.mjs"; export default { finished, diff --git a/ext/node/polyfills/timers.ts b/ext/node/polyfills/timers.ts index 9c688f2421..e5e64529c9 100644 --- a/ext/node/polyfills/timers.ts +++ b/ext/node/polyfills/timers.ts @@ -8,6 +8,7 @@ import * as timers from "ext:deno_web/02_timers.js"; const clearTimeout_ = timers.clearTimeout; const clearInterval_ = timers.clearInterval; +const setTimeoutUnclamped = timers.setTimeoutUnclamped; export function setTimeout( callback: (...args: unknown[]) => void, @@ -46,10 +47,12 @@ export function clearInterval(timeout?: Timeout | number | string) { } // TODO(bartlomieju): implement the 'NodeJS.Immediate' versions of the timers. // https://github.com/DefinitelyTyped/DefinitelyTyped/blob/1163ead296d84e7a3c80d71e7c81ecbd1a130e9a/types/node/v12/globals.d.ts#L1120-L1131 -export const setImmediate = ( +export function setImmediate( cb: (...args: unknown[]) => void, ...args: unknown[] -): Timeout => setTimeout(cb, 0, ...args); +): Timeout { + return setTimeoutUnclamped(cb, 0, ...args); +} export const clearImmediate = clearTimeout; export default { diff --git a/ext/node/polyfills/url.ts b/ext/node/polyfills/url.ts index 4beebe844e..28b6060bc6 100644 --- a/ext/node/polyfills/url.ts +++ b/ext/node/polyfills/url.ts @@ -939,14 +939,11 @@ export function format( ["Object", "string"], urlObject, ); - } else if (!(urlObject instanceof Url)) { - if (urlObject instanceof URL) { - return formatWhatwg(urlObject, options); - } - return Url.prototype.format.call(urlObject); + } else if (urlObject instanceof URL) { + return formatWhatwg(urlObject, options); } - return (urlObject as Url).format(); + return Url.prototype.format.call(urlObject); } /** @@ -1002,10 +999,9 @@ function formatWhatwg( } ret += "@"; } - // TODO(wafuwfu13): Support unicode option - // ret += options.unicode ? - // domainToUnicode(urlObject.host) : urlObject.host; - ret += urlObject.host; + ret += options.unicode + ? domainToUnicode(urlObject.hostname) + : urlObject.hostname; if (urlObject.port) { ret += `:${urlObject.port}`; } diff --git a/ext/node/polyfills/v8.ts b/ext/node/polyfills/v8.ts index 27feb1cecb..e411b541e0 100644 --- a/ext/node/polyfills/v8.ts +++ b/ext/node/polyfills/v8.ts @@ -42,7 +42,14 @@ export function getHeapStatistics() { } export function setFlagsFromString() { - notImplemented("v8.setFlagsFromString"); + // NOTE(bartlomieju): From Node.js docs: + // The v8.setFlagsFromString() method can be used to programmatically set V8 + // command-line flags. This method should be used with care. Changing settings + // after the VM has started may result in unpredictable behavior, including + // crashes and data loss; or it may simply do nothing. + // + // Notice: "or it may simply do nothing". This is what we're gonna do, + // this function will just be a no-op. } export function stopCoverage() { notImplemented("v8.stopCoverage"); diff --git a/ext/node/polyfills/worker_threads.ts b/ext/node/polyfills/worker_threads.ts index cc9529fbd7..8005506bb6 100644 --- a/ext/node/polyfills/worker_threads.ts +++ b/ext/node/polyfills/worker_threads.ts @@ -3,9 +3,11 @@ import { resolve, toFileUrl } from "ext:deno_node/path.ts"; import { notImplemented } from "ext:deno_node/_utils.ts"; -import { EventEmitter } from "ext:deno_node/events.ts"; +import { EventEmitter, once } from "ext:deno_node/events.ts"; +import { BroadcastChannel } from "ext:deno_broadcast_channel/01_broadcast_channel.js"; +import { MessageChannel, MessagePort } from "ext:deno_web/13_message_port.js"; -const environmentData = new Map(); +let environmentData = new Map(); let threads = 0; export interface WorkerOptions { @@ -46,13 +48,18 @@ class _Worker extends EventEmitter { postMessage: Worker["postMessage"]; constructor(specifier: URL | string, options?: WorkerOptions) { - notImplemented("Worker"); super(); if (options?.eval === true) { specifier = `data:text/javascript,${specifier}`; } else if (typeof specifier === "string") { - // @ts-ignore This API is temporarily disabled - specifier = toFileUrl(resolve(specifier)); + specifier = resolve(specifier); + if (!specifier.toString().endsWith(".mjs")) { + const cwdFileUrl = toFileUrl(Deno.cwd()); + specifier = + `data:text/javascript,(async function() {const { createRequire } = await import("node:module");const require = createRequire("${cwdFileUrl}");require("${specifier}");})();`; + } else { + specifier = toFileUrl(specifier); + } } const handle = this[kHandle] = new Worker( specifier, @@ -93,20 +100,11 @@ class _Worker extends EventEmitter { readonly performance = globalThis.performance; } -export const isMainThread = - // deno-lint-ignore no-explicit-any - (globalThis as any).name !== PRIVATE_WORKER_THREAD_NAME; +export let isMainThread; +export let resourceLimits; -// fake resourceLimits -export const resourceLimits = isMainThread ? {} : { - maxYoungGenerationSizeMb: 48, - maxOldGenerationSizeMb: 2048, - codeRangeSizeMb: 0, - stackSizeMb: 4, -}; - -const threadId = 0; -const workerData: unknown = null; +let threadId = 0; +let workerData: unknown = null; // Like https://github.com/nodejs/node/blob/48655e17e1d84ba5021d7a94b4b88823f7c9c6cf/lib/internal/event_target.js#L611 interface NodeEventTarget extends @@ -129,74 +127,100 @@ interface NodeEventTarget extends type ParentPort = typeof self & NodeEventTarget; // deno-lint-ignore no-explicit-any -const parentPort: ParentPort = null as any; +let parentPort: ParentPort = null as any; -/* -if (!isMainThread) { - // deno-lint-ignore no-explicit-any - delete (globalThis as any).name; - // deno-lint-ignore no-explicit-any - const listeners = new WeakMap<(...args: any[]) => void, (ev: any) => any>(); - - parentPort = self as ParentPort; - parentPort.off = parentPort.removeListener = function ( - this: ParentPort, - name, - listener, - ) { - this.removeEventListener(name, listeners.get(listener)!); - listeners.delete(listener); - return this; - }; - parentPort.on = parentPort.addListener = function ( - this: ParentPort, - name, - listener, - ) { +globalThis.__bootstrap.internals.__initWorkerThreads = () => { + isMainThread = // deno-lint-ignore no-explicit-any - const _listener = (ev: any) => listener(ev.data); - listeners.set(listener, _listener); - this.addEventListener(name, _listener); - return this; + (globalThis as any).name !== PRIVATE_WORKER_THREAD_NAME; + + defaultExport.isMainThread = isMainThread; + // fake resourceLimits + resourceLimits = isMainThread ? {} : { + maxYoungGenerationSizeMb: 48, + maxOldGenerationSizeMb: 2048, + codeRangeSizeMb: 0, + stackSizeMb: 4, }; - parentPort.once = function (this: ParentPort, name, listener) { + defaultExport.resourceLimits = resourceLimits; + + if (!isMainThread) { // deno-lint-ignore no-explicit-any - const _listener = (ev: any) => listener(ev.data); - listeners.set(listener, _listener); - this.addEventListener(name, _listener); - return this; - }; + delete (globalThis as any).name; + // deno-lint-ignore no-explicit-any + const listeners = new WeakMap<(...args: any[]) => void, (ev: any) => any>(); - // mocks - parentPort.setMaxListeners = () => {}; - parentPort.getMaxListeners = () => Infinity; - parentPort.eventNames = () => [""]; - parentPort.listenerCount = () => 0; + parentPort = self as ParentPort; - parentPort.emit = () => notImplemented("parentPort.emit"); - parentPort.removeAllListeners = () => - notImplemented("parentPort.removeAllListeners"); + const initPromise = once( + parentPort, + "message", + ).then((result) => { + // TODO(kt3k): The below values are set asynchronously + // using the first message from the parent. + // This should be done synchronously. + threadId = result[0].data.threadId; + workerData = result[0].data.workerData; + environmentData = result[0].data.environmentData; - // Receive startup message - [{ threadId, workerData, environmentData }] = await once( - parentPort, - "message", - ); + defaultExport.threadId = threadId; + defaultExport.workerData = workerData; + }); - // alias - parentPort.addEventListener("offline", () => { - parentPort.emit("close"); - }); -} -*/ + parentPort.off = parentPort.removeListener = function ( + this: ParentPort, + name, + listener, + ) { + this.removeEventListener(name, listeners.get(listener)!); + listeners.delete(listener); + return this; + }; + parentPort.on = parentPort.addListener = function ( + this: ParentPort, + name, + listener, + ) { + initPromise.then(() => { + // deno-lint-ignore no-explicit-any + const _listener = (ev: any) => listener(ev.data); + listeners.set(listener, _listener); + this.addEventListener(name, _listener); + }); + return this; + }; + + parentPort.once = function (this: ParentPort, name, listener) { + initPromise.then(() => { + // deno-lint-ignore no-explicit-any + const _listener = (ev: any) => listener(ev.data); + listeners.set(listener, _listener); + this.addEventListener(name, _listener); + }); + return this; + }; + + // mocks + parentPort.setMaxListeners = () => {}; + parentPort.getMaxListeners = () => Infinity; + parentPort.eventNames = () => [""]; + parentPort.listenerCount = () => 0; + + parentPort.emit = () => notImplemented("parentPort.emit"); + parentPort.removeAllListeners = () => + notImplemented("parentPort.removeAllListeners"); + + parentPort.addEventListener("offline", () => { + parentPort.emit("close"); + }); + } +}; export function getEnvironmentData(key: unknown) { - notImplemented("getEnvironmentData"); return environmentData.get(key); } export function setEnvironmentData(key: unknown, value?: unknown) { - notImplemented("setEnvironmentData"); if (value === undefined) { environmentData.delete(key); } else { @@ -204,12 +228,6 @@ export function setEnvironmentData(key: unknown, value?: unknown) { } } -// deno-lint-ignore no-explicit-any -const _MessagePort: typeof MessagePort = (globalThis as any).MessagePort; -const _MessageChannel: typeof MessageChannel = - // deno-lint-ignore no-explicit-any - (globalThis as any).MessageChannel; -export const BroadcastChannel = globalThis.BroadcastChannel; export const SHARE_ENV = Symbol.for("nodejs.worker_threads.SHARE_ENV"); export function markAsUntransferable() { notImplemented("markAsUntransferable"); @@ -221,20 +239,21 @@ export function receiveMessageOnPort() { notImplemented("receiveMessageOnPort"); } export { - _MessageChannel as MessageChannel, - _MessagePort as MessagePort, _Worker as Worker, + BroadcastChannel, + MessageChannel, + MessagePort, parentPort, threadId, workerData, }; -export default { +const defaultExport = { markAsUntransferable, moveMessagePortToContext, receiveMessageOnPort, - MessagePort: _MessagePort, - MessageChannel: _MessageChannel, + MessagePort, + MessageChannel, BroadcastChannel, Worker: _Worker, getEnvironmentData, @@ -246,3 +265,5 @@ export default { parentPort, isMainThread, }; + +export default defaultExport; diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 1422ba6b02..9664915052 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -4,19 +4,25 @@ use std::path::Path; use std::path::PathBuf; use deno_core::anyhow::bail; +use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::serde_json::Map; use deno_core::serde_json::Value; use deno_core::url::Url; use deno_core::ModuleSpecifier; +use deno_fs::FileSystemRc; +use deno_media_type::MediaType; +use deno_semver::npm::NpmPackageNv; +use deno_semver::npm::NpmPackageNvReference; +use deno_semver::npm::NpmPackageReqReference; use crate::errors; -use crate::package_json::PackageJson; -use crate::path::PathClean; -use crate::NodeFs; +use crate::AllowAllNodePermissions; use crate::NodePermissions; -use crate::RequireNpmResolver; +use crate::NpmResolverRc; +use crate::PackageJson; +use crate::PathClean; pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"]; pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"]; @@ -39,53 +45,1267 @@ impl NodeResolutionMode { } } -/// Checks if the resolved file has a corresponding declaration file. -pub fn path_to_declaration_path( - path: PathBuf, - referrer_kind: NodeModuleKind, -) -> Option { - fn probe_extensions( - path: &Path, - referrer_kind: NodeModuleKind, - ) -> Option { - let specific_dts_path = match referrer_kind { - NodeModuleKind::Cjs => with_known_extension(path, "d.cts"), - NodeModuleKind::Esm => with_known_extension(path, "d.mts"), - }; - if Fs::exists(&specific_dts_path) { - return Some(specific_dts_path); - } - let dts_path = with_known_extension(path, "d.ts"); - if Fs::exists(&dts_path) { - Some(dts_path) - } else { - None +#[derive(Debug)] +pub enum NodeResolution { + Esm(ModuleSpecifier), + CommonJs(ModuleSpecifier), + BuiltIn(String), +} + +impl NodeResolution { + pub fn into_url(self) -> ModuleSpecifier { + match self { + Self::Esm(u) => u, + Self::CommonJs(u) => u, + Self::BuiltIn(specifier) => { + if specifier.starts_with("node:") { + ModuleSpecifier::parse(&specifier).unwrap() + } else { + ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap() + } + } } } - let lowercase_path = path.to_string_lossy().to_lowercase(); - if lowercase_path.ends_with(".d.ts") - || lowercase_path.ends_with(".d.cts") - || lowercase_path.ends_with(".d.ts") - { - return Some(path); + pub fn into_specifier_and_media_type( + resolution: Option, + ) -> (ModuleSpecifier, MediaType) { + match resolution { + Some(NodeResolution::CommonJs(specifier)) => { + let media_type = MediaType::from_specifier(&specifier); + ( + specifier, + match media_type { + MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, + MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, + MediaType::Dts => MediaType::Dcts, + _ => media_type, + }, + ) + } + Some(NodeResolution::Esm(specifier)) => { + let media_type = MediaType::from_specifier(&specifier); + ( + specifier, + match media_type { + MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, + MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, + MediaType::Dts => MediaType::Dmts, + _ => media_type, + }, + ) + } + Some(resolution) => (resolution.into_url(), MediaType::Dts), + None => ( + ModuleSpecifier::parse("internal:///missing_dependency.d.ts").unwrap(), + MediaType::Dts, + ), + } } - if let Some(path) = probe_extensions::(&path, referrer_kind) { - return Some(path); +} + +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = deno_fs::sync::MaybeArc; + +#[derive(Debug)] +pub struct NodeResolver { + fs: FileSystemRc, + npm_resolver: NpmResolverRc, +} + +impl NodeResolver { + pub fn new(fs: FileSystemRc, npm_resolver: NpmResolverRc) -> Self { + Self { fs, npm_resolver } } - if Fs::is_dir(&path) { - if let Some(path) = - probe_extensions::(&path.join("index"), referrer_kind) + + pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + self.npm_resolver.in_npm_package(specifier) + } + + /// This function is an implementation of `defaultResolve` in + /// `lib/internal/modules/esm/resolve.js` from Node. + pub fn resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + // Note: if we are here, then the referrer is an esm module + // TODO(bartlomieju): skipped "policy" part as we don't plan to support it + + if crate::is_builtin_node_module(specifier) { + return Ok(Some(NodeResolution::BuiltIn(specifier.to_string()))); + } + + if let Ok(url) = Url::parse(specifier) { + if url.scheme() == "data" { + return Ok(Some(NodeResolution::Esm(url))); + } + + let protocol = url.scheme(); + + if protocol == "node" { + let split_specifier = url.as_str().split(':'); + let specifier = split_specifier.skip(1).collect::(); + + if crate::is_builtin_node_module(&specifier) { + return Ok(Some(NodeResolution::BuiltIn(specifier))); + } + } + + if protocol != "file" && protocol != "data" { + return Err(errors::err_unsupported_esm_url_scheme(&url)); + } + + // todo(dsherret): this seems wrong + if referrer.scheme() == "data" { + let url = referrer.join(specifier).map_err(AnyError::from)?; + return Ok(Some(NodeResolution::Esm(url))); + } + } + + let url = self.module_resolve( + specifier, + referrer, + DEFAULT_CONDITIONS, + mode, + permissions, + )?; + let url = match url { + Some(url) => url, + None => return Ok(None), + }; + let url = match mode { + NodeResolutionMode::Execution => url, + NodeResolutionMode::Types => { + let path = url.to_file_path().unwrap(); + // todo(16370): the module kind is not correct here. I think we need + // typescript to tell us if the referrer is esm or cjs + let path = + match self.path_to_declaration_path(path, NodeModuleKind::Esm) { + Some(path) => path, + None => return Ok(None), + }; + ModuleSpecifier::from_file_path(path).unwrap() + } + }; + + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(Some(resolve_response)) + } + + fn module_resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + // note: if we're here, the referrer is an esm module + let url = if should_be_treated_as_relative_or_absolute_path(specifier) { + let resolved_specifier = referrer.join(specifier)?; + if mode.is_types() { + let file_path = to_file_path(&resolved_specifier); + // todo(dsherret): the node module kind is not correct and we + // should use the value provided by typescript instead + let declaration_path = + self.path_to_declaration_path(file_path, NodeModuleKind::Esm); + declaration_path.map(|declaration_path| { + ModuleSpecifier::from_file_path(declaration_path).unwrap() + }) + } else { + Some(resolved_specifier) + } + } else if specifier.starts_with('#') { + Some( + self + .package_imports_resolve( + specifier, + referrer, + NodeModuleKind::Esm, + conditions, + mode, + permissions, + ) + .map(|p| ModuleSpecifier::from_file_path(p).unwrap())?, + ) + } else if let Ok(resolved) = Url::parse(specifier) { + Some(resolved) + } else { + self + .package_resolve( + specifier, + referrer, + NodeModuleKind::Esm, + conditions, + mode, + permissions, + )? + .map(|p| ModuleSpecifier::from_file_path(p).unwrap()) + }; + Ok(match url { + Some(url) => Some(self.finalize_resolution(url, referrer)?), + None => None, + }) + } + + fn finalize_resolution( + &self, + resolved: ModuleSpecifier, + base: &ModuleSpecifier, + ) -> Result { + let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C"); + + if encoded_sep_re.is_match(resolved.path()) { + return Err(errors::err_invalid_module_specifier( + resolved.path(), + "must not include encoded \"/\" or \"\\\\\" characters", + Some(to_file_path_string(base)), + )); + } + + let path = to_file_path(&resolved); + + // TODO(bartlomieju): currently not supported + // if (getOptionValue('--experimental-specifier-resolution') === 'node') { + // ... + // } + + let p_str = path.to_str().unwrap(); + let p = if p_str.ends_with('/') { + p_str[p_str.len() - 1..].to_string() + } else { + p_str.to_string() + }; + + let (is_dir, is_file) = if let Ok(stats) = self.fs.stat_sync(Path::new(&p)) + { + (stats.is_directory, stats.is_file) + } else { + (false, false) + }; + if is_dir { + return Err(errors::err_unsupported_dir_import( + resolved.as_str(), + base.as_str(), + )); + } else if !is_file { + return Err(errors::err_module_not_found( + resolved.as_str(), + base.as_str(), + "module", + )); + } + + Ok(resolved) + } + + pub fn resolve_npm_req_reference( + &self, + reference: &NpmPackageReqReference, + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + let reference = self + .npm_resolver + .resolve_nv_ref_from_pkg_req_ref(reference)?; + self.resolve_npm_reference(&reference, mode, permissions) + } + + pub fn resolve_npm_reference( + &self, + reference: &NpmPackageNvReference, + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + let package_folder = self + .npm_resolver + .resolve_package_folder_from_deno_module(&reference.nv)?; + let node_module_kind = NodeModuleKind::Esm; + let maybe_resolved_path = self + .package_config_resolve( + &reference + .sub_path + .as_ref() + .map(|s| format!("./{s}")) + .unwrap_or_else(|| ".".to_string()), + &package_folder, + node_module_kind, + DEFAULT_CONDITIONS, + mode, + permissions, + ) + .with_context(|| { + format!("Error resolving package config for '{reference}'") + })?; + let resolved_path = match maybe_resolved_path { + Some(resolved_path) => resolved_path, + None => return Ok(None), + }; + let resolved_path = match mode { + NodeResolutionMode::Execution => resolved_path, + NodeResolutionMode::Types => { + match self.path_to_declaration_path(resolved_path, node_module_kind) { + Some(path) => path, + None => return Ok(None), + } + } + }; + let url = ModuleSpecifier::from_file_path(resolved_path).unwrap(); + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(Some(resolve_response)) + } + + pub fn resolve_binary_commands( + &self, + pkg_nv: &NpmPackageNv, + ) -> Result, AnyError> { + let package_folder = self + .npm_resolver + .resolve_package_folder_from_deno_module(pkg_nv)?; + let package_json_path = package_folder.join("package.json"); + let package_json = + self.load_package_json(&AllowAllNodePermissions, package_json_path)?; + + Ok(match package_json.bin { + Some(Value::String(_)) => vec![pkg_nv.name.to_string()], + Some(Value::Object(o)) => { + o.into_iter().map(|(key, _)| key).collect::>() + } + _ => Vec::new(), + }) + } + + pub fn resolve_binary_export( + &self, + pkg_ref: &NpmPackageReqReference, + ) -> Result { + let pkg_nv = self + .npm_resolver + .resolve_pkg_id_from_pkg_req(&pkg_ref.req)? + .nv; + let bin_name = pkg_ref.sub_path.as_deref(); + let package_folder = self + .npm_resolver + .resolve_package_folder_from_deno_module(&pkg_nv)?; + let package_json_path = package_folder.join("package.json"); + let package_json = + self.load_package_json(&AllowAllNodePermissions, package_json_path)?; + let bin = match &package_json.bin { + Some(bin) => bin, + None => bail!( + "package '{}' did not have a bin property in its package.json", + &pkg_nv.name, + ), + }; + let bin_entry = resolve_bin_entry_value(&pkg_nv, bin_name, bin)?; + let url = + ModuleSpecifier::from_file_path(package_folder.join(bin_entry)).unwrap(); + + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(resolve_response) + } + + pub fn url_to_node_resolution( + &self, + url: ModuleSpecifier, + ) -> Result { + let url_str = url.as_str().to_lowercase(); + if url_str.starts_with("http") { + Ok(NodeResolution::Esm(url)) + } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { + let package_config = + self.get_closest_package_json(&url, &AllowAllNodePermissions)?; + if package_config.typ == "module" { + Ok(NodeResolution::Esm(url)) + } else { + Ok(NodeResolution::CommonJs(url)) + } + } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { + Ok(NodeResolution::Esm(url)) + } else if url_str.ends_with(".ts") { + Err(generic_error(format!( + "TypeScript files are not supported in npm packages: {url}" + ))) + } else { + Ok(NodeResolution::CommonJs(url)) + } + } + + fn package_config_resolve( + &self, + package_subpath: &str, + package_dir: &Path, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + let package_json_path = package_dir.join("package.json"); + let referrer = ModuleSpecifier::from_directory_path(package_dir).unwrap(); + let package_config = + self.load_package_json(permissions, package_json_path.clone())?; + if let Some(exports) = &package_config.exports { + let result = self.package_exports_resolve( + &package_json_path, + package_subpath.to_string(), + exports, + &referrer, + referrer_kind, + conditions, + mode, + permissions, + ); + match result { + Ok(found) => return Ok(Some(found)), + Err(exports_err) => { + if mode.is_types() && package_subpath == "." { + if let Ok(Some(path)) = + self.legacy_main_resolve(&package_config, referrer_kind, mode) + { + return Ok(Some(path)); + } else { + return Ok(None); + } + } + return Err(exports_err); + } + } + } + if package_subpath == "." { + return self.legacy_main_resolve(&package_config, referrer_kind, mode); + } + + Ok(Some(package_dir.join(package_subpath))) + } + + /// Checks if the resolved file has a corresponding declaration file. + pub(super) fn path_to_declaration_path( + &self, + path: PathBuf, + referrer_kind: NodeModuleKind, + ) -> Option { + fn probe_extensions( + fs: &dyn deno_fs::FileSystem, + path: &Path, + referrer_kind: NodeModuleKind, + ) -> Option { + let specific_dts_path = match referrer_kind { + NodeModuleKind::Cjs => with_known_extension(path, "d.cts"), + NodeModuleKind::Esm => with_known_extension(path, "d.mts"), + }; + if fs.exists(&specific_dts_path) { + return Some(specific_dts_path); + } + let dts_path = with_known_extension(path, "d.ts"); + if fs.exists(&dts_path) { + Some(dts_path) + } else { + None + } + } + + let lowercase_path = path.to_string_lossy().to_lowercase(); + if lowercase_path.ends_with(".d.ts") + || lowercase_path.ends_with(".d.cts") + || lowercase_path.ends_with(".d.ts") { return Some(path); } + if let Some(path) = probe_extensions(&*self.fs, &path, referrer_kind) { + return Some(path); + } + if self.fs.is_dir(&path) { + if let Some(path) = + probe_extensions(&*self.fs, &path.join("index"), referrer_kind) + { + return Some(path); + } + } + None } - None + + pub(super) fn package_imports_resolve( + &self, + name: &str, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result { + if name == "#" || name.starts_with("#/") || name.ends_with('/') { + let reason = "is not a valid internal imports specifier name"; + return Err(errors::err_invalid_module_specifier( + name, + reason, + Some(to_specifier_display_string(referrer)), + )); + } + + let package_config = + self.get_package_scope_config(referrer, permissions)?; + let mut package_json_path = None; + if package_config.exists { + package_json_path = Some(package_config.path.clone()); + if let Some(imports) = &package_config.imports { + if imports.contains_key(name) && !name.contains('*') { + let maybe_resolved = self.resolve_package_target( + package_json_path.as_ref().unwrap(), + imports.get(name).unwrap().to_owned(), + "".to_string(), + name.to_string(), + referrer, + referrer_kind, + false, + true, + conditions, + mode, + permissions, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } + } else { + let mut best_match = ""; + let mut best_match_subpath = None; + for key in imports.keys() { + let pattern_index = key.find('*'); + if let Some(pattern_index) = pattern_index { + let key_sub = &key[0..=pattern_index]; + if name.starts_with(key_sub) { + let pattern_trailer = &key[pattern_index + 1..]; + if name.len() > key.len() + && name.ends_with(&pattern_trailer) + && pattern_key_compare(best_match, key) == 1 + && key.rfind('*') == Some(pattern_index) + { + best_match = key; + best_match_subpath = Some( + name[pattern_index..=(name.len() - pattern_trailer.len())] + .to_string(), + ); + } + } + } + } + + if !best_match.is_empty() { + let target = imports.get(best_match).unwrap().to_owned(); + let maybe_resolved = self.resolve_package_target( + package_json_path.as_ref().unwrap(), + target, + best_match_subpath.unwrap(), + best_match.to_string(), + referrer, + referrer_kind, + true, + true, + conditions, + mode, + permissions, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } + } + } + } + } + + Err(throw_import_not_defined( + name, + package_json_path.as_deref(), + referrer, + )) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target_string( + &self, + target: String, + subpath: String, + match_: String, + package_json_path: &Path, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result { + if !subpath.is_empty() && !pattern && !target.ends_with('/') { + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + let invalid_segment_re = + lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)"); + let pattern_re = lazy_regex::regex!(r"\*"); + if !target.starts_with("./") { + if internal && !target.starts_with("../") && !target.starts_with('/') { + let is_url = Url::parse(&target).is_ok(); + if !is_url { + let export_target = if pattern { + pattern_re + .replace(&target, |_caps: ®ex::Captures| subpath.clone()) + .to_string() + } else { + format!("{target}{subpath}") + }; + let package_json_url = + ModuleSpecifier::from_file_path(package_json_path).unwrap(); + return match self.package_resolve( + &export_target, + &package_json_url, + referrer_kind, + conditions, + mode, + permissions, + ) { + Ok(Some(path)) => Ok(path), + Ok(None) => Err(generic_error("not found")), + Err(err) => Err(err), + }; + } + } + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + if invalid_segment_re.is_match(&target[2..]) { + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + let package_path = package_json_path.parent().unwrap(); + let resolved_path = package_path.join(&target).clean(); + if !resolved_path.starts_with(package_path) { + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + if subpath.is_empty() { + return Ok(resolved_path); + } + if invalid_segment_re.is_match(&subpath) { + let request = if pattern { + match_.replace('*', &subpath) + } else { + format!("{match_}{subpath}") + }; + return Err(throw_invalid_subpath( + request, + package_json_path, + internal, + referrer, + )); + } + if pattern { + let resolved_path_str = resolved_path.to_string_lossy(); + let replaced = pattern_re + .replace(&resolved_path_str, |_caps: ®ex::Captures| { + subpath.clone() + }); + return Ok(PathBuf::from(replaced.to_string())); + } + Ok(resolved_path.join(&subpath).clean()) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target( + &self, + package_json_path: &Path, + target: Value, + subpath: String, + package_subpath: String, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + if let Some(target) = target.as_str() { + return self + .resolve_package_target_string( + target.to_string(), + subpath, + package_subpath, + package_json_path, + referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + permissions, + ) + .map(|path| { + if mode.is_types() { + self.path_to_declaration_path(path, referrer_kind) + } else { + Some(path) + } + }); + } else if let Some(target_arr) = target.as_array() { + if target_arr.is_empty() { + return Ok(None); + } + + let mut last_error = None; + for target_item in target_arr { + let resolved_result = self.resolve_package_target( + package_json_path, + target_item.to_owned(), + subpath.clone(), + package_subpath.clone(), + referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + permissions, + ); + + match resolved_result { + Ok(Some(resolved)) => return Ok(Some(resolved)), + Ok(None) => { + last_error = None; + continue; + } + Err(e) => { + let err_string = e.to_string(); + last_error = Some(e); + if err_string.starts_with("[ERR_INVALID_PACKAGE_TARGET]") { + continue; + } + return Err(last_error.unwrap()); + } + } + } + if last_error.is_none() { + return Ok(None); + } + return Err(last_error.unwrap()); + } else if let Some(target_obj) = target.as_object() { + for key in target_obj.keys() { + // TODO(bartlomieju): verify that keys are not numeric + // return Err(errors::err_invalid_package_config( + // to_file_path_string(package_json_url), + // Some(base.as_str().to_string()), + // Some("\"exports\" cannot contain numeric property keys.".to_string()), + // )); + + if key == "default" + || conditions.contains(&key.as_str()) + || mode.is_types() && key.as_str() == "types" + { + let condition_target = target_obj.get(key).unwrap().to_owned(); + + let resolved = self.resolve_package_target( + package_json_path, + condition_target, + subpath.clone(), + package_subpath.clone(), + referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + permissions, + )?; + match resolved { + Some(resolved) => return Ok(Some(resolved)), + None => { + continue; + } + } + } + } + } else if target.is_null() { + return Ok(None); + } + + Err(throw_invalid_package_target( + package_subpath, + target.to_string(), + package_json_path, + internal, + referrer, + )) + } + + #[allow(clippy::too_many_arguments)] + pub fn package_exports_resolve( + &self, + package_json_path: &Path, + package_subpath: String, + package_exports: &Map, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result { + if package_exports.contains_key(&package_subpath) + && package_subpath.find('*').is_none() + && !package_subpath.ends_with('/') + { + let target = package_exports.get(&package_subpath).unwrap().to_owned(); + let resolved = self.resolve_package_target( + package_json_path, + target, + "".to_string(), + package_subpath.to_string(), + referrer, + referrer_kind, + false, + false, + conditions, + mode, + permissions, + )?; + if resolved.is_none() { + return Err(throw_exports_not_found( + package_subpath, + package_json_path, + referrer, + )); + } + return Ok(resolved.unwrap()); + } + + let mut best_match = ""; + let mut best_match_subpath = None; + for key in package_exports.keys() { + let pattern_index = key.find('*'); + if let Some(pattern_index) = pattern_index { + let key_sub = &key[0..pattern_index]; + if package_subpath.starts_with(key_sub) { + // When this reaches EOL, this can throw at the top of the whole function: + // + // if (StringPrototypeEndsWith(packageSubpath, '/')) + // throwInvalidSubpath(packageSubpath) + // + // To match "imports" and the spec. + if package_subpath.ends_with('/') { + // TODO(bartlomieju): + // emitTrailingSlashPatternDeprecation(); + } + let pattern_trailer = &key[pattern_index + 1..]; + if package_subpath.len() >= key.len() + && package_subpath.ends_with(&pattern_trailer) + && pattern_key_compare(best_match, key) == 1 + && key.rfind('*') == Some(pattern_index) + { + best_match = key; + best_match_subpath = Some( + package_subpath[pattern_index + ..(package_subpath.len() - pattern_trailer.len())] + .to_string(), + ); + } + } + } + } + + if !best_match.is_empty() { + let target = package_exports.get(best_match).unwrap().to_owned(); + let maybe_resolved = self.resolve_package_target( + package_json_path, + target, + best_match_subpath.unwrap(), + best_match.to_string(), + referrer, + referrer_kind, + true, + false, + conditions, + mode, + permissions, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } else { + return Err(throw_exports_not_found( + package_subpath, + package_json_path, + referrer, + )); + } + } + + Err(throw_exports_not_found( + package_subpath, + package_json_path, + referrer, + )) + } + + pub(super) fn package_resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &dyn NodePermissions, + ) -> Result, AnyError> { + let (package_name, package_subpath, _is_scoped) = + parse_package_name(specifier, referrer)?; + + // ResolveSelf + let package_config = + self.get_package_scope_config(referrer, permissions)?; + if package_config.exists + && package_config.name.as_ref() == Some(&package_name) + { + if let Some(exports) = &package_config.exports { + return self + .package_exports_resolve( + &package_config.path, + package_subpath, + exports, + referrer, + referrer_kind, + conditions, + mode, + permissions, + ) + .map(Some); + } + } + + let package_dir_path = self + .npm_resolver + .resolve_package_folder_from_package(&package_name, referrer, mode)?; + let package_json_path = package_dir_path.join("package.json"); + + // todo: error with this instead when can't find package + // Err(errors::err_module_not_found( + // &package_json_url + // .join(".") + // .unwrap() + // .to_file_path() + // .unwrap() + // .display() + // .to_string(), + // &to_file_path_string(referrer), + // "package", + // )) + + // Package match. + let package_json = + self.load_package_json(permissions, package_json_path)?; + if let Some(exports) = &package_json.exports { + return self + .package_exports_resolve( + &package_json.path, + package_subpath, + exports, + referrer, + referrer_kind, + conditions, + mode, + permissions, + ) + .map(Some); + } + if package_subpath == "." { + return self.legacy_main_resolve(&package_json, referrer_kind, mode); + } + + let file_path = package_json.path.parent().unwrap().join(&package_subpath); + + if mode.is_types() { + let maybe_declaration_path = + self.path_to_declaration_path(file_path, referrer_kind); + Ok(maybe_declaration_path) + } else { + Ok(Some(file_path)) + } + } + + pub(super) fn get_package_scope_config( + &self, + referrer: &ModuleSpecifier, + permissions: &dyn NodePermissions, + ) -> Result { + let root_folder = self + .npm_resolver + .resolve_package_folder_from_path(&referrer.to_file_path().unwrap())?; + let package_json_path = root_folder.join("package.json"); + self.load_package_json(permissions, package_json_path) + } + + pub(super) fn get_closest_package_json( + &self, + url: &ModuleSpecifier, + permissions: &dyn NodePermissions, + ) -> Result { + let package_json_path = self.get_closest_package_json_path(url)?; + self.load_package_json(permissions, package_json_path) + } + + fn get_closest_package_json_path( + &self, + url: &ModuleSpecifier, + ) -> Result { + let file_path = url.to_file_path().unwrap(); + let current_dir = deno_core::strip_unc_prefix( + self.fs.realpath_sync(file_path.parent().unwrap())?, + ); + let mut current_dir = current_dir.as_path(); + let package_json_path = current_dir.join("package.json"); + if self.fs.exists(&package_json_path) { + return Ok(package_json_path); + } + let root_pkg_folder = self + .npm_resolver + .resolve_package_folder_from_path(current_dir)?; + while current_dir.starts_with(&root_pkg_folder) { + current_dir = current_dir.parent().unwrap(); + let package_json_path = current_dir.join("package.json"); + if self.fs.exists(&package_json_path) { + return Ok(package_json_path); + } + } + + bail!("did not find package.json in {}", root_pkg_folder.display()) + } + + pub(super) fn load_package_json( + &self, + permissions: &dyn NodePermissions, + package_json_path: PathBuf, + ) -> Result { + PackageJson::load( + &*self.fs, + &*self.npm_resolver, + permissions, + package_json_path, + ) + } + + pub(super) fn legacy_main_resolve( + &self, + package_json: &PackageJson, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result, AnyError> { + let maybe_main = if mode.is_types() { + match package_json.types.as_ref() { + Some(types) => Some(types), + None => { + // fallback to checking the main entrypoint for + // a corresponding declaration file + if let Some(main) = package_json.main(referrer_kind) { + let main = package_json.path.parent().unwrap().join(main).clean(); + if let Some(path) = + self.path_to_declaration_path(main, referrer_kind) + { + return Ok(Some(path)); + } + } + None + } + } + } else { + package_json.main(referrer_kind) + }; + + if let Some(main) = maybe_main { + let guess = package_json.path.parent().unwrap().join(main).clean(); + if self.fs.is_file(&guess) { + return Ok(Some(guess)); + } + + // todo(dsherret): investigate exactly how node and typescript handles this + let endings = if mode.is_types() { + match referrer_kind { + NodeModuleKind::Cjs => { + vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"] + } + NodeModuleKind::Esm => vec![ + ".d.ts", + ".d.mts", + "/index.d.ts", + "/index.d.mts", + ".d.cts", + "/index.d.cts", + ], + } + } else { + vec![".js", "/index.js"] + }; + for ending in endings { + let guess = package_json + .path + .parent() + .unwrap() + .join(format!("{main}{ending}")) + .clean(); + if self.fs.is_file(&guess) { + // TODO(bartlomieju): emitLegacyIndexDeprecation() + return Ok(Some(guess)); + } + } + } + + let index_file_names = if mode.is_types() { + // todo(dsherret): investigate exactly how typescript does this + match referrer_kind { + NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"], + NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"], + } + } else { + vec!["index.js"] + }; + for index_file_name in index_file_names { + let guess = package_json + .path + .parent() + .unwrap() + .join(index_file_name) + .clean(); + if self.fs.is_file(&guess) { + // TODO(bartlomieju): emitLegacyIndexDeprecation() + return Ok(Some(guess)); + } + } + + Ok(None) + } +} + +fn resolve_bin_entry_value<'a>( + pkg_nv: &NpmPackageNv, + bin_name: Option<&str>, + bin: &'a Value, +) -> Result<&'a str, AnyError> { + let bin_entry = match bin { + Value::String(_) => { + if bin_name.is_some() && bin_name.unwrap() != pkg_nv.name { + None + } else { + Some(bin) + } + } + Value::Object(o) => { + if let Some(bin_name) = bin_name { + o.get(bin_name) + } else if o.len() == 1 || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap()) { + o.values().next() + } else { + o.get(&pkg_nv.name) + } + }, + _ => bail!("package '{}' did not have a bin property with a string or object value in its package.json", pkg_nv), + }; + let bin_entry = match bin_entry { + Some(e) => e, + None => { + let keys = bin + .as_object() + .map(|o| { + o.keys() + .map(|k| format!(" * npm:{pkg_nv}/{k}")) + .collect::>() + }) + .unwrap_or_default(); + bail!( + "package '{}' did not have a bin entry for '{}' in its package.json{}", + pkg_nv, + bin_name.unwrap_or(&pkg_nv.name), + if keys.is_empty() { + "".to_string() + } else { + format!("\n\nPossibilities:\n{}", keys.join("\n")) + } + ) + } + }; + match bin_entry { + Value::String(s) => Ok(s), + _ => bail!( + "package '{}' had a non-string sub property of bin in its package.json", + pkg_nv, + ), + } +} + +fn to_file_path(url: &ModuleSpecifier) -> PathBuf { + url + .to_file_path() + .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}")) +} + +fn to_file_path_string(url: &ModuleSpecifier) -> String { + to_file_path(url).display().to_string() +} + +fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool { + if specifier.is_empty() { + return false; + } + + if specifier.starts_with('/') { + return true; + } + + is_relative_specifier(specifier) +} + +// TODO(ry) We very likely have this utility function elsewhere in Deno. +fn is_relative_specifier(specifier: &str) -> bool { + let specifier_len = specifier.len(); + let specifier_chars: Vec<_> = specifier.chars().collect(); + + if !specifier_chars.is_empty() && specifier_chars[0] == '.' { + if specifier_len == 1 || specifier_chars[1] == '/' { + return true; + } + if specifier_chars[1] == '.' + && (specifier_len == 2 || specifier_chars[2] == '/') + { + return true; + } + } + false } /// Alternate `PathBuf::with_extension` that will handle known extensions /// more intelligently. -pub fn with_known_extension(path: &Path, ext: &str) -> PathBuf { +fn with_known_extension(path: &Path, ext: &str) -> PathBuf { const NON_DECL_EXTS: &[&str] = &["cjs", "js", "json", "jsx", "mjs", "tsx"]; const DECL_EXTS: &[&str] = &["cts", "mts", "ts"]; @@ -142,145 +1362,6 @@ fn throw_import_not_defined( ) } -fn pattern_key_compare(a: &str, b: &str) -> i32 { - let a_pattern_index = a.find('*'); - let b_pattern_index = b.find('*'); - - let base_len_a = if let Some(index) = a_pattern_index { - index + 1 - } else { - a.len() - }; - let base_len_b = if let Some(index) = b_pattern_index { - index + 1 - } else { - b.len() - }; - - if base_len_a > base_len_b { - return -1; - } - - if base_len_b > base_len_a { - return 1; - } - - if a_pattern_index.is_none() { - return 1; - } - - if b_pattern_index.is_none() { - return -1; - } - - if a.len() > b.len() { - return -1; - } - - if b.len() > a.len() { - return 1; - } - - 0 -} - -pub fn package_imports_resolve( - name: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - if name == "#" || name.starts_with("#/") || name.ends_with('/') { - let reason = "is not a valid internal imports specifier name"; - return Err(errors::err_invalid_module_specifier( - name, - reason, - Some(to_specifier_display_string(referrer)), - )); - } - - let package_config = - get_package_scope_config::(referrer, npm_resolver, permissions)?; - let mut package_json_path = None; - if package_config.exists { - package_json_path = Some(package_config.path.clone()); - if let Some(imports) = &package_config.imports { - if imports.contains_key(name) && !name.contains('*') { - let maybe_resolved = resolve_package_target::( - package_json_path.as_ref().unwrap(), - imports.get(name).unwrap().to_owned(), - "".to_string(), - name.to_string(), - referrer, - referrer_kind, - false, - true, - conditions, - mode, - npm_resolver, - permissions, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } - } else { - let mut best_match = ""; - let mut best_match_subpath = None; - for key in imports.keys() { - let pattern_index = key.find('*'); - if let Some(pattern_index) = pattern_index { - let key_sub = &key[0..=pattern_index]; - if name.starts_with(key_sub) { - let pattern_trailer = &key[pattern_index + 1..]; - if name.len() > key.len() - && name.ends_with(&pattern_trailer) - && pattern_key_compare(best_match, key) == 1 - && key.rfind('*') == Some(pattern_index) - { - best_match = key; - best_match_subpath = Some( - name[pattern_index..=(name.len() - pattern_trailer.len())] - .to_string(), - ); - } - } - } - } - - if !best_match.is_empty() { - let target = imports.get(best_match).unwrap().to_owned(); - let maybe_resolved = resolve_package_target::( - package_json_path.as_ref().unwrap(), - target, - best_match_subpath.unwrap(), - best_match.to_string(), - referrer, - referrer_kind, - true, - true, - conditions, - mode, - npm_resolver, - permissions, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } - } - } - } - } - - Err(throw_import_not_defined( - name, - package_json_path.as_deref(), - referrer, - )) -} - fn throw_invalid_package_target( subpath: String, target: String, @@ -316,245 +1397,6 @@ fn throw_invalid_subpath( ) } -#[allow(clippy::too_many_arguments)] -fn resolve_package_target_string( - target: String, - subpath: String, - match_: String, - package_json_path: &Path, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - if !subpath.is_empty() && !pattern && !target.ends_with('/') { - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - let invalid_segment_re = - lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)"); - let pattern_re = lazy_regex::regex!(r"\*"); - if !target.starts_with("./") { - if internal && !target.starts_with("../") && !target.starts_with('/') { - let is_url = Url::parse(&target).is_ok(); - if !is_url { - let export_target = if pattern { - pattern_re - .replace(&target, |_caps: ®ex::Captures| subpath.clone()) - .to_string() - } else { - format!("{target}{subpath}") - }; - let package_json_url = - ModuleSpecifier::from_file_path(package_json_path).unwrap(); - return match package_resolve::( - &export_target, - &package_json_url, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ) { - Ok(Some(path)) => Ok(path), - Ok(None) => Err(generic_error("not found")), - Err(err) => Err(err), - }; - } - } - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - if invalid_segment_re.is_match(&target[2..]) { - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - let package_path = package_json_path.parent().unwrap(); - let resolved_path = package_path.join(&target).clean(); - if !resolved_path.starts_with(package_path) { - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - if subpath.is_empty() { - return Ok(resolved_path); - } - if invalid_segment_re.is_match(&subpath) { - let request = if pattern { - match_.replace('*', &subpath) - } else { - format!("{match_}{subpath}") - }; - return Err(throw_invalid_subpath( - request, - package_json_path, - internal, - referrer, - )); - } - if pattern { - let resolved_path_str = resolved_path.to_string_lossy(); - let replaced = pattern_re - .replace(&resolved_path_str, |_caps: ®ex::Captures| { - subpath.clone() - }); - return Ok(PathBuf::from(replaced.to_string())); - } - Ok(resolved_path.join(&subpath).clean()) -} - -#[allow(clippy::too_many_arguments)] -fn resolve_package_target( - package_json_path: &Path, - target: Value, - subpath: String, - package_subpath: String, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result, AnyError> { - if let Some(target) = target.as_str() { - return resolve_package_target_string::( - target.to_string(), - subpath, - package_subpath, - package_json_path, - referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - npm_resolver, - permissions, - ) - .map(|path| { - if mode.is_types() { - path_to_declaration_path::(path, referrer_kind) - } else { - Some(path) - } - }); - } else if let Some(target_arr) = target.as_array() { - if target_arr.is_empty() { - return Ok(None); - } - - let mut last_error = None; - for target_item in target_arr { - let resolved_result = resolve_package_target::( - package_json_path, - target_item.to_owned(), - subpath.clone(), - package_subpath.clone(), - referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - npm_resolver, - permissions, - ); - - match resolved_result { - Ok(Some(resolved)) => return Ok(Some(resolved)), - Ok(None) => { - last_error = None; - continue; - } - Err(e) => { - let err_string = e.to_string(); - last_error = Some(e); - if err_string.starts_with("[ERR_INVALID_PACKAGE_TARGET]") { - continue; - } - return Err(last_error.unwrap()); - } - } - } - if last_error.is_none() { - return Ok(None); - } - return Err(last_error.unwrap()); - } else if let Some(target_obj) = target.as_object() { - for key in target_obj.keys() { - // TODO(bartlomieju): verify that keys are not numeric - // return Err(errors::err_invalid_package_config( - // to_file_path_string(package_json_url), - // Some(base.as_str().to_string()), - // Some("\"exports\" cannot contain numeric property keys.".to_string()), - // )); - - if key == "default" - || conditions.contains(&key.as_str()) - || mode.is_types() && key.as_str() == "types" - { - let condition_target = target_obj.get(key).unwrap().to_owned(); - - let resolved = resolve_package_target::( - package_json_path, - condition_target, - subpath.clone(), - package_subpath.clone(), - referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - npm_resolver, - permissions, - )?; - match resolved { - Some(resolved) => return Ok(Some(resolved)), - None => { - continue; - } - } - } - } - } else if target.is_null() { - return Ok(None); - } - - Err(throw_invalid_package_target( - package_subpath, - target.to_string(), - package_json_path, - internal, - referrer, - )) -} - fn throw_exports_not_found( subpath: String, package_json_path: &Path, @@ -567,115 +1409,6 @@ fn throw_exports_not_found( ) } -#[allow(clippy::too_many_arguments)] -pub fn package_exports_resolve( - package_json_path: &Path, - package_subpath: String, - package_exports: &Map, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - if package_exports.contains_key(&package_subpath) - && package_subpath.find('*').is_none() - && !package_subpath.ends_with('/') - { - let target = package_exports.get(&package_subpath).unwrap().to_owned(); - let resolved = resolve_package_target::( - package_json_path, - target, - "".to_string(), - package_subpath.to_string(), - referrer, - referrer_kind, - false, - false, - conditions, - mode, - npm_resolver, - permissions, - )?; - if resolved.is_none() { - return Err(throw_exports_not_found( - package_subpath, - package_json_path, - referrer, - )); - } - return Ok(resolved.unwrap()); - } - - let mut best_match = ""; - let mut best_match_subpath = None; - for key in package_exports.keys() { - let pattern_index = key.find('*'); - if let Some(pattern_index) = pattern_index { - let key_sub = &key[0..pattern_index]; - if package_subpath.starts_with(key_sub) { - // When this reaches EOL, this can throw at the top of the whole function: - // - // if (StringPrototypeEndsWith(packageSubpath, '/')) - // throwInvalidSubpath(packageSubpath) - // - // To match "imports" and the spec. - if package_subpath.ends_with('/') { - // TODO(bartlomieju): - // emitTrailingSlashPatternDeprecation(); - } - let pattern_trailer = &key[pattern_index + 1..]; - if package_subpath.len() > key.len() - && package_subpath.ends_with(&pattern_trailer) - && pattern_key_compare(best_match, key) == 1 - && key.rfind('*') == Some(pattern_index) - { - best_match = key; - best_match_subpath = Some( - package_subpath - [pattern_index..(package_subpath.len() - pattern_trailer.len())] - .to_string(), - ); - } - } - } - } - - if !best_match.is_empty() { - let target = package_exports.get(best_match).unwrap().to_owned(); - let maybe_resolved = resolve_package_target::( - package_json_path, - target, - best_match_subpath.unwrap(), - best_match.to_string(), - referrer, - referrer_kind, - true, - false, - conditions, - mode, - npm_resolver, - permissions, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } else { - return Err(throw_exports_not_found( - package_subpath, - package_json_path, - referrer, - )); - } - } - - Err(throw_exports_not_found( - package_subpath, - package_json_path, - referrer, - )) -} - fn parse_package_name( specifier: &str, referrer: &ModuleSpecifier, @@ -727,229 +1460,153 @@ fn parse_package_name( Ok((package_name, package_subpath, is_scoped)) } -pub fn package_resolve( - specifier: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result, AnyError> { - let (package_name, package_subpath, _is_scoped) = - parse_package_name(specifier, referrer)?; +fn pattern_key_compare(a: &str, b: &str) -> i32 { + let a_pattern_index = a.find('*'); + let b_pattern_index = b.find('*'); - // ResolveSelf - let package_config = - get_package_scope_config::(referrer, npm_resolver, permissions)?; - if package_config.exists - && package_config.name.as_ref() == Some(&package_name) - { - if let Some(exports) = &package_config.exports { - return package_exports_resolve::( - &package_config.path, - package_subpath, - exports, - referrer, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ) - .map(Some); - } - } - - let package_dir_path = npm_resolver.resolve_package_folder_from_package( - &package_name, - &referrer.to_file_path().unwrap(), - mode, - )?; - let package_json_path = package_dir_path.join("package.json"); - - // todo: error with this instead when can't find package - // Err(errors::err_module_not_found( - // &package_json_url - // .join(".") - // .unwrap() - // .to_file_path() - // .unwrap() - // .display() - // .to_string(), - // &to_file_path_string(referrer), - // "package", - // )) - - // Package match. - let package_json = - PackageJson::load::(npm_resolver, permissions, package_json_path)?; - if let Some(exports) = &package_json.exports { - return package_exports_resolve::( - &package_json.path, - package_subpath, - exports, - referrer, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ) - .map(Some); - } - if package_subpath == "." { - return legacy_main_resolve::(&package_json, referrer_kind, mode); - } - - let file_path = package_json.path.parent().unwrap().join(&package_subpath); - - if mode.is_types() { - let maybe_declaration_path = - path_to_declaration_path::(file_path, referrer_kind); - Ok(maybe_declaration_path) + let base_len_a = if let Some(index) = a_pattern_index { + index + 1 } else { - Ok(Some(file_path)) - } -} - -pub fn get_package_scope_config( - referrer: &ModuleSpecifier, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - let root_folder = npm_resolver - .resolve_package_folder_from_path(&referrer.to_file_path().unwrap())?; - let package_json_path = root_folder.join("package.json"); - PackageJson::load::(npm_resolver, permissions, package_json_path) -} - -pub fn get_closest_package_json( - url: &ModuleSpecifier, - npm_resolver: &dyn RequireNpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - let package_json_path = - get_closest_package_json_path::(url, npm_resolver)?; - PackageJson::load::(npm_resolver, permissions, package_json_path) -} - -fn get_closest_package_json_path( - url: &ModuleSpecifier, - npm_resolver: &dyn RequireNpmResolver, -) -> Result { - let file_path = url.to_file_path().unwrap(); - let mut current_dir = file_path.parent().unwrap(); - let package_json_path = current_dir.join("package.json"); - if Fs::exists(&package_json_path) { - return Ok(package_json_path); - } - let root_pkg_folder = npm_resolver - .resolve_package_folder_from_path(&url.to_file_path().unwrap())?; - while current_dir.starts_with(&root_pkg_folder) { - current_dir = current_dir.parent().unwrap(); - let package_json_path = current_dir.join("package.json"); - if Fs::exists(&package_json_path) { - return Ok(package_json_path); - } - } - - bail!("did not find package.json in {}", root_pkg_folder.display()) -} - -pub fn legacy_main_resolve( - package_json: &PackageJson, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, -) -> Result, AnyError> { - let maybe_main = if mode.is_types() { - match package_json.types.as_ref() { - Some(types) => Some(types), - None => { - // fallback to checking the main entrypoint for - // a corresponding declaration file - if let Some(main) = package_json.main(referrer_kind) { - let main = package_json.path.parent().unwrap().join(main).clean(); - if let Some(path) = - path_to_declaration_path::(main, referrer_kind) - { - return Ok(Some(path)); - } - } - None - } - } + a.len() + }; + let base_len_b = if let Some(index) = b_pattern_index { + index + 1 } else { - package_json.main(referrer_kind) + b.len() }; - if let Some(main) = maybe_main { - let guess = package_json.path.parent().unwrap().join(main).clean(); - if Fs::is_file(&guess) { - return Ok(Some(guess)); - } - - // todo(dsherret): investigate exactly how node and typescript handles this - let endings = if mode.is_types() { - match referrer_kind { - NodeModuleKind::Cjs => { - vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"] - } - NodeModuleKind::Esm => vec![ - ".d.ts", - ".d.mts", - "/index.d.ts", - "/index.d.mts", - ".d.cts", - "/index.d.cts", - ], - } - } else { - vec![".js", "/index.js"] - }; - for ending in endings { - let guess = package_json - .path - .parent() - .unwrap() - .join(format!("{main}{ending}")) - .clean(); - if Fs::is_file(&guess) { - // TODO(bartlomieju): emitLegacyIndexDeprecation() - return Ok(Some(guess)); - } - } + if base_len_a > base_len_b { + return -1; } - let index_file_names = if mode.is_types() { - // todo(dsherret): investigate exactly how typescript does this - match referrer_kind { - NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"], - NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"], - } - } else { - vec!["index.js"] - }; - for index_file_name in index_file_names { - let guess = package_json - .path - .parent() - .unwrap() - .join(index_file_name) - .clean(); - if Fs::is_file(&guess) { - // TODO(bartlomieju): emitLegacyIndexDeprecation() - return Ok(Some(guess)); - } + if base_len_b > base_len_a { + return 1; } - Ok(None) + if a_pattern_index.is_none() { + return 1; + } + + if b_pattern_index.is_none() { + return -1; + } + + if a.len() > b.len() { + return -1; + } + + if b.len() > a.len() { + return 1; + } + + 0 } #[cfg(test)] mod tests { + use deno_core::serde_json::json; + use super::*; + #[test] + fn test_resolve_bin_entry_value() { + // should resolve the specified value + let value = json!({ + "bin1": "./value1", + "bin2": "./value2", + "test": "./value3", + }); + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.1.1").unwrap(), + Some("bin1"), + &value + ) + .unwrap(), + "./value1" + ); + + // should resolve the value with the same name when not specified + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.1.1").unwrap(), + None, + &value + ) + .unwrap(), + "./value3" + ); + + // should not resolve when specified value does not exist + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.1.1").unwrap(), + Some("other"), + &value + ) + .err() + .unwrap() + .to_string(), + concat!( + "package 'test@1.1.1' did not have a bin entry for 'other' in its package.json\n", + "\n", + "Possibilities:\n", + " * npm:test@1.1.1/bin1\n", + " * npm:test@1.1.1/bin2\n", + " * npm:test@1.1.1/test" + ) + ); + + // should not resolve when default value can't be determined + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("asdf@1.2.3").unwrap(), + None, + &value + ) + .err() + .unwrap() + .to_string(), + concat!( + "package 'asdf@1.2.3' did not have a bin entry for 'asdf' in its package.json\n", + "\n", + "Possibilities:\n", + " * npm:asdf@1.2.3/bin1\n", + " * npm:asdf@1.2.3/bin2\n", + " * npm:asdf@1.2.3/test" + ) + ); + + // should resolve since all the values are the same + let value = json!({ + "bin1": "./value", + "bin2": "./value", + }); + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.2.3").unwrap(), + None, + &value + ) + .unwrap(), + "./value" + ); + + // should not resolve when specified and is a string + let value = json!("./value"); + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.2.3").unwrap(), + Some("path"), + &value + ) + .err() + .unwrap() + .to_string(), + "package 'test@1.2.3' did not have a bin entry for 'path' in its package.json" + ); + } + #[test] fn test_parse_package_name() { let dummy_referrer = Url::parse("http://example.com").unwrap(); diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 3b01da0a31..c946cdcdf4 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.86.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs index 123d35acf0..dded1b3859 100644 --- a/ext/tls/lib.rs +++ b/ext/tls/lib.rs @@ -9,16 +9,14 @@ pub use webpki_roots; use deno_core::anyhow::anyhow; use deno_core::error::custom_error; use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; use rustls::client::HandshakeSignatureValid; use rustls::client::ServerCertVerified; use rustls::client::ServerCertVerifier; -use rustls::client::StoresClientSessions; use rustls::client::WebPkiVerifier; -use rustls::internal::msgs::handshake::DigitallySignedStruct; use rustls::Certificate; use rustls::ClientConfig; +use rustls::DigitallySignedStruct; use rustls::Error; use rustls::PrivateKey; use rustls::RootCertStore; @@ -27,13 +25,20 @@ use rustls_pemfile::certs; use rustls_pemfile::pkcs8_private_keys; use rustls_pemfile::rsa_private_keys; use serde::Deserialize; -use std::collections::HashMap; use std::io::BufRead; use std::io::BufReader; use std::io::Cursor; use std::sync::Arc; use std::time::SystemTime; +/// Lazily resolves the root cert store. +/// +/// This was done because the root cert store is not needed in all cases +/// and takes a bit of time to initialize. +pub trait RootCertStoreProvider: Send + Sync { + fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError>; +} + // This extension has no runtime apis, it only exports some shared native functions. deno_core::extension!(deno_tls); @@ -137,26 +142,6 @@ pub struct BasicAuth { pub password: String, } -#[derive(Default)] -struct ClientSessionMemoryCache(Mutex, Vec>>); - -impl StoresClientSessions for ClientSessionMemoryCache { - fn get(&self, key: &[u8]) -> Option> { - self.0.lock().get(key).cloned() - } - - fn put(&self, key: Vec, value: Vec) -> bool { - let mut sessions = self.0.lock(); - // TODO(bnoordhuis) Evict sessions LRU-style instead of arbitrarily. - while sessions.len() >= 1024 { - let key = sessions.keys().next().unwrap().clone(); - sessions.remove(&key); - } - sessions.insert(key, value); - true - } -} - pub fn create_default_root_cert_store() -> RootCertStore { let mut root_cert_store = RootCertStore::empty(); // TODO(@justinmchase): Consider also loading the system keychain here @@ -285,7 +270,7 @@ fn filter_invalid_encoding_err( to_be_filtered: Result, ) -> Result { match to_be_filtered { - Err(Error::InvalidCertificateEncoding) => { + Err(Error::InvalidCertificate(rustls::CertificateError::BadEncoding)) => { Ok(HandshakeSignatureValid::assertion()) } res => res, diff --git a/ext/url/00_url.js b/ext/url/00_url.js index d76366cfae..49dd2c46f3 100644 --- a/ext/url/00_url.js +++ b/ext/url/00_url.js @@ -17,13 +17,14 @@ const { ArrayPrototypeSort, ArrayPrototypeSplice, ObjectKeys, - Uint32Array, SafeArrayIterator, StringPrototypeSlice, + StringPrototypeStartsWith, Symbol, SymbolFor, SymbolIterator, TypeError, + Uint32Array, } = primordials; const _list = Symbol("list"); @@ -104,7 +105,8 @@ class URLSearchParams { init = webidl.converters ["sequence> or record or USVString"]( init, - { prefix, context: "Argument 1" }, + prefix, + "Argument 1", ); this[webidl.brand] = webidl.brand; if (!init) { @@ -147,6 +149,7 @@ class URLSearchParams { if (url === null) { return; } + // deno-lint-ignore prefer-primordials url[_updateUrlSearch](this.toString()); } @@ -158,14 +161,8 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'append' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters.USVString(value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); + value = webidl.converters.USVString(value, prefix, "Argument 2"); ArrayPrototypePush(this[_list], [name, value]); this.#updateUrlSearch(); } @@ -177,10 +174,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'append' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); const list = this[_list]; let i = 0; while (i < list.length) { @@ -201,10 +195,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'getAll' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); const values = []; const entries = this[_list]; for (let i = 0; i < entries.length; ++i) { @@ -224,10 +215,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'get' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); const entries = this[_list]; for (let i = 0; i < entries.length; ++i) { const entry = entries[i]; @@ -246,10 +234,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'has' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); return ArrayPrototypeSome(this[_list], (entry) => entry[0] === name); } @@ -261,14 +246,8 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'set' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters.USVString(value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); + value = webidl.converters.USVString(value, prefix, "Argument 2"); const list = this[_list]; @@ -371,12 +350,10 @@ class URL { */ constructor(url, base = undefined) { const prefix = "Failed to construct 'URL'"; - url = webidl.converters.DOMString(url, { prefix, context: "Argument 1" }); + webidl.requiredArguments(arguments.length, 1, prefix); + url = webidl.converters.DOMString(url, prefix, "Argument 1"); if (base !== undefined) { - base = webidl.converters.DOMString(base, { - prefix, - context: "Argument 2", - }); + base = webidl.converters.DOMString(base, prefix, "Argument 2"); } this[webidl.brand] = webidl.brand; const status = opUrlParse(url, base); @@ -390,12 +367,10 @@ class URL { */ static canParse(url, base = undefined) { const prefix = "Failed to call 'URL.canParse'"; - url = webidl.converters.DOMString(url, { prefix, context: "Argument 1" }); + webidl.requiredArguments(arguments.length, 1, prefix); + url = webidl.converters.DOMString(url, prefix, "Argument 1"); if (base !== undefined) { - base = webidl.converters.DOMString(base, { - prefix, - context: "Argument 2", - }); + base = webidl.converters.DOMString(base, prefix, "Argument 2"); } const status = opUrlParse(url, base); return status === 0 || status === 1; @@ -448,7 +423,10 @@ class URL { #hasAuthority() { // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/lib.rs#L824 - return this.#serialization.slice(this.#schemeEnd).startsWith("://"); + return StringPrototypeStartsWith( + StringPrototypeSlice(this.#serialization, this.#schemeEnd), + "://", + ); } /** @return {string} */ @@ -456,7 +434,7 @@ class URL { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L263 return this.#fragmentStart - ? trim(this.#serialization.slice(this.#fragmentStart)) + ? trim(StringPrototypeSlice(this.#serialization, this.#fragmentStart)) : ""; } @@ -465,10 +443,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'hash' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -485,7 +460,11 @@ class URL { get host() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L101 - return this.#serialization.slice(this.#hostStart, this.#pathStart); + return StringPrototypeSlice( + this.#serialization, + this.#hostStart, + this.#pathStart, + ); } /** @param {string} value */ @@ -493,10 +472,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'host' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -513,7 +489,11 @@ class URL { get hostname() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/lib.rs#L988 - return this.#serialization.slice(this.#hostStart, this.#hostEnd); + return StringPrototypeSlice( + this.#serialization, + this.#hostStart, + this.#hostEnd, + ); } /** @param {string} value */ @@ -521,10 +501,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'hostname' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -548,10 +525,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'href' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); const status = opUrlParse(value); this.#serialization = getSerialization(status, value); this.#updateComponents(); @@ -562,7 +536,11 @@ class URL { get origin() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/origin.rs#L14 - const scheme = this.#serialization.slice(0, this.#schemeEnd); + const scheme = StringPrototypeSlice( + this.#serialization, + 0, + this.#schemeEnd, + ); if ( scheme === "http" || scheme === "https" || scheme === "ftp" || scheme === "ws" || scheme === "wss" @@ -591,7 +569,8 @@ class URL { this.#usernameEnd !== this.#serialization.length && this.#serialization[this.#usernameEnd] === ":" ) { - return this.#serialization.slice( + return StringPrototypeSlice( + this.#serialization, this.#usernameEnd + 1, this.#hostStart - 1, ); @@ -604,10 +583,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'password' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -625,11 +601,15 @@ class URL { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/lib.rs#L1203 if (!this.#queryStart && !this.#fragmentStart) { - return this.#serialization.slice(this.#pathStart); + return StringPrototypeSlice(this.#serialization, this.#pathStart); } const nextComponentStart = this.#queryStart || this.#fragmentStart; - return this.#serialization.slice(this.#pathStart, nextComponentStart); + return StringPrototypeSlice( + this.#serialization, + this.#pathStart, + nextComponentStart, + ); } /** @param {string} value */ @@ -637,10 +617,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'pathname' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -658,9 +635,14 @@ class URL { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L196 if (this.#port === NO_PORT) { - return this.#serialization.slice(this.#hostEnd, this.#pathStart); + return StringPrototypeSlice( + this.#serialization, + this.#hostEnd, + this.#pathStart, + ); } else { - return this.#serialization.slice( + return StringPrototypeSlice( + this.#serialization, this.#hostEnd + 1, /* : */ this.#pathStart, ); @@ -672,10 +654,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'port' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -692,7 +671,11 @@ class URL { get protocol() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L56 - return this.#serialization.slice(0, this.#schemeEnd + 1 /* : */); + return StringPrototypeSlice( + this.#serialization, + 0, + this.#schemeEnd + 1, /* : */ + ); } /** @param {string} value */ @@ -700,10 +683,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'protocol' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -723,7 +703,9 @@ class URL { const afterPath = this.#queryStart || this.#fragmentStart || this.#serialization.length; const afterQuery = this.#fragmentStart || this.#serialization.length; - return trim(this.#serialization.slice(afterPath, afterQuery)); + return trim( + StringPrototypeSlice(this.#serialization, afterPath, afterQuery), + ); } /** @param {string} value */ @@ -731,10 +713,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'search' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -757,7 +736,8 @@ class URL { this.#hasAuthority() && this.#usernameEnd > this.#schemeEnd + schemeSeperatorLen ) { - return this.#serialization.slice( + return StringPrototypeSlice( + this.#serialization, this.#schemeEnd + schemeSeperatorLen, this.#usernameEnd, ); @@ -771,10 +751,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'username' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -825,15 +802,25 @@ function parseUrlEncoded(bytes) { webidl .converters[ "sequence> or record or USVString" - ] = (V, opts) => { + ] = (V, prefix, context, opts) => { // Union for (sequence> or record or USVString) if (webidl.type(V) === "Object" && V !== null) { if (V[SymbolIterator] !== undefined) { - return webidl.converters["sequence>"](V, opts); + return webidl.converters["sequence>"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters["record"](V, opts); + return webidl.converters["record"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters.USVString(V, opts); + return webidl.converters.USVString(V, prefix, context, opts); }; export { diff --git a/ext/url/01_urlpattern.js b/ext/url/01_urlpattern.js index 1ed02f8d38..04bb50fd7f 100644 --- a/ext/url/01_urlpattern.js +++ b/ext/url/01_urlpattern.js @@ -13,8 +13,9 @@ import * as webidl from "ext:deno_webidl/00_webidl.js"; const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeMap, - ObjectKeys, + ArrayPrototypePop, ObjectFromEntries, + ObjectKeys, RegExpPrototypeExec, RegExpPrototypeTest, SafeRegExp, @@ -56,15 +57,9 @@ class URLPattern { this[webidl.brand] = webidl.brand; const prefix = "Failed to construct 'URLPattern'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters.URLPatternInput(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.URLPatternInput(input, prefix, "Argument 1"); if (baseURL !== undefined) { - baseURL = webidl.converters.USVString(baseURL, { - prefix, - context: "Argument 2", - }); + baseURL = webidl.converters.USVString(baseURL, prefix, "Argument 2"); } const components = ops.op_urlpattern_parse(input, baseURL); @@ -134,15 +129,9 @@ class URLPattern { webidl.assertBranded(this, URLPatternPrototype); const prefix = "Failed to execute 'test' on 'URLPattern'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters.URLPatternInput(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.URLPatternInput(input, prefix, "Argument 1"); if (baseURL !== undefined) { - baseURL = webidl.converters.USVString(baseURL, { - prefix, - context: "Argument 2", - }); + baseURL = webidl.converters.USVString(baseURL, prefix, "Argument 2"); } const res = ops.op_urlpattern_process_match_input( @@ -175,15 +164,9 @@ class URLPattern { webidl.assertBranded(this, URLPatternPrototype); const prefix = "Failed to execute 'exec' on 'URLPattern'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters.URLPatternInput(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.URLPatternInput(input, prefix, "Argument 1"); if (baseURL !== undefined) { - baseURL = webidl.converters.USVString(baseURL, { - prefix, - context: "Argument 2", - }); + baseURL = webidl.converters.USVString(baseURL, prefix, "Argument 2"); } const res = ops.op_urlpattern_process_match_input( @@ -196,7 +179,7 @@ class URLPattern { const { 0: values, 1: inputs } = res; if (inputs[1] === null) { - inputs.pop(); + ArrayPrototypePop(inputs); } /** @type {URLPatternResult} */ @@ -258,12 +241,12 @@ webidl.converters.URLPatternInit = webidl { key: "baseURL", converter: webidl.converters.USVString }, ]); -webidl.converters["URLPatternInput"] = (V, opts) => { +webidl.converters["URLPatternInput"] = (V, prefix, context, opts) => { // Union for (URLPatternInit or USVString) if (typeof V == "object") { - return webidl.converters.URLPatternInit(V, opts); + return webidl.converters.URLPatternInit(V, prefix, context, opts); } - return webidl.converters.USVString(V, opts); + return webidl.converters.USVString(V, prefix, context, opts); }; export { URLPattern }; diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 9ded3c0ad7..ac849fcaa9 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.99.0" +version = "0.107.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/benches/url_ops.rs b/ext/url/benches/url_ops.rs index 2e56665521..835dfea2ec 100644 --- a/ext/url/benches/url_ops.rs +++ b/ext/url/benches/url_ops.rs @@ -22,6 +22,7 @@ fn setup() -> Vec { "#, ), }]) + .esm_entry_point("ext:bench_setup/setup") .build(), ] } diff --git a/ext/url/lib.deno_url.d.ts b/ext/url/lib.deno_url.d.ts index 1d5f840199..9a8c155d99 100644 --- a/ext/url/lib.deno_url.d.ts +++ b/ext/url/lib.deno_url.d.ts @@ -206,7 +206,7 @@ declare type URLPatternInput = string | URLPatternInit; /** @category Web APIs */ declare interface URLPatternComponentResult { input: string; - groups: Record; + groups: Record; } /** `URLPatternResult` is the object returned from `URLPattern.exec`. diff --git a/ext/web/01_dom_exception.js b/ext/web/01_dom_exception.js index ae3dcfd2e2..31d2cdc29f 100644 --- a/ext/web/01_dom_exception.js +++ b/ext/web/01_dom_exception.js @@ -21,7 +21,7 @@ const { SymbolFor, } = primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; const _name = Symbol("name"); const _message = Symbol("message"); @@ -94,14 +94,16 @@ class DOMException { // https://webidl.spec.whatwg.org/#dom-domexception-domexception constructor(message = "", name = "Error") { - message = webidl.converters.DOMString(message, { - prefix: "Failed to construct 'DOMException'", - context: "Argument 1", - }); - name = webidl.converters.DOMString(name, { - prefix: "Failed to construct 'DOMException'", - context: "Argument 2", - }); + message = webidl.converters.DOMString( + message, + "Failed to construct 'DOMException'", + "Argument 1", + ); + name = webidl.converters.DOMString( + name, + "Failed to construct 'DOMException'", + "Argument 2", + ); const code = nameToCodeMapping[name] ?? 0; this[_message] = message; diff --git a/ext/web/01_mimesniff.js b/ext/web/01_mimesniff.js index ad89f33cd7..7d402e0801 100644 --- a/ext/web/01_mimesniff.js +++ b/ext/web/01_mimesniff.js @@ -13,6 +13,7 @@ const { MapPrototypeHas, MapPrototypeSet, RegExpPrototypeTest, + RegExpPrototypeExec, SafeMap, SafeMapIterator, StringPrototypeReplaceAll, @@ -197,7 +198,7 @@ function serializeMimeType(mimeType) { for (const param of new SafeMapIterator(mimeType.parameters)) { serialization += `;${param[0]}=`; let value = param[1]; - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, value)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, value) === null) { value = StringPrototypeReplaceAll(value, "\\", "\\\\"); value = StringPrototypeReplaceAll(value, '"', '\\"'); value = `"${value}"`; diff --git a/ext/web/02_event.js b/ext/web/02_event.js index 34b3502a7c..142fa66b2b 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -9,7 +9,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as webidl from "ext:deno_webidl/00_webidl.js"; import DOMException from "ext:deno_web/01_dom_exception.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeFilter, @@ -122,7 +122,7 @@ const isTrusted = ObjectGetOwnPropertyDescriptor({ }, }, "isTrusted").get; -const eventInitConverter = webidl.createDictionaryConverter("EventInit", [{ +webidl.converters.EventInit = webidl.createDictionaryConverter("EventInit", [{ key: "bubbles", defaultValue: false, converter: webidl.converters.boolean, @@ -167,14 +167,16 @@ class Event { 1, "Failed to construct 'Event'", ); - type = webidl.converters.DOMString(type, { - prefix: "Failed to construct 'Event'", - context: "Argument 1", - }); - const eventInit = eventInitConverter(eventInitDict, { - prefix: "Failed to construct 'Event'", - context: "Argument 2", - }); + type = webidl.converters.DOMString( + type, + "Failed to construct 'Event'", + "Argument 1", + ); + const eventInit = webidl.converters.EventInit( + eventInitDict, + "Failed to construct 'Event'", + "Argument 2", + ); this[_attributes] = { type, ...eventInit, @@ -482,7 +484,7 @@ function getRoot(eventTarget) { function isNode( eventTarget, ) { - return Boolean(eventTarget && ReflectHas(eventTarget, "nodeType")); + return eventTarget?.nodeType !== undefined; } // https://dom.spec.whatwg.org/#concept-shadow-including-inclusive-ancestor @@ -734,10 +736,15 @@ function innerInvokeEventListeners( return found; } - // Copy event listeners before iterating since the list can be modified during the iteration. - const handlers = ArrayPrototypeSlice(targetListeners[type]); + let handlers = targetListeners[type]; + const handlersLength = handlers.length; - for (let i = 0; i < handlers.length; i++) { + // Copy event listeners before iterating since the list can be modified during the iteration. + if (handlersLength > 1) { + handlers = ArrayPrototypeSlice(targetListeners[type]); + } + + for (let i = 0; i < handlersLength; i++) { const listener = handlers[i]; let capture, once, passive; @@ -804,12 +811,19 @@ function innerInvokeEventListeners( * Ref: https://dom.spec.whatwg.org/#concept-event-listener-invoke */ function invokeEventListeners(tuple, eventImpl) { const path = getPath(eventImpl); - const tupleIndex = ArrayPrototypeIndexOf(path, tuple); - for (let i = tupleIndex; i >= 0; i--) { - const t = path[i]; + if (path.length === 1) { + const t = path[0]; if (t.target) { setTarget(eventImpl, t.target); - break; + } + } else { + const tupleIndex = ArrayPrototypeIndexOf(path, tuple); + for (let i = tupleIndex; i >= 0; i--) { + const t = path[i]; + if (t.target) { + setTarget(eventImpl, t.target); + break; + } } } @@ -936,13 +950,13 @@ function lazyAddEventListenerOptionsConverter() { ); } -webidl.converters.AddEventListenerOptions = (V, opts) => { +webidl.converters.AddEventListenerOptions = (V, prefix, context, opts) => { if (webidl.type(V) !== "Object" || V === null) { V = { capture: Boolean(V) }; } lazyAddEventListenerOptionsConverter(); - return addEventListenerOptionsConverter(V, opts); + return addEventListenerOptionsConverter(V, prefix, context, opts); }; class EventTarget { @@ -962,10 +976,11 @@ class EventTarget { webidl.requiredArguments(arguments.length, 2, prefix); - options = webidl.converters.AddEventListenerOptions(options, { + options = webidl.converters.AddEventListenerOptions( + options, prefix, - context: "Argument 3", - }); + "Argument 3", + ); if (callback === null) { return; @@ -1527,6 +1542,7 @@ export { CloseEvent, CustomEvent, defineEventHandler, + dispatch, ErrorEvent, Event, EventTarget, diff --git a/ext/web/02_timers.js b/ext/web/02_timers.js index 78cf06e445..19ebfaa0e4 100644 --- a/ext/web/02_timers.js +++ b/ext/web/02_timers.js @@ -1,5 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase + const core = globalThis.Deno.core; const ops = core.ops; const primordials = globalThis.__bootstrap.primordials; @@ -13,7 +15,6 @@ const { MapPrototypeSet, Uint8Array, Uint32Array, - // deno-lint-ignore camelcase NumberPOSITIVE_INFINITY, PromisePrototypeThen, SafeArrayIterator, @@ -26,6 +27,7 @@ const { import * as webidl from "ext:deno_webidl/00_webidl.js"; import { reportException } from "ext:deno_web/02_event.js"; import { assert } from "ext:deno_web/00_infra.js"; +const { op_sleep, op_void_async_deferred } = core.ensureFastOps(); const hrU8 = new Uint8Array(8); const hr = new Uint32Array(TypedArrayPrototypeGetBuffer(hrU8)); @@ -94,6 +96,9 @@ function initializeTimer( args, repeat, prevId, + // TODO(bartlomieju): remove this option, once `nextTick` and `setImmediate` + // in Node compat are cleaned up + respectNesting = true, ) { // 2. If previousId was given, let id be previousId; otherwise, let // previousId be an implementation-defined integer than is greater than zero @@ -126,7 +131,7 @@ function initializeTimer( // The nesting level of 5 and minimum of 4 ms are spec-mandated magic // constants. if (timeout < 0) timeout = 0; - if (timerNestingLevel > 5 && timeout < 4) timeout = 4; + if (timerNestingLevel > 5 && timeout < 4 && respectNesting) timeout = 4; // 9. Let task be a task that runs the following steps: const task = { @@ -216,7 +221,16 @@ const scheduledTimers = { head: null, tail: null }; */ function runAfterTimeout(cb, millis, timerInfo) { const cancelRid = timerInfo.cancelRid; - const sleepPromise = core.opAsync2("op_sleep", millis, cancelRid); + let sleepPromise; + // If this timeout is scheduled for 0ms it means we want it to run at the + // end of the event loop turn. There's no point in setting up a Tokio timer, + // since its lowest resolution is 1ms. Firing of a "void async" op is better + // in this case, because the timer will take closer to 0ms instead of >1ms. + if (millis === 0) { + sleepPromise = op_void_async_deferred(); + } else { + sleepPromise = op_sleep(millis, cancelRid); + } timerInfo.promiseId = sleepPromise[SymbolFor("Deno.core.internalPromiseId")]; if (!timerInfo.isRef) { core.unrefOp(timerInfo.promiseId); @@ -244,7 +258,8 @@ function runAfterTimeout(cb, millis, timerInfo) { PromisePrototypeThen( sleepPromise, (cancelled) => { - if (!cancelled) { + // "op_void_async_deferred" returns null + if (cancelled !== null && !cancelled) { // The timer was cancelled. removeFromScheduledTimers(timerObject); return; @@ -328,6 +343,18 @@ function setInterval(callback, timeout = 0, ...args) { return initializeTimer(callback, timeout, args, true); } +// TODO(bartlomieju): remove this option, once `nextTick` and `setImmediate` +// in Node compat are cleaned up +function setTimeoutUnclamped(callback, timeout = 0, ...args) { + checkThis(this); + if (typeof callback !== "function") { + callback = webidl.converters.DOMString(callback); + } + timeout = webidl.converters.long(timeout); + + return initializeTimer(callback, timeout, args, false, undefined, false); +} + function clearTimeout(id = 0) { checkThis(this); id = webidl.converters.long(id); @@ -369,5 +396,6 @@ export { refTimer, setInterval, setTimeout, + setTimeoutUnclamped, unrefTimer, }; diff --git a/ext/web/03_abort_signal.js b/ext/web/03_abort_signal.js index 2122d642ef..07a274dd27 100644 --- a/ext/web/03_abort_signal.js +++ b/ext/web/03_abort_signal.js @@ -46,9 +46,14 @@ class AbortSignal extends EventTarget { static timeout(millis) { const prefix = "Failed to call 'AbortSignal.timeout'"; webidl.requiredArguments(arguments.length, 1, prefix); - millis = webidl.converters["unsigned long long"](millis, { - enforceRange: true, - }); + millis = webidl.converters["unsigned long long"]( + millis, + prefix, + "Argument 1", + { + enforceRange: true, + }, + ); const signal = new AbortSignal(illegalConstructorKey); signal[timerId] = setTimeout( @@ -198,4 +203,5 @@ export { newSignal, remove, signalAbort, + timerId, }; diff --git a/ext/web/05_base64.js b/ext/web/05_base64.js index 05dd1431c2..3971b2e585 100644 --- a/ext/web/05_base64.js +++ b/ext/web/05_base64.js @@ -23,10 +23,7 @@ const { function atob(data) { const prefix = "Failed to execute 'atob'"; webidl.requiredArguments(arguments.length, 1, prefix); - data = webidl.converters.DOMString(data, { - prefix, - context: "Argument 1", - }); + data = webidl.converters.DOMString(data, prefix, "Argument 1"); try { return ops.op_base64_atob(data); } catch (e) { @@ -47,10 +44,7 @@ function atob(data) { function btoa(data) { const prefix = "Failed to execute 'btoa'"; webidl.requiredArguments(arguments.length, 1, prefix); - data = webidl.converters.DOMString(data, { - prefix, - context: "Argument 1", - }); + data = webidl.converters.DOMString(data, prefix, "Argument 1"); try { return ops.op_base64_btoa(data); } catch (e) { diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index c516063658..21207c3729 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -19,9 +19,10 @@ import { const primordials = globalThis.__bootstrap.primordials; const { ArrayBuffer, + ArrayBufferIsView, ArrayBufferPrototype, ArrayBufferPrototypeGetByteLength, - ArrayBufferIsView, + ArrayBufferPrototypeSlice, ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeShift, @@ -34,12 +35,12 @@ const { DataViewPrototypeGetByteOffset, Float32Array, Float64Array, - Int8Array, Int16Array, Int32Array, + Int8Array, + MathMin, NumberIsInteger, NumberIsNaN, - MathMin, ObjectCreate, ObjectDefineProperties, ObjectDefineProperty, @@ -52,14 +53,13 @@ const { PromisePrototypeThen, PromiseReject, PromiseResolve, - queueMicrotask, RangeError, ReflectHas, SafeFinalizationRegistry, SafePromiseAll, SafeWeakMap, // TODO(lucacasonato): add SharedArrayBuffer to primordials - // SharedArrayBufferPrototype + // SharedArrayBufferPrototype, Symbol, SymbolAsyncIterator, SymbolFor, @@ -70,15 +70,16 @@ const { TypedArrayPrototypeGetSymbolToStringTag, TypedArrayPrototypeSet, TypedArrayPrototypeSlice, - Uint8Array, Uint16Array, Uint32Array, + Uint8Array, Uint8ClampedArray, WeakMapPrototypeGet, WeakMapPrototypeHas, WeakMapPrototypeSet, + queueMicrotask, } = primordials; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { assert, AssertionError } from "ext:deno_web/00_infra.js"; /** @template T */ @@ -508,7 +509,7 @@ function extractSizeAlgorithm(strategy) { [chunk], undefined, webidl.converters["unrestricted double"], - { prefix: "Failed to call `sizeAlgorithm`" }, + "Failed to call `sizeAlgorithm`", ); } @@ -1252,7 +1253,17 @@ function readableByteStreamControllerEnqueueClonedChunkToQueue( ) { let cloneResult; try { - cloneResult = buffer.slice(byteOffset, byteOffset + byteLength); + if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, buffer)) { + cloneResult = ArrayBufferPrototypeSlice( + buffer, + byteOffset, + byteOffset + byteLength, + ); + } else { + // TODO(lucacasonato): add SharedArrayBuffer to primordials + // deno-lint-ignore prefer-primordials + cloneResult = buffer.slice(byteOffset, byteOffset + byteLength); + } } catch (e) { readableByteStreamControllerError(controller, e); } @@ -1864,7 +1875,7 @@ function readableByteStreamControllerPullInto( return; } } - controller[_pendingPullIntos].push(pullIntoDescriptor); + ArrayPrototypePush(controller[_pendingPullIntos], pullIntoDescriptor); readableStreamAddReadIntoRequest(stream, readIntoRequest); readableByteStreamControllerCallPullIfNeeded(controller); } @@ -3315,10 +3326,7 @@ function setUpReadableByteStreamControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters.any, - { - prefix: - "Failed to call 'startAlgorithm' on 'ReadableByteStreamController'", - }, + "Failed to call 'startAlgorithm' on 'ReadableByteStreamController'", ); } if (underlyingSourceDict.pull !== undefined) { @@ -3328,11 +3336,8 @@ function setUpReadableByteStreamControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'pullAlgorithm' on 'ReadableByteStreamController'", - returnsPromise: true, - }, + "Failed to call 'pullAlgorithm' on 'ReadableByteStreamController'", + true, ); } if (underlyingSourceDict.cancel !== undefined) { @@ -3342,11 +3347,8 @@ function setUpReadableByteStreamControllerFromUnderlyingSource( [reason], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'cancelAlgorithm' on 'ReadableByteStreamController'", - returnsPromise: true, - }, + "Failed to call 'cancelAlgorithm' on 'ReadableByteStreamController'", + true, ); } const autoAllocateChunkSize = underlyingSourceDict["autoAllocateChunkSize"]; @@ -3437,10 +3439,7 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters.any, - { - prefix: - "Failed to call 'startAlgorithm' on 'ReadableStreamDefaultController'", - }, + "Failed to call 'startAlgorithm' on 'ReadableStreamDefaultController'", ); } if (underlyingSourceDict.pull !== undefined) { @@ -3450,11 +3449,8 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'pullAlgorithm' on 'ReadableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'pullAlgorithm' on 'ReadableStreamDefaultController'", + true, ); } if (underlyingSourceDict.cancel !== undefined) { @@ -3464,11 +3460,8 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource( [reason], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'cancelAlgorithm' on 'ReadableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'cancelAlgorithm' on 'ReadableStreamDefaultController'", + true, ); } setUpReadableStreamDefaultController( @@ -3569,11 +3562,8 @@ function setUpTransformStreamDefaultControllerFromTransformer( [chunk, controller], transformer, webidl.converters["Promise"], - { - prefix: - "Failed to call 'transformAlgorithm' on 'TransformStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'transformAlgorithm' on 'TransformStreamDefaultController'", + true, ); } if (transformerDict.flush !== undefined) { @@ -3583,11 +3573,8 @@ function setUpTransformStreamDefaultControllerFromTransformer( [controller], transformer, webidl.converters["Promise"], - { - prefix: - "Failed to call 'flushAlgorithm' on 'TransformStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'flushAlgorithm' on 'TransformStreamDefaultController'", + true, ); } setUpTransformStreamDefaultController( @@ -3679,10 +3666,7 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [controller], underlyingSink, webidl.converters.any, - { - prefix: - "Failed to call 'startAlgorithm' on 'WritableStreamDefaultController'", - }, + "Failed to call 'startAlgorithm' on 'WritableStreamDefaultController'", ); } if (underlyingSinkDict.write !== undefined) { @@ -3692,11 +3676,8 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [chunk, controller], underlyingSink, webidl.converters["Promise"], - { - prefix: - "Failed to call 'writeAlgorithm' on 'WritableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'writeAlgorithm' on 'WritableStreamDefaultController'", + true, ); } if (underlyingSinkDict.close !== undefined) { @@ -3706,11 +3687,8 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [], underlyingSink, webidl.converters["Promise"], - { - prefix: - "Failed to call 'closeAlgorithm' on 'WritableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'closeAlgorithm' on 'WritableStreamDefaultController'", + true, ); } if (underlyingSinkDict.abort !== undefined) { @@ -3720,11 +3698,8 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [reason], underlyingSink, webidl.converters["Promise"], - { - prefix: - "Failed to call 'abortAlgorithm' on 'WritableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'abortAlgorithm' on 'WritableStreamDefaultController'", + true, ); } setUpWritableStreamDefaultController( @@ -4517,7 +4492,7 @@ function writableStreamMarkCloseRequestInFlight(stream) { function writableStreamMarkFirstWriteRequestInFlight(stream) { assert(stream[_inFlightWriteRequest] === undefined); assert(stream[_writeRequests].length); - const writeRequest = stream[_writeRequests].shift(); + const writeRequest = ArrayPrototypeShift(stream[_writeRequests]); stream[_inFlightWriteRequest] = writeRequest; } @@ -4702,10 +4677,7 @@ class ByteLengthQueuingStrategy { constructor(init) { const prefix = "Failed to construct 'ByteLengthQueuingStrategy'"; webidl.requiredArguments(arguments.length, 1, prefix); - init = webidl.converters.QueuingStrategyInit(init, { - prefix, - context: "Argument 1", - }); + init = webidl.converters.QueuingStrategyInit(init, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; this[_globalObject] = globalThis; this[_highWaterMark] = init.highWaterMark; @@ -4759,10 +4731,7 @@ class CountQueuingStrategy { constructor(init) { const prefix = "Failed to construct 'CountQueuingStrategy'"; webidl.requiredArguments(arguments.length, 1, prefix); - init = webidl.converters.QueuingStrategyInit(init, { - prefix, - context: "Argument 1", - }); + init = webidl.converters.QueuingStrategyInit(init, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; this[_globalObject] = globalThis; this[_highWaterMark] = init.highWaterMark; @@ -4839,18 +4808,20 @@ class ReadableStream { constructor(underlyingSource = undefined, strategy = undefined) { const prefix = "Failed to construct 'ReadableStream'"; if (underlyingSource !== undefined) { - underlyingSource = webidl.converters.object(underlyingSource, { + underlyingSource = webidl.converters.object( + underlyingSource, prefix, - context: "Argument 1", - }); + "Argument 1", + ); } else { underlyingSource = null; } if (strategy !== undefined) { - strategy = webidl.converters.QueuingStrategy(strategy, { + strategy = webidl.converters.QueuingStrategy( + strategy, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } else { strategy = {}; } @@ -4859,7 +4830,8 @@ class ReadableStream { if (underlyingSource !== undefined) { underlyingSourceDict = webidl.converters.UnderlyingSource( underlyingSource, - { prefix, context: "underlyingSource" }, + prefix, + "underlyingSource", ); } initializeReadableStream(this); @@ -4926,10 +4898,11 @@ class ReadableStream { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'getReader' on 'ReadableStream'"; if (options !== undefined) { - options = webidl.converters.ReadableStreamGetReaderOptions(options, { + options = webidl.converters.ReadableStreamGetReaderOptions( + options, prefix, - context: "Argument 1", - }); + "Argument 1", + ); } else { options = {}; } @@ -4951,14 +4924,16 @@ class ReadableStream { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'pipeThrough' on 'ReadableStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - transform = webidl.converters.ReadableWritablePair(transform, { + transform = webidl.converters.ReadableWritablePair( + transform, prefix, - context: "Argument 1", - }); - options = webidl.converters.StreamPipeOptions(options, { + "Argument 1", + ); + options = webidl.converters.StreamPipeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); const { readable, writable } = transform; const { preventClose, preventAbort, preventCancel, signal } = options; if (isReadableStreamLocked(this)) { @@ -4989,14 +4964,16 @@ class ReadableStream { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'pipeTo' on 'ReadableStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - destination = webidl.converters.WritableStream(destination, { + destination = webidl.converters.WritableStream( + destination, prefix, - context: "Argument 1", - }); - options = webidl.converters.StreamPipeOptions(options, { + "Argument 1", + ); + options = webidl.converters.StreamPipeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } catch (err) { return PromiseReject(err); } @@ -5035,10 +5012,11 @@ class ReadableStream { values(options = {}) { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'values' on 'ReadableStream'"; - options = webidl.converters.ReadableStreamIteratorOptions(options, { + options = webidl.converters.ReadableStreamIteratorOptions( + options, prefix, - context: "Argument 1", - }); + "Argument 1", + ); /** @type {AsyncIterableIterator} */ const iterator = ObjectCreate(readableStreamAsyncIteratorPrototype); const reader = acquireReadableStreamDefaultReader(this); @@ -5080,10 +5058,7 @@ class ReadableStreamDefaultReader { constructor(stream) { const prefix = "Failed to construct 'ReadableStreamDefaultReader'"; webidl.requiredArguments(arguments.length, 1, prefix); - stream = webidl.converters.ReadableStream(stream, { - prefix, - context: "Argument 1", - }); + stream = webidl.converters.ReadableStream(stream, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; setUpReadableStreamDefaultReader(this, stream); } @@ -5180,10 +5155,7 @@ class ReadableStreamBYOBReader { constructor(stream) { const prefix = "Failed to construct 'ReadableStreamBYOBReader'"; webidl.requiredArguments(arguments.length, 1, prefix); - stream = webidl.converters.ReadableStream(stream, { - prefix, - context: "Argument 1", - }); + stream = webidl.converters.ReadableStream(stream, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; setUpReadableStreamBYOBReader(this, stream); } @@ -5196,10 +5168,7 @@ class ReadableStreamBYOBReader { try { webidl.assertBranded(this, ReadableStreamBYOBReaderPrototype); const prefix = "Failed to execute 'read' on 'ReadableStreamBYOBReader'"; - view = webidl.converters.ArrayBufferView(view, { - prefix, - context: "Argument 1", - }); + view = webidl.converters.ArrayBufferView(view, prefix, "Argument 1"); } catch (err) { return PromiseReject(err); } @@ -5322,11 +5291,14 @@ class ReadableStreamBYOBRequest { webidl.assertBranded(this, ReadableStreamBYOBRequestPrototype); const prefix = "Failed to execute 'respond' on 'ReadableStreamBYOBRequest'"; webidl.requiredArguments(arguments.length, 1, prefix); - bytesWritten = webidl.converters["unsigned long long"](bytesWritten, { - enforceRange: true, + bytesWritten = webidl.converters["unsigned long long"]( + bytesWritten, prefix, - context: "Argument 1", - }); + "Argument 1", + { + enforceRange: true, + }, + ); if (this[_controller] === undefined) { throw new TypeError("This BYOB request has been invalidated"); @@ -5355,10 +5327,7 @@ class ReadableStreamBYOBRequest { const prefix = "Failed to execute 'respondWithNewView' on 'ReadableStreamBYOBRequest'"; webidl.requiredArguments(arguments.length, 1, prefix); - view = webidl.converters.ArrayBufferView(view, { - prefix, - context: "Argument 1", - }); + view = webidl.converters.ArrayBufferView(view, prefix, "Argument 1"); if (this[_controller] === undefined) { throw new TypeError("This BYOB request has been invalidated"); @@ -5450,10 +5419,7 @@ class ReadableByteStreamController { "Failed to execute 'enqueue' on 'ReadableByteStreamController'"; webidl.requiredArguments(arguments.length, 1, prefix); const arg1 = "Argument 1"; - chunk = webidl.converters.ArrayBufferView(chunk, { - prefix, - context: arg1, - }); + chunk = webidl.converters.ArrayBufferView(chunk, prefix, arg1); let buffer, byteLength; if (TypedArrayPrototypeGetSymbolToStringTag(chunk) === undefined) { buffer = DataViewPrototypeGetBuffer(/** @type {DataView} */ (chunk)); @@ -5467,16 +5433,19 @@ class ReadableByteStreamController { ); } if (byteLength === 0) { - throw webidl.makeException(TypeError, "length must be non-zero", { + throw webidl.makeException( + TypeError, + "length must be non-zero", prefix, - context: arg1, - }); + arg1, + ); } if (getArrayBufferByteLength(buffer) === 0) { throw webidl.makeException( TypeError, "buffer length must be non-zero", - { prefix, context: arg1 }, + prefix, + arg1, ); } if (this[_closeRequested] === true) { @@ -5733,27 +5702,27 @@ class TransformStream { ) { const prefix = "Failed to construct 'TransformStream'"; if (transformer !== undefined) { - transformer = webidl.converters.object(transformer, { - prefix, - context: "Argument 1", - }); + transformer = webidl.converters.object(transformer, prefix, "Argument 1"); } - writableStrategy = webidl.converters.QueuingStrategy(writableStrategy, { + writableStrategy = webidl.converters.QueuingStrategy( + writableStrategy, prefix, - context: "Argument 2", - }); - readableStrategy = webidl.converters.QueuingStrategy(readableStrategy, { + "Argument 2", + ); + readableStrategy = webidl.converters.QueuingStrategy( + readableStrategy, prefix, - context: "Argument 2", - }); + "Argument 3", + ); this[webidl.brand] = webidl.brand; if (transformer === undefined) { transformer = null; } - const transformerDict = webidl.converters.Transformer(transformer, { + const transformerDict = webidl.converters.Transformer( + transformer, prefix, - context: "transformer", - }); + "transformer", + ); if (transformerDict.readableType !== undefined) { throw new RangeError( `${prefix}: readableType transformers not supported.`, @@ -5790,10 +5759,7 @@ class TransformStream { [this[_controller]], transformer, webidl.converters.any, - { - prefix: - "Failed to call 'start' on 'TransformStreamDefaultController'", - }, + "Failed to call 'start' on 'TransformStreamDefaultController'", ), ); } else { @@ -5923,22 +5889,25 @@ class WritableStream { constructor(underlyingSink = undefined, strategy = {}) { const prefix = "Failed to construct 'WritableStream'"; if (underlyingSink !== undefined) { - underlyingSink = webidl.converters.object(underlyingSink, { + underlyingSink = webidl.converters.object( + underlyingSink, prefix, - context: "Argument 1", - }); + "Argument 1", + ); } - strategy = webidl.converters.QueuingStrategy(strategy, { + strategy = webidl.converters.QueuingStrategy( + strategy, prefix, - context: "Argument 2", - }); + "Argument 2", + ); this[webidl.brand] = webidl.brand; if (underlyingSink === undefined) { underlyingSink = null; } const underlyingSinkDict = webidl.converters.UnderlyingSink( underlyingSink, - { prefix, context: "underlyingSink" }, + prefix, + "underlyingSink", ); if (underlyingSinkDict.type != null) { throw new RangeError( @@ -6039,10 +6008,7 @@ class WritableStreamDefaultWriter { constructor(stream) { const prefix = "Failed to construct 'WritableStreamDefaultWriter'"; webidl.requiredArguments(arguments.length, 1, prefix); - stream = webidl.converters.WritableStream(stream, { - prefix, - context: "Argument 1", - }); + stream = webidl.converters.WritableStream(stream, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; setUpWritableStreamDefaultWriter(this, stream); } @@ -6287,8 +6253,8 @@ webidl.converters.UnderlyingSource = webidl }, { key: "autoAllocateChunkSize", - converter: (V, opts) => - webidl.converters["unsigned long long"](V, { + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long long"](V, prefix, context, { ...opts, enforceRange: true, }), diff --git a/ext/web/08_text_encoding.js b/ext/web/08_text_encoding.js index 2e19c3d1ff..e7e535f906 100644 --- a/ext/web/08_text_encoding.js +++ b/ext/web/08_text_encoding.js @@ -53,14 +53,12 @@ class TextDecoder { */ constructor(label = "utf-8", options = {}) { const prefix = "Failed to construct 'TextDecoder'"; - label = webidl.converters.DOMString(label, { + label = webidl.converters.DOMString(label, prefix, "Argument 1"); + options = webidl.converters.TextDecoderOptions( + options, prefix, - context: "Argument 1", - }); - options = webidl.converters.TextDecoderOptions(options, { - prefix, - context: "Argument 2", - }); + "Argument 2", + ); const encoding = ops.op_encoding_normalize_label(label); this.#encoding = encoding; this.#fatal = options.fatal; @@ -95,18 +93,17 @@ class TextDecoder { webidl.assertBranded(this, TextDecoderPrototype); const prefix = "Failed to execute 'decode' on 'TextDecoder'"; if (input !== undefined) { - input = webidl.converters.BufferSource(input, { - prefix, - context: "Argument 1", + input = webidl.converters.BufferSource(input, prefix, "Argument 1", { allowShared: true, }); } let stream = false; if (options !== undefined) { - options = webidl.converters.TextDecodeOptions(options, { + options = webidl.converters.TextDecodeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); stream = options.stream; } @@ -215,13 +212,13 @@ class TextEncoder { */ encode(input = "") { webidl.assertBranded(this, TextEncoderPrototype); - const prefix = "Failed to execute 'encode' on 'TextEncoder'"; // The WebIDL type of `input` is `USVString`, but `core.encode` already // converts lone surrogates to the replacement character. - input = webidl.converters.DOMString(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.DOMString( + input, + "Failed to execute 'encode' on 'TextEncoder'", + "Argument 1", + ); return core.encode(input); } @@ -235,15 +232,15 @@ class TextEncoder { const prefix = "Failed to execute 'encodeInto' on 'TextEncoder'"; // The WebIDL type of `source` is `USVString`, but the ops bindings // already convert lone surrogates to the replacement character. - source = webidl.converters.DOMString(source, { + source = webidl.converters.DOMString(source, prefix, "Argument 1"); + destination = webidl.converters.Uint8Array( + destination, prefix, - context: "Argument 1", - }); - destination = webidl.converters.Uint8Array(destination, { - prefix, - context: "Argument 2", - allowShared: true, - }); + "Argument 2", + { + allowShared: true, + }, + ); ops.op_encoding_encode_into(source, destination, encodeIntoBuf); return { read: encodeIntoBuf[0], @@ -269,21 +266,19 @@ class TextDecoderStream { */ constructor(label = "utf-8", options = {}) { const prefix = "Failed to construct 'TextDecoderStream'"; - label = webidl.converters.DOMString(label, { + label = webidl.converters.DOMString(label, prefix, "Argument 1"); + options = webidl.converters.TextDecoderOptions( + options, prefix, - context: "Argument 1", - }); - options = webidl.converters.TextDecoderOptions(options, { - prefix, - context: "Argument 2", - }); + "Argument 2", + ); this.#decoder = new TextDecoder(label, options); this.#transform = new TransformStream({ // The transform and flush functions need access to TextDecoderStream's // `this`, so they are defined as functions rather than methods. transform: (chunk, controller) => { try { - chunk = webidl.converters.BufferSource(chunk, { + chunk = webidl.converters.BufferSource(chunk, prefix, "chunk", { allowShared: true, }); const decoded = this.#decoder.decode(chunk, { stream: true }); diff --git a/ext/web/09_file.js b/ext/web/09_file.js index dccb206112..79a9c41b29 100644 --- a/ext/web/09_file.js +++ b/ext/web/09_file.js @@ -14,6 +14,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { ReadableStream } from "ext:deno_web/06_streams.js"; +import { URL } from "ext:deno_url/00_url.js"; const primordials = globalThis.__bootstrap.primordials; const { ArrayBufferPrototype, @@ -48,7 +49,7 @@ const { TypeError, Uint8Array, } = primordials; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; // TODO(lucacasonato): this needs to not be hardcoded and instead depend on // host os. @@ -218,14 +219,16 @@ class Blob { */ constructor(blobParts = [], options = {}) { const prefix = "Failed to construct 'Blob'"; - blobParts = webidl.converters["sequence"](blobParts, { - context: "Argument 1", + blobParts = webidl.converters["sequence"]( + blobParts, prefix, - }); - options = webidl.converters["BlobPropertyBag"](options, { - context: "Argument 2", + "Argument 1", + ); + options = webidl.converters["BlobPropertyBag"]( + options, prefix, - }); + "Argument 2", + ); this[webidl.brand] = webidl.brand; @@ -261,24 +264,21 @@ class Blob { webidl.assertBranded(this, BlobPrototype); const prefix = "Failed to execute 'slice' on 'Blob'"; if (start !== undefined) { - start = webidl.converters["long long"](start, { + start = webidl.converters["long long"](start, prefix, "Argument 1", { clamp: true, - context: "Argument 1", - prefix, }); } if (end !== undefined) { - end = webidl.converters["long long"](end, { + end = webidl.converters["long long"](end, prefix, "Argument 2", { clamp: true, - context: "Argument 2", - prefix, }); } if (contentType !== undefined) { - contentType = webidl.converters["DOMString"](contentType, { - context: "Argument 3", + contentType = webidl.converters["DOMString"]( + contentType, prefix, - }); + "Argument 3", + ); } // deno-lint-ignore no-this-alias @@ -326,6 +326,7 @@ class Blob { relativeStart -= size; relativeEnd -= size; } else { + // deno-lint-ignore prefer-primordials const chunk = part.slice( relativeStart, MathMin(part.size, relativeEnd), @@ -430,27 +431,27 @@ webidl.converters["Blob"] = webidl.createInterfaceConverter( "Blob", Blob.prototype, ); -webidl.converters["BlobPart"] = (V, opts) => { +webidl.converters["BlobPart"] = (V, prefix, context, opts) => { // Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString) if (typeof V == "object") { if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) { - return webidl.converters["Blob"](V, opts); + return webidl.converters["Blob"](V, prefix, context, opts); } if ( ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) || // deno-lint-ignore prefer-primordials ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V) ) { - return webidl.converters["ArrayBuffer"](V, opts); + return webidl.converters["ArrayBuffer"](V, prefix, context, opts); } if (ArrayBufferIsView(V)) { - return webidl.converters["ArrayBufferView"](V, opts); + return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } } // BlobPart is passed to processBlobParts after conversion, which calls core.encode() // on the string. // core.encode() is equivalent to USVString normalization. - return webidl.converters["DOMString"](V, opts); + return webidl.converters["DOMString"](V, prefix, context, opts); }; webidl.converters["sequence"] = webidl.createSequenceConverter( webidl.converters["BlobPart"], @@ -494,18 +495,17 @@ class File extends Blob { const prefix = "Failed to construct 'File'"; webidl.requiredArguments(arguments.length, 2, prefix); - fileBits = webidl.converters["sequence"](fileBits, { - context: "Argument 1", + fileBits = webidl.converters["sequence"]( + fileBits, prefix, - }); - fileName = webidl.converters["USVString"](fileName, { - context: "Argument 2", + "Argument 1", + ); + fileName = webidl.converters["USVString"](fileName, prefix, "Argument 2"); + options = webidl.converters["FilePropertyBag"]( + options, prefix, - }); - options = webidl.converters["FilePropertyBag"](options, { - context: "Argument 3", - prefix, - }); + "Argument 3", + ); super(fileBits, options); @@ -655,6 +655,33 @@ function blobFromObjectUrl(url) { return blob; } +/** + * @param {Blob} blob + * @returns {string} + */ +function createObjectURL(blob) { + const prefix = "Failed to execute 'createObjectURL' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, prefix); + blob = webidl.converters["Blob"](blob, prefix, "Argument 1"); + + return ops.op_blob_create_object_url(blob.type, getParts(blob)); +} + +/** + * @param {string} url + * @returns {void} + */ +function revokeObjectURL(url) { + const prefix = "Failed to execute 'revokeObjectURL' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, prefix); + url = webidl.converters["DOMString"](url, prefix, "Argument 1"); + + ops.op_blob_revoke_object_url(url); +} + +URL.createObjectURL = createObjectURL; +URL.revokeObjectURL = revokeObjectURL; + export { Blob, blobFromObjectUrl, diff --git a/ext/web/10_filereader.js b/ext/web/10_filereader.js index 897ac7e937..fe5dbb9150 100644 --- a/ext/web/10_filereader.js +++ b/ext/web/10_filereader.js @@ -383,10 +383,7 @@ class FileReader extends EventTarget { const prefix = "Failed to execute 'readAsText' on 'FileReader'"; webidl.requiredArguments(arguments.length, 1, prefix); if (encoding !== undefined) { - encoding = webidl.converters["DOMString"](encoding, { - prefix, - context: "Argument 2", - }); + encoding = webidl.converters["DOMString"](encoding, prefix, "Argument 2"); } // alias for readAsArrayBuffer this.#readOperation(blob, { kind: "Text", encoding }); diff --git a/ext/web/11_blob_url.js b/ext/web/11_blob_url.js deleted file mode 100644 index 7c4adf4e99..0000000000 --- a/ext/web/11_blob_url.js +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// @ts-check -/// -/// -/// -/// -/// -/// -/// -/// -/// - -const core = globalThis.Deno.core; -const ops = core.ops; -import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { getParts } from "ext:deno_web/09_file.js"; -import { URL } from "ext:deno_url/00_url.js"; - -/** - * @param {Blob} blob - * @returns {string} - */ -function createObjectURL(blob) { - const prefix = "Failed to execute 'createObjectURL' on 'URL'"; - webidl.requiredArguments(arguments.length, 1, prefix); - blob = webidl.converters["Blob"](blob, { - context: "Argument 1", - prefix, - }); - - return ops.op_blob_create_object_url(blob.type, getParts(blob)); -} - -/** - * @param {string} url - * @returns {void} - */ -function revokeObjectURL(url) { - const prefix = "Failed to execute 'revokeObjectURL' on 'URL'"; - webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters["DOMString"](url, { - context: "Argument 1", - prefix, - }); - - ops.op_blob_revoke_object_url(url); -} - -URL.createObjectURL = createObjectURL; -URL.revokeObjectURL = revokeObjectURL; diff --git a/ext/web/13_message_port.js b/ext/web/13_message_port.js index 39820a52f7..6d9a2c01f2 100644 --- a/ext/web/13_message_port.js +++ b/ext/web/13_message_port.js @@ -110,16 +110,15 @@ class MessagePort extends EventTarget { ) { const transfer = webidl.converters["sequence"]( transferOrOptions, - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); options = { transfer }; } else { options = webidl.converters.StructuredSerializeOptions( transferOrOptions, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); } const { transfer } = options; @@ -260,7 +259,7 @@ function serializeJsMessageData(data, transferables) { ); } j++; - transferredArrayBuffers.push(ab); + ArrayPrototypePush(transferredArrayBuffers, ab); } } @@ -330,10 +329,11 @@ webidl.converters.StructuredSerializeOptions = webidl function structuredClone(value, options) { const prefix = "Failed to execute 'structuredClone'"; webidl.requiredArguments(arguments.length, 1, prefix); - options = webidl.converters.StructuredSerializeOptions(options, { + options = webidl.converters.StructuredSerializeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); const messageData = serializeJsMessageData(value, options.transfer); return deserializeJsMessageData(messageData)[0]; } diff --git a/ext/web/14_compression.js b/ext/web/14_compression.js index 1731b3bf3b..2ba7746bd5 100644 --- a/ext/web/14_compression.js +++ b/ext/web/14_compression.js @@ -29,19 +29,13 @@ class CompressionStream { constructor(format) { const prefix = "Failed to construct 'CompressionStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - format = webidl.converters.CompressionFormat(format, { - prefix, - context: "Argument 1", - }); + format = webidl.converters.CompressionFormat(format, prefix, "Argument 1"); const rid = ops.op_compression_new(format, false); this.#transform = new TransformStream({ transform(chunk, controller) { - chunk = webidl.converters.BufferSource(chunk, { - prefix, - context: "chunk", - }); + chunk = webidl.converters.BufferSource(chunk, prefix, "chunk"); const output = ops.op_compression_write( rid, chunk, @@ -77,19 +71,13 @@ class DecompressionStream { constructor(format) { const prefix = "Failed to construct 'DecompressionStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - format = webidl.converters.CompressionFormat(format, { - prefix, - context: "Argument 1", - }); + format = webidl.converters.CompressionFormat(format, prefix, "Argument 1"); const rid = ops.op_compression_new(format, true); this.#transform = new TransformStream({ transform(chunk, controller) { - chunk = webidl.converters.BufferSource(chunk, { - prefix, - context: "chunk", - }); + chunk = webidl.converters.BufferSource(chunk, prefix, "chunk"); const output = ops.op_compression_write( rid, chunk, diff --git a/ext/web/15_performance.js b/ext/web/15_performance.js index 85990c954d..72f4d3a7e5 100644 --- a/ext/web/15_performance.js +++ b/ext/web/15_performance.js @@ -16,7 +16,7 @@ const { } = primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { structuredClone } from "ext:deno_web/02_structured_clone.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { EventTarget } from "ext:deno_web/02_event.js"; import { opNow } from "ext:deno_web/02_timers.js"; import DOMException from "ext:deno_web/01_dom_exception.js"; @@ -41,11 +41,16 @@ webidl.converters["PerformanceMarkOptions"] = webidl ], ); -webidl.converters["DOMString or DOMHighResTimeStamp"] = (V, opts) => { +webidl.converters["DOMString or DOMHighResTimeStamp"] = ( + V, + prefix, + context, + opts, +) => { if (webidl.type(V) === "Number" && V !== null) { - return webidl.converters.DOMHighResTimeStamp(V, opts); + return webidl.converters.DOMHighResTimeStamp(V, prefix, context, opts); } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; webidl.converters["PerformanceMeasureOptions"] = webidl @@ -71,11 +76,21 @@ webidl.converters["PerformanceMeasureOptions"] = webidl ], ); -webidl.converters["DOMString or PerformanceMeasureOptions"] = (V, opts) => { +webidl.converters["DOMString or PerformanceMeasureOptions"] = ( + V, + prefix, + context, + opts, +) => { if (webidl.type(V) === "Object" && V !== null) { - return webidl.converters["PerformanceMeasureOptions"](V, opts); + return webidl.converters["PerformanceMeasureOptions"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; function setTimeOrigin(origin) { @@ -221,15 +236,13 @@ class PerformanceMark extends PerformanceEntry { const prefix = "Failed to construct 'PerformanceMark'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.DOMString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.DOMString(name, prefix, "Argument 1"); - options = webidl.converters.PerformanceMarkOptions(options, { + options = webidl.converters.PerformanceMarkOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); const { detail = null, startTime = now() } = options; @@ -345,10 +358,11 @@ class Performance extends EventTarget { clearMarks(markName = undefined) { webidl.assertBranded(this, PerformancePrototype); if (markName !== undefined) { - markName = webidl.converters.DOMString(markName, { - prefix: "Failed to execute 'clearMarks' on 'Performance'", - context: "Argument 1", - }); + markName = webidl.converters.DOMString( + markName, + "Failed to execute 'clearMarks' on 'Performance'", + "Argument 1", + ); performanceEntries = ArrayPrototypeFilter( performanceEntries, @@ -365,10 +379,11 @@ class Performance extends EventTarget { clearMeasures(measureName = undefined) { webidl.assertBranded(this, PerformancePrototype); if (measureName !== undefined) { - measureName = webidl.converters.DOMString(measureName, { - prefix: "Failed to execute 'clearMeasures' on 'Performance'", - context: "Argument 1", - }); + measureName = webidl.converters.DOMString( + measureName, + "Failed to execute 'clearMeasures' on 'Performance'", + "Argument 1", + ); performanceEntries = ArrayPrototypeFilter( performanceEntries, @@ -396,16 +411,10 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'getEntriesByName' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.DOMString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.DOMString(name, prefix, "Argument 1"); if (type !== undefined) { - type = webidl.converters.DOMString(type, { - prefix, - context: "Argument 2", - }); + type = webidl.converters.DOMString(type, prefix, "Argument 2"); } return filterByNameType(name, type); @@ -416,10 +425,7 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'getEntriesByName' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - type = webidl.converters.DOMString(type, { - prefix, - context: "Argument 1", - }); + type = webidl.converters.DOMString(type, prefix, "Argument 1"); return filterByNameType(undefined, type); } @@ -432,15 +438,13 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'mark' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - markName = webidl.converters.DOMString(markName, { - prefix, - context: "Argument 1", - }); + markName = webidl.converters.DOMString(markName, prefix, "Argument 1"); - markOptions = webidl.converters.PerformanceMarkOptions(markOptions, { + markOptions = webidl.converters.PerformanceMarkOptions( + markOptions, prefix, - context: "Argument 2", - }); + "Argument 2", + ); // 3.1.1.1 If the global object is a Window object and markName uses the // same name as a read only attribute in the PerformanceTiming interface, @@ -460,22 +464,21 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'measure' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - measureName = webidl.converters.DOMString(measureName, { + measureName = webidl.converters.DOMString( + measureName, prefix, - context: "Argument 1", - }); + "Argument 1", + ); startOrMeasureOptions = webidl.converters - ["DOMString or PerformanceMeasureOptions"](startOrMeasureOptions, { + ["DOMString or PerformanceMeasureOptions"]( + startOrMeasureOptions, prefix, - context: "Argument 2", - }); + "Argument 2", + ); if (endMark !== undefined) { - endMark = webidl.converters.DOMString(endMark, { - prefix, - context: "Argument 3", - }); + endMark = webidl.converters.DOMString(endMark, prefix, "Argument 3"); } if ( diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index 4d5a412fbd..a244ac178c 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.130.0" +version = "0.138.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -22,6 +22,7 @@ flate2.workspace = true serde = "1.0.149" tokio.workspace = true uuid = { workspace = true, features = ["serde"] } +windows-sys.workspace = true [dev-dependencies] deno_bench_util.workspace = true diff --git a/ext/web/benches/encoding.rs b/ext/web/benches/encoding.rs index 74dd430fc0..5b147f00c8 100644 --- a/ext/web/benches/encoding.rs +++ b/ext/web/benches/encoding.rs @@ -45,6 +45,7 @@ fn setup() -> Vec { .state(|state| { state.put(Permissions {}); }) + .esm_entry_point("ext:bench_setup/setup") .build(), ] } diff --git a/ext/web/benches/timers_ops.rs b/ext/web/benches/timers_ops.rs index 62adaf3e37..084fac98ba 100644 --- a/ext/web/benches/timers_ops.rs +++ b/ext/web/benches/timers_ops.rs @@ -40,6 +40,7 @@ fn setup() -> Vec { .state(|state| { state.put(Permissions{}); }) + .esm_entry_point("ext:bench_setup/setup") .build() ] } diff --git a/ext/web/hr_timer_lock.rs b/ext/web/hr_timer_lock.rs new file mode 100644 index 0000000000..f1f588d6c8 --- /dev/null +++ b/ext/web/hr_timer_lock.rs @@ -0,0 +1,67 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +#[cfg(target_os = "windows")] +mod windows { + use std::marker::PhantomData; + use std::sync::atomic::AtomicU32; + + pub(crate) struct HrTimerLock { + pub(super) _unconstructable: PhantomData<()>, + } + + /// Decrease the reference count of the HR timer on drop. + impl Drop for HrTimerLock { + fn drop(&mut self) { + dec_ref(); + } + } + + /// Maintains the HR timer refcount. This should be more than sufficient as 2^32 timers would be + /// an impossible situation, and if it does somehow happen, the worst case is that we'll disable + /// the high-res timer when we shouldn't (and things would eventually return to proper operation). + static TIMER_REFCOUNT: AtomicU32 = AtomicU32::new(0); + + pub(super) fn inc_ref() { + let old = TIMER_REFCOUNT.fetch_add(1, std::sync::atomic::Ordering::SeqCst); + // Overflow/underflow sanity check in debug mode + debug_assert!(old != u32::MAX); + if old == 0 { + lock_hr(); + } + } + + fn dec_ref() { + let old = TIMER_REFCOUNT.fetch_sub(1, std::sync::atomic::Ordering::SeqCst); + // Overflow/underflow sanity check in debug mode + debug_assert!(old != 0); + if old == 1 { + unlock_hr(); + } + } + + /// If the refcount is > 0, we ask Windows for a lower timer period once. While the underlying + /// Windows timeBeginPeriod/timeEndPeriod API can manage its own reference counts, we choose to + /// use it once per process and avoid nesting these calls. + fn lock_hr() { + // SAFETY: We just want to set the timer period here + unsafe { windows_sys::Win32::Media::timeBeginPeriod(1) }; + } + + fn unlock_hr() { + // SAFETY: We just want to set the timer period here + unsafe { windows_sys::Win32::Media::timeEndPeriod(1) }; + } +} + +#[cfg(target_os = "windows")] +pub(crate) fn hr_timer_lock() -> windows::HrTimerLock { + windows::inc_ref(); + windows::HrTimerLock { + _unconstructable: Default::default(), + } +} + +/// No-op on other platforms. +#[cfg(not(target_os = "windows"))] +pub(crate) fn hr_timer_lock() -> (std::marker::PhantomData<()>,) { + (std::marker::PhantomData::default(),) +} diff --git a/ext/web/lib.rs b/ext/web/lib.rs index b0dc0d56d5..b1e0dd5d8d 100644 --- a/ext/web/lib.rs +++ b/ext/web/lib.rs @@ -2,6 +2,7 @@ mod blob; mod compression; +mod hr_timer_lock; mod message_port; mod timers; @@ -103,7 +104,6 @@ deno_core::extension!(deno_web, "08_text_encoding.js", "09_file.js", "10_filereader.js", - "11_blob_url.js", "12_location.js", "13_message_port.js", "14_compression.js", @@ -142,7 +142,7 @@ fn op_base64_atob(mut s: ByteString) -> Result { fn forgiving_base64_decode_inplace( input: &mut [u8], ) -> Result { - let error: _ = + let error = || DomExceptionInvalidCharacterError::new("Failed to decode base64"); let decoded = base64_simd::forgiving_decode_inplace(input).map_err(|_| error())?; diff --git a/ext/web/timers.rs b/ext/web/timers.rs index 252cd4ad41..54e185abda 100644 --- a/ext/web/timers.rs +++ b/ext/web/timers.rs @@ -2,9 +2,9 @@ //! This module helps deno implement timers and performance APIs. +use crate::hr_timer_lock::hr_timer_lock; use deno_core::error::AnyError; use deno_core::op; - use deno_core::CancelFuture; use deno_core::CancelHandle; use deno_core::OpState; @@ -86,8 +86,24 @@ pub async fn op_sleep( rid: ResourceId, ) -> Result { let handle = state.borrow().resource_table.get::(rid)?; + + // If a timer is requested with <=100ms resolution, request the high-res timer. Since the default + // Windows timer period is 15ms, this means a 100ms timer could fire at 115ms (15% late). We assume that + // timers longer than 100ms are a reasonable cutoff here. + + // The high-res timers on Windows are still limited. Unfortuntely this means that our shortest duration 4ms timers + // can still be 25% late, but without a more complex timer system or spinning on the clock itself, we're somewhat + // bounded by the OS' scheduler itself. + let _hr_timer_lock = if millis <= 100 { + Some(hr_timer_lock()) + } else { + None + }; + let res = tokio::time::sleep(Duration::from_millis(millis)) .or_cancel(handle.0.clone()) .await; + + // We release the high-res timer lock here, either by being cancelled or resolving. Ok(res.is_ok()) } diff --git a/ext/webidl/00_webidl.js b/ext/webidl/00_webidl.js index 4398609e52..ca1c7c6064 100644 --- a/ext/webidl/00_webidl.js +++ b/ext/webidl/00_webidl.js @@ -47,7 +47,7 @@ const { ObjectGetOwnPropertyDescriptor, ObjectGetOwnPropertyDescriptors, ObjectGetPrototypeOf, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, ObjectIs, PromisePrototypeThen, @@ -59,6 +59,7 @@ const { ReflectHas, ReflectOwnKeys, RegExpPrototypeTest, + RegExpPrototypeExec, SafeRegExp, SafeSet, SetPrototypeEntries, @@ -86,11 +87,9 @@ const { Uint8ClampedArray, } = primordials; -function makeException(ErrorType, message, opts = {}) { +function makeException(ErrorType, message, prefix, context) { return new ErrorType( - `${opts.prefix ? opts.prefix + ": " : ""}${ - opts.context ? opts.context : "Value" - } ${message}`, + `${prefix ? prefix + ": " : ""}${context ? context : "Value"} ${message}`, ); } @@ -193,13 +192,18 @@ function createIntegerConversion(bitLength, typeOpts) { const twoToTheBitLength = MathPow(2, bitLength); const twoToOneLessThanTheBitLength = MathPow(2, bitLength - 1); - return (V, opts = {}) => { + return (V, prefix = undefined, context = undefined, opts = {}) => { let x = toNumber(V); x = censorNegativeZero(x); if (opts.enforceRange) { if (!NumberIsFinite(x)) { - throw makeException(TypeError, "is not a finite number", opts); + throw makeException( + TypeError, + "is not a finite number", + prefix, + context, + ); } x = integerPart(x); @@ -208,7 +212,8 @@ function createIntegerConversion(bitLength, typeOpts) { throw makeException( TypeError, `is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`, - opts, + prefix, + context, ); } @@ -246,13 +251,18 @@ function createLongLongConversion(bitLength, { unsigned }) { const lowerBound = unsigned ? 0 : NumberMIN_SAFE_INTEGER; const asBigIntN = unsigned ? BigIntAsUintN : BigIntAsIntN; - return (V, opts = {}) => { + return (V, prefix = undefined, context = undefined, opts = {}) => { let x = toNumber(V); x = censorNegativeZero(x); if (opts.enforceRange) { if (!NumberIsFinite(x)) { - throw makeException(TypeError, "is not a finite number", opts); + throw makeException( + TypeError, + "is not a finite number", + prefix, + context, + ); } x = integerPart(x); @@ -261,7 +271,8 @@ function createLongLongConversion(bitLength, { unsigned }) { throw makeException( TypeError, `is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`, - opts, + prefix, + context, ); } @@ -310,14 +321,15 @@ converters["unsigned long long"] = createLongLongConversion(64, { unsigned: true, }); -converters.float = (V, opts) => { +converters.float = (V, prefix, context, _opts) => { const x = toNumber(V); if (!NumberIsFinite(x)) { throw makeException( TypeError, "is not a finite floating-point value", - opts, + prefix, + context, ); } @@ -331,14 +343,15 @@ converters.float = (V, opts) => { throw makeException( TypeError, "is outside the range of a single-precision floating-point value", - opts, + prefix, + context, ); } return y; }; -converters["unrestricted float"] = (V, _opts) => { +converters["unrestricted float"] = (V, _prefix, _context, _opts) => { const x = toNumber(V); if (isNaN(x)) { @@ -352,27 +365,28 @@ converters["unrestricted float"] = (V, _opts) => { return MathFround(x); }; -converters.double = (V, opts) => { +converters.double = (V, prefix, context, _opts) => { const x = toNumber(V); if (!NumberIsFinite(x)) { throw makeException( TypeError, "is not a finite floating-point value", - opts, + prefix, + context, ); } return x; }; -converters["unrestricted double"] = (V, _opts) => { +converters["unrestricted double"] = (V, _prefix, _context, _opts) => { const x = toNumber(V); return x; }; -converters.DOMString = function (V, opts = {}) { +converters.DOMString = function (V, prefix, context, opts = {}) { if (typeof V === "string") { return V; } else if (V === null && opts.treatNullAsEmptyString) { @@ -381,7 +395,8 @@ converters.DOMString = function (V, opts = {}) { throw makeException( TypeError, "is a symbol, which cannot be converted to a string", - opts, + prefix, + context, ); } @@ -390,16 +405,21 @@ converters.DOMString = function (V, opts = {}) { // deno-lint-ignore no-control-regex const IS_BYTE_STRING = new SafeRegExp(/^[\x00-\xFF]*$/); -converters.ByteString = (V, opts) => { - const x = converters.DOMString(V, opts); - if (!RegExpPrototypeTest(IS_BYTE_STRING, x)) { - throw makeException(TypeError, "is not a valid ByteString", opts); +converters.ByteString = (V, prefix, context, opts) => { + const x = converters.DOMString(V, prefix, context, opts); + if (RegExpPrototypeExec(IS_BYTE_STRING, x) === null) { + throw makeException( + TypeError, + "is not a valid ByteString", + prefix, + context, + ); } return x; }; -converters.USVString = (V, opts) => { - const S = converters.DOMString(V, opts); +converters.USVString = (V, prefix, context, opts) => { + const S = converters.DOMString(V, prefix, context, opts); const n = S.length; let U = ""; for (let i = 0; i < n; ++i) { @@ -425,9 +445,14 @@ converters.USVString = (V, opts) => { return U; }; -converters.object = (V, opts) => { +converters.object = (V, prefix, context, _opts) => { if (type(V) !== "Object") { - throw makeException(TypeError, "is not an object", opts); + throw makeException( + TypeError, + "is not an object", + prefix, + context, + ); } return V; @@ -437,9 +462,14 @@ converters.object = (V, opts) => { // Neither Function nor VoidFunction is defined with [TreatNonObjectAsNull], so // handling for that is omitted. -function convertCallbackFunction(V, opts) { +function convertCallbackFunction(V, prefix, context, _opts) { if (typeof V !== "function") { - throw makeException(TypeError, "is not a function", opts); + throw makeException( + TypeError, + "is not a function", + prefix, + context, + ); } return V; } @@ -458,31 +488,53 @@ function isSharedArrayBuffer(V) { return ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V); } -converters.ArrayBuffer = (V, opts = {}) => { +converters.ArrayBuffer = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (!isNonSharedArrayBuffer(V)) { if (opts.allowShared && !isSharedArrayBuffer(V)) { throw makeException( TypeError, "is not an ArrayBuffer or SharedArrayBuffer", - opts, + prefix, + context, ); } - throw makeException(TypeError, "is not an ArrayBuffer", opts); + throw makeException( + TypeError, + "is not an ArrayBuffer", + prefix, + context, + ); } return V; }; -converters.DataView = (V, opts = {}) => { +converters.DataView = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (!isDataView(V)) { - throw makeException(TypeError, "is not a DataView", opts); + throw makeException( + TypeError, + "is not a DataView", + prefix, + context, + ); } if (!opts.allowShared && isSharedArrayBuffer(DataViewPrototypeGetBuffer(V))) { throw makeException( TypeError, "is backed by a SharedArrayBuffer, which is not allowed", - opts, + prefix, + context, ); } @@ -506,12 +558,18 @@ ArrayPrototypeForEach( const article = RegExpPrototypeTest(new SafeRegExp(/^[AEIOU]/), name) ? "an" : "a"; - converters[name] = (V, opts = {}) => { + converters[name] = ( + V, + prefix = undefined, + context = undefined, + opts = {}, + ) => { if (TypedArrayPrototypeGetSymbolToStringTag(V) !== name) { throw makeException( TypeError, `is not ${article} ${name} object`, - opts, + prefix, + context, ); } if ( @@ -521,7 +579,8 @@ ArrayPrototypeForEach( throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts, + prefix, + context, ); } @@ -532,12 +591,18 @@ ArrayPrototypeForEach( // Common definitions -converters.ArrayBufferView = (V, opts = {}) => { +converters.ArrayBufferView = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (!ArrayBufferIsView(V)) { throw makeException( TypeError, "is not a view on an ArrayBuffer or SharedArrayBuffer", - opts, + prefix, + context, ); } let buffer; @@ -550,14 +615,20 @@ converters.ArrayBufferView = (V, opts = {}) => { throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts, + prefix, + context, ); } return V; }; -converters.BufferSource = (V, opts = {}) => { +converters.BufferSource = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (ArrayBufferIsView(V)) { let buffer; if (TypedArrayPrototypeGetSymbolToStringTag(V) !== undefined) { @@ -569,7 +640,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts, + prefix, + context, ); } @@ -580,7 +652,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer or a view on one", - opts, + prefix, + context, ); } if ( @@ -591,7 +664,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer, SharedArrayBuffer, or a view on one", - opts, + prefix, + context, ); } @@ -696,7 +770,7 @@ function createDictionaryConverter(name, ...dictionaries) { } } - return function (V, opts = {}) { + return function (V, prefix = undefined, context = undefined, opts = {}) { const typeV = type(V); switch (typeV) { case "Undefined": @@ -707,7 +781,8 @@ function createDictionaryConverter(name, ...dictionaries) { throw makeException( TypeError, "can not be converted to a dictionary", - opts, + prefix, + context, ); } const esDict = V; @@ -731,17 +806,23 @@ function createDictionaryConverter(name, ...dictionaries) { } if (esMemberValue !== undefined) { - const context = `'${key}' of '${name}'${ - opts.context ? ` (${opts.context})` : "" + const memberContext = `'${key}' of '${name}'${ + context ? ` (${context})` : "" }`; const converter = member.converter; - const idlMemberValue = converter(esMemberValue, { ...opts, context }); + const idlMemberValue = converter( + esMemberValue, + prefix, + memberContext, + opts, + ); idlDict[key] = idlMemberValue; } else if (member.required) { throw makeException( TypeError, `can not be converted to '${name}' because '${key}' is required in '${name}'.`, - opts, + prefix, + context, ); } } @@ -754,13 +835,13 @@ function createDictionaryConverter(name, ...dictionaries) { function createEnumConverter(name, values) { const E = new SafeSet(values); - return function (V, opts = {}) { + return function (V, prefix = undefined, _context = undefined, _opts = {}) { const S = String(V); if (!E.has(S)) { throw new TypeError( `${ - opts.prefix ? opts.prefix + ": " : "" + prefix ? prefix + ": " : "" }The provided value '${S}' is not a valid enum value of type ${name}.`, ); } @@ -770,7 +851,7 @@ function createEnumConverter(name, values) { } function createNullableConverter(converter) { - return (V, opts = {}) => { + return (V, prefix = undefined, context = undefined, opts = {}) => { // FIXME: If Type(V) is not Object, and the conversion to an IDL value is // being performed due to V being assigned to an attribute whose type is a // nullable callback function that is annotated with @@ -778,18 +859,19 @@ function createNullableConverter(converter) { // value null. if (V === null || V === undefined) return null; - return converter(V, opts); + return converter(V, prefix, context, opts); }; } // https://heycam.github.io/webidl/#es-sequence function createSequenceConverter(converter) { - return function (V, opts = {}) { + return function (V, prefix = undefined, context = undefined, opts = {}) { if (type(V) !== "Object") { throw makeException( TypeError, "can not be converted to sequence.", - opts, + prefix, + context, ); } const iter = V?.[SymbolIterator]?.(); @@ -797,7 +879,8 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts, + prefix, + context, ); } const array = []; @@ -807,14 +890,17 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts, + prefix, + context, ); } if (res.done === true) break; - const val = converter(res.value, { - ...opts, - context: `${opts.context}, index ${array.length}`, - }); + const val = converter( + res.value, + prefix, + `${context}, index ${array.length}`, + opts, + ); ArrayPrototypePush(array, val); } return array; @@ -822,24 +908,25 @@ function createSequenceConverter(converter) { } function createRecordConverter(keyConverter, valueConverter) { - return (V, opts) => { + return (V, prefix, context, opts) => { if (type(V) !== "Object") { throw makeException( TypeError, "can not be converted to dictionary.", - opts, + prefix, + context, ); } const result = {}; // Fast path for common case (not a Proxy) if (!core.isProxy(V)) { for (const key in V) { - if (!ObjectPrototypeHasOwnProperty(V, key)) { + if (!ObjectHasOwn(V, key)) { continue; } - const typedKey = keyConverter(key, opts); + const typedKey = keyConverter(key, prefix, context, opts); const value = V[key]; - const typedValue = valueConverter(value, opts); + const typedValue = valueConverter(value, prefix, context, opts); result[typedKey] = typedValue; } return result; @@ -850,9 +937,9 @@ function createRecordConverter(keyConverter, valueConverter) { const key = keys[i]; const desc = ObjectGetOwnPropertyDescriptor(V, key); if (desc !== undefined && desc.enumerable === true) { - const typedKey = keyConverter(key, opts); + const typedKey = keyConverter(key, prefix, context, opts); const value = V[key]; - const typedValue = valueConverter(value, opts); + const typedValue = valueConverter(value, prefix, context, opts); result[typedKey] = typedValue; } } @@ -861,8 +948,13 @@ function createRecordConverter(keyConverter, valueConverter) { } function createPromiseConverter(converter) { - return (V, opts) => - PromisePrototypeThen(PromiseResolve(V), (V) => converter(V, opts)); + return (V, prefix, context, opts) => + // should be able to handle thenables + // see: https://github.com/web-platform-tests/wpt/blob/a31d3ba53a79412793642366f3816c9a63f0cf57/streams/writable-streams/close.any.js#L207 + typeof V?.then === "function" + ? PromisePrototypeThen(PromiseResolve(V), (V) => + converter(V, prefix, context, opts)) + : PromiseResolve(converter(V, prefix, context, opts)); } function invokeCallbackFunction( @@ -870,16 +962,14 @@ function invokeCallbackFunction( args, thisArg, returnValueConverter, - opts, + prefix, + returnsPromise, ) { try { const rv = ReflectApply(callable, thisArg, args); - return returnValueConverter(rv, { - prefix: opts.prefix, - context: "return value", - }); + return returnValueConverter(rv, prefix, "return value"); } catch (err) { - if (opts.returnsPromise === true) { + if (returnsPromise === true) { return PromiseReject(err); } throw err; @@ -889,9 +979,14 @@ function invokeCallbackFunction( const brand = Symbol("[[webidl.brand]]"); function createInterfaceConverter(name, prototype) { - return (V, opts) => { + return (V, prefix, context, _opts) => { if (!ObjectPrototypeIsPrototypeOf(prototype, V) || V[brand] !== brand) { - throw makeException(TypeError, `is not of type ${name}.`, opts); + throw makeException( + TypeError, + `is not of type ${name}.`, + prefix, + context, + ); } return V; }; @@ -1041,7 +1136,7 @@ function mixinPairIterable(name, prototype, dataSymbol, keyKey, valueKey) { function configurePrototype(prototype) { const descriptors = ObjectGetOwnPropertyDescriptors(prototype.prototype); for (const key in descriptors) { - if (!ObjectPrototypeHasOwnProperty(descriptors, key)) { + if (!ObjectHasOwn(descriptors, key)) { continue; } if (key === "constructor") continue; diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 939dfaeafe..d47220fc3a 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.99.0" +version = "0.107.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/benches/dict.rs b/ext/webidl/benches/dict.rs index d08adbb6a8..b3d95c8a32 100644 --- a/ext/webidl/benches/dict.rs +++ b/ext/webidl/benches/dict.rs @@ -19,6 +19,7 @@ fn setup() -> Vec { "dict.js" )), }]) + .esm_entry_point("ext:deno_webidl_bench/setup.js") .build(), ] } diff --git a/ext/webidl/internal.d.ts b/ext/webidl/internal.d.ts index 110031ae73..cc4422a274 100644 --- a/ext/webidl/internal.d.ts +++ b/ext/webidl/internal.d.ts @@ -5,30 +5,13 @@ /// declare module "ext:deno_webidl/00_webidl.js" { - interface ConverterOpts { - /** - * The prefix for error messages created by this converter. - * Examples: - * - `Failed to construct 'Event'` - * - `Failed to execute 'removeEventListener' on 'EventTarget'` - */ - prefix: string; - } - interface ValueConverterOpts extends ConverterOpts { - /** - * The context of this value error messages created by this converter. - * Examples: - * - `Argument 1` - * - `Argument 3` - */ - context: string; - } function makeException( ErrorType: any, message: string, - opts: ValueConverterOpts, + prefix?: string, + context?: string, ): any; - interface IntConverterOpts extends ValueConverterOpts { + interface IntConverterOpts { /** * Wether to throw if the number is outside of the acceptable values for * this type. @@ -39,13 +22,13 @@ declare module "ext:deno_webidl/00_webidl.js" { */ clamp?: boolean; } - interface StringConverterOpts extends ValueConverterOpts { + interface StringConverterOpts { /** * Wether to treat `null` value as an empty string. */ treatNullAsEmptyString?: boolean; } - interface BufferConverterOpts extends ValueConverterOpts { + interface BufferConverterOpts { /** * Wether to allow `SharedArrayBuffer` (not just `ArrayBuffer`). */ @@ -56,148 +39,322 @@ declare module "ext:deno_webidl/00_webidl.js" { /** * Convert a value into a `boolean` (bool). */ - boolean(v: any, opts?: IntConverterOpts): boolean; + boolean( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): boolean; /** * Convert a value into a `byte` (int8). */ - byte(v: any, opts?: IntConverterOpts): number; + byte( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `octet` (uint8). */ - octet(v: any, opts?: IntConverterOpts): number; + octet( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `short` (int16). */ - short(v: any, opts?: IntConverterOpts): number; + short( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `unsigned short` (uint16). */ - ["unsigned short"](v: any, opts?: IntConverterOpts): number; + ["unsigned short"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `long` (int32). */ - long(v: any, opts?: IntConverterOpts): number; + long( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `unsigned long` (uint32). */ - ["unsigned long"](v: any, opts?: IntConverterOpts): number; + ["unsigned long"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `long long` (int64). * **Note this is truncated to a JS number (53 bit precision).** */ - ["long long"](v: any, opts?: IntConverterOpts): number; + ["long long"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `unsigned long long` (uint64). * **Note this is truncated to a JS number (53 bit precision).** */ - ["unsigned long long"](v: any, opts?: IntConverterOpts): number; + ["unsigned long long"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `float` (f32). */ - float(v: any, opts?: ValueConverterOpts): number; + float( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `unrestricted float` (f32, infinity, or NaN). */ - ["unrestricted float"](v: any, opts?: ValueConverterOpts): number; + ["unrestricted float"]( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `double` (f64). */ - double(v: any, opts?: ValueConverterOpts): number; + double( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `unrestricted double` (f64, infinity, or NaN). */ - ["unrestricted double"](v: any, opts?: ValueConverterOpts): number; + ["unrestricted double"]( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `DOMString` (string). */ - DOMString(v: any, opts?: StringConverterOpts): string; + DOMString( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string; /** * Convert a value into a `ByteString` (string with only u8 codepoints). */ - ByteString(v: any, opts?: StringConverterOpts): string; + ByteString( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string; /** * Convert a value into a `USVString` (string with only valid non * surrogate Unicode code points). */ - USVString(v: any, opts?: StringConverterOpts): string; + USVString( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string; /** * Convert a value into an `object` (object). */ - object(v: any, opts?: ValueConverterOpts): object; + object( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): object; /** * Convert a value into an `ArrayBuffer` (ArrayBuffer). */ - ArrayBuffer(v: any, opts?: BufferConverterOpts): ArrayBuffer; + ArrayBuffer( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): ArrayBuffer; /** * Convert a value into a `DataView` (ArrayBuffer). */ - DataView(v: any, opts?: BufferConverterOpts): DataView; + DataView( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): DataView; /** * Convert a value into a `Int8Array` (Int8Array). */ - Int8Array(v: any, opts?: BufferConverterOpts): Int8Array; + Int8Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Int8Array; /** * Convert a value into a `Int16Array` (Int16Array). */ - Int16Array(v: any, opts?: BufferConverterOpts): Int16Array; + Int16Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Int16Array; /** * Convert a value into a `Int32Array` (Int32Array). */ - Int32Array(v: any, opts?: BufferConverterOpts): Int32Array; + Int32Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Int32Array; /** * Convert a value into a `Uint8Array` (Uint8Array). */ - Uint8Array(v: any, opts?: BufferConverterOpts): Uint8Array; + Uint8Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Uint8Array; /** * Convert a value into a `Uint16Array` (Uint16Array). */ - Uint16Array(v: any, opts?: BufferConverterOpts): Uint16Array; + Uint16Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Uint16Array; /** * Convert a value into a `Uint32Array` (Uint32Array). */ - Uint32Array(v: any, opts?: BufferConverterOpts): Uint32Array; + Uint32Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Uint32Array; /** * Convert a value into a `Uint8ClampedArray` (Uint8ClampedArray). */ Uint8ClampedArray( v: any, + prefix?: string, + context?: string, opts?: BufferConverterOpts, ): Uint8ClampedArray; /** * Convert a value into a `Float32Array` (Float32Array). */ - Float32Array(v: any, opts?: BufferConverterOpts): Float32Array; + Float32Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Float32Array; /** * Convert a value into a `Float64Array` (Float64Array). */ - Float64Array(v: any, opts?: BufferConverterOpts): Float64Array; + Float64Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Float64Array; /** * Convert a value into an `ArrayBufferView` (ArrayBufferView). */ - ArrayBufferView(v: any, opts?: BufferConverterOpts): ArrayBufferView; + ArrayBufferView( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): ArrayBufferView; /** * Convert a value into a `BufferSource` (ArrayBuffer or ArrayBufferView). */ BufferSource( v: any, + prefix?: string, + context?: string, opts?: BufferConverterOpts, ): ArrayBuffer | ArrayBufferView; /** * Convert a value into a `DOMTimeStamp` (u64). Alias for unsigned long long */ - DOMTimeStamp(v: any, opts?: IntConverterOpts): number; + DOMTimeStamp( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `Function` ((...args: any[]) => any). */ - Function(v: any, opts?: ValueConverterOpts): (...args: any) => any; + Function( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): (...args: any) => any; /** * Convert a value into a `VoidFunction` (() => void). */ - VoidFunction(v: any, opts?: ValueConverterOpts): () => void; - ["UVString?"](v: any, opts?: ValueConverterOpts): string | null; - ["sequence"](v: any, opts?: ValueConverterOpts): number[]; + VoidFunction( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): () => void; + ["UVString?"]( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string | null; + ["sequence"]( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number[]; - [type: string]: (v: any, opts: ValueConverterOpts) => any; + [type: string]: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => any; }; /** @@ -211,7 +368,12 @@ declare module "ext:deno_webidl/00_webidl.js" { type Dictionary = DictionaryMember[]; interface DictionaryMember { key: string; - converter: (v: any, opts: ValueConverterOpts) => any; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => any; defaultValue?: any; required?: boolean; } @@ -222,7 +384,12 @@ declare module "ext:deno_webidl/00_webidl.js" { function createDictionaryConverter( name: string, ...dictionaries: Dictionary[] - ): (v: any, opts: ValueConverterOpts) => T; + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T; /** * Create a converter for enums. @@ -230,28 +397,63 @@ declare module "ext:deno_webidl/00_webidl.js" { function createEnumConverter( name: string, values: string[], - ): (v: any, opts: ValueConverterOpts) => string; + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => string; /** * Create a converter that makes the contained type nullable. */ function createNullableConverter( - converter: (v: any, opts: ValueConverterOpts) => T, - ): (v: any, opts: ValueConverterOpts) => T | null; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T | null; /** * Create a converter that converts a sequence of the inner type. */ function createSequenceConverter( - converter: (v: any, opts: ValueConverterOpts) => T, - ): (v: any, opts: ValueConverterOpts) => T[]; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T[]; /** * Create a converter that converts a Promise of the inner type. */ function createPromiseConverter( - converter: (v: any, opts: ValueConverterOpts) => T, - ): (v: any, opts: ValueConverterOpts) => Promise; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => Promise; /** * Invoke a callback function. @@ -260,8 +462,14 @@ declare module "ext:deno_webidl/00_webidl.js" { callable: (...args: any) => any, args: any[], thisArg: any, - returnValueConverter: (v: any, opts: ValueConverterOpts) => T, - opts: ConverterOpts & { returnsPromise?: boolean }, + returnValueConverter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + prefix: string, + returnsPromise?: boolean, ): T; /** @@ -290,17 +498,34 @@ declare module "ext:deno_webidl/00_webidl.js" { function createInterfaceConverter( name: string, prototype: any, - ): (v: any, opts: ValueConverterOpts) => any; + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => any; function createRecordConverter< K extends string | number | symbol, V, >( - keyConverter: (v: any, opts: ValueConverterOpts) => K, - valueConverter: (v: any, opts: ValueConverterOpts) => V, + keyConverter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => K, + valueConverter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => V, ): ( v: Record, - opts: ValueConverterOpts, + prefix?: string, + context?: string, + opts?: any, ) => any; /** diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 2c6bf46b27..f6cb6599d8 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -1,9 +1,9 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase /// const core = globalThis.Deno.core; -const ops = core.ops; import { URL } from "ext:deno_url/00_url.js"; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { HTTP_TOKEN_CODE_POINT_RE } from "ext:deno_web/00_infra.js"; @@ -12,6 +12,7 @@ import { _skipInternalInit, CloseEvent, defineEventHandler, + dispatch, ErrorEvent, Event, EventTarget, @@ -22,17 +23,15 @@ const primordials = globalThis.__bootstrap.primordials; const { ArrayBufferPrototype, ArrayBufferIsView, - ArrayBufferPrototypeGetByteLength, ArrayPrototypeJoin, ArrayPrototypeMap, ArrayPrototypeSome, DataView, - DataViewPrototypeGetByteLength, ErrorPrototypeToString, ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, PromisePrototypeThen, - RegExpPrototypeTest, + RegExpPrototypeExec, SafeSet, SetPrototypeGetSize, // TODO(lucacasonato): add SharedArrayBuffer to primordials @@ -45,23 +44,38 @@ const { PromisePrototypeCatch, SymbolFor, TypedArrayPrototypeGetByteLength, - TypedArrayPrototypeGetSymbolToStringTag, } = primordials; +const op_ws_check_permission_and_cancel_handle = + core.ops.op_ws_check_permission_and_cancel_handle; +const { + op_ws_create, + op_ws_close, + op_ws_send_binary, + op_ws_send_text, + op_ws_next_event, + op_ws_send_ping, + op_ws_get_buffered_amount, +} = core.ensureFastOps(); -webidl.converters["sequence or DOMString"] = (V, opts) => { +webidl.converters["sequence or DOMString"] = ( + V, + prefix, + context, + opts, +) => { // Union for (sequence or DOMString) if (webidl.type(V) === "Object" && V !== null) { if (V[SymbolIterator] !== undefined) { - return webidl.converters["sequence"](V, opts); + return webidl.converters["sequence"](V, prefix, context, opts); } } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; -webidl.converters["WebSocketSend"] = (V, opts) => { +webidl.converters["WebSocketSend"] = (V, prefix, context, opts) => { // Union for (Blob or ArrayBufferView or ArrayBuffer or USVString) if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) { - return webidl.converters["Blob"](V, opts); + return webidl.converters["Blob"](V, prefix, context, opts); } if (typeof V === "object") { if ( @@ -69,13 +83,13 @@ webidl.converters["WebSocketSend"] = (V, opts) => { // deno-lint-ignore prefer-primordials ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V) ) { - return webidl.converters["ArrayBuffer"](V, opts); + return webidl.converters["ArrayBuffer"](V, prefix, context, opts); } if (ArrayBufferIsView(V)) { - return webidl.converters["ArrayBufferView"](V, opts); + return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } } - return webidl.converters["USVString"](V, opts); + return webidl.converters["USVString"](V, prefix, context, opts); }; /** role */ @@ -95,7 +109,6 @@ const _role = Symbol("[[role]]"); const _extensions = Symbol("[[extensions]]"); const _protocol = Symbol("[[protocol]]"); const _binaryType = Symbol("[[binaryType]]"); -const _bufferedAmount = Symbol("[[bufferedAmount]]"); const _eventLoop = Symbol("[[eventLoop]]"); const _server = Symbol("[[server]]"); @@ -103,86 +116,25 @@ const _idleTimeoutDuration = Symbol("[[idleTimeout]]"); const _idleTimeoutTimeout = Symbol("[[idleTimeoutTimeout]]"); const _serverHandleIdleTimeout = Symbol("[[serverHandleIdleTimeout]]"); class WebSocket extends EventTarget { - [_rid]; - [_role]; - - [_readyState] = CONNECTING; - get readyState() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_readyState]; - } - - get CONNECTING() { - webidl.assertBranded(this, WebSocketPrototype); - return CONNECTING; - } - get OPEN() { - webidl.assertBranded(this, WebSocketPrototype); - return OPEN; - } - get CLOSING() { - webidl.assertBranded(this, WebSocketPrototype); - return CLOSING; - } - get CLOSED() { - webidl.assertBranded(this, WebSocketPrototype); - return CLOSED; - } - - [_extensions] = ""; - get extensions() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_extensions]; - } - - [_protocol] = ""; - get protocol() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_protocol]; - } - - [_url] = ""; - get url() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_url]; - } - - [_binaryType] = "blob"; - get binaryType() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_binaryType]; - } - set binaryType(value) { - webidl.assertBranded(this, WebSocketPrototype); - value = webidl.converters.DOMString(value, { - prefix: "Failed to set 'binaryType' on 'WebSocket'", - }); - if (value === "blob" || value === "arraybuffer") { - this[_binaryType] = value; - } - } - - [_bufferedAmount] = 0; - get bufferedAmount() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_bufferedAmount]; - } - constructor(url, protocols = []) { super(); this[webidl.brand] = webidl.brand; + this[_rid] = undefined; + this[_role] = undefined; + this[_readyState] = CONNECTING; + this[_extensions] = ""; + this[_protocol] = ""; + this[_url] = ""; + this[_binaryType] = "blob"; + this[_idleTimeoutDuration] = 0; + this[_idleTimeoutTimeout] = undefined; const prefix = "Failed to construct 'WebSocket'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters.USVString(url, { - prefix, - context: "Argument 1", - }); + url = webidl.converters.USVString(url, prefix, "Argument 1"); protocols = webidl.converters["sequence or DOMString"]( protocols, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); let wsURL; @@ -210,7 +162,7 @@ class WebSocket extends EventTarget { this[_url] = wsURL.href; this[_role] = CLIENT; - ops.op_ws_check_permission_and_cancel_handle( + op_ws_check_permission_and_cancel_handle( "WebSocket.abort()", this[_url], false, @@ -237,7 +189,8 @@ class WebSocket extends EventTarget { if ( ArrayPrototypeSome( protocols, - (protocol) => !RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, protocol), + (protocol) => + RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, protocol) === null, ) ) { throw new DOMException( @@ -247,8 +200,7 @@ class WebSocket extends EventTarget { } PromisePrototypeThen( - core.opAsync( - "op_ws_create", + op_ws_create( "new WebSocket()", wsURL.href, ArrayPrototypeJoin(protocols, ", "), @@ -260,7 +212,7 @@ class WebSocket extends EventTarget { if (this[_readyState] === CLOSING) { PromisePrototypeThen( - core.opAsync("op_ws_close", this[_rid]), + op_ws_close(this[_rid]), () => { this[_readyState] = CLOSED; @@ -295,75 +247,97 @@ class WebSocket extends EventTarget { ); } + get readyState() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_readyState]; + } + + get CONNECTING() { + webidl.assertBranded(this, WebSocketPrototype); + return CONNECTING; + } + get OPEN() { + webidl.assertBranded(this, WebSocketPrototype); + return OPEN; + } + get CLOSING() { + webidl.assertBranded(this, WebSocketPrototype); + return CLOSING; + } + get CLOSED() { + webidl.assertBranded(this, WebSocketPrototype); + return CLOSED; + } + + get extensions() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_extensions]; + } + + get protocol() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_protocol]; + } + + get url() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_url]; + } + + get binaryType() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_binaryType]; + } + set binaryType(value) { + webidl.assertBranded(this, WebSocketPrototype); + value = webidl.converters.DOMString( + value, + "Failed to set 'binaryType' on 'WebSocket'", + ); + if (value === "blob" || value === "arraybuffer") { + this[_binaryType] = value; + } + } + + get bufferedAmount() { + webidl.assertBranded(this, WebSocketPrototype); + if (this[_readyState] === OPEN) { + return op_ws_get_buffered_amount(this[_rid]); + } else { + return 0; + } + } + send(data) { webidl.assertBranded(this, WebSocketPrototype); const prefix = "Failed to execute 'send' on 'WebSocket'"; webidl.requiredArguments(arguments.length, 1, prefix); - data = webidl.converters.WebSocketSend(data, { - prefix, - context: "Argument 1", - }); + data = webidl.converters.WebSocketSend(data, prefix, "Argument 1"); if (this[_readyState] !== OPEN) { throw new DOMException("readyState not OPEN", "InvalidStateError"); } - /** - * @param {ArrayBufferView} view - * @param {number} byteLength - */ - const sendTypedArray = (view, byteLength) => { - this[_bufferedAmount] += byteLength; - PromisePrototypeThen( - core.opAsync2( - this[_role] === SERVER - ? "op_server_ws_send_binary" - : "op_ws_send_binary", - this[_rid], - view, - ), - () => { - this[_bufferedAmount] -= byteLength; - }, - ); - }; - if (ObjectPrototypeIsPrototypeOf(BlobPrototype, data)) { PromisePrototypeThen( + // deno-lint-ignore prefer-primordials data.slice().arrayBuffer(), (ab) => - sendTypedArray( + op_ws_send_binary( + this[_rid], new DataView(ab), - ArrayBufferPrototypeGetByteLength(ab), ), ); } else if (ArrayBufferIsView(data)) { - if (TypedArrayPrototypeGetSymbolToStringTag(data) === undefined) { - // DataView - sendTypedArray(data, DataViewPrototypeGetByteLength(data)); - } else { - // TypedArray - sendTypedArray(data, TypedArrayPrototypeGetByteLength(data)); - } + op_ws_send_binary(this[_rid], data); } else if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, data)) { - sendTypedArray( - new DataView(data), - ArrayBufferPrototypeGetByteLength(data), - ); + op_ws_send_binary(this[_rid], data); } else { const string = String(data); - const d = core.encode(string); - this[_bufferedAmount] += TypedArrayPrototypeGetByteLength(d); - PromisePrototypeThen( - core.opAsync2( - this[_role] === SERVER ? "op_server_ws_send_text" : "op_ws_send_text", - this[_rid], - string, - ), - () => { - this[_bufferedAmount] -= TypedArrayPrototypeGetByteLength(d); - }, + op_ws_send_text( + this[_rid], + string, ); } } @@ -373,18 +347,13 @@ class WebSocket extends EventTarget { const prefix = "Failed to execute 'close' on 'WebSocket'"; if (code !== undefined) { - code = webidl.converters["unsigned short"](code, { - prefix, + code = webidl.converters["unsigned short"](code, prefix, "Argument 1", { clamp: true, - context: "Argument 1", }); } if (reason !== undefined) { - reason = webidl.converters.USVString(reason, { - prefix, - context: "Argument 2", - }); + reason = webidl.converters.USVString(reason, prefix, "Argument 2"); } if (!this[_server]) { @@ -415,8 +384,7 @@ class WebSocket extends EventTarget { this[_readyState] = CLOSING; PromisePrototypeCatch( - core.opAsync( - this[_role] === SERVER ? "op_server_ws_close" : "op_ws_close", + op_ws_close( this[_rid], code, reason, @@ -440,10 +408,7 @@ class WebSocket extends EventTarget { async [_eventLoop]() { while (this[_readyState] !== CLOSED) { - const { 0: kind, 1: value } = await core.opAsync2( - this[_role] === SERVER ? "op_server_ws_next_event" : "op_ws_next_event", - this[_rid], - ); + const { 0: kind, 1: value } = await op_ws_next_event(this[_rid]); switch (kind) { case 0: { @@ -453,7 +418,7 @@ class WebSocket extends EventTarget { data: value, origin: this[_url], }); - this.dispatchEvent(event); + dispatch(this, event); break; } case 1: { @@ -472,7 +437,7 @@ class WebSocket extends EventTarget { origin: this[_url], [_skipInternalInit]: true, }); - this.dispatchEvent(event); + dispatch(this, event); break; } case 2: { @@ -480,7 +445,7 @@ class WebSocket extends EventTarget { this[_serverHandleIdleTimeout](); break; } - case 5: { + case 3: { /* error */ this[_readyState] = CLOSED; @@ -494,10 +459,6 @@ class WebSocket extends EventTarget { core.tryClose(this[_rid]); break; } - case 3: { - /* ping */ - break; - } default: { /* close */ const code = kind; @@ -507,8 +468,7 @@ class WebSocket extends EventTarget { if (prevState === OPEN) { try { - await core.opAsync( - this[_role] === SERVER ? "op_server_ws_close" : "op_ws_close", + await op_ws_close( this[_rid], code, value, @@ -536,23 +496,12 @@ class WebSocket extends EventTarget { clearTimeout(this[_idleTimeoutTimeout]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { - await core.opAsync( - this[_role] === SERVER ? "op_server_ws_send" : "op_ws_send", - this[_rid], - { - kind: "ping", - }, - ); + await op_ws_send_ping(this[_rid]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { this[_readyState] = CLOSING; const reason = "No response from ping frame."; - await core.opAsync( - this[_role] === SERVER ? "op_server_ws_close" : "op_ws_close", - this[_rid], - 1001, - reason, - ); + await op_ws_close(this[_rid], 1001, reason); this[_readyState] = CLOSED; const errEvent = new ErrorEvent("error", { diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 0ee7a70aa0..be1001eb60 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase /// const core = globalThis.Deno.core; @@ -17,6 +18,7 @@ const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeJoin, ArrayPrototypeMap, + DateNow, Error, ObjectPrototypeIsPrototypeOf, PromisePrototypeCatch, @@ -27,10 +29,17 @@ const { StringPrototypeToLowerCase, Symbol, SymbolFor, - TypedArrayPrototypeGetByteLength, TypeError, + TypedArrayPrototypeGetByteLength, Uint8ArrayPrototype, } = primordials; +const { + op_ws_send_text_async, + op_ws_send_binary_async, + op_ws_next_event, + op_ws_create, + op_ws_close, +} = core.ensureFastOps(); webidl.converters.WebSocketStreamOptions = webidl.createDictionaryConverter( "WebSocketStreamOptions", @@ -88,14 +97,12 @@ class WebSocketStream { this[webidl.brand] = webidl.brand; const prefix = "Failed to construct 'WebSocketStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters.USVString(url, { + url = webidl.converters.USVString(url, prefix, "Argument 1"); + options = webidl.converters.WebSocketStreamOptions( + options, prefix, - context: "Argument 1", - }); - options = webidl.converters.WebSocketStreamOptions(options, { - prefix, - context: "Argument 2", - }); + "Argument 2", + ); const wsURL = new URL(url); @@ -154,8 +161,7 @@ class WebSocketStream { }; options.signal?.[add](abort); PromisePrototypeThen( - core.opAsync( - "op_ws_create", + op_ws_create( "new WebSocketStream()", this[_url], options.protocols ? ArrayPrototypeJoin(options.protocols, ", ") : "", @@ -166,17 +172,14 @@ class WebSocketStream { options.signal?.[remove](abort); if (this[_earlyClose]) { PromisePrototypeThen( - core.opAsync("op_ws_close", create.rid), + op_ws_close(create.rid), () => { PromisePrototypeThen( (async () => { while (true) { - const { 0: kind } = await core.opAsync( - "op_ws_next_event", - create.rid, - ); + const { 0: kind } = await op_ws_next_event(create.rid); - if (kind > 6) { + if (kind > 5) { /* close */ break; } @@ -207,17 +210,11 @@ class WebSocketStream { const writable = new WritableStream({ write: async (chunk) => { if (typeof chunk === "string") { - await core.opAsync("op_ws_send", this[_rid], { - kind: "text", - value: chunk, - }); + await op_ws_send_text_async(this[_rid], chunk); } else if ( ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, chunk) ) { - await core.opAsync("op_ws_send", this[_rid], { - kind: "binary", - value: chunk, - }, chunk); + await op_ws_send_binary_async(this[_rid], chunk); } else { throw new TypeError( "A chunk may only be either a string or an Uint8Array", @@ -242,10 +239,7 @@ class WebSocketStream { }, }); const pull = async (controller) => { - const { 0: kind, 1: value } = await core.opAsync( - "op_ws_next_event", - this[_rid], - ); + const { 0: kind, 1: value } = await op_ws_next_event(this[_rid]); switch (kind) { case 0: @@ -255,7 +249,11 @@ class WebSocketStream { controller.enqueue(value); break; } - case 5: { + case 2: { + /* pong */ + break; + } + case 3: { /* error */ const err = new Error(value); this[_closed].reject(err); @@ -263,19 +261,7 @@ class WebSocketStream { core.tryClose(this[_rid]); break; } - case 3: { - /* ping */ - await core.opAsync("op_ws_send", this[_rid], { - kind: "pong", - }); - await pull(controller); - break; - } - case 2: { - /* pong */ - break; - } - case 6: { + case 4: { /* closed */ this[_closed].resolve(undefined); core.tryClose(this[_rid]); @@ -297,7 +283,7 @@ class WebSocketStream { this[_closed].state === "pending" ) { if ( - new Date().getTime() - await this[_closeSent].promise <= + DateNow() - await this[_closeSent].promise <= CLOSE_RESPONSE_TIMEOUT ) { return pull(controller); @@ -380,10 +366,11 @@ class WebSocketStream { close(closeInfo) { webidl.assertBranded(this, WebSocketStreamPrototype); - closeInfo = webidl.converters.WebSocketCloseInfo(closeInfo, { - prefix: "Failed to execute 'close' on 'WebSocketStream'", - context: "Argument 1", - }); + closeInfo = webidl.converters.WebSocketCloseInfo( + closeInfo, + "Failed to execute 'close' on 'WebSocketStream'", + "Argument 1", + ); if ( closeInfo.code && @@ -416,10 +403,10 @@ class WebSocketStream { this[_earlyClose] = true; } else if (this[_closed].state === "pending") { PromisePrototypeThen( - core.opAsync("op_ws_close", this[_rid], code, closeInfo.reason), + op_ws_close(this[_rid], code, closeInfo.reason), () => { setTimeout(() => { - this[_closeSent].resolve(new Date().getTime()); + this[_closeSent].resolve(DateNow()); }, 0); }, (err) => { diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 2f5ed95b30..1ac465f698 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.104.0" +version = "0.112.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -14,12 +14,13 @@ description = "Implementation of WebSocket API for Deno" path = "lib.rs" [dependencies] +bytes.workspace = true deno_core.workspace = true +deno_net.workspace = true deno_tls.workspace = true -fastwebsockets = "0.1.3" +fastwebsockets = { workspace = true, features = ["upgrade"] } http.workspace = true -hyper.workspace = true +hyper = { workspace = true, features = ["backports"] } serde.workspace = true tokio.workspace = true tokio-rustls.workspace = true -tokio-tungstenite = { workspace = true, features = ["rustls-tls-webpki-roots"] } diff --git a/ext/websocket/autobahn/autobahn_server.js b/ext/websocket/autobahn/autobahn_server.js new file mode 100644 index 0000000000..b5f399a5b6 --- /dev/null +++ b/ext/websocket/autobahn/autobahn_server.js @@ -0,0 +1,20 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { parse } from "../../../test_util/std/flags/mod.ts"; + +const { port } = parse(Deno.args, { + number: ["port"], + default: { + port: 6969, + }, +}); + +const { serve } = Deno; + +// A message-based WebSocket echo server. +serve({ port }, (request) => { + const { socket, response } = Deno.upgradeWebSocket(request); + socket.onmessage = (event) => { + socket.send(event.data); + }; + return response; +}); diff --git a/ext/websocket/autobahn/fuzzingclient.js b/ext/websocket/autobahn/fuzzingclient.js new file mode 100644 index 0000000000..8aa7166958 --- /dev/null +++ b/ext/websocket/autobahn/fuzzingclient.js @@ -0,0 +1,33 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file + +import { $ } from "https://deno.land/x/dax@0.31.0/mod.ts"; + +const pwd = new URL(".", import.meta.url).pathname; + +const AUTOBAHN_TESTSUITE_DOCKER = + "crossbario/autobahn-testsuite:0.8.2@sha256:5d4ba3aa7d6ab2fdbf6606f3f4ecbe4b66f205ce1cbc176d6cdf650157e52242"; + +const self = Deno.execPath(); +$`${self} run -A --unstable ${pwd}/autobahn_server.js`.spawn(); +await $`docker run --name fuzzingserver -v ${pwd}/fuzzingclient.json:/fuzzingclient.json:ro -v ${pwd}/reports:/reports -p 9001:9001 --net=host --rm ${AUTOBAHN_TESTSUITE_DOCKER} wstest -m fuzzingclient -s fuzzingclient.json` + .cwd(pwd); + +const { deno_websocket } = JSON.parse( + Deno.readTextFileSync(`${pwd}/reports/servers/index.json`), +); +const result = Object.values(deno_websocket); + +function failed(name) { + return name != "OK" && name != "INFORMATIONAL" && name != "NON-STRICT"; +} + +const failedtests = result.filter((outcome) => failed(outcome.behavior)); + +console.log( + `%c${result.length - failedtests.length} / ${result.length} tests OK`, + `color: ${failedtests.length == 0 ? "green" : "red"}`, +); + +Deno.exit(failedtests.length == 0 ? 0 : 1); diff --git a/ext/websocket/autobahn/fuzzingclient.json b/ext/websocket/autobahn/fuzzingclient.json new file mode 100644 index 0000000000..fcee80c993 --- /dev/null +++ b/ext/websocket/autobahn/fuzzingclient.json @@ -0,0 +1,26 @@ +{ + "outdir": "./reports/servers", + "servers": [ + { + "agent": "deno_websocket", + "url": "ws://localhost:6969" + } + ], + "cases": [ + "1.*", + "2.*", + "3.*", + "4.*", + "5.*", + "6.*", + "7.*", + "9.*", + "10.*" + ], + "exclude-cases": [ + "11.*", + "12.*", + "13.*" + ], + "exclude-agent-cases": {} +} diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 71f176070a..af987c1e4b 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -1,15 +1,10 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - +use crate::stream::WebSocketStream; +use bytes::Bytes; use deno_core::error::invalid_hostname; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures::stream::SplitSink; -use deno_core::futures::stream::SplitStream; -use deno_core::futures::SinkExt; -use deno_core::futures::StreamExt; use deno_core::op; -use deno_core::StringOrBuffer; - use deno_core::url; use deno_core::AsyncRefCell; use deno_core::ByteString; @@ -19,21 +14,27 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_core::StringOrBuffer; use deno_core::ZeroCopyBuf; +use deno_net::raw::NetworkStream; use deno_tls::create_client_config; +use deno_tls::RootCertStoreProvider; +use http::header::CONNECTION; +use http::header::UPGRADE; use http::HeaderName; use http::HeaderValue; use http::Method; use http::Request; use http::Uri; -use serde::Deserialize; +use hyper::Body; use serde::Serialize; use std::borrow::Cow; +use std::cell::Cell; use std::cell::RefCell; use std::convert::TryFrom; use std::fmt; +use std::future::Future; use std::path::PathBuf; -use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; use tokio::io::AsyncRead; @@ -42,23 +43,28 @@ use tokio::net::TcpStream; use tokio_rustls::rustls::RootCertStore; use tokio_rustls::rustls::ServerName; use tokio_rustls::TlsConnector; -use tokio_tungstenite::client_async_with_config; -use tokio_tungstenite::tungstenite::handshake::client::Response; -use tokio_tungstenite::tungstenite::protocol::frame::coding::CloseCode; -use tokio_tungstenite::tungstenite::protocol::CloseFrame; -use tokio_tungstenite::tungstenite::protocol::Message; -use tokio_tungstenite::tungstenite::protocol::WebSocketConfig; -use tokio_tungstenite::MaybeTlsStream; -use tokio_tungstenite::WebSocketStream; -pub use tokio_tungstenite; // Re-export tokio_tungstenite +use fastwebsockets::CloseCode; +use fastwebsockets::FragmentCollector; +use fastwebsockets::Frame; +use fastwebsockets::OpCode; +use fastwebsockets::Role; +use fastwebsockets::WebSocket; -mod server; - -pub use server::ws_create_server_stream; +mod stream; #[derive(Clone)] -pub struct WsRootStore(pub Option); +pub struct WsRootStoreProvider(Option>); + +impl WsRootStoreProvider { + pub fn get_or_try_init(&self) -> Result, AnyError> { + Ok(match &self.0 { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }) + } +} + #[derive(Clone)] pub struct WsUserAgent(pub String); @@ -76,100 +82,6 @@ pub trait WebSocketPermissions { /// would override previously used alias. pub struct UnsafelyIgnoreCertificateErrors(Option>); -type ClientWsStream = WebSocketStream>; -type ServerWsStream = WebSocketStream>>; - -pub enum WebSocketStreamType { - Client { - tx: AsyncRefCell>, - rx: AsyncRefCell>, - }, - Server { - tx: AsyncRefCell>, - rx: AsyncRefCell>, - }, -} - -pub trait Upgraded: AsyncRead + AsyncWrite + Unpin {} - -pub struct WsStreamResource { - pub stream: WebSocketStreamType, - // When a `WsStreamResource` resource is closed, all pending 'read' ops are - // canceled, while 'write' ops are allowed to complete. Therefore only - // 'read' futures are attached to this cancel handle. - pub cancel: CancelHandle, -} - -impl WsStreamResource { - async fn send(self: &Rc, message: Message) -> Result<(), AnyError> { - use tokio_tungstenite::tungstenite::Error; - let res = match self.stream { - WebSocketStreamType::Client { .. } => { - let mut tx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { tx, .. } => tx, - WebSocketStreamType::Server { .. } => unreachable!(), - }) - .borrow_mut() - .await; - tx.send(message).await - } - WebSocketStreamType::Server { .. } => { - let mut tx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { .. } => unreachable!(), - WebSocketStreamType::Server { tx, .. } => tx, - }) - .borrow_mut() - .await; - tx.send(message).await - } - }; - - match res { - Ok(()) => Ok(()), - Err(Error::ConnectionClosed) => Ok(()), - Err(tokio_tungstenite::tungstenite::Error::Protocol( - tokio_tungstenite::tungstenite::error::ProtocolError::SendAfterClosing, - )) => Ok(()), - Err(err) => Err(err.into()), - } - } - - async fn next_message( - self: &Rc, - cancel: RcRef, - ) -> Result< - Option>, - AnyError, - > { - match &self.stream { - WebSocketStreamType::Client { .. } => { - let mut rx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { rx, .. } => rx, - WebSocketStreamType::Server { .. } => unreachable!(), - }) - .borrow_mut() - .await; - rx.next().or_cancel(cancel).await.map_err(AnyError::from) - } - WebSocketStreamType::Server { .. } => { - let mut rx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { .. } => unreachable!(), - WebSocketStreamType::Server { rx, .. } => rx, - }) - .borrow_mut() - .await; - rx.next().or_cancel(cancel).await.map_err(AnyError::from) - } - } - } -} - -impl Resource for WsStreamResource { - fn name(&self) -> Cow { - "webSocketStream".into() - } -} - pub struct WsCancelResource(Rc); impl Resource for WsCancelResource { @@ -217,6 +129,33 @@ pub struct CreateResponse { extensions: String, } +async fn handshake( + cancel_resource: Option>, + request: Request, + socket: S, +) -> Result<(WebSocket, http::Response), AnyError> { + let client = + fastwebsockets::handshake::client(&LocalExecutor, request, socket); + + let (upgraded, response) = if let Some(cancel_resource) = cancel_resource { + client.or_cancel(cancel_resource).await? + } else { + client.await + } + .map_err(|err| { + DomExceptionNetworkError::new(&format!( + "failed to connect to WebSocket: {err}" + )) + })?; + + let upgraded = upgraded.into_inner(); + let stream = + WebSocketStream::new(stream::WsStreamKind::Upgraded(upgraded), None); + let stream = WebSocket::after_handshake(stream, Role::Client); + + Ok((stream, response)) +} + #[op] pub async fn op_ws_create( state: Rc>, @@ -243,7 +182,7 @@ where .borrow_mut() .resource_table .get::(cancel_rid)?; - Some(r) + Some(r.0.clone()) } else { None }; @@ -252,12 +191,34 @@ where .borrow() .try_borrow::() .and_then(|it| it.0.clone()); - let root_cert_store = state.borrow().borrow::().0.clone(); + let root_cert_store = state + .borrow() + .borrow::() + .get_or_try_init()?; let user_agent = state.borrow().borrow::().0.clone(); let uri: Uri = url.parse()?; - let mut request = Request::builder().method(Method::GET).uri(&uri); + let mut request = Request::builder().method(Method::GET).uri( + uri + .path_and_query() + .ok_or(type_error("Missing path in url".to_string()))? + .as_str(), + ); - request = request.header("User-Agent", user_agent); + let authority = uri.authority().unwrap().as_str(); + let host = authority + .find('@') + .map(|idx| authority.split_at(idx + 1).1) + .unwrap_or_else(|| authority); + request = request + .header("User-Agent", user_agent) + .header("Host", host) + .header(UPGRADE, "websocket") + .header(CONNECTION, "Upgrade") + .header( + "Sec-WebSocket-Key", + fastwebsockets::handshake::generate_key(), + ) + .header("Sec-WebSocket-Version", "13"); if !protocols.is_empty() { request = request.header("Sec-WebSocket-Protocol", protocols); @@ -287,7 +248,7 @@ where } } - let request = request.body(())?; + let request = request.body(Body::empty())?; let domain = &uri.host().unwrap().to_string(); let port = &uri.port_u16().unwrap_or(match uri.scheme_str() { Some("wss") => 443, @@ -297,8 +258,8 @@ where let addr = format!("{domain}:{port}"); let tcp_socket = TcpStream::connect(addr).await?; - let socket: MaybeTlsStream = match uri.scheme_str() { - Some("ws") => MaybeTlsStream::Plain(tcp_socket), + let (stream, response) = match uri.scheme_str() { + Some("ws") => handshake(cancel_resource, request, tcp_socket).await?, Some("wss") => { let tls_config = create_client_config( root_cert_store, @@ -310,43 +271,21 @@ where let dnsname = ServerName::try_from(domain.as_str()) .map_err(|_| invalid_hostname(domain))?; let tls_socket = tls_connector.connect(dnsname, tcp_socket).await?; - MaybeTlsStream::Rustls(tls_socket) + handshake(cancel_resource, request, tls_socket).await? } _ => unreachable!(), }; - let client = client_async_with_config( - request, - socket, - Some(WebSocketConfig { - max_message_size: Some(128 << 20), - max_frame_size: Some(32 << 20), - ..Default::default() - }), - ); - let (stream, response): (ClientWsStream, Response) = - if let Some(cancel_resource) = cancel_resource { - client.or_cancel(cancel_resource.0.to_owned()).await? - } else { - client.await - } - .map_err(|err| { - DomExceptionNetworkError::new(&format!( - "failed to connect to WebSocket: {err}" - )) - })?; - if let Some(cancel_rid) = cancel_handle { state.borrow_mut().resource_table.close(cancel_rid).ok(); } - let (ws_tx, ws_rx) = stream.split(); - let resource = WsStreamResource { - stream: WebSocketStreamType::Client { - rx: AsyncRefCell::new(ws_rx), - tx: AsyncRefCell::new(ws_tx), - }, - cancel: Default::default(), + let resource = ServerWebSocket { + buffered: Cell::new(0), + errored: Cell::new(None), + ws: AsyncRefCell::new(FragmentCollector::new(stream)), + closed: Cell::new(false), + tx_lock: AsyncRefCell::new(()), }; let mut state = state.borrow_mut(); let rid = state.resource_table.add(resource); @@ -368,17 +307,117 @@ where }) } -#[derive(Deserialize)] -#[serde(tag = "kind", content = "value", rename_all = "camelCase")] -pub enum SendValue { - Text(String), - Binary(ZeroCopyBuf), - Pong, - Ping, +#[repr(u16)] +pub enum MessageKind { + Text = 0, + Binary = 1, + Pong = 2, + Error = 3, + Closed = 4, } -#[op] -pub async fn op_ws_send_binary( +pub struct ServerWebSocket { + buffered: Cell, + errored: Cell>, + ws: AsyncRefCell>, + closed: Cell, + tx_lock: AsyncRefCell<()>, +} + +impl ServerWebSocket { + #[inline] + pub async fn write_frame( + self: &Rc, + frame: Frame, + ) -> Result<(), AnyError> { + let _lock = RcRef::map(self, |r| &r.tx_lock).borrow_mut().await; + // SAFETY: fastwebsockets only needs a mutable reference to the WebSocket + // to populate the write buffer. We encounter an await point when writing + // to the socket after the frame has already been written to the buffer. + let ws = unsafe { &mut *self.ws.as_ptr() }; + ws.write_frame(frame) + .await + .map_err(|err| type_error(err.to_string()))?; + Ok(()) + } +} + +impl Resource for ServerWebSocket { + fn name(&self) -> Cow { + "serverWebSocket".into() + } +} + +pub fn ws_create_server_stream( + state: &mut OpState, + transport: NetworkStream, + read_buf: Bytes, +) -> Result { + let mut ws = WebSocket::after_handshake( + WebSocketStream::new( + stream::WsStreamKind::Network(transport), + Some(read_buf), + ), + Role::Server, + ); + ws.set_writev(true); + ws.set_auto_close(true); + ws.set_auto_pong(true); + + let ws_resource = ServerWebSocket { + buffered: Cell::new(0), + errored: Cell::new(None), + ws: AsyncRefCell::new(FragmentCollector::new(ws)), + closed: Cell::new(false), + tx_lock: AsyncRefCell::new(()), + }; + + let rid = state.resource_table.add(ws_resource); + Ok(rid) +} + +#[op(fast)] +pub fn op_ws_send_binary( + state: &mut OpState, + rid: ResourceId, + data: ZeroCopyBuf, +) { + let resource = state.resource_table.get::(rid).unwrap(); + let data = data.to_vec(); + let len = data.len(); + resource.buffered.set(resource.buffered.get() + len); + deno_core::task::spawn(async move { + if let Err(err) = resource + .write_frame(Frame::new(true, OpCode::Binary, None, data)) + .await + { + resource.errored.set(Some(err)); + } else { + resource.buffered.set(resource.buffered.get() - len); + } + }); +} + +#[op(fast)] +pub fn op_ws_send_text(state: &mut OpState, rid: ResourceId, data: String) { + let resource = state.resource_table.get::(rid).unwrap(); + let len = data.len(); + resource.buffered.set(resource.buffered.get() + len); + deno_core::task::spawn(async move { + if let Err(err) = resource + .write_frame(Frame::new(true, OpCode::Text, None, data.into_bytes())) + .await + { + resource.errored.set(Some(err)); + } else { + resource.buffered.set(resource.buffered.get() - len); + } + }); +} + +/// Async version of send. Does not update buffered amount as we rely on the socket itself for backpressure. +#[op(fast)] +pub async fn op_ws_send_binary_async( state: Rc>, rid: ResourceId, data: ZeroCopyBuf, @@ -386,13 +425,16 @@ pub async fn op_ws_send_binary( let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(Message::Binary(data.to_vec())).await?; - Ok(()) + .get::(rid)?; + let data = data.to_vec(); + resource + .write_frame(Frame::new(true, OpCode::Binary, None, data)) + .await } -#[op] -pub async fn op_ws_send_text( +/// Async version of send. Does not update buffered amount as we rely on the socket itself for backpressure. +#[op(fast)] +pub async fn op_ws_send_text_async( state: Rc>, rid: ResourceId, data: String, @@ -400,30 +442,46 @@ pub async fn op_ws_send_text( let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(Message::Text(data)).await?; - Ok(()) + .get::(rid)?; + resource + .write_frame(Frame::new(true, OpCode::Text, None, data.into_bytes())) + .await +} + +#[op(fast)] +pub fn op_ws_get_buffered_amount(state: &mut OpState, rid: ResourceId) -> u32 { + state + .resource_table + .get::(rid) + .unwrap() + .buffered + .get() as u32 } #[op] -pub async fn op_ws_send( +pub async fn op_ws_send_pong( state: Rc>, rid: ResourceId, - value: SendValue, ) -> Result<(), AnyError> { - let msg = match value { - SendValue::Text(text) => Message::Text(text), - SendValue::Binary(buf) => Message::Binary(buf.to_vec()), - SendValue::Pong => Message::Pong(vec![]), - SendValue::Ping => Message::Ping(vec![]), - }; - let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(msg).await?; - Ok(()) + .get::(rid)?; + resource.write_frame(Frame::pong(vec![])).await +} + +#[op] +pub async fn op_ws_send_ping( + state: Rc>, + rid: ResourceId, +) -> Result<(), AnyError> { + let resource = state + .borrow_mut() + .resource_table + .get::(rid)?; + resource + .write_frame(Frame::new(true, OpCode::Ping, None, vec![])) + .await } #[op(deferred)] @@ -433,34 +491,20 @@ pub async fn op_ws_close( code: Option, reason: Option, ) -> Result<(), AnyError> { - let rid = rid; - let msg = Message::Close(code.map(|c| CloseFrame { - code: CloseCode::from(c), - reason: match reason { - Some(reason) => Cow::from(reason), - None => Default::default(), - }, - })); - let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(msg).await?; + .get::(rid)?; + let frame = reason + .map(|reason| Frame::close(code.unwrap_or(1005), reason.as_bytes())) + .unwrap_or_else(|| Frame::close_raw(vec![])); + + resource.closed.set(true); + resource.write_frame(frame).await?; Ok(()) } -#[repr(u16)] -pub enum MessageKind { - Text = 0, - Binary = 1, - Pong = 2, - Ping = 3, - Error = 5, - Closed = 6, -} - -#[op] +#[op(fast)] pub async fn op_ws_next_event( state: Rc>, rid: ResourceId, @@ -468,48 +512,62 @@ pub async fn op_ws_next_event( let resource = state .borrow_mut() .resource_table - .get::(rid)?; + .get::(rid)?; - let cancel = RcRef::map(&resource, |r| &r.cancel); - let val = resource.next_message(cancel).await?; - let res = match val { - Some(Ok(Message::Text(text))) => { - (MessageKind::Text as u16, StringOrBuffer::String(text)) - } - Some(Ok(Message::Binary(data))) => ( - MessageKind::Binary as u16, - StringOrBuffer::Buffer(data.into()), - ), - Some(Ok(Message::Close(Some(frame)))) => ( - frame.code.into(), - StringOrBuffer::String(frame.reason.to_string()), - ), - Some(Ok(Message::Close(None))) => { - (1005, StringOrBuffer::String("".to_string())) - } - Some(Ok(Message::Ping(_))) => ( - MessageKind::Ping as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - Some(Ok(Message::Pong(_))) => ( - MessageKind::Pong as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - Some(Err(e)) => ( - MessageKind::Error as u16, - StringOrBuffer::String(e.to_string()), - ), - None => { - // No message was received, presumably the socket closed while we waited. - // Try close the stream, ignoring any errors, and report closed status to JavaScript. - let _ = state.borrow_mut().resource_table.close(rid); - ( - MessageKind::Closed as u16, + if let Some(err) = resource.errored.take() { + return Err(err); + } + + let mut ws = RcRef::map(&resource, |r| &r.ws).borrow_mut().await; + loop { + let val = match ws.read_frame().await { + Ok(val) => val, + Err(err) => { + // No message was received, socket closed while we waited. + // Try close the stream, ignoring any errors, and report closed status to JavaScript. + if resource.closed.get() { + let _ = state.borrow_mut().resource_table.close(rid); + return Ok(( + MessageKind::Closed as u16, + StringOrBuffer::Buffer(vec![].into()), + )); + } + + return Ok(( + MessageKind::Error as u16, + StringOrBuffer::String(err.to_string()), + )); + } + }; + + break Ok(match val.opcode { + OpCode::Text => ( + MessageKind::Text as u16, + StringOrBuffer::String(String::from_utf8(val.payload).unwrap()), + ), + OpCode::Binary => ( + MessageKind::Binary as u16, + StringOrBuffer::Buffer(val.payload.into()), + ), + OpCode::Close => { + if val.payload.len() < 2 { + return Ok((1005, StringOrBuffer::String("".to_string()))); + } + + let close_code = + CloseCode::from(u16::from_be_bytes([val.payload[0], val.payload[1]])); + let reason = String::from_utf8(val.payload[2..].to_vec()).unwrap(); + (close_code.into(), StringOrBuffer::String(reason)) + } + OpCode::Pong => ( + MessageKind::Pong as u16, StringOrBuffer::Buffer(vec![].into()), - ) - } - }; - Ok(res) + ), + OpCode::Continuation | OpCode::Ping => { + continue; + } + }); + } } deno_core::extension!(deno_websocket, @@ -518,21 +576,20 @@ deno_core::extension!(deno_websocket, ops = [ op_ws_check_permission_and_cancel_handle

, op_ws_create

, - op_ws_send, op_ws_close, op_ws_next_event, op_ws_send_binary, op_ws_send_text, - server::op_server_ws_send, - server::op_server_ws_close, - server::op_server_ws_next_event, - server::op_server_ws_send_binary, - server::op_server_ws_send_text, + op_ws_send_binary_async, + op_ws_send_text_async, + op_ws_send_ping, + op_ws_send_pong, + op_ws_get_buffered_amount, ], esm = [ "01_websocket.js", "02_websocketstream.js" ], options = { user_agent: String, - root_cert_store: Option, + root_cert_store_provider: Option>, unsafely_ignore_certificate_errors: Option> }, state = |state, options| { @@ -540,7 +597,7 @@ deno_core::extension!(deno_websocket, state.put(UnsafelyIgnoreCertificateErrors( options.unsafely_ignore_certificate_errors, )); - state.put::(WsRootStore(options.root_cert_store)); + state.put::(WsRootStoreProvider(options.root_cert_store_provider)); }, ); @@ -573,3 +630,17 @@ pub fn get_network_error_class_name(e: &AnyError) -> Option<&'static str> { e.downcast_ref::() .map(|_| "DOMExceptionNetworkError") } + +// Needed so hyper can use non Send futures +#[derive(Clone)] +struct LocalExecutor; + +impl hyper::rt::Executor for LocalExecutor +where + Fut: Future + 'static, + Fut::Output: 'static, +{ + fn execute(&self, fut: Fut) { + deno_core::task::spawn(fut); + } +} diff --git a/ext/websocket/server.rs b/ext/websocket/server.rs deleted file mode 100644 index 44bc07e59b..0000000000 --- a/ext/websocket/server.rs +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use crate::MessageKind; -use crate::SendValue; -use crate::Upgraded; -use deno_core::error::type_error; -use deno_core::error::AnyError; -use deno_core::op; -use deno_core::AsyncRefCell; -use deno_core::OpState; -use deno_core::RcRef; -use deno_core::Resource; -use deno_core::ResourceId; -use deno_core::StringOrBuffer; -use deno_core::ZeroCopyBuf; -use std::borrow::Cow; -use std::cell::RefCell; -use std::pin::Pin; -use std::rc::Rc; - -use fastwebsockets::CloseCode; -use fastwebsockets::FragmentCollector; -use fastwebsockets::Frame; -use fastwebsockets::OpCode; -use fastwebsockets::WebSocket; - -pub struct ServerWebSocket { - ws: AsyncRefCell>>>, -} - -impl ServerWebSocket { - #[inline] - pub async fn write_frame( - self: Rc, - frame: Frame, - ) -> Result<(), AnyError> { - // SAFETY: fastwebsockets only needs a mutable reference to the WebSocket - // to populate the write buffer. We encounter an await point when writing - // to the socket after the frame has already been written to the buffer. - let ws = unsafe { &mut *self.ws.as_ptr() }; - ws.write_frame(frame) - .await - .map_err(|err| type_error(err.to_string()))?; - Ok(()) - } -} - -impl Resource for ServerWebSocket { - fn name(&self) -> Cow { - "serverWebSocket".into() - } -} -pub async fn ws_create_server_stream( - state: &Rc>, - transport: Pin>, -) -> Result { - let mut ws = WebSocket::after_handshake(transport); - ws.set_writev(false); - ws.set_auto_close(true); - ws.set_auto_pong(true); - - let ws_resource = ServerWebSocket { - ws: AsyncRefCell::new(FragmentCollector::new(ws)), - }; - - let resource_table = &mut state.borrow_mut().resource_table; - let rid = resource_table.add(ws_resource); - Ok(rid) -} - -#[op] -pub async fn op_server_ws_send_binary( - state: Rc>, - rid: ResourceId, - data: ZeroCopyBuf, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource - .write_frame(Frame::new(true, OpCode::Binary, None, data.to_vec())) - .await -} - -#[op] -pub async fn op_server_ws_send_text( - state: Rc>, - rid: ResourceId, - data: String, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource - .write_frame(Frame::new(true, OpCode::Text, None, data.into_bytes())) - .await -} - -#[op] -pub async fn op_server_ws_send( - state: Rc>, - rid: ResourceId, - value: SendValue, -) -> Result<(), AnyError> { - let msg = match value { - SendValue::Text(text) => { - Frame::new(true, OpCode::Text, None, text.into_bytes()) - } - SendValue::Binary(buf) => { - Frame::new(true, OpCode::Binary, None, buf.to_vec()) - } - SendValue::Pong => Frame::new(true, OpCode::Pong, None, vec![]), - SendValue::Ping => Frame::new(true, OpCode::Ping, None, vec![]), - }; - - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource.write_frame(msg).await -} - -#[op(deferred)] -pub async fn op_server_ws_close( - state: Rc>, - rid: ResourceId, - code: Option, - reason: Option, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - let frame = reason - .map(|reason| Frame::close(code.unwrap_or(1005), reason.as_bytes())) - .unwrap_or_else(|| Frame::close_raw(vec![])); - resource.write_frame(frame).await -} - -#[op(deferred)] -pub async fn op_server_ws_next_event( - state: Rc>, - rid: ResourceId, -) -> Result<(u16, StringOrBuffer), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - let mut ws = RcRef::map(&resource, |r| &r.ws).borrow_mut().await; - let val = match ws.read_frame().await { - Ok(val) => val, - Err(err) => { - return Ok(( - MessageKind::Error as u16, - StringOrBuffer::String(err.to_string()), - )) - } - }; - - let res = match val.opcode { - OpCode::Text => ( - MessageKind::Text as u16, - StringOrBuffer::String(String::from_utf8(val.payload).unwrap()), - ), - OpCode::Binary => ( - MessageKind::Binary as u16, - StringOrBuffer::Buffer(val.payload.into()), - ), - OpCode::Close => { - if val.payload.len() < 2 { - return Ok((1005, StringOrBuffer::String("".to_string()))); - } - - let close_code = - CloseCode::from(u16::from_be_bytes([val.payload[0], val.payload[1]])); - let reason = String::from_utf8(val.payload[2..].to_vec()).unwrap(); - (close_code.into(), StringOrBuffer::String(reason)) - } - OpCode::Ping => ( - MessageKind::Ping as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - OpCode::Pong => ( - MessageKind::Pong as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - OpCode::Continuation => { - return Err(type_error("Unexpected continuation frame")) - } - }; - Ok(res) -} diff --git a/ext/websocket/stream.rs b/ext/websocket/stream.rs new file mode 100644 index 0000000000..6f93406f62 --- /dev/null +++ b/ext/websocket/stream.rs @@ -0,0 +1,114 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use bytes::Buf; +use bytes::Bytes; +use deno_net::raw::NetworkStream; +use hyper::upgrade::Upgraded; +use std::pin::Pin; +use std::task::Poll; +use tokio::io::AsyncRead; +use tokio::io::AsyncWrite; +use tokio::io::ReadBuf; + +// TODO(bartlomieju): remove this +pub(crate) enum WsStreamKind { + Upgraded(Upgraded), + Network(NetworkStream), +} + +pub(crate) struct WebSocketStream { + stream: WsStreamKind, + pre: Option, +} + +impl WebSocketStream { + pub fn new(stream: WsStreamKind, buffer: Option) -> Self { + Self { + stream, + pre: buffer, + } + } +} + +impl AsyncRead for WebSocketStream { + // From hyper's Rewind (https://github.com/hyperium/hyper), MIT License, Copyright (c) Sean McArthur + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if let Some(mut prefix) = self.pre.take() { + // If there are no remaining bytes, let the bytes get dropped. + if !prefix.is_empty() { + let copy_len = std::cmp::min(prefix.len(), buf.remaining()); + // TODO: There should be a way to do following two lines cleaner... + buf.put_slice(&prefix[..copy_len]); + prefix.advance(copy_len); + // Put back what's left + if !prefix.is_empty() { + self.pre = Some(prefix); + } + + return Poll::Ready(Ok(())); + } + } + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_read(cx, buf), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_read(cx, buf), + } + } +} + +impl AsyncWrite for WebSocketStream { + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_write(cx, buf), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_write(cx, buf), + } + } + + fn poll_flush( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_flush(cx), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_flush(cx), + } + } + + fn poll_shutdown( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_shutdown(cx), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_shutdown(cx), + } + } + + fn is_write_vectored(&self) -> bool { + match &self.stream { + WsStreamKind::Network(stream) => stream.is_write_vectored(), + WsStreamKind::Upgraded(stream) => stream.is_write_vectored(), + } + } + + fn poll_write_vectored( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => { + Pin::new(stream).poll_write_vectored(cx, bufs) + } + WsStreamKind::Upgraded(stream) => { + Pin::new(stream).poll_write_vectored(cx, bufs) + } + } + } +} diff --git a/ext/webstorage/01_webstorage.js b/ext/webstorage/01_webstorage.js index bc6d173b78..58c68c832d 100644 --- a/ext/webstorage/01_webstorage.js +++ b/ext/webstorage/01_webstorage.js @@ -36,10 +36,7 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'key' on 'Storage'"; webidl.requiredArguments(arguments.length, 1, prefix); - index = webidl.converters["unsigned long"](index, { - prefix, - context: "Argument 1", - }); + index = webidl.converters["unsigned long"](index, prefix, "Argument 1"); return ops.op_webstorage_key(index, this[_persistent]); } @@ -48,14 +45,8 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'setItem' on 'Storage'"; webidl.requiredArguments(arguments.length, 2, prefix); - key = webidl.converters.DOMString(key, { - prefix, - context: "Argument 1", - }); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 2", - }); + key = webidl.converters.DOMString(key, prefix, "Argument 1"); + value = webidl.converters.DOMString(value, prefix, "Argument 2"); ops.op_webstorage_set(key, value, this[_persistent]); } @@ -64,10 +55,7 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'getItem' on 'Storage'"; webidl.requiredArguments(arguments.length, 1, prefix); - key = webidl.converters.DOMString(key, { - prefix, - context: "Argument 1", - }); + key = webidl.converters.DOMString(key, prefix, "Argument 1"); return ops.op_webstorage_get(key, this[_persistent]); } @@ -76,10 +64,7 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'removeItem' on 'Storage'"; webidl.requiredArguments(arguments.length, 1, prefix); - key = webidl.converters.DOMString(key, { - prefix, - context: "Argument 1", - }); + key = webidl.converters.DOMString(key, prefix, "Argument 1"); ops.op_webstorage_remove(key, this[_persistent]); } diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 215986c24a..7901e9b9e5 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.94.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/Cargo.toml b/ops/Cargo.toml index a059a9580d..7768f8c9d7 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ops" -version = "0.59.0" +version = "0.67.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/fast_call.rs b/ops/fast_call.rs index 2485b6083c..ebbb1927bc 100644 --- a/ops/fast_call.rs +++ b/ops/fast_call.rs @@ -245,41 +245,16 @@ pub(crate) fn generate( } if optimizer.is_async { - // Referenced variables are declared in parent block. - let track_async = q!({ - let __op_id = __ctx.id; - let __state = ::std::cell::RefCell::borrow(&__ctx.state); - __state.tracker.track_async(__op_id); - }); - - output_transforms.push_tokens(&track_async); - let queue_future = if optimizer.returns_result { q!({ - let realm_idx = __ctx.realm_idx; - let __get_class = __state.get_error_class_fn; - let result = _ops::queue_fast_async_op(__ctx, async move { - let result = result.await; - ( - realm_idx, - __promise_id, - __op_id, - _ops::to_op_result(__get_class, result), - ) - }); + let result = _ops::queue_fast_async_op(__ctx, __promise_id, result); }) } else { q!({ - let realm_idx = __ctx.realm_idx; - let result = _ops::queue_fast_async_op(__ctx, async move { - let result = result.await; - ( - realm_idx, - __promise_id, - __op_id, - _ops::OpResult::Ok(result.into()), - ) - }); + let result = + _ops::queue_fast_async_op(__ctx, __promise_id, async move { + Ok(result.await) + }); }) }; diff --git a/ops/lib.rs b/ops/lib.rs index 7bf9620917..d7c8b06402 100644 --- a/ops/lib.rs +++ b/ops/lib.rs @@ -143,7 +143,8 @@ impl Op { is_async: #is_async, is_unstable: #is_unstable, is_v8: #is_v8, - force_registration: false, + // TODO(mmastrac) + arg_count: 0, } } @@ -158,8 +159,8 @@ impl Op { let has_fallible_fast_call = active && optimizer.returns_result; - let v8_body = if is_async { - let deferred = attrs.deferred; + let (v8_body, arg_count) = if is_async { + let deferred: bool = attrs.deferred; codegen_v8_async( &core, &item, @@ -204,7 +205,7 @@ impl Op { is_async: #is_async, is_unstable: #is_unstable, is_v8: #is_v8, - force_registration: false, + arg_count: #arg_count as u8, } } @@ -241,7 +242,7 @@ fn codegen_v8_async( margs: Attributes, asyncness: bool, deferred: bool, -) -> TokenStream2 { +) -> (TokenStream2, usize) { let Attributes { is_v8, .. } = margs; let special_args = f .sig @@ -258,43 +259,64 @@ fn codegen_v8_async( let (arg_decls, args_tail, _) = codegen_args(core, f, rust_i0, 1, asyncness); let type_params = exclude_lifetime_params(&f.sig.generics.params); - let (pre_result, mut result_fut) = match asyncness { - true => ( - quote! {}, - quote! { Self::call::<#type_params>(#args_head #args_tail).await; }, - ), - false => ( - quote! { let result_fut = Self::call::<#type_params>(#args_head #args_tail); }, - quote! { result_fut.await; }, - ), - }; - let result_wrapper = match is_result(&f.sig.output) { - true => { - // Support `Result> + 'static, AnyError>` - if !asyncness { - result_fut = quote! { result_fut; }; - quote! { - let result = match result { - Ok(fut) => fut.await, - Err(e) => return (realm_idx, promise_id, op_id, #core::_ops::to_op_result::<()>(get_class, Err(e))), - }; - } - } else { - quote! {} + let wrapper = match (asyncness, is_result(&f.sig.output)) { + (true, true) => { + quote! { + let fut = #core::_ops::map_async_op1(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); + } + } + (true, false) => { + quote! { + let fut = #core::_ops::map_async_op2(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); + } + } + (false, true) => { + quote! { + let fut = #core::_ops::map_async_op3(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); + } + } + (false, false) => { + quote! { + let fut = #core::_ops::map_async_op4(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); } } - false => quote! { let result = Ok(result); }, }; - quote! { + let token_stream = quote! { use #core::futures::FutureExt; // SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime let ctx = unsafe { &*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value() as *const #core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = #core::v8::Local::<#core::v8::Integer>::try_from(promise_id) @@ -310,25 +332,15 @@ fn codegen_v8_async( }; #arg_decls - - // Track async call & get copy of get_error_class_fn - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; - - #pre_result - let maybe_response = #core::_ops::queue_async_op(ctx, scope, #deferred, async move { - let result = #result_fut - #result_wrapper - (realm_idx, promise_id, op_id, #core::_ops::to_op_result(get_class, result)) - }); + #wrapper if let Some(response) = maybe_response { rv.set(response); } - } + }; + + // +1 arg for the promise ID + (token_stream, 1 + f.sig.inputs.len() - rust_i0) } fn scope_arg(arg: &FnArg) -> Option { @@ -365,7 +377,7 @@ fn codegen_v8_sync( f: &syn::ItemFn, margs: Attributes, has_fallible_fast_call: bool, -) -> TokenStream2 { +) -> (TokenStream2, usize) { let Attributes { is_v8, .. } = margs; let special_args = f .sig @@ -396,7 +408,7 @@ fn codegen_v8_sync( quote! {} }; - quote! { + let token_stream = quote! { // SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime let ctx = unsafe { &*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value() @@ -413,7 +425,9 @@ fn codegen_v8_sync( op_state.tracker.track_sync(ctx.id); #ret - } + }; + + (token_stream, f.sig.inputs.len() - rust_i0) } /// (full declarations, idents, v8 argument count) diff --git a/ops/optimizer_tests/async_nop.out b/ops/optimizer_tests/async_nop.out index 7782b5970d..d59967a451 100644 --- a/ops/optimizer_tests/async_nop.out +++ b/ops/optimizer_tests/async_nop.out @@ -40,7 +40,7 @@ impl op_void_async { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] @@ -56,8 +56,6 @@ impl op_void_async { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -74,25 +72,13 @@ impl op_void_async { return; } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op2(ctx, Self::call()); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call().await; - let result = Ok(result); - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); @@ -116,16 +102,10 @@ fn op_void_async_fast_fn<'scope>( }; let op_state = __ctx.state.clone(); let result = op_void_async::call(); - let __op_id = __ctx.id; - let __state = ::std::cell::RefCell::borrow(&__ctx.state); - __state.tracker.track_async(__op_id); - let realm_idx = __ctx.realm_idx; let result = _ops::queue_fast_async_op( __ctx, - async move { - let result = result.await; - (realm_idx, __promise_id, __op_id, _ops::OpResult::Ok(result.into())) - }, + __promise_id, + async move { Ok(result.await) }, ); result } diff --git a/ops/optimizer_tests/async_result.out b/ops/optimizer_tests/async_result.out index c3bb433f1a..6f61a9697c 100644 --- a/ops/optimizer_tests/async_result.out +++ b/ops/optimizer_tests/async_result.out @@ -40,7 +40,7 @@ impl op_async_result { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] @@ -56,8 +56,6 @@ impl op_async_result { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -85,24 +83,16 @@ impl op_async_result { return deno_core::_ops::throw_type_error(scope, msg); } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op1( + ctx, + Self::call(ctx.state.clone(), arg_0), + ); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call(ctx.state.clone(), arg_0).await; - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); @@ -127,16 +117,5 @@ fn op_async_result_fast_fn<'scope>( }; let state = __ctx.state.clone(); let result = op_async_result::call(state, rid); - let __op_id = __ctx.id; - let __state = ::std::cell::RefCell::borrow(&__ctx.state); - __state.tracker.track_async(__op_id); - let realm_idx = __ctx.realm_idx; - let __get_class = __state.get_error_class_fn; - let result = _ops::queue_fast_async_op( - __ctx, - async move { - let result = result.await; - (realm_idx, __promise_id, __op_id, _ops::to_op_result(__get_class, result)) - }, - ); + let result = _ops::queue_fast_async_op(__ctx, __promise_id, result); } diff --git a/ops/optimizer_tests/callback_options.out b/ops/optimizer_tests/callback_options.out index e892e01189..d46d46765d 100644 --- a/ops/optimizer_tests/callback_options.out +++ b/ops/optimizer_tests/callback_options.out @@ -40,7 +40,7 @@ impl op_fallback { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/cow_str.out b/ops/optimizer_tests/cow_str.out index dc909da819..d52df86ee6 100644 --- a/ops/optimizer_tests/cow_str.out +++ b/ops/optimizer_tests/cow_str.out @@ -40,7 +40,7 @@ impl op_cow_str { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/f64_slice.out b/ops/optimizer_tests/f64_slice.out index 3e8ef07d85..403ec8fa39 100644 --- a/ops/optimizer_tests/f64_slice.out +++ b/ops/optimizer_tests/f64_slice.out @@ -40,7 +40,7 @@ impl op_f64_buf { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/incompatible_1.out b/ops/optimizer_tests/incompatible_1.out index 5104fb5e46..0e8c98fd0a 100644 --- a/ops/optimizer_tests/incompatible_1.out +++ b/ops/optimizer_tests/incompatible_1.out @@ -30,7 +30,7 @@ impl op_sync_serialize_object_with_numbers_as_keys { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/issue16934.out b/ops/optimizer_tests/issue16934.out index 68f59ef438..355add5e08 100644 --- a/ops/optimizer_tests/issue16934.out +++ b/ops/optimizer_tests/issue16934.out @@ -30,7 +30,7 @@ impl send_stdin { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] @@ -50,8 +50,6 @@ impl send_stdin { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -79,28 +77,19 @@ impl send_stdin { ); } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op1( + ctx, + Self::call( + compile_error!("mutable opstate is not supported in async ops"), + arg_0, + ), + ); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call( - compile_error!("mutable opstate is not supported in async ops"), - arg_0, - ) - .await; - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); diff --git a/ops/optimizer_tests/issue16934_fast.out b/ops/optimizer_tests/issue16934_fast.out index 7a4a39f348..b6e80b5749 100644 --- a/ops/optimizer_tests/issue16934_fast.out +++ b/ops/optimizer_tests/issue16934_fast.out @@ -30,7 +30,7 @@ impl send_stdin { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] @@ -48,8 +48,6 @@ impl send_stdin { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -77,28 +75,19 @@ impl send_stdin { return deno_core::_ops::throw_type_error(scope, msg); } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op1( + ctx, + Self::call( + compile_error!("mutable opstate is not supported in async ops"), + arg_0, + ), + ); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call( - compile_error!("mutable opstate is not supported in async ops"), - arg_0, - ) - .await; - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); diff --git a/ops/optimizer_tests/op_blob_revoke_object_url.out b/ops/optimizer_tests/op_blob_revoke_object_url.out index 4eda692240..3165430df1 100644 --- a/ops/optimizer_tests/op_blob_revoke_object_url.out +++ b/ops/optimizer_tests/op_blob_revoke_object_url.out @@ -30,7 +30,7 @@ impl op_blob_revoke_object_url { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_ffi_ptr_value.out b/ops/optimizer_tests/op_ffi_ptr_value.out index 3fee00cff8..3e4b2571da 100644 --- a/ops/optimizer_tests/op_ffi_ptr_value.out +++ b/ops/optimizer_tests/op_ffi_ptr_value.out @@ -40,7 +40,7 @@ impl op_ffi_ptr_value { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_print.out b/ops/optimizer_tests/op_print.out index 7bf5457d78..d79cdfd620 100644 --- a/ops/optimizer_tests/op_print.out +++ b/ops/optimizer_tests/op_print.out @@ -30,7 +30,7 @@ impl op_print { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state.out b/ops/optimizer_tests/op_state.out index cebb1e25c7..1d83ae4315 100644 --- a/ops/optimizer_tests/op_state.out +++ b/ops/optimizer_tests/op_state.out @@ -40,7 +40,7 @@ impl op_set_exit_code { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_basic1.out b/ops/optimizer_tests/op_state_basic1.out index d8278daca6..c1ea447c53 100644 --- a/ops/optimizer_tests/op_state_basic1.out +++ b/ops/optimizer_tests/op_state_basic1.out @@ -40,7 +40,7 @@ impl foo { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_generics.out b/ops/optimizer_tests/op_state_generics.out index 631a2142f7..24596256ac 100644 --- a/ops/optimizer_tests/op_state_generics.out +++ b/ops/optimizer_tests/op_state_generics.out @@ -46,7 +46,7 @@ impl op_foo { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_result.out b/ops/optimizer_tests/op_state_result.out index d03ffd5a61..4c58de8d6e 100644 --- a/ops/optimizer_tests/op_state_result.out +++ b/ops/optimizer_tests/op_state_result.out @@ -40,7 +40,7 @@ impl foo { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_warning.out b/ops/optimizer_tests/op_state_warning.out index 5548dc134e..97d76aa975 100644 --- a/ops/optimizer_tests/op_state_warning.out +++ b/ops/optimizer_tests/op_state_warning.out @@ -40,7 +40,7 @@ impl op_listen { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_with_transforms.out b/ops/optimizer_tests/op_state_with_transforms.out index ad4e5335a8..3bbb7289fc 100644 --- a/ops/optimizer_tests/op_state_with_transforms.out +++ b/ops/optimizer_tests/op_state_with_transforms.out @@ -46,7 +46,7 @@ impl op_now { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/opstate_with_arity.out b/ops/optimizer_tests/opstate_with_arity.out index 037774c255..88651ce765 100644 --- a/ops/optimizer_tests/opstate_with_arity.out +++ b/ops/optimizer_tests/opstate_with_arity.out @@ -40,7 +40,7 @@ impl op_add_4 { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 4usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/option_arg.out b/ops/optimizer_tests/option_arg.out index 39d47562b8..f00937a745 100644 --- a/ops/optimizer_tests/option_arg.out +++ b/ops/optimizer_tests/option_arg.out @@ -30,7 +30,7 @@ impl op_try_close { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/owned_string.out b/ops/optimizer_tests/owned_string.out index f8b195b2fb..d186e5108b 100644 --- a/ops/optimizer_tests/owned_string.out +++ b/ops/optimizer_tests/owned_string.out @@ -40,7 +40,7 @@ impl op_string_length { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/param_mut_binding_warning.out b/ops/optimizer_tests/param_mut_binding_warning.out index 98dc6b2b91..5435b21db6 100644 --- a/ops/optimizer_tests/param_mut_binding_warning.out +++ b/ops/optimizer_tests/param_mut_binding_warning.out @@ -30,7 +30,7 @@ impl op_read_sync { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/raw_ptr.out b/ops/optimizer_tests/raw_ptr.out index 678ce50152..a1bacbfc83 100644 --- a/ops/optimizer_tests/raw_ptr.out +++ b/ops/optimizer_tests/raw_ptr.out @@ -51,7 +51,7 @@ impl op_ffi_ptr_of { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/serde_v8_value.out b/ops/optimizer_tests/serde_v8_value.out index d0f8dacdfc..1a3d1ed31c 100644 --- a/ops/optimizer_tests/serde_v8_value.out +++ b/ops/optimizer_tests/serde_v8_value.out @@ -40,7 +40,7 @@ impl op_is_proxy { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/strings.out b/ops/optimizer_tests/strings.out index 3238bfc427..a1e684caf3 100644 --- a/ops/optimizer_tests/strings.out +++ b/ops/optimizer_tests/strings.out @@ -40,7 +40,7 @@ impl op_string_length { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/strings_result.out b/ops/optimizer_tests/strings_result.out index 8b2e2acef2..46e27e7629 100644 --- a/ops/optimizer_tests/strings_result.out +++ b/ops/optimizer_tests/strings_result.out @@ -30,7 +30,7 @@ impl op_string_length { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/u64_result.out b/ops/optimizer_tests/u64_result.out index 02d25686a8..46ccd53e1a 100644 --- a/ops/optimizer_tests/u64_result.out +++ b/ops/optimizer_tests/u64_result.out @@ -30,7 +30,7 @@ impl op_bench_now { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/uint8array.out b/ops/optimizer_tests/uint8array.out index 93fa40e1f2..31915d2fed 100644 --- a/ops/optimizer_tests/uint8array.out +++ b/ops/optimizer_tests/uint8array.out @@ -40,7 +40,7 @@ impl op_import_spki_x25519 { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/unit_result.out b/ops/optimizer_tests/unit_result.out index 354a2e3b94..cab67c0ea9 100644 --- a/ops/optimizer_tests/unit_result.out +++ b/ops/optimizer_tests/unit_result.out @@ -40,7 +40,7 @@ impl op_unit_result { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/unit_result2.out b/ops/optimizer_tests/unit_result2.out index 721229121b..3d84797911 100644 --- a/ops/optimizer_tests/unit_result2.out +++ b/ops/optimizer_tests/unit_result2.out @@ -40,7 +40,7 @@ impl op_set_nodelay { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/unit_ret.out b/ops/optimizer_tests/unit_ret.out index 7d0f63dc88..523ae6504d 100644 --- a/ops/optimizer_tests/unit_ret.out +++ b/ops/optimizer_tests/unit_ret.out @@ -40,7 +40,7 @@ impl op_unit { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/wasm_op.out b/ops/optimizer_tests/wasm_op.out index 0196f45481..5a8996cd03 100644 --- a/ops/optimizer_tests/wasm_op.out +++ b/ops/optimizer_tests/wasm_op.out @@ -40,7 +40,7 @@ impl op_wasm { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 58f292e8f9..5a7196ea8f 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.107.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -29,8 +29,12 @@ name = "deno_runtime" path = "lib.rs" [[example]] -name = "hello_runtime" -path = "examples/hello_runtime.rs" +name = "extension_with_esm" +path = "examples/extension_with_esm/main.rs" + +[[example]] +name = "extension_with_ops" +path = "examples/extension_with_ops/main.rs" [build-dependencies] deno_ast.workspace = true @@ -41,7 +45,7 @@ deno_core.workspace = true deno_crypto.workspace = true deno_fetch.workspace = true deno_ffi.workspace = true -deno_fs.workspace = true +deno_fs = { workspace = true, features = ["sync_fs"] } deno_http.workspace = true deno_io.workspace = true deno_net.workspace = true @@ -67,7 +71,7 @@ deno_core.workspace = true deno_crypto.workspace = true deno_fetch.workspace = true deno_ffi.workspace = true -deno_fs.workspace = true +deno_fs = { workspace = true, features = ["sync_fs"] } deno_http.workspace = true deno_io.workspace = true deno_kv.workspace = true @@ -80,6 +84,7 @@ deno_web.workspace = true deno_webidl.workspace = true deno_websocket.workspace = true deno_webstorage.workspace = true +fastwebsockets.workspace = true atty.workspace = true console_static_text.workspace = true diff --git a/runtime/build.rs b/runtime/build.rs index d47bee9419..f656682a1d 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -18,7 +18,7 @@ mod startup_snapshot { use deno_core::Extension; use deno_core::ExtensionFileSource; use deno_core::ModuleCode; - use deno_fs::StdFs; + use deno_http::DefaultHttpPropertyExtractor; use std::path::Path; fn transpile_ts_for_snapshotting( @@ -122,12 +122,16 @@ mod startup_snapshot { } impl deno_node::NodePermissions for Permissions { - fn check_read( + fn check_net_url( &mut self, - _p: &Path, + _url: &deno_core::url::Url, + _api_name: &str, ) -> Result<(), deno_core::error::AnyError> { unreachable!("snapshotting!") } + fn check_read(&self, _p: &Path) -> Result<(), deno_core::error::AnyError> { + unreachable!("snapshotting!") + } } impl deno_net::NetPermissions for Permissions { @@ -218,13 +222,6 @@ mod startup_snapshot { } } - struct SnapshotNodeEnv; - - impl deno_node::NodeEnv for SnapshotNodeEnv { - type P = Permissions; - type Fs = deno_node::RealFs; - } - deno_core::extension!(runtime, deps = [ deno_webidl, @@ -278,12 +275,24 @@ mod startup_snapshot { include_str!("js/99_main.js"), ), }]); + ext.esm_entry_point("ext:runtime_main/js/99_main.js"); + } + ); + + #[cfg(feature = "snapshot_from_snapshot")] + deno_core::extension!( + runtime_main, + deps = [runtime], + customizer = |ext: &mut deno_core::ExtensionBuilder| { + eprintln!("I am here!!!"); + ext.esm_entry_point("ext:runtime/90_deno_ns.js"); } ); pub fn create_runtime_snapshot(snapshot_path: PathBuf) { // NOTE(bartlomieju): ordering is important here, keep it in sync with // `runtime/worker.rs`, `runtime/web_worker.rs` and `cli/build.rs`! + let fs = std::sync::Arc::new(deno_fs::RealFs); let extensions: Vec = vec![ deno_webidl::deno_webidl::init_ops_and_esm(), deno_console::deno_console::init_ops_and_esm(), @@ -318,18 +327,17 @@ mod startup_snapshot { false, // No --unstable ), deno_napi::deno_napi::init_ops_and_esm::(), - deno_http::deno_http::init_ops_and_esm(), + deno_http::deno_http::init_ops_and_esm::(), deno_io::deno_io::init_ops_and_esm(Default::default()), - deno_fs::deno_fs::init_ops_and_esm::<_, Permissions>(false, StdFs), + deno_fs::deno_fs::init_ops_and_esm::(false, fs.clone()), runtime::init_ops_and_esm(), // FIXME(bartlomieju): these extensions are specified last, because they // depend on `runtime`, even though it should be other way around - deno_node::deno_node::init_ops_and_esm::(None), - #[cfg(not(feature = "snapshot_from_snapshot"))] + deno_node::deno_node::init_ops_and_esm::(None, fs), runtime_main::init_ops_and_esm(), ]; - create_snapshot(CreateSnapshotOptions { + let output = create_snapshot(CreateSnapshotOptions { cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"), snapshot_path, startup_snapshot: None, @@ -337,6 +345,9 @@ mod startup_snapshot { compression_cb: None, snapshot_module_load_cb: Some(Box::new(transpile_ts_for_snapshotting)), }); + for path in output.files_loaded_during_snapshot { + println!("cargo:rerun-if-changed={}", path.display()); + } } } @@ -357,6 +368,7 @@ fn main() { if env::var_os("DOCS_RS").is_some() { let snapshot_slice = &[]; #[allow(clippy::needless_borrow)] + #[allow(clippy::disallowed_methods)] std::fs::write(&runtime_snapshot_path, snapshot_slice).unwrap(); } diff --git a/runtime/clippy.toml b/runtime/clippy.toml new file mode 100644 index 0000000000..53676a90e6 --- /dev/null +++ b/runtime/clippy.toml @@ -0,0 +1,45 @@ +disallowed-methods = [ + { path = "std::env::current_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::env::set_current_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::env::temp_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::copy", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::DirBuilder::new", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::OpenOptions::new", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::rename", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::write", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using FileSystem trait" }, +] diff --git a/runtime/examples/extension_with_esm/bootstrap.js b/runtime/examples/extension_with_esm/bootstrap.js new file mode 100644 index 0000000000..759dde9395 --- /dev/null +++ b/runtime/examples/extension_with_esm/bootstrap.js @@ -0,0 +1,5 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +function hello() { + console.log("Hello from extension!"); +} +globalThis.Extension = { hello }; diff --git a/runtime/examples/hello_runtime.js b/runtime/examples/extension_with_esm/main.js similarity index 86% rename from runtime/examples/hello_runtime.js rename to runtime/examples/extension_with_esm/main.js index 066fa21d66..5b079d8d89 100644 --- a/runtime/examples/hello_runtime.js +++ b/runtime/examples/extension_with_esm/main.js @@ -1,3 +1,4 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. console.log("Hello world!"); console.log(Deno); +Extension.hello(); diff --git a/runtime/examples/extension_with_esm/main.rs b/runtime/examples/extension_with_esm/main.rs new file mode 100644 index 0000000000..6b21460a3f --- /dev/null +++ b/runtime/examples/extension_with_esm/main.rs @@ -0,0 +1,36 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::path::Path; +use std::rc::Rc; + +use deno_core::error::AnyError; +use deno_core::FsModuleLoader; +use deno_core::ModuleSpecifier; +use deno_runtime::permissions::PermissionsContainer; +use deno_runtime::worker::MainWorker; +use deno_runtime::worker::WorkerOptions; + +deno_core::extension!( + hello_runtime, + esm_entry_point = "ext:hello_runtime/bootstrap.js", + esm = ["bootstrap.js"] +); + +#[tokio::main] +async fn main() -> Result<(), AnyError> { + let js_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("examples/extension_with_esm/main.js"); + let main_module = ModuleSpecifier::from_file_path(js_path).unwrap(); + let mut worker = MainWorker::bootstrap_from_options( + main_module.clone(), + PermissionsContainer::allow_all(), + WorkerOptions { + module_loader: Rc::new(FsModuleLoader), + extensions: vec![hello_runtime::init_ops_and_esm()], + ..Default::default() + }, + ); + worker.execute_main_module(&main_module).await?; + worker.run_event_loop(false).await?; + Ok(()) +} diff --git a/runtime/examples/extension_with_ops/main.js b/runtime/examples/extension_with_ops/main.js new file mode 100644 index 0000000000..042573c080 --- /dev/null +++ b/runtime/examples/extension_with_ops/main.js @@ -0,0 +1,2 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +Deno[Deno.internal].core.ops.op_hello("World"); diff --git a/runtime/examples/extension_with_ops/main.rs b/runtime/examples/extension_with_ops/main.rs new file mode 100644 index 0000000000..1feb4ba279 --- /dev/null +++ b/runtime/examples/extension_with_ops/main.rs @@ -0,0 +1,38 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::path::Path; +use std::rc::Rc; + +use deno_core::error::AnyError; +use deno_core::op; +use deno_core::FsModuleLoader; +use deno_core::ModuleSpecifier; +use deno_runtime::permissions::PermissionsContainer; +use deno_runtime::worker::MainWorker; +use deno_runtime::worker::WorkerOptions; + +deno_core::extension!(hello_runtime, ops = [op_hello]); + +#[op] +fn op_hello(text: &str) { + println!("Hello {}!", text); +} + +#[tokio::main] +async fn main() -> Result<(), AnyError> { + let js_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("examples/extension_with_ops/main.js"); + let main_module = ModuleSpecifier::from_file_path(js_path).unwrap(); + let mut worker = MainWorker::bootstrap_from_options( + main_module.clone(), + PermissionsContainer::allow_all(), + WorkerOptions { + module_loader: Rc::new(FsModuleLoader), + extensions: vec![hello_runtime::init_ops()], + ..Default::default() + }, + ); + worker.execute_main_module(&main_module).await?; + worker.run_event_loop(false).await?; + Ok(()) +} diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs deleted file mode 100644 index f44c774fbb..0000000000 --- a/runtime/examples/hello_runtime.rs +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_core::FsModuleLoader; -use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; -use deno_runtime::deno_web::BlobStore; -use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::worker::MainWorker; -use deno_runtime::worker::WorkerOptions; -use deno_runtime::BootstrapOptions; -use std::path::Path; -use std::rc::Rc; -use std::sync::Arc; - -fn get_error_class_name(e: &AnyError) -> &'static str { - deno_runtime::errors::get_error_class_name(e).unwrap_or("Error") -} - -#[tokio::main] -async fn main() -> Result<(), AnyError> { - let module_loader = Rc::new(FsModuleLoader); - let create_web_worker_cb = Arc::new(|_| { - todo!("Web workers are not supported in the example"); - }); - let web_worker_event_cb = Arc::new(|_| { - todo!("Web workers are not supported in the example"); - }); - - let options = WorkerOptions { - bootstrap: BootstrapOptions::default(), - extensions: vec![], - startup_snapshot: None, - unsafely_ignore_certificate_errors: None, - root_cert_store: None, - seed: None, - source_map_getter: None, - format_js_error_fn: None, - web_worker_preload_module_cb: web_worker_event_cb.clone(), - web_worker_pre_execute_module_cb: web_worker_event_cb, - create_web_worker_cb, - maybe_inspector_server: None, - should_break_on_first_statement: false, - should_wait_for_inspector_session: false, - module_loader, - npm_resolver: None, - get_error_class_fn: Some(&get_error_class_name), - cache_storage_dir: None, - origin_storage_dir: None, - blob_store: BlobStore::default(), - broadcast_channel: InMemoryBroadcastChannel::default(), - shared_array_buffer_store: None, - compiled_wasm_module_store: None, - stdio: Default::default(), - }; - - let js_path = - Path::new(env!("CARGO_MANIFEST_DIR")).join("examples/hello_runtime.js"); - let main_module = deno_core::resolve_path( - &js_path.to_string_lossy(), - &std::env::current_dir().context("Unable to get CWD")?, - )?; - let permissions = PermissionsContainer::allow_all(); - - let mut worker = MainWorker::bootstrap_from_options( - main_module.clone(), - permissions, - options, - ); - worker.execute_main_module(&main_module).await?; - worker.run_event_loop(false).await?; - Ok(()) -} diff --git a/runtime/fmt_errors.rs b/runtime/fmt_errors.rs index 6852cbcd1e..5dcb96ec77 100644 --- a/runtime/fmt_errors.rs +++ b/runtime/fmt_errors.rs @@ -12,7 +12,7 @@ use std::fmt::Write as _; /// Compares all properties of JsError, except for JsError::cause. /// This function is used to detect that 2 JsError objects in a JsError::cause /// chain are identical, ie. there is a recursive cause. -/// 02_console.js, which also detects recursive causes, can use JS object +/// 01_console.js, which also detects recursive causes, can use JS object /// comparisons to compare errors. We don't have access to JS object identity in /// format_js_error(). fn errors_are_equal_without_cause(a: &JsError, b: &JsError) -> bool { diff --git a/runtime/fs_util.rs b/runtime/fs_util.rs index a29a57b397..204b0e4e85 100644 --- a/runtime/fs_util.rs +++ b/runtime/fs_util.rs @@ -3,32 +3,17 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; pub use deno_core::normalize_path; -use std::env::current_dir; -use std::io::Error; use std::path::Path; use std::path::PathBuf; -/// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. -pub fn canonicalize_path(path: &Path) -> Result { - let mut canonicalized_path = path.canonicalize()?; - if cfg!(windows) { - canonicalized_path = PathBuf::from( - canonicalized_path - .display() - .to_string() - .trim_start_matches("\\\\?\\"), - ); - } - Ok(canonicalized_path) -} - #[inline] pub fn resolve_from_cwd(path: &Path) -> Result { if path.is_absolute() { Ok(normalize_path(path)) } else { - let cwd = - current_dir().context("Failed to get current working directory")?; + #[allow(clippy::disallowed_methods)] + let cwd = std::env::current_dir() + .context("Failed to get current working directory")?; Ok(normalize_path(cwd.join(path))) } } @@ -37,21 +22,26 @@ pub fn resolve_from_cwd(path: &Path) -> Result { mod tests { use super::*; + fn current_dir() -> PathBuf { + #[allow(clippy::disallowed_methods)] + std::env::current_dir().unwrap() + } + #[test] fn resolve_from_cwd_child() { - let cwd = current_dir().unwrap(); + let cwd = current_dir(); assert_eq!(resolve_from_cwd(Path::new("a")).unwrap(), cwd.join("a")); } #[test] fn resolve_from_cwd_dot() { - let cwd = current_dir().unwrap(); + let cwd = current_dir(); assert_eq!(resolve_from_cwd(Path::new(".")).unwrap(), cwd); } #[test] fn resolve_from_cwd_parent() { - let cwd = current_dir().unwrap(); + let cwd = current_dir(); assert_eq!(resolve_from_cwd(Path::new("a/..")).unwrap(), cwd); } @@ -75,7 +65,7 @@ mod tests { #[test] fn resolve_from_cwd_absolute() { let expected = Path::new("a"); - let cwd = current_dir().unwrap(); + let cwd = current_dir(); let absolute_expected = cwd.join(expected); assert_eq!(resolve_from_cwd(expected).unwrap(), absolute_expected); } diff --git a/runtime/inspector_server.rs b/runtime/inspector_server.rs index d65e813cb6..1a67068964 100644 --- a/runtime/inspector_server.rs +++ b/runtime/inspector_server.rs @@ -1,7 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use core::convert::Infallible as Never; // Alias for the future `!` type. -use deno_core::error::AnyError; +// Alias for the future `!` type. +use core::convert::Infallible as Never; use deno_core::futures::channel::mpsc; use deno_core::futures::channel::mpsc::UnboundedReceiver; use deno_core::futures::channel::mpsc::UnboundedSender; @@ -15,11 +15,13 @@ use deno_core::futures::task::Poll; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::task::spawn; use deno_core::InspectorMsg; use deno_core::InspectorSessionProxy; use deno_core::JsRuntime; -use deno_websocket::tokio_tungstenite::tungstenite; -use deno_websocket::tokio_tungstenite::WebSocketStream; +use fastwebsockets::Frame; +use fastwebsockets::OpCode; +use fastwebsockets::WebSocket; use std::cell::RefCell; use std::collections::HashMap; use std::convert::Infallible; @@ -108,7 +110,7 @@ where Fut::Output: 'static, { fn execute(&self, fut: Fut) { - tokio::task::spawn_local(fut); + deno_core::task::spawn(fut); } } @@ -145,35 +147,27 @@ fn handle_ws_request( let info = maybe_inspector_info.unwrap(); info.new_session_tx.clone() }; - - let resp = tungstenite::handshake::server::create_response(&req) - .map(|resp| resp.map(|_| hyper::Body::empty())) - .or_else(|e| match e { - tungstenite::error::Error::HttpFormat(http_error) => Err(http_error), - _ => http::Response::builder() - .status(http::StatusCode::BAD_REQUEST) - .body("Not a valid Websocket Request".into()), - })?; - let (parts, _) = req.into_parts(); - let req = http::Request::from_parts(parts, body); + let mut req = http::Request::from_parts(parts, body); + + let (resp, fut) = match fastwebsockets::upgrade::upgrade(&mut req) { + Ok(e) => e, + _ => { + return http::Response::builder() + .status(http::StatusCode::BAD_REQUEST) + .body("Not a valid Websocket Request".into()); + } + }; // spawn a task that will wait for websocket connection and then pump messages between // the socket and inspector proxy - tokio::task::spawn_local(async move { - let upgrade_result = hyper::upgrade::on(req).await; - let upgraded = if let Ok(u) = upgrade_result { - u + spawn(async move { + let websocket = if let Ok(w) = fut.await { + w } else { eprintln!("Inspector server failed to upgrade to WS connection"); return; }; - let websocket = WebSocketStream::from_raw_socket( - upgraded, - tungstenite::protocol::Role::Server, - None, - ) - .await; // The 'outbound' channel carries messages sent to the websocket. let (outbound_tx, outbound_rx) = mpsc::unbounded(); @@ -324,37 +318,36 @@ async fn server( /// 'futures' crate, therefore they can't participate in Tokio's cooperative /// task yielding. async fn pump_websocket_messages( - websocket: WebSocketStream, + mut websocket: WebSocket, inbound_tx: UnboundedSender, - outbound_rx: UnboundedReceiver, + mut outbound_rx: UnboundedReceiver, ) { - let (websocket_tx, websocket_rx) = websocket.split(); - - let outbound_pump = outbound_rx - .map(|msg| tungstenite::Message::text(msg.content)) - .map(Ok) - .forward(websocket_tx) - .map_err(|_| ()); - - let inbound_pump = async move { - let _result = websocket_rx - .map_err(AnyError::from) - .map_ok(|msg| { - // Messages that cannot be converted to strings are ignored. - if let Ok(msg_text) = msg.into_text() { - let _ = inbound_tx.unbounded_send(msg_text); + 'pump: loop { + tokio::select! { + Some(msg) = outbound_rx.next() => { + let msg = Frame::text(msg.content.into_bytes()); + let _ = websocket.write_frame(msg).await; } - }) - .try_collect::<()>() - .await; - - // Users don't care if there was an error coming from debugger, - // just about the fact that debugger did disconnect. - eprintln!("Debugger session ended"); - - Ok(()) - }; - let _ = future::try_join(outbound_pump, inbound_pump).await; + Ok(msg) = websocket.read_frame() => { + match msg.opcode { + OpCode::Text => { + if let Ok(s) = String::from_utf8(msg.payload) { + let _ = inbound_tx.unbounded_send(s); + } + } + OpCode::Close => { + // Users don't care if there was an error coming from debugger, + // just about the fact that debugger did disconnect. + eprintln!("Debugger session ended"); + break 'pump; + } + _ => { + // Ignore other messages. + } + } + } + } + } } /// Inspector information that is sent from the isolate thread to the server diff --git a/runtime/js/06_util.js b/runtime/js/06_util.js index db4564e32d..971957b7ec 100644 --- a/runtime/js/06_util.js +++ b/runtime/js/06_util.js @@ -5,21 +5,30 @@ const { Promise, SafeArrayIterator, } = primordials; -let logDebug = false; + +// WARNING: Keep this in sync with Rust (search for LogLevel) +const LogLevel = { + Error: 1, + Warn: 2, + Info: 3, + Debug: 4, +}; + +let logLevel = 3; let logSource = "JS"; -function setLogDebug(debug, source) { - logDebug = debug; +function setLogLevel(level, source) { + logLevel = level; if (source) { logSource = source; } } function log(...args) { - if (logDebug) { + if (logLevel >= LogLevel.Debug) { // if we destructure `console` off `globalThis` too early, we don't bind to // the right console, therefore we don't log anything out. - globalThis.console.log( + globalThis.console.error( `DEBUG ${logSource} -`, ...new SafeArrayIterator(args), ); @@ -80,6 +89,6 @@ export { log, nonEnumerable, readOnly, - setLogDebug, + setLogLevel, writable, }; diff --git a/runtime/js/11_workers.js b/runtime/js/11_workers.js index f8ed122b81..e046900053 100644 --- a/runtime/js/11_workers.js +++ b/runtime/js/11_workers.js @@ -4,10 +4,11 @@ const core = globalThis.Deno.core; const ops = core.ops; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypeFilter, Error, ObjectPrototypeIsPrototypeOf, - StringPrototypeStartsWith, String, + StringPrototypeStartsWith, SymbolIterator, SymbolToStringTag, } = primordials; @@ -192,8 +193,9 @@ class Worker extends EventTarget { const event = new MessageEvent("message", { cancelable: false, data: message, - ports: transferables.filter((t) => - ObjectPrototypeIsPrototypeOf(MessagePortPrototype, t) + ports: ArrayPrototypeFilter( + transferables, + (t) => ObjectPrototypeIsPrototypeOf(MessagePortPrototype, t), ), }); this.dispatchEvent(event); @@ -202,7 +204,7 @@ class Worker extends EventTarget { postMessage(message, transferOrOptions = {}) { const prefix = "Failed to execute 'postMessage' on 'MessagePort'"; - webidl.requiredArguments(arguments.length, 1, { prefix }); + webidl.requiredArguments(arguments.length, 1, prefix); message = webidl.converters.any(message); let options; if ( @@ -212,16 +214,15 @@ class Worker extends EventTarget { ) { const transfer = webidl.converters["sequence"]( transferOrOptions, - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); options = { transfer }; } else { options = webidl.converters.StructuredSerializeOptions( transferOrOptions, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); } const { transfer } = options; diff --git a/runtime/js/30_os.js b/runtime/js/30_os.js index 3af5803ad6..a5a55e19bd 100644 --- a/runtime/js/30_os.js +++ b/runtime/js/30_os.js @@ -6,10 +6,12 @@ import { Event, EventTarget } from "ext:deno_web/02_event.js"; const primordials = globalThis.__bootstrap.primordials; const { Error, + FunctionPrototypeBind, SymbolFor, } = primordials; -const windowDispatchEvent = EventTarget.prototype.dispatchEvent.bind( +const windowDispatchEvent = FunctionPrototypeBind( + EventTarget.prototype.dispatchEvent, globalThis, ); diff --git a/runtime/js/40_process.js b/runtime/js/40_process.js index 2a5ac86bf2..664a4b303d 100644 --- a/runtime/js/40_process.js +++ b/runtime/js/40_process.js @@ -200,6 +200,7 @@ function collectOutput(readableStream) { class ChildProcess { #rid; #waitPromiseId; + #waitComplete = false; #unrefed = false; #pid; @@ -268,8 +269,8 @@ class ChildProcess { const waitPromise = core.opAsync("op_spawn_wait", this.#rid); this.#waitPromiseId = waitPromise[promiseIdSymbol]; this.#status = PromisePrototypeThen(waitPromise, (res) => { - this.#rid = null; signal?.[abortSignal.remove](onAbort); + this.#waitComplete = true; return res; }); } @@ -317,10 +318,10 @@ class ChildProcess { } kill(signo = "SIGTERM") { - if (this.#rid === null) { + if (this.#waitComplete) { throw new TypeError("Child process has already terminated."); } - ops.op_kill(this.#pid, signo, "Deno.Child.kill()"); + ops.op_spawn_kill(this.#rid, signo); } ref() { diff --git a/runtime/js/90_deno_ns.js b/runtime/js/90_deno_ns.js index bb6ba3b08d..3507a98a3d 100644 --- a/runtime/js/90_deno_ns.js +++ b/runtime/js/90_deno_ns.js @@ -4,7 +4,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as timers from "ext:deno_web/02_timers.js"; import * as httpClient from "ext:deno_fetch/22_http_client.js"; -import * as console from "ext:deno_console/02_console.js"; +import * as console from "ext:deno_console/01_console.js"; import * as ffi from "ext:deno_ffi/00_ffi.js"; import * as net from "ext:deno_net/01_net.js"; import * as tls from "ext:deno_net/02_tls.js"; diff --git a/runtime/js/98_global_scope.js b/runtime/js/98_global_scope.js index 911db65cb8..1084f5c248 100644 --- a/runtime/js/98_global_scope.js +++ b/runtime/js/98_global_scope.js @@ -13,7 +13,7 @@ import * as event from "ext:deno_web/02_event.js"; import * as timers from "ext:deno_web/02_timers.js"; import * as base64 from "ext:deno_web/05_base64.js"; import * as encoding from "ext:deno_web/08_text_encoding.js"; -import * as console from "ext:deno_console/02_console.js"; +import * as console from "ext:deno_console/01_console.js"; import * as caches from "ext:deno_cache/01_cache.js"; import * as compression from "ext:deno_web/14_compression.js"; import * as worker from "ext:runtime/11_workers.js"; diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index fa16cc1f40..8fd9a6bd94 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -12,27 +12,28 @@ const ops = core.ops; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypeFilter, ArrayPrototypeIndexOf, + ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeShift, ArrayPrototypeSplice, - ArrayPrototypeMap, DateNow, Error, ErrorPrototype, - FunctionPrototypeCall, FunctionPrototypeBind, + FunctionPrototypeCall, ObjectAssign, - ObjectDefineProperty, ObjectDefineProperties, + ObjectDefineProperty, ObjectFreeze, ObjectPrototypeIsPrototypeOf, ObjectSetPrototypeOf, + PromisePrototypeThen, PromiseResolve, + SafeWeakMap, Symbol, SymbolIterator, - PromisePrototypeThen, - SafeWeakMap, TypeError, WeakMapPrototypeDelete, WeakMapPrototypeGet, @@ -44,12 +45,14 @@ import * as location from "ext:deno_web/12_location.js"; import * as version from "ext:runtime/01_version.ts"; import * as os from "ext:runtime/30_os.js"; import * as timers from "ext:deno_web/02_timers.js"; -import * as colors from "ext:deno_console/01_colors.js"; import { + getDefaultInspectOptions, + getNoColor, inspectArgs, quoteString, + setNoColor, wrapConsole, -} from "ext:deno_console/02_console.js"; +} from "ext:deno_console/01_console.js"; import * as performance from "ext:deno_web/15_performance.js"; import * as url from "ext:deno_url/00_url.js"; import * as fetch from "ext:deno_fetch/26_fetch.js"; @@ -99,7 +102,7 @@ function workerClose() { function postMessage(message, transferOrOptions = {}) { const prefix = "Failed to execute 'postMessage' on 'DedicatedWorkerGlobalScope'"; - webidl.requiredArguments(arguments.length, 1, { prefix }); + webidl.requiredArguments(arguments.length, 1, prefix); message = webidl.converters.any(message); let options; if ( @@ -109,16 +112,15 @@ function postMessage(message, transferOrOptions = {}) { ) { const transfer = webidl.converters["sequence"]( transferOrOptions, - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); options = { transfer }; } else { options = webidl.converters.StructuredSerializeOptions( transferOrOptions, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); } const { transfer } = options; @@ -146,8 +148,10 @@ async function pollForMessages() { const msgEvent = new event.MessageEvent("message", { cancelable: false, data: message, - ports: transferables.filter((t) => - ObjectPrototypeIsPrototypeOf(messagePort.MessagePortPrototype, t) + ports: ArrayPrototypeFilter( + transferables, + (t) => + ObjectPrototypeIsPrototypeOf(messagePort.MessagePortPrototype, t), ), }); @@ -218,12 +222,12 @@ function formatException(error) { return null; } else if (typeof error == "string") { return `Uncaught ${ - inspectArgs([quoteString(error)], { - colors: !colors.getNoColor(), + inspectArgs([quoteString(error, getDefaultInspectOptions())], { + colors: !getNoColor(), }) }`; } else { - return `Uncaught ${inspectArgs([error], { colors: !colors.getNoColor() })}`; + return `Uncaught ${inspectArgs([error], { colors: !getNoColor() })}`; } } @@ -295,7 +299,7 @@ function runtimeStart( v8Version, tsVersion, target, - debugFlag, + logLevel, noColor, isTty, source, @@ -311,8 +315,8 @@ function runtimeStart( tsVersion, ); core.setBuildInfo(target); - util.setLogDebug(debugFlag, source); - colors.setNoColor(noColor || !isTty); + util.setLogLevel(logLevel, source); + setNoColor(noColor || !isTty); // deno-lint-ignore prefer-primordials Error.prepareStackTrace = core.prepareStackTrace; } @@ -342,7 +346,8 @@ function promiseRejectCallback(type, promise, reason) { } return !!globalThis_.onunhandledrejection || - event.listenerCount(globalThis_, "unhandledrejection") > 0; + event.listenerCount(globalThis_, "unhandledrejection") > 0 || + typeof internals.nodeProcessUnhandledRejectionCallback !== "undefined"; } function promiseRejectMacrotaskCallback() { @@ -379,6 +384,15 @@ function promiseRejectMacrotaskCallback() { globalThis_.dispatchEvent(rejectionEvent); globalThis_.removeEventListener("error", errorEventCb); + // If event was not yet prevented, try handing it off to Node compat layer + // (if it was initialized) + if ( + !rejectionEvent.defaultPrevented && + typeof internals.nodeProcessUnhandledRejectionCallback !== "undefined" + ) { + internals.nodeProcessUnhandledRejectionCallback(rejectionEvent); + } + // If event was not prevented (or "unhandledrejection" listeners didn't // throw) we will let Rust side handle it. if (rejectionEvent.defaultPrevented) { @@ -414,7 +428,7 @@ function bootstrapMainRuntime(runtimeOptions) { const { 0: args, 1: cpuCount, - 2: debugFlag, + 2: logLevel, 3: denoVersion, 4: locale, 5: location_, @@ -423,12 +437,11 @@ function bootstrapMainRuntime(runtimeOptions) { 8: tsVersion, 9: unstableFlag, 10: pid, - 11: ppid, - 12: target, - 13: v8Version, - 14: userAgent, - 15: inspectFlag, - // 16: enableTestingFeaturesFlag + 11: target, + 12: v8Version, + 13: userAgent, + 14: inspectFlag, + // 15: enableTestingFeaturesFlag } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -482,7 +495,7 @@ function bootstrapMainRuntime(runtimeOptions) { v8Version, tsVersion, target, - debugFlag, + logLevel, noColor, isTty, ); @@ -491,9 +504,16 @@ function bootstrapMainRuntime(runtimeOptions) { setUserAgent(userAgent); setLanguage(locale); + let ppid = undefined; ObjectDefineProperties(finalDenoNs, { pid: util.readOnly(pid), - ppid: util.readOnly(ppid), + ppid: util.getterOnly(() => { + // lazy because it's expensive + if (ppid === undefined) { + ppid = ops.op_ppid(); + } + return ppid; + }), noColor: util.readOnly(noColor), args: util.readOnly(ObjectFreeze(args)), mainModule: util.getterOnly(opMainModule), @@ -522,7 +542,7 @@ function bootstrapWorkerRuntime( const { 0: args, 1: cpuCount, - 2: debugFlag, + 2: logLevel, 3: denoVersion, 4: locale, 5: location_, @@ -531,12 +551,11 @@ function bootstrapWorkerRuntime( 8: tsVersion, 9: unstableFlag, 10: pid, - // 11: ppid, - 12: target, - 13: v8Version, - // 14: userAgent, - // 15: inspectFlag, - 16: enableTestingFeaturesFlag, + 11: target, + 12: v8Version, + // 13: userAgent, + // 14: inspectFlag, + 15: enableTestingFeaturesFlag, } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -591,7 +610,7 @@ function bootstrapWorkerRuntime( v8Version, tsVersion, target, - debugFlag, + logLevel, noColor, isTty, internalName ?? name, diff --git a/runtime/lib.rs b/runtime/lib.rs index 6745c4a565..1e307f492a 100644 --- a/runtime/lib.rs +++ b/runtime/lib.rs @@ -35,9 +35,4 @@ pub mod worker; mod worker_bootstrap; pub use worker_bootstrap::BootstrapOptions; - -pub struct RuntimeNodeEnv; -impl deno_node::NodeEnv for RuntimeNodeEnv { - type P = permissions::PermissionsContainer; - type Fs = deno_node::RealFs; -} +pub use worker_bootstrap::WorkerLogLevel; diff --git a/runtime/ops/fs_events.rs b/runtime/ops/fs_events.rs index 27e76b3d34..2668431ebf 100644 --- a/runtime/ops/fs_events.rs +++ b/runtime/ops/fs_events.rs @@ -31,9 +31,6 @@ use tokio::sync::mpsc; deno_core::extension!( deno_fs_events, ops = [op_fs_events_open, op_fs_events_poll], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); struct FsEventsResource { diff --git a/runtime/ops/http.rs b/runtime/ops/http.rs index 767fc3ae01..eb27112570 100644 --- a/runtime/ops/http.rs +++ b/runtime/ops/http.rs @@ -30,9 +30,6 @@ use tokio::net::UnixStream; deno_core::extension!( deno_http_runtime, ops = [op_http_start, op_http_upgrade], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index 70a1263018..bbadee993a 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -48,21 +48,18 @@ deno_core::extension!( state = |state, options| { state.put::(options.exit_code); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - } ); deno_core::extension!( deno_os_worker, ops_fn = deno_ops, middleware = |op| match op.name { - "op_exit" | "op_set_exit_code" => op.disable(), + "op_exit" | "op_set_exit_code" => deno_core::OpDecl { + v8_fn_ptr: deno_core::op_void_sync::v8_fn_ptr as _, + ..op + }, _ => op, }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - } ); #[op] @@ -336,6 +333,7 @@ fn rss() -> usize { (out, idx) } + #[allow(clippy::disallowed_methods)] let statm_content = if let Ok(c) = std::fs::read_to_string("/proc/self/statm") { c @@ -382,6 +380,53 @@ fn rss() -> usize { task_info.resident_size as usize } +#[cfg(target_os = "openbsd")] +fn rss() -> usize { + // Uses OpenBSD's KERN_PROC_PID sysctl(2) + // to retrieve information about the current + // process, part of which is the RSS (p_vm_rssize) + + // SAFETY: libc call (get PID of own process) + let pid = unsafe { libc::getpid() }; + // SAFETY: libc call (get system page size) + let pagesize = unsafe { libc::sysconf(libc::_SC_PAGESIZE) } as usize; + // KERN_PROC_PID returns a struct libc::kinfo_proc + let mut kinfoproc = std::mem::MaybeUninit::::uninit(); + let mut size = std::mem::size_of_val(&kinfoproc) as libc::size_t; + let mut mib = [ + libc::CTL_KERN, + libc::KERN_PROC, + libc::KERN_PROC_PID, + pid, + // mib is an array of integers, size is of type size_t + // conversion is safe, because the size of a libc::kinfo_proc + // structure will not exceed i32::MAX + size.try_into().unwrap(), + 1, + ]; + // SAFETY: libc call, mib has been statically initialized, + // kinfoproc is a valid pointer to a libc::kinfo_proc struct + let res = unsafe { + libc::sysctl( + mib.as_mut_ptr(), + mib.len() as _, + kinfoproc.as_mut_ptr() as *mut libc::c_void, + &mut size, + std::ptr::null_mut(), + 0, + ) + }; + + if res == 0 { + // SAFETY: sysctl returns 0 on success and kinfoproc is initialized + // p_vm_rssize contains size in pages -> multiply with pagesize to + // get size in bytes. + pagesize * unsafe { (*kinfoproc.as_mut_ptr()).p_vm_rssize as usize } + } else { + 0 + } +} + #[cfg(windows)] fn rss() -> usize { use winapi::shared::minwindef::DWORD; diff --git a/runtime/ops/os/sys_info.rs b/runtime/ops/os/sys_info.rs index 1a9358dc0b..795e6bb0af 100644 --- a/runtime/ops/os/sys_info.rs +++ b/runtime/ops/os/sys_info.rs @@ -48,6 +48,7 @@ pub fn loadavg() -> LoadAvg { pub fn os_release() -> String { #[cfg(target_os = "linux")] { + #[allow(clippy::disallowed_methods)] match std::fs::read_to_string("/proc/sys/kernel/osrelease") { Ok(mut s) => { s.pop(); // pop '\n' diff --git a/runtime/ops/permissions.rs b/runtime/ops/permissions.rs index 6f7b98a304..663b1d2409 100644 --- a/runtime/ops/permissions.rs +++ b/runtime/ops/permissions.rs @@ -18,9 +18,6 @@ deno_core::extension!( op_revoke_permission, op_request_permission, ], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[derive(Deserialize)] diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index cf8740255d..44429fdab7 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -2,6 +2,7 @@ use super::check_unstable; use crate::permissions::PermissionsContainer; +use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; use deno_core::serde_json; @@ -12,10 +13,10 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::ZeroCopyBuf; +use deno_io::fs::FileResource; use deno_io::ChildStderrResource; use deno_io::ChildStdinResource; use deno_io::ChildStdoutResource; -use deno_io::StdFileResource; use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; @@ -93,7 +94,9 @@ impl StdioOrRid { ) -> Result { match &self { StdioOrRid::Stdio(val) => Ok(val.as_stdio()), - StdioOrRid::Rid(rid) => StdFileResource::as_stdio(state, *rid), + StdioOrRid::Rid(rid) => { + FileResource::with_file(state, *rid, |file| Ok(file.as_stdio()?)) + } } } } @@ -104,16 +107,16 @@ deno_core::extension!( op_spawn_child, op_spawn_wait, op_spawn_sync, + op_spawn_kill, deprecated::op_run, deprecated::op_run_status, deprecated::op_kill, ], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); -struct ChildResource(tokio::process::Child); +/// Second member stores the pid separately from the RefCell. It's needed for +/// `op_spawn_kill`, where the RefCell is borrowed mutably by `op_spawn_wait`. +struct ChildResource(RefCell, u32); impl Resource for ChildResource { fn name(&self) -> Cow { @@ -300,7 +303,9 @@ fn spawn_child( .take() .map(|stderr| state.resource_table.add(ChildStderrResource::from(stderr))); - let child_rid = state.resource_table.add(ChildResource(child)); + let child_rid = state + .resource_table + .add(ChildResource(RefCell::new(child), pid)); Ok(Child { rid: child_rid, @@ -326,17 +331,18 @@ async fn op_spawn_wait( state: Rc>, rid: ResourceId, ) -> Result { + #![allow(clippy::await_holding_refcell_ref)] let resource = state .borrow_mut() .resource_table - .take::(rid)?; - Rc::try_unwrap(resource) - .ok() - .unwrap() - .0 - .wait() - .await? - .try_into() + .get::(rid)?; + let result = resource.0.try_borrow_mut()?.wait().await?.try_into(); + state + .borrow_mut() + .resource_table + .close(rid) + .expect("shouldn't have closed until now"); + result } #[op] @@ -364,6 +370,19 @@ fn op_spawn_sync( }) } +#[op] +fn op_spawn_kill( + state: &mut OpState, + rid: ResourceId, + signal: String, +) -> Result<(), AnyError> { + if let Ok(child_resource) = state.resource_table.get::(rid) { + deprecated::kill(child_resource.1 as i32, &signal)?; + return Ok(()); + } + Err(type_error("Child process has already terminated.")) +} + mod deprecated { use super::*; @@ -575,7 +594,6 @@ mod deprecated { #[cfg(not(unix))] pub fn kill(pid: i32, signal: &str) -> Result<(), AnyError> { - use deno_core::error::type_error; use std::io::Error; use std::io::ErrorKind::NotFound; use winapi::shared::minwindef::DWORD; diff --git a/runtime/ops/runtime.rs b/runtime/ops/runtime.rs index 8802f9cd6c..3f60c74379 100644 --- a/runtime/ops/runtime.rs +++ b/runtime/ops/runtime.rs @@ -8,14 +8,11 @@ use deno_core::OpState; deno_core::extension!( deno_runtime, - ops = [op_main_module], + ops = [op_main_module, op_ppid], options = { main_module: ModuleSpecifier }, state = |state, options| { state.put::(options.main_module); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] @@ -31,7 +28,10 @@ fn op_main_module(state: &mut OpState) -> Result { Ok(main_path) } -pub fn ppid() -> i64 { +/// This is an op instead of being done at initialization time because +/// it's expensive to retreive the ppid on Windows. +#[op] +pub fn op_ppid() -> i64 { #[cfg(windows)] { // Adopted from rustup: diff --git a/runtime/ops/signal.rs b/runtime/ops/signal.rs index 93e1cfef28..934192c777 100644 --- a/runtime/ops/signal.rs +++ b/runtime/ops/signal.rs @@ -32,9 +32,6 @@ use tokio::signal::windows::CtrlC; deno_core::extension!( deno_signal, ops = [op_signal_bind, op_signal_unbind, op_signal_poll], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[cfg(unix)] @@ -113,6 +110,7 @@ pub fn signal_str_to_int(s: &str) -> Result { "SIGQUIT" => Ok(3), "SIGILL" => Ok(4), "SIGTRAP" => Ok(5), + "SIGIOT" => Ok(6), "SIGABRT" => Ok(6), "SIGEMT" => Ok(7), "SIGFPE" => Ok(8), @@ -193,6 +191,7 @@ pub fn signal_str_to_int(s: &str) -> Result { "SIGQUIT" => Ok(3), "SIGILL" => Ok(4), "SIGTRAP" => Ok(5), + "SIGIOT" => Ok(6), "SIGABRT" => Ok(6), "SIGBUS" => Ok(7), "SIGFPE" => Ok(8), @@ -269,6 +268,7 @@ pub fn signal_str_to_int(s: &str) -> Result { "SIGQUIT" => Ok(3), "SIGILL" => Ok(4), "SIGTRAP" => Ok(5), + "SIGIOT" => Ok(6), "SIGABRT" => Ok(6), "SIGEMT" => Ok(7), "SIGFPE" => Ok(8), diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index 3146f22e22..b4e4d73400 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -1,13 +1,42 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::io::Error; +use std::rc::Rc; + +use deno_core::error::resource_unavailable; use deno_core::error::AnyError; use deno_core::op; use deno_core::OpState; -use deno_io::StdFileResource; -use std::io::Error; +use deno_core::Resource; +use deno_io::fs::FileResource; +#[cfg(unix)] +use deno_core::ResourceId; #[cfg(unix)] use nix::sys::termios; +#[cfg(unix)] +use std::cell::RefCell; +#[cfg(unix)] +use std::collections::HashMap; + +#[cfg(unix)] +#[derive(Default, Clone)] +struct TtyModeStore(Rc>>); + +#[cfg(unix)] +impl TtyModeStore { + pub fn get(&self, id: ResourceId) -> Option { + self.0.borrow().get(&id).map(ToOwned::to_owned) + } + + pub fn take(&self, id: ResourceId) -> Option { + self.0.borrow_mut().remove(&id) + } + + pub fn set(&self, id: ResourceId, mode: termios::Termios) { + self.0.borrow_mut().insert(id, mode); + } +} #[cfg(windows)] use deno_core::error::custom_error; @@ -17,13 +46,14 @@ use winapi::shared::minwindef::DWORD; use winapi::um::wincon; #[cfg(windows)] -fn get_windows_handle( - f: &std::fs::File, +fn get_fd_from_resource( + resource: Rc, ) -> Result { - use std::os::windows::io::AsRawHandle; use winapi::um::handleapi; - let handle = f.as_raw_handle(); + let Some(handle) = resource.backing_fd() else { + return Err(resource_unavailable()); + }; if handle == handleapi::INVALID_HANDLE_VALUE { return Err(Error::last_os_error().into()); } else if handle.is_null() { @@ -32,11 +62,22 @@ fn get_windows_handle( Ok(handle) } +#[cfg(not(windows))] +fn get_fd_from_resource( + resource: Rc, +) -> Result { + match resource.backing_fd() { + Some(fd) => Ok(fd), + None => Err(resource_unavailable()), + } +} + deno_core::extension!( deno_tty, ops = [op_stdin_set_raw, op_isatty, op_console_size], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); + state = |state| { + #[cfg(unix)] + state.put(TtyModeStore::default()); }, ); @@ -75,23 +116,15 @@ fn op_stdin_set_raw( // Copyright (c) 2019 Timon. MIT license. #[cfg(windows)] { - use std::os::windows::io::AsRawHandle; use winapi::shared::minwindef::FALSE; use winapi::um::consoleapi; - use winapi::um::handleapi; if cbreak { return Err(deno_core::error::not_supported()); } - StdFileResource::with_file(state, rid, move |std_file| { - let handle = std_file.as_raw_handle(); - - if handle == handleapi::INVALID_HANDLE_VALUE { - return Err(Error::last_os_error().into()); - } else if handle.is_null() { - return Err(custom_error("ReferenceError", "null handle")); - } + FileResource::with_resource(state, rid, move |resource| { + let handle = get_fd_from_resource(resource)?; let mut original_mode: DWORD = 0; // SAFETY: winapi call if unsafe { consoleapi::GetConsoleMode(handle, &mut original_mode) } @@ -116,55 +149,49 @@ fn op_stdin_set_raw( } #[cfg(unix)] { - use std::os::unix::io::AsRawFd; + let tty_mode_store = state.borrow::().clone(); + let previous_mode = tty_mode_store.get(rid); - StdFileResource::with_file_and_metadata( - state, - rid, - move |std_file, meta_data| { - let raw_fd = std_file.as_raw_fd(); + FileResource::with_resource(state, rid, move |resource| { + let raw_fd = get_fd_from_resource(resource)?; - if is_raw { - let mut raw = { - let mut meta_data = meta_data.lock(); - let maybe_tty_mode = &mut meta_data.tty.mode; - if maybe_tty_mode.is_none() { - // Save original mode. - let original_mode = termios::tcgetattr(raw_fd)?; - maybe_tty_mode.replace(original_mode); - } - maybe_tty_mode.clone().unwrap() - }; - - raw.input_flags &= !(termios::InputFlags::BRKINT - | termios::InputFlags::ICRNL - | termios::InputFlags::INPCK - | termios::InputFlags::ISTRIP - | termios::InputFlags::IXON); - - raw.control_flags |= termios::ControlFlags::CS8; - - raw.local_flags &= !(termios::LocalFlags::ECHO - | termios::LocalFlags::ICANON - | termios::LocalFlags::IEXTEN); - if !cbreak { - raw.local_flags &= !(termios::LocalFlags::ISIG); - } - raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = - 1; - raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = - 0; - termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw)?; - } else { - // Try restore saved mode. - if let Some(mode) = meta_data.lock().tty.mode.take() { - termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode)?; + if is_raw { + let mut raw = match previous_mode { + Some(mode) => mode, + None => { + // Save original mode. + let original_mode = termios::tcgetattr(raw_fd)?; + tty_mode_store.set(rid, original_mode.clone()); + original_mode } + }; + + raw.input_flags &= !(termios::InputFlags::BRKINT + | termios::InputFlags::ICRNL + | termios::InputFlags::INPCK + | termios::InputFlags::ISTRIP + | termios::InputFlags::IXON); + + raw.control_flags |= termios::ControlFlags::CS8; + + raw.local_flags &= !(termios::LocalFlags::ECHO + | termios::LocalFlags::ICANON + | termios::LocalFlags::IEXTEN); + if !cbreak { + raw.local_flags &= !(termios::LocalFlags::ISIG); } + raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = 1; + raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = 0; + termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw)?; + } else { + // Try restore saved mode. + if let Some(mode) = tty_mode_store.take(rid) { + termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode)?; + } + } - Ok(()) - }, - ) + Ok(()) + }) } } @@ -174,13 +201,14 @@ fn op_isatty( rid: u32, out: &mut [u8], ) -> Result<(), AnyError> { - StdFileResource::with_file(state, rid, move |std_file| { + FileResource::with_resource(state, rid, move |resource| { + let raw_fd = get_fd_from_resource(resource)?; #[cfg(windows)] { use winapi::shared::minwindef::FALSE; use winapi::um::consoleapi; - let handle = get_windows_handle(std_file)?; + let handle = raw_fd; let mut test_mode: DWORD = 0; // If I cannot get mode out of console, it is not a console. // TODO(bartlomieju): @@ -193,8 +221,6 @@ fn op_isatty( } #[cfg(unix)] { - use std::os::unix::io::AsRawFd; - let raw_fd = std_file.as_raw_fd(); // TODO(bartlomieju): #[allow(clippy::undocumented_unsafe_blocks)] { @@ -215,8 +241,9 @@ fn op_console_size( result: &mut [u32], rid: u32, ) -> Result<(), AnyError> { - StdFileResource::with_file(state, rid, move |std_file| { - let size = console_size(std_file)?; + FileResource::with_resource(state, rid, move |resource| { + let fd = get_fd_from_resource(resource)?; + let size = console_size_from_fd(fd)?; result[0] = size.cols; result[1] = size.rows; Ok(()) @@ -249,40 +276,50 @@ pub fn console_size( { use std::os::windows::io::AsRawHandle; let handle = std_file.as_raw_handle(); - - // SAFETY: winapi calls - unsafe { - let mut bufinfo: winapi::um::wincon::CONSOLE_SCREEN_BUFFER_INFO = - std::mem::zeroed(); - - if winapi::um::wincon::GetConsoleScreenBufferInfo(handle, &mut bufinfo) - == 0 - { - return Err(Error::last_os_error()); - } - Ok(ConsoleSize { - cols: bufinfo.dwSize.X as u32, - rows: bufinfo.dwSize.Y as u32, - }) - } + console_size_from_fd(handle) } - #[cfg(unix)] { use std::os::unix::io::AsRawFd; - let fd = std_file.as_raw_fd(); - // SAFETY: libc calls - unsafe { - let mut size: libc::winsize = std::mem::zeroed(); - if libc::ioctl(fd, libc::TIOCGWINSZ, &mut size as *mut _) != 0 { - return Err(Error::last_os_error()); - } - Ok(ConsoleSize { - cols: size.ws_col as u32, - rows: size.ws_row as u32, - }) + console_size_from_fd(fd) + } +} + +#[cfg(windows)] +fn console_size_from_fd( + handle: std::os::windows::io::RawHandle, +) -> Result { + // SAFETY: winapi calls + unsafe { + let mut bufinfo: winapi::um::wincon::CONSOLE_SCREEN_BUFFER_INFO = + std::mem::zeroed(); + + if winapi::um::wincon::GetConsoleScreenBufferInfo(handle, &mut bufinfo) == 0 + { + return Err(Error::last_os_error()); } + Ok(ConsoleSize { + cols: bufinfo.dwSize.X as u32, + rows: bufinfo.dwSize.Y as u32, + }) + } +} + +#[cfg(not(windows))] +fn console_size_from_fd( + fd: std::os::unix::prelude::RawFd, +) -> Result { + // SAFETY: libc calls + unsafe { + let mut size: libc::winsize = std::mem::zeroed(); + if libc::ioctl(fd, libc::TIOCGWINSZ, &mut size as *mut _) != 0 { + return Err(Error::last_os_error()); + } + Ok(ConsoleSize { + cols: size.ws_col as u32, + rows: size.ws_row as u32, + }) } } diff --git a/runtime/ops/web_worker.rs b/runtime/ops/web_worker.rs index 7952a03f26..e62642fdd6 100644 --- a/runtime/ops/web_worker.rs +++ b/runtime/ops/web_worker.rs @@ -25,9 +25,6 @@ deno_core::extension!( op_worker_get_type, op_worker_sync_fetch, ], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] diff --git a/runtime/ops/web_worker/sync_fetch.rs b/runtime/ops/web_worker/sync_fetch.rs index 2049d5ab85..ba5f325d63 100644 --- a/runtime/ops/web_worker/sync_fetch.rs +++ b/runtime/ops/web_worker/sync_fetch.rs @@ -8,7 +8,6 @@ use deno_core::op; use deno_core::url::Url; use deno_core::OpState; use deno_fetch::data_url::DataUrl; -use deno_fetch::reqwest; use deno_web::BlobStore; use deno_websocket::DomExceptionNetworkError; use hyper::body::Bytes; @@ -41,7 +40,7 @@ pub fn op_worker_sync_fetch( let handle = state.borrow::().clone(); assert_eq!(handle.worker_type, WebWorkerType::Classic); - let client = state.borrow::().clone(); + let client = deno_fetch::get_or_create_client_from_state(state)?; // TODO(andreubotella) It's not good to throw an exception related to blob // URLs when none of the script URLs use the blob scheme. diff --git a/runtime/ops/worker_host.rs b/runtime/ops/worker_host.rs index d5285ec890..f96ae38e8a 100644 --- a/runtime/ops/worker_host.rs +++ b/runtime/ops/worker_host.rs @@ -119,9 +119,6 @@ deno_core::extension!( FormatJsErrorFnHolder(options.format_js_error_fn); state.put::(format_js_error_fn_holder); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[derive(Deserialize)] diff --git a/runtime/permissions/mod.rs b/runtime/permissions/mod.rs index b15750313e..e3ddd8cf49 100644 --- a/runtime/permissions/mod.rs +++ b/runtime/permissions/mod.rs @@ -13,10 +13,12 @@ use deno_core::serde::Deserializer; use deno_core::serde::Serialize; use deno_core::serde_json; use deno_core::url; +use deno_core::url::Url; use deno_core::ModuleSpecifier; use deno_core::OpState; use log; use once_cell::sync::Lazy; +use std::borrow::Cow; use std::collections::HashSet; use std::fmt; use std::hash::Hash; @@ -871,8 +873,8 @@ impl UnaryPermission { .ok_or_else(|| uri_error("Missing host"))? .to_string(); let display_host = match url.port() { - None => hostname.clone(), - Some(port) => format!("{hostname}:{port}"), + None => Cow::Borrowed(&hostname), + Some(port) => Cow::Owned(format!("{hostname}:{port}")), }; let host = &(&hostname, url.port_or_known_default()); let (result, prompted, is_allow_all) = self.query(Some(host)).check( @@ -1872,7 +1874,16 @@ impl PermissionsContainer { impl deno_node::NodePermissions for PermissionsContainer { #[inline(always)] - fn check_read(&mut self, path: &Path) -> Result<(), AnyError> { + fn check_net_url( + &mut self, + url: &Url, + api_name: &str, + ) -> Result<(), AnyError> { + self.0.lock().net.check_url(url, Some(api_name)) + } + + #[inline(always)] + fn check_read(&self, path: &Path) -> Result<(), AnyError> { self.0.lock().read.check(path, None) } } diff --git a/runtime/tokio_util.rs b/runtime/tokio_util.rs index a4db5e33fa..1245a5b8eb 100644 --- a/runtime/tokio_util.rs +++ b/runtime/tokio_util.rs @@ -1,9 +1,41 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::fmt::Debug; +use std::str::FromStr; + +use deno_core::task::MaskFutureAsSend; + +/// Default configuration for tokio. In the future, this method may have different defaults +/// depending on the platform and/or CPU layout. +const fn tokio_configuration() -> (u32, u32, usize) { + (61, 31, 1024) +} + +fn tokio_env(name: &'static str, default: T) -> T +where + ::Err: Debug, +{ + match std::env::var(name) { + Ok(value) => value.parse().unwrap(), + Err(_) => default, + } +} pub fn create_basic_runtime() -> tokio::runtime::Runtime { + let (event_interval, global_queue_interval, max_io_events_per_tick) = + tokio_configuration(); + tokio::runtime::Builder::new_current_thread() .enable_io() .enable_time() + .event_interval(tokio_env("DENO_TOKIO_EVENT_INTERVAL", event_interval)) + .global_queue_interval(tokio_env( + "DENO_TOKIO_GLOBAL_QUEUE_INTERVAL", + global_queue_interval, + )) + .max_io_events_per_tick(tokio_env( + "DENO_TOKIO_MAX_IO_EVENTS_PER_TICK", + max_io_events_per_tick, + )) // This limits the number of threads for blocking operations (like for // synchronous fs ops) or CPU bound tasks like when we run dprint in // parallel for deno fmt. @@ -14,11 +46,26 @@ pub fn create_basic_runtime() -> tokio::runtime::Runtime { .unwrap() } -pub fn run_local(future: F) -> R +#[inline(always)] +pub fn create_and_run_current_thread(future: F) -> R where - F: std::future::Future, + F: std::future::Future + 'static, + R: Send + 'static, { let rt = create_basic_runtime(); - let local = tokio::task::LocalSet::new(); - local.block_on(&rt, future) + + // Since this is the main future, we want to box it in debug mode because it tends to be fairly + // large and the compiler won't optimize repeated copies. We also make this runtime factory + // function #[inline(always)] to avoid holding the unboxed, unused future on the stack. + + #[cfg(debug_assertions)] + // SAFETY: this this is guaranteed to be running on a current-thread executor + let future = Box::pin(unsafe { MaskFutureAsSend::new(future) }); + + #[cfg(not(debug_assertions))] + // SAFETY: this this is guaranteed to be running on a current-thread executor + let future = unsafe { MaskFutureAsSend::new(future) }; + + let join_handle = rt.spawn(future); + rt.block_on(join_handle).unwrap().into_inner() } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 0d743cfc62..36f9718b51 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -3,7 +3,8 @@ use crate::colors; use crate::inspector_server::InspectorServer; use crate::ops; use crate::permissions::PermissionsContainer; -use crate::tokio_util::run_local; +use crate::tokio_util::create_and_run_current_thread; +use crate::worker::init_runtime_module_map; use crate::worker::FormatJsErrorFn; use crate::BootstrapOptions; use deno_broadcast_channel::InMemoryBroadcastChannel; @@ -34,11 +35,11 @@ use deno_core::RuntimeOptions; use deno_core::SharedArrayBufferStore; use deno_core::Snapshot; use deno_core::SourceMapGetter; -use deno_fs::StdFs; +use deno_fs::FileSystem; +use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_node::RequireNpmResolver; -use deno_tls::rustls::RootCertStore; +use deno_tls::RootCertStoreProvider; use deno_web::create_entangled_message_port; use deno_web::BlobStore; use deno_web::MessagePort; @@ -330,10 +331,11 @@ pub struct WebWorkerOptions { pub extensions: Vec, pub startup_snapshot: Option, pub unsafely_ignore_certificate_errors: Option>, - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, pub seed: Option, + pub fs: Arc, pub module_loader: Rc, - pub npm_resolver: Option>, + pub npm_resolver: Option>, pub create_web_worker_cb: Arc, pub preload_module_cb: Arc, pub pre_execute_module_cb: Arc, @@ -407,7 +409,7 @@ impl WebWorker { deno_fetch::deno_fetch::init_ops::( deno_fetch::Options { user_agent: options.bootstrap.user_agent.clone(), - root_cert_store: options.root_cert_store.clone(), + root_cert_store_provider: options.root_cert_store_provider.clone(), unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), @@ -418,7 +420,7 @@ impl WebWorker { deno_cache::deno_cache::init_ops::(create_cache), deno_websocket::deno_websocket::init_ops::( options.bootstrap.user_agent.clone(), - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), options.unsafely_ignore_certificate_errors.clone(), ), deno_webstorage::deno_webstorage::init_ops(None).disable(), @@ -429,7 +431,7 @@ impl WebWorker { ), deno_ffi::deno_ffi::init_ops::(unstable), deno_net::deno_net::init_ops::( - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), unstable, options.unsafely_ignore_certificate_errors.clone(), ), @@ -439,11 +441,15 @@ impl WebWorker { unstable, ), deno_napi::deno_napi::init_ops::(), - deno_http::deno_http::init_ops(), + deno_http::deno_http::init_ops::(), deno_io::deno_io::init_ops(Some(options.stdio)), - deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), - deno_node::deno_node::init_ops::( + deno_fs::deno_fs::init_ops::( + unstable, + options.fs.clone(), + ), + deno_node::deno_node::init_ops::( options.npm_resolver, + options.fs, ), // Runtime ops that are always initialized for WebWorkers ops::web_worker::deno_web_worker::init_ops(), @@ -490,6 +496,7 @@ impl WebWorker { inspector: options.maybe_inspector_server.is_some(), ..Default::default() }); + init_runtime_module_map(&mut js_runtime); if let Some(server) = options.maybe_inspector_server.clone() { server.register_inspector( @@ -833,5 +840,5 @@ pub fn run_web_worker( debug!("Worker thread shuts down {}", &name); result }; - run_local(fut) + create_and_run_current_thread(fut) } diff --git a/runtime/worker.rs b/runtime/worker.rs index 14abd12b55..10375818d0 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -30,11 +30,11 @@ use deno_core::RuntimeOptions; use deno_core::SharedArrayBufferStore; use deno_core::Snapshot; use deno_core::SourceMapGetter; -use deno_fs::StdFs; +use deno_fs::FileSystem; +use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_node::RequireNpmResolver; -use deno_tls::rustls::RootCertStore; +use deno_tls::RootCertStoreProvider; use deno_web::BlobStore; use log::debug; @@ -57,6 +57,17 @@ impl ExitCode { self.0.store(code, Relaxed); } } + +/// Clear extension modules from the module map, except preserve `ext:deno_node` +/// modules as `node:` specifiers. +pub fn init_runtime_module_map(js_runtime: &mut JsRuntime) { + js_runtime.clear_module_map( + deno_node::SUPPORTED_BUILTIN_NODE_MODULES + .iter() + .map(|p| (p.ext_specifier, p.specifier)), + ); +} + /// This worker is created and used by almost all /// subcommands in Deno executable. /// @@ -84,17 +95,22 @@ pub struct WorkerOptions { /// V8 snapshot that should be loaded on startup. pub startup_snapshot: Option, + + /// Optional isolate creation parameters, such as heap limits. + pub create_params: Option, + pub unsafely_ignore_certificate_errors: Option>, - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, pub seed: Option, + pub fs: Arc, /// Implementation of `ModuleLoader` which will be /// called when V8 requests to load ES modules. /// /// If not provided runtime will error if code being /// executed tries to load modules. pub module_loader: Rc, - pub npm_resolver: Option>, + pub npm_resolver: Option>, // Callbacks invoked when creating new instance of WebWorker pub create_web_worker_cb: Arc, pub web_worker_preload_module_cb: Arc, @@ -149,6 +165,7 @@ impl Default for WorkerOptions { create_web_worker_cb: Arc::new(|_| { unimplemented!("web workers are not supported") }), + fs: Arc::new(deno_fs::RealFs), module_loader: Rc::new(FsModuleLoader), seed: None, unsafely_ignore_certificate_errors: Default::default(), @@ -163,11 +180,12 @@ impl Default for WorkerOptions { cache_storage_dir: Default::default(), broadcast_channel: Default::default(), source_map_getter: Default::default(), - root_cert_store: Default::default(), + root_cert_store_provider: Default::default(), npm_resolver: Default::default(), blob_store: Default::default(), extensions: Default::default(), startup_snapshot: Default::default(), + create_params: Default::default(), bootstrap: Default::default(), stdio: Default::default(), } @@ -227,7 +245,7 @@ impl MainWorker { deno_fetch::deno_fetch::init_ops::( deno_fetch::Options { user_agent: options.bootstrap.user_agent.clone(), - root_cert_store: options.root_cert_store.clone(), + root_cert_store_provider: options.root_cert_store_provider.clone(), unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), @@ -238,7 +256,7 @@ impl MainWorker { deno_cache::deno_cache::init_ops::(create_cache), deno_websocket::deno_websocket::init_ops::( options.bootstrap.user_agent.clone(), - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), options.unsafely_ignore_certificate_errors.clone(), ), deno_webstorage::deno_webstorage::init_ops( @@ -251,7 +269,7 @@ impl MainWorker { ), deno_ffi::deno_ffi::init_ops::(unstable), deno_net::deno_net::init_ops::( - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), unstable, options.unsafely_ignore_certificate_errors.clone(), ), @@ -263,11 +281,15 @@ impl MainWorker { unstable, ), deno_napi::deno_napi::init_ops::(), - deno_http::deno_http::init_ops(), + deno_http::deno_http::init_ops::(), deno_io::deno_io::init_ops(Some(options.stdio)), - deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), - deno_node::deno_node::init_ops::( + deno_fs::deno_fs::init_ops::( + unstable, + options.fs.clone(), + ), + deno_node::deno_node::init_ops::( options.npm_resolver, + options.fs, ), // Ops from this crate ops::runtime::deno_runtime::init_ops(main_module.clone()), @@ -304,6 +326,7 @@ impl MainWorker { let mut js_runtime = JsRuntime::new(RuntimeOptions { module_loader: Some(options.module_loader.clone()), startup_snapshot: Some(startup_snapshot), + create_params: options.create_params, source_map_getter: options.source_map_getter, get_error_class_fn: options.get_error_class_fn, shared_array_buffer_store: options.shared_array_buffer_store.clone(), @@ -313,6 +336,7 @@ impl MainWorker { is_main: true, ..Default::default() }); + init_runtime_module_map(&mut js_runtime); if let Some(server) = options.maybe_inspector_server.clone() { server.register_inspector( diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 09725122cf..9627281a68 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -5,7 +5,35 @@ use deno_core::ModuleSpecifier; use std::thread; use crate::colors; -use crate::ops::runtime::ppid; + +/// The log level to use when printing diagnostic log messages, warnings, +/// or errors in the worker. +/// +/// Note: This is disconnected with the log crate's log level and the Rust code +/// in this crate will respect that value instead. To specify that, use +/// `log::set_max_level`. +#[derive(Debug, Default, Clone, Copy)] +pub enum WorkerLogLevel { + // WARNING: Ensure this is kept in sync with + // the JS values (search for LogLevel). + Error = 1, + Warn = 2, + #[default] + Info = 3, + Debug = 4, +} + +impl From for WorkerLogLevel { + fn from(value: log::Level) -> Self { + match value { + log::Level::Error => WorkerLogLevel::Error, + log::Level::Warn => WorkerLogLevel::Warn, + log::Level::Info => WorkerLogLevel::Info, + log::Level::Debug => WorkerLogLevel::Debug, + log::Level::Trace => WorkerLogLevel::Debug, + } + } +} /// Common bootstrap options for MainWorker & WebWorker #[derive(Clone)] @@ -13,7 +41,7 @@ pub struct BootstrapOptions { /// Sets `Deno.args` in JS runtime. pub args: Vec, pub cpu_count: usize, - pub debug_flag: bool, + pub log_level: WorkerLogLevel, pub enable_testing_features: bool, pub locale: String, pub location: Option, @@ -45,7 +73,7 @@ impl Default for BootstrapOptions { no_color: !colors::use_color(), is_tty: colors::is_tty(), enable_testing_features: Default::default(), - debug_flag: Default::default(), + log_level: Default::default(), ts_version: Default::default(), locale: "en".to_string(), location: Default::default(), @@ -61,7 +89,7 @@ impl BootstrapOptions { &self, scope: &mut v8::HandleScope<'s>, ) -> v8::Local<'s, v8::Array> { - let array = v8::Array::new(scope, 17); + let array = v8::Array::new(scope, 16); { let args = v8::Array::new(scope, self.args.len() as i32); @@ -78,7 +106,7 @@ impl BootstrapOptions { } { - let val = v8::Boolean::new(scope, self.debug_flag); + let val = v8::Integer::new(scope, self.log_level as i32); array.set_index(scope, 2, val.into()); } @@ -142,18 +170,13 @@ impl BootstrapOptions { array.set_index(scope, 10, val.into()); } - { - let val = v8::Integer::new(scope, ppid() as i32); - array.set_index(scope, 11, val.into()); - } - { let val = v8::String::new_external_onebyte_static( scope, env!("TARGET").as_bytes(), ) .unwrap(); - array.set_index(scope, 12, val.into()); + array.set_index(scope, 11, val.into()); } { @@ -163,7 +186,7 @@ impl BootstrapOptions { v8::NewStringType::Normal, ) .unwrap(); - array.set_index(scope, 13, val.into()); + array.set_index(scope, 12, val.into()); } { @@ -173,17 +196,17 @@ impl BootstrapOptions { v8::NewStringType::Normal, ) .unwrap(); - array.set_index(scope, 14, val.into()); + array.set_index(scope, 13, val.into()); } { let val = v8::Boolean::new(scope, self.inspect); - array.set_index(scope, 15, val.into()); + array.set_index(scope, 14, val.into()); } { let val = v8::Boolean::new(scope, self.enable_testing_features); - array.set_index(scope, 16, val.into()); + array.set_index(scope, 15, val.into()); } array diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 434cca4575..f332202964 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.68.2" +channel = "1.69.0" components = ["rustfmt", "clippy"] diff --git a/serde_v8/Cargo.toml b/serde_v8/Cargo.toml index 530b938b63..6d04e5ea44 100644 --- a/serde_v8/Cargo.toml +++ b/serde_v8/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "serde_v8" -version = "0.92.0" +version = "0.100.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/serde_v8/de.rs b/serde_v8/de.rs index d593ffbc56..edb1263ebb 100644 --- a/serde_v8/de.rs +++ b/serde_v8/de.rs @@ -4,6 +4,7 @@ use serde::de::Visitor; use serde::de::{self}; use serde::Deserialize; +use crate::error::value_to_type_str; use crate::error::Error; use crate::error::Result; use crate::keys::v8_struct_key; @@ -84,7 +85,7 @@ macro_rules! deserialize_signed { } else if let Some(x) = self.input.to_big_int(self.scope) { x.i64_value().0 as $t } else { - return Err(Error::ExpectedInteger); + return Err(Error::ExpectedInteger(value_to_type_str(self.input))); }, ) } @@ -107,7 +108,7 @@ macro_rules! deserialize_unsigned { } else if let Some(x) = self.input.to_big_int(self.scope) { x.u64_value().0 as $t } else { - return Err(Error::ExpectedInteger); + return Err(Error::ExpectedInteger(value_to_type_str(self.input))); }, ) } @@ -188,7 +189,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> } else if let Some(x) = self.input.to_big_int(self.scope) { bigint_to_f64(x) } else { - return Err(Error::ExpectedNumber); + return Err(Error::ExpectedNumber(value_to_type_str(self.input))); }, ) } @@ -216,7 +217,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> let string = to_utf8(v8_string, self.scope); visitor.visit_string(string) } else { - Err(Error::ExpectedString) + Err(Error::ExpectedString(value_to_type_str(self.input))) } } @@ -268,7 +269,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> V: Visitor<'de>, { let arr = v8::Local::::try_from(self.input) - .map_err(|_| Error::ExpectedArray)?; + .map_err(|_| Error::ExpectedArray(value_to_type_str(self.input)))?; visitor.visit_seq(SeqAccess::new(arr.into(), self.scope, 0..arr.length())) } @@ -308,7 +309,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> { // Assume object, then get_own_property_names let obj = v8::Local::::try_from(self.input) - .map_err(|_| Error::ExpectedObject)?; + .map_err(|_| Error::ExpectedObject(value_to_type_str(self.input)))?; if v8::Local::::try_from(self.input).is_ok() { let pairs_array = v8::Local::::try_from(self.input) @@ -363,7 +364,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> _ => { // Regular struct let obj = v8::Local::::try_from(self.input) - .or(Err(Error::ExpectedObject))?; + .map_err(|_| Error::ExpectedObject(value_to_type_str(self.input)))?; // Fields names are a hint and must be inferred when not provided if fields.is_empty() { @@ -409,7 +410,8 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> let tag = { let prop_names = obj.get_own_property_names(self.scope, Default::default()); - let prop_names = prop_names.ok_or(Error::ExpectedEnum)?; + let prop_names = prop_names + .ok_or_else(|| Error::ExpectedEnum(value_to_type_str(self.input)))?; let prop_names_len = prop_names.length(); if prop_names_len != 1 { return Err(Error::LengthMismatch(prop_names_len as usize, 1)); @@ -424,8 +426,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> payload, }) } else { - // TODO: improve error - Err(Error::ExpectedEnum) + Err(Error::ExpectedEnum(value_to_type_str(self.input))) } } diff --git a/serde_v8/error.rs b/serde_v8/error.rs index 94ac3c0a54..16d7882b70 100644 --- a/serde_v8/error.rs +++ b/serde_v8/error.rs @@ -9,30 +9,41 @@ pub enum Error { #[error("{0}")] Message(String), - #[error("serde_v8 error: invalid type, expected: boolean")] - ExpectedBoolean, - #[error("serde_v8 error: invalid type, expected: integer")] - ExpectedInteger, - #[error("serde_v8 error: invalid type, expected: number")] - ExpectedNumber, - #[error("serde_v8 error: invalid type, expected: string")] - ExpectedString, - #[error("serde_v8 error: invalid type, expected: array")] - ExpectedArray, - #[error("serde_v8 error: invalid type, expected: map")] - ExpectedMap, - #[error("serde_v8 error: invalid type, expected: enum")] - ExpectedEnum, - #[error("serde_v8 error: invalid type, expected: object")] - ExpectedObject, - #[error("serde_v8 error: invalid type, expected: buffer")] - ExpectedBuffer, - #[error("serde_v8 error: invalid type, expected: detachable")] - ExpectedDetachable, - #[error("serde_v8 error: invalid type, expected: external")] - ExpectedExternal, - #[error("serde_v8 error: invalid type, expected: bigint")] - ExpectedBigInt, + #[error("serde_v8 error: invalid type; expected: boolean, got: {0}")] + ExpectedBoolean(&'static str), + + #[error("serde_v8 error: invalid type; expected: integer, got: {0}")] + ExpectedInteger(&'static str), + + #[error("serde_v8 error: invalid type; expected: number, got: {0}")] + ExpectedNumber(&'static str), + + #[error("serde_v8 error: invalid type; expected: string, got: {0}")] + ExpectedString(&'static str), + + #[error("serde_v8 error: invalid type; expected: array, got: {0}")] + ExpectedArray(&'static str), + + #[error("serde_v8 error: invalid type; expected: map, got: {0}")] + ExpectedMap(&'static str), + + #[error("serde_v8 error: invalid type; expected: enum, got: {0}")] + ExpectedEnum(&'static str), + + #[error("serde_v8 error: invalid type; expected: object, got: {0}")] + ExpectedObject(&'static str), + + #[error("serde_v8 error: invalid type; expected: buffer, got: {0}")] + ExpectedBuffer(&'static str), + + #[error("serde_v8 error: invalid type; expected: detachable, got: {0}")] + ExpectedDetachable(&'static str), + + #[error("serde_v8 error: invalid type; expected: external, got: {0}")] + ExpectedExternal(&'static str), + + #[error("serde_v8 error: invalid type; expected: bigint, got: {0}")] + ExpectedBigInt(&'static str), #[error("serde_v8 error: invalid type, expected: utf8")] ExpectedUtf8, @@ -44,6 +55,9 @@ pub enum Error { #[error("serde_v8 error: length mismatch, got: {0}, expected: {1}")] LengthMismatch(usize, usize), + + #[error("serde_v8 error: can't create slice from resizable ArrayBuffer")] + ResizableBackingStoreNotSupported, } impl serde::ser::Error for Error { @@ -57,3 +71,89 @@ impl serde::de::Error for Error { Error::Message(msg.to_string()) } } + +pub(crate) fn value_to_type_str(value: v8::Local) -> &'static str { + if value.is_module_namespace_object() { + "Module" + } else if value.is_wasm_module_object() { + "WASM module" + } else if value.is_wasm_memory_object() { + "WASM memory object" + } else if value.is_proxy() { + "Proxy" + } else if value.is_shared_array_buffer() { + "SharedArrayBuffer" + } else if value.is_data_view() { + "DataView" + } else if value.is_big_uint64_array() { + "BigUint64Array" + } else if value.is_big_int64_array() { + "BigInt64Array" + } else if value.is_float64_array() { + "Float64Array" + } else if value.is_float32_array() { + "Float32Array" + } else if value.is_int32_array() { + "Int32Array" + } else if value.is_uint32_array() { + "Uint32Array" + } else if value.is_int16_array() { + "Int16Array" + } else if value.is_uint16_array() { + "Uint16Array" + } else if value.is_int8_array() { + "Int8Array" + } else if value.is_uint8_clamped_array() { + "Uint8ClampedArray" + } else if value.is_uint8_array() { + "Uint8Array" + } else if value.is_typed_array() { + "TypedArray" + } else if value.is_array_buffer_view() { + "ArrayBufferView" + } else if value.is_array_buffer() { + "ArrayBuffer" + } else if value.is_weak_set() { + "WeakSet" + } else if value.is_weak_map() { + "WeakMap" + } else if value.is_set_iterator() { + "Set Iterator" + } else if value.is_map_iterator() { + "Map Iterator" + } else if value.is_set() { + "Set" + } else if value.is_map() { + "Map" + } else if value.is_promise() { + "Promise" + } else if value.is_generator_function() { + "Generator function" + } else if value.is_async_function() { + "Async function" + } else if value.is_reg_exp() { + "RegExp" + } else if value.is_date() { + "Date" + } else if value.is_number() { + "Number" + } else if value.is_boolean() { + "Boolean" + } else if value.is_big_int() { + "bigint" + } else if value.is_array() { + "array" + } else if value.is_function() { + "function" + } else if value.is_symbol() { + "symbol" + } else if value.is_string() { + "string" + } else if value.is_null() { + "null" + } else if value.is_undefined() { + "undefined" + } else { + "unknown" + } +} diff --git a/serde_v8/magic/bigint.rs b/serde_v8/magic/bigint.rs index 69828747ff..330803daf8 100644 --- a/serde_v8/magic/bigint.rs +++ b/serde_v8/magic/bigint.rs @@ -5,6 +5,7 @@ use smallvec::SmallVec; use super::transl8::FromV8; use super::transl8::ToV8; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; use crate::Error; @@ -42,7 +43,7 @@ impl FromV8 for BigInt { value: v8::Local, ) -> Result { let v8bigint = v8::Local::::try_from(value) - .map_err(|_| Error::ExpectedBigInt)?; + .map_err(|_| Error::ExpectedBigInt(value_to_type_str(value)))?; let word_count = v8bigint.word_count(); let mut words: SmallVec<[u64; 1]> = smallvec![0u64; word_count]; let (sign_bit, _words) = v8bigint.to_words_array(&mut words); diff --git a/serde_v8/magic/bytestring.rs b/serde_v8/magic/bytestring.rs index 77771698f5..3baa704e5f 100644 --- a/serde_v8/magic/bytestring.rs +++ b/serde_v8/magic/bytestring.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use super::transl8::FromV8; use super::transl8::ToV8; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; use crate::Error; use smallvec::SmallVec; @@ -49,7 +50,7 @@ impl FromV8 for ByteString { value: v8::Local, ) -> Result { let v8str = v8::Local::::try_from(value) - .map_err(|_| Error::ExpectedString)?; + .map_err(|_| Error::ExpectedString(value_to_type_str(value)))?; if !v8str.contains_only_onebyte() { return Err(Error::ExpectedLatin1); } diff --git a/serde_v8/magic/detached_buffer.rs b/serde_v8/magic/detached_buffer.rs index 7ee4dfb214..bc4b3de677 100644 --- a/serde_v8/magic/detached_buffer.rs +++ b/serde_v8/magic/detached_buffer.rs @@ -8,6 +8,7 @@ use super::transl8::FromV8; use super::transl8::ToV8; use super::v8slice::to_ranged_buffer; use super::v8slice::V8Slice; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; // A buffer that detaches when deserialized from JS @@ -57,10 +58,10 @@ impl FromV8 for DetachedBuffer { scope: &mut v8::HandleScope, value: v8::Local, ) -> Result { - let (b, range) = - to_ranged_buffer(scope, value).or(Err(crate::Error::ExpectedBuffer))?; + let (b, range) = to_ranged_buffer(scope, value) + .map_err(|_| crate::Error::ExpectedBuffer(value_to_type_str(value)))?; if !b.is_detachable() { - return Err(crate::Error::ExpectedDetachable); + return Err(crate::Error::ExpectedDetachable(value_to_type_str(value))); } let store = b.get_backing_store(); b.detach(None); // Detach diff --git a/serde_v8/magic/external_pointer.rs b/serde_v8/magic/external_pointer.rs index fca6028d67..e22e41a010 100644 --- a/serde_v8/magic/external_pointer.rs +++ b/serde_v8/magic/external_pointer.rs @@ -2,6 +2,8 @@ use std::ffi::c_void; +use crate::error::value_to_type_str; + use super::transl8::impl_magic; use super::transl8::FromV8; use super::transl8::ToV8; @@ -38,7 +40,7 @@ impl FromV8 for ExternalPointer { } else if let Ok(external) = v8::Local::::try_from(value) { Ok(ExternalPointer(external.value())) } else { - Err(crate::Error::ExpectedExternal) + Err(crate::Error::ExpectedExternal(value_to_type_str(value))) } } } diff --git a/serde_v8/magic/string_or_buffer.rs b/serde_v8/magic/string_or_buffer.rs index 04ce08be22..8b1a06dbc2 100644 --- a/serde_v8/magic/string_or_buffer.rs +++ b/serde_v8/magic/string_or_buffer.rs @@ -2,6 +2,7 @@ use super::buffer::ZeroCopyBuf; use super::transl8::FromV8; use super::transl8::ToV8; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; use crate::Error; use std::ops::Deref; @@ -73,7 +74,7 @@ impl FromV8 for StringOrBuffer { } else if let Ok(s) = crate::from_v8(scope, value) { return Ok(Self::String(s)); } - Err(Error::ExpectedBuffer) + Err(Error::ExpectedBuffer(value_to_type_str(value))) } } diff --git a/serde_v8/magic/u16string.rs b/serde_v8/magic/u16string.rs index 57e3fd0f66..04d742da96 100644 --- a/serde_v8/magic/u16string.rs +++ b/serde_v8/magic/u16string.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::error::value_to_type_str; use crate::Error; use super::transl8::impl_magic; @@ -37,7 +38,7 @@ impl FromV8 for U16String { value: v8::Local, ) -> Result { let v8str = v8::Local::::try_from(value) - .map_err(|_| Error::ExpectedString)?; + .map_err(|_| Error::ExpectedString(value_to_type_str(value)))?; let len = v8str.length(); let mut buffer = Vec::with_capacity(len); #[allow(clippy::uninit_vec)] diff --git a/serde_v8/magic/v8slice.rs b/serde_v8/magic/v8slice.rs index 384ccf5c52..2b103f1c96 100644 --- a/serde_v8/magic/v8slice.rs +++ b/serde_v8/magic/v8slice.rs @@ -5,6 +5,8 @@ use std::ops::DerefMut; use std::ops::Range; use std::rc::Rc; +use crate::error::value_to_type_str; + use super::rawbytes; use super::transl8::FromV8; @@ -29,20 +31,6 @@ pub struct V8Slice { unsafe impl Send for V8Slice {} impl V8Slice { - pub fn from_buffer( - buffer: v8::Local, - range: Range, - ) -> Result { - let store = buffer.get_backing_store(); - if store.is_shared() { - return Err(v8::DataError::BadType { - actual: "shared ArrayBufferView", - expected: "non-shared ArrayBufferView", - }); - } - Ok(Self { store, range }) - } - fn as_slice(&self) -> &[u8] { // SAFETY: v8::SharedRef is similar to Arc<[u8]>, // it points to a fixed continuous slice of bytes on the heap. @@ -89,9 +77,19 @@ impl FromV8 for V8Slice { scope: &mut v8::HandleScope, value: v8::Local, ) -> Result { - to_ranged_buffer(scope, value) - .and_then(|(b, r)| Self::from_buffer(b, r)) - .map_err(|_| crate::Error::ExpectedBuffer) + match to_ranged_buffer(scope, value) { + Ok((b, range)) => { + let store = b.get_backing_store(); + if store.is_resizable_by_user_javascript() { + Err(crate::Error::ResizableBackingStoreNotSupported) + } else if store.is_shared() { + Err(crate::Error::ExpectedBuffer(value_to_type_str(value))) + } else { + Ok(V8Slice { store, range }) + } + } + Err(_) => Err(crate::Error::ExpectedBuffer(value_to_type_str(value))), + } } } diff --git a/serde_v8/tests/de.rs b/serde_v8/tests/de.rs index eae30f5404..4c5cf72836 100644 --- a/serde_v8/tests/de.rs +++ b/serde_v8/tests/de.rs @@ -265,6 +265,16 @@ fn de_buffers() { assert_eq!(&*buf, &[0x68, 0x65, 0x6C, 0x6C, 0x6F]); }, ); + + dedo("(new ArrayBuffer(4))", |scope, v| { + let buf: ZeroCopyBuf = serde_v8::from_v8(scope, v).unwrap(); + assert_eq!(&*buf, &[0x0, 0x0, 0x0, 0x0]); + }); + + dedo("(new ArrayBuffer(8, { maxByteLength: 16}))", |scope, v| { + let result: Result = serde_v8::from_v8(scope, v); + matches!(result, Err(Error::ResizableBackingStoreNotSupported)); + }); } // Structs @@ -408,7 +418,7 @@ detest!( ); defail!(defail_struct, MathOp, "123", |e| e - == Err(Error::ExpectedObject)); + == Err(Error::ExpectedObject("Number"))); #[derive(Eq, PartialEq, Debug, Deserialize)] pub struct SomeThing { diff --git a/test_napi/.gitignore b/test_napi/.gitignore index 6fdcc4a662..54de1ef345 100644 --- a/test_napi/.gitignore +++ b/test_napi/.gitignore @@ -1,4 +1,7 @@ package-lock.json # Test generated artifacts -.swc \ No newline at end of file +.swc +*.dylib +*.so +*.dll diff --git a/test_napi/bigint_test.js b/test_napi/bigint_test.js new file mode 100644 index 0000000000..8d05f957d3 --- /dev/null +++ b/test_napi/bigint_test.js @@ -0,0 +1,63 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import { assertEquals, assertThrows, loadTestLibrary } from "./common.js"; + +const bi = loadTestLibrary(); + +Deno.test("cases", function () { + const cases = [ + 0n, + -0n, + 1n, + -1n, + 100n, + 2121n, + -1233n, + 986583n, + -976675n, + 98765432213456789876546896323445679887645323232436587988766545658n, + -4350987086545760976737453646576078997096876957864353245245769809n, + ]; + + for (const num of cases) { + if (num > -(2n ** 63n) && num < 2n ** 63n) { + assertEquals(bi.testInt64(num), num); + assertEquals(bi.isLossless(num, true), true); + } else { + assertEquals(bi.isLossless(num, true), false); + } + + if (num >= 0 && num < 2n ** 64n) { + assertEquals(bi.testUint64(num), num); + assertEquals(bi.isLossless(num, false), true); + } else { + assertEquals(bi.isLossless(num, false), false); + } + + assertEquals(bi.testWords(num), num); + } +}); + +Deno.test( + // TODO(bartlomieju): fix this test + { ignore: true }, + function tooBigBigInt() { + assertThrows( + () => bi.createTooBigBigInt(), + Error, + "Invalid argument", + ); + }, +); + +Deno.test( + // TODO(bartlomieju): fix this test + { ignore: true }, + function exceptionForwarding() { + assertThrows( + () => bi.makeBigIntWordsThrow(), + Error, + "Maximum BigInt size exceeded", + ); + }, +); diff --git a/test_napi/common.js b/test_napi/common.js index 09378918f1..ce9b2544b1 100644 --- a/test_napi/common.js +++ b/test_napi/common.js @@ -9,17 +9,19 @@ export { export { fromFileUrl } from "../test_util/std/path/mod.ts"; const targetDir = Deno.execPath().replace(/[^\/\\]+$/, ""); -const [libPrefix, libSuffix] = { +export const [libPrefix, libSuffix] = { darwin: ["lib", "dylib"], linux: ["lib", "so"], windows: ["", "dll"], }[Deno.build.os]; +const ops = Deno[Deno.internal].core.ops; + export function loadTestLibrary() { const specifier = `${targetDir}/${libPrefix}test_napi.${libSuffix}`; // Internal, used in ext/node - return Deno[Deno.internal].core.ops.op_napi_open(specifier, { + return ops.op_napi_open(specifier, { Buffer: {}, }); } diff --git a/test_napi/init_test.js b/test_napi/init_test.js new file mode 100644 index 0000000000..633fdbb615 --- /dev/null +++ b/test_napi/init_test.js @@ -0,0 +1,14 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import { assert, libSuffix } from "./common.js"; + +const ops = Deno[Deno.internal].core.ops; + +Deno.test("ctr initialization (napi_module_register)", { + ignore: Deno.build.os == "windows", +}, function () { + const path = new URL(`./module.${libSuffix}`, import.meta.url).pathname; + const obj = ops.op_napi_open(path, {}); + assert(obj != null); + assert(typeof obj === "object"); +}); diff --git a/test_napi/make_callback_test.js b/test_napi/make_callback_test.js new file mode 100644 index 0000000000..63ab18810d --- /dev/null +++ b/test_napi/make_callback_test.js @@ -0,0 +1,53 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import { assertEquals, loadTestLibrary } from "./common.js"; + +const mc = loadTestLibrary(); + +Deno.test("napi makeCallback1", function () { + const resource = {}; + + let callCount = 0; + function cb() { + callCount++; + assertEquals(arguments.length, 0); + assertEquals(this, globalThis); + return 42; + } + assertEquals(mc.makeCallback(resource, globalThis, cb), 42); + assertEquals(callCount, 1); +}); + +Deno.test("napi makeCallback2", function () { + const resource = {}; + + let callCount = 0; + function cb(x) { + callCount++; + assertEquals(arguments.length, 1); + assertEquals(this, globalThis); + assertEquals(x, 1337); + return 42; + } + assertEquals(mc.makeCallback(resource, globalThis, cb, 1337), 42); + assertEquals(callCount, 1); +}); + +Deno.test("napi makeCallback3", function () { + const resource = {}; + + let callCount = 0; + + function multiArgFunc(arg1, arg2, arg3) { + callCount++; + assertEquals(arg1, 1); + assertEquals(arg2, 2); + assertEquals(arg3, 3); + return 42; + } + assertEquals( + mc.makeCallback(resource, globalThis, multiArgFunc, 1, 2, 3), + 42, + ); + assertEquals(callCount, 1); +}); diff --git a/test_napi/module.c b/test_napi/module.c new file mode 100644 index 0000000000..4548aa37fb --- /dev/null +++ b/test_napi/module.c @@ -0,0 +1,68 @@ +typedef struct napi_module { + int nm_version; + unsigned int nm_flags; + const char* nm_filename; + void* nm_register_func; + const char* nm_modname; + void* nm_priv; + void* reserved[4]; +} napi_module; + +#ifdef _WIN32 +#define NAPI_EXTERN __declspec(dllexport) +#define NAPI_CDECL __cdecl +#else +#define NAPI_EXTERN __attribute__((visibility("default"))) +#define NAPI_CDECL +#endif + +NAPI_EXTERN void NAPI_CDECL +napi_module_register(napi_module* mod); + +#if defined(_MSC_VER) +#if defined(__cplusplus) +#define NAPI_C_CTOR(fn) \ + static void NAPI_CDECL fn(void); \ + namespace { \ + struct fn##_ { \ + fn##_() { fn(); } \ + } fn##_v_; \ + } \ + static void NAPI_CDECL fn(void) +#else // !defined(__cplusplus) +#pragma section(".CRT$XCU", read) +// The NAPI_C_CTOR macro defines a function fn that is called during CRT +// initialization. +// C does not support dynamic initialization of static variables and this code +// simulates C++ behavior. Exporting the function pointer prevents it from being +// optimized. See for details: +// https://docs.microsoft.com/en-us/cpp/c-runtime-library/crt-initialization?view=msvc-170 +#define NAPI_C_CTOR(fn) \ + static void NAPI_CDECL fn(void); \ + __declspec(dllexport, allocate(".CRT$XCU")) void(NAPI_CDECL * fn##_)(void) = \ + fn; \ + static void NAPI_CDECL fn(void) +#endif // defined(__cplusplus) +#else +#define NAPI_C_CTOR(fn) \ + static void fn(void) __attribute__((constructor)); \ + static void fn(void) +#endif + +#define NAPI_MODULE_TEST(modname, regfunc) \ + static napi_module _module = { \ + 1, \ + 0, \ + __FILE__, \ + regfunc, \ + #modname, \ + 0, \ + {0}, \ + }; \ + NAPI_C_CTOR(_register_##modname) { napi_module_register(&_module); } \ + +void* init(void* env __attribute__((unused)), void* exports) { + return exports; +} + +NAPI_MODULE_TEST(TEST_NAPI_MODULE_NAME, init) diff --git a/test_napi/object_wrap_test.js b/test_napi/object_wrap_test.js index ae64821ead..3466c39e4b 100644 --- a/test_napi/object_wrap_test.js +++ b/test_napi/object_wrap_test.js @@ -1,6 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -import { assertEquals, loadTestLibrary } from "./common.js"; +import { assert, assertEquals, loadTestLibrary } from "./common.js"; const objectWrap = loadTestLibrary(); @@ -16,3 +16,26 @@ Deno.test("napi object wrap new", function () { assertEquals(obj.get_value(), 10); assertEquals(objectWrap.NapiObject.factory(), 64); }); + +Deno.test("napi bind finalizer", function () { + const obj = {}; + objectWrap.test_bind_finalizer(obj); +}); + +Deno.test("napi external finalizer", function () { + let obj = objectWrap.test_external_finalizer(); + assert(obj); + obj = null; +}); + +Deno.test("napi external buffer", function () { + let buf = objectWrap.test_external_buffer(); + assertEquals(buf, new Uint8Array([1, 2, 3])); + buf = null; +}); + +Deno.test("napi external arraybuffer", function () { + let buf = objectWrap.test_external_arraybuffer(); + assertEquals(new Uint8Array(buf), new Uint8Array([1, 2, 3])); + buf = null; +}); diff --git a/test_napi/src/async.rs b/test_napi/src/async.rs index 51e6edac9e..970d34ce19 100644 --- a/test_napi/src/async.rs +++ b/test_napi/src/async.rs @@ -49,7 +49,6 @@ unsafe extern "C" fn complete( ptr::null(), &mut _result )); - assert_napi_ok!(napi_delete_reference(env, baton.func)); assert_napi_ok!(napi_delete_async_work(env, baton.task)); } @@ -73,7 +72,7 @@ extern "C" fn test_async_work( &mut resource_name, )); - let mut async_work: napi_async_work = ptr::null_mut(); + let async_work: napi_async_work = ptr::null_mut(); let mut func: napi_ref = ptr::null_mut(); assert_napi_ok!(napi_create_reference(env, args[0], 1, &mut func)); @@ -82,6 +81,8 @@ extern "C" fn test_async_work( func, task: async_work, }); + let mut async_work = baton.task; + let baton_ptr = Box::into_raw(baton) as *mut c_void; assert_napi_ok!(napi_create_async_work( env, @@ -89,9 +90,12 @@ extern "C" fn test_async_work( resource_name, Some(execute), Some(complete), - Box::into_raw(baton) as *mut c_void, + baton_ptr, &mut async_work, )); + let mut baton = unsafe { Box::from_raw(baton_ptr as *mut Baton) }; + baton.task = async_work; + Box::into_raw(baton); assert_napi_ok!(napi_queue_async_work(env, async_work)); ptr::null_mut() diff --git a/test_napi/src/bigint.rs b/test_napi/src/bigint.rs new file mode 100644 index 0000000000..e901e342ae --- /dev/null +++ b/test_napi/src/bigint.rs @@ -0,0 +1,205 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::assert_napi_ok; +use crate::cstr; +use crate::napi_get_callback_info; +use crate::napi_new_property; +use napi_sys::Status::napi_pending_exception; +use napi_sys::ValueType::napi_bigint; +use napi_sys::*; +use std::ptr; + +extern "C" fn is_lossless( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, argc, _) = napi_get_callback_info!(env, info, 2); + assert_eq!(argc, 2); + + let mut is_signed = false; + assert_napi_ok!(napi_get_value_bool(env, args[1], &mut is_signed)); + + let mut lossless = false; + + if is_signed { + let mut input: i64 = 0; + assert_napi_ok!(napi_get_value_bigint_int64( + env, + args[0], + &mut input, + &mut lossless + )); + } else { + let mut input: u64 = 0; + assert_napi_ok!(napi_get_value_bigint_uint64( + env, + args[0], + &mut input, + &mut lossless + )); + } + + let mut output: napi_value = ptr::null_mut(); + assert_napi_ok!(napi_get_boolean(env, lossless, &mut output)); + + output +} + +extern "C" fn test_int64( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, _argc, _) = napi_get_callback_info!(env, info, 2); + + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[0], &mut ty)); + assert_eq!(ty, napi_bigint); + + let mut input: i64 = 0; + let mut lossless = false; + assert_napi_ok!(napi_get_value_bigint_int64( + env, + args[0], + &mut input, + &mut lossless + )); + + let mut output: napi_value = ptr::null_mut(); + assert_napi_ok!(napi_create_bigint_int64(env, input, &mut output)); + + output +} + +extern "C" fn test_uint64( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, _argc, _) = napi_get_callback_info!(env, info, 2); + + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[0], &mut ty)); + assert_eq!(ty, napi_bigint); + + let mut input: u64 = 0; + let mut lossless = false; + assert_napi_ok!(napi_get_value_bigint_uint64( + env, + args[0], + &mut input, + &mut lossless + )); + + let mut output: napi_value = ptr::null_mut(); + assert_napi_ok!(napi_create_bigint_uint64(env, input, &mut output)); + + output +} + +extern "C" fn test_words( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, _argc, _) = napi_get_callback_info!(env, info, 1); + + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[0], &mut ty)); + assert_eq!(ty, napi_bigint); + + let mut expected_work_count = 0; + assert_napi_ok!(napi_get_value_bigint_words( + env, + args[0], + ptr::null_mut(), + &mut expected_work_count, + ptr::null_mut() + )); + + let mut sign_bit = 0; + let mut word_count: usize = 10; + let mut words: Vec = Vec::with_capacity(10); + + assert_napi_ok!(napi_get_value_bigint_words( + env, + args[0], + &mut sign_bit, + &mut word_count, + words.as_mut_ptr(), + )); + + assert_eq!(word_count, expected_work_count); + let mut output: napi_value = ptr::null_mut(); + + assert_napi_ok!(napi_create_bigint_words( + env, + sign_bit, + word_count, + words.as_ptr(), + &mut output, + )); + output +} + +extern "C" fn create_too_big_big_int( + env: napi_env, + _info: napi_callback_info, +) -> napi_value { + let sign_bit = 0; + let word_count = usize::MAX; + let words: Vec = Vec::with_capacity(10); + + let mut output: napi_value = ptr::null_mut(); + let result = unsafe { + napi_create_bigint_words( + env, + sign_bit, + word_count, + words.as_ptr(), + &mut output, + ) + }; + assert_eq!(result, 1); + + output +} + +extern "C" fn make_big_int_words_throw( + env: napi_env, + _info: napi_callback_info, +) -> napi_value { + let words: Vec = Vec::with_capacity(10); + let mut output = ptr::null_mut(); + + let status = unsafe { + napi_create_bigint_words(env, 0, usize::MAX, words.as_ptr(), &mut output) + }; + + if status != napi_pending_exception { + unsafe { + napi_throw_error( + env, + ptr::null_mut(), + cstr!("Expected status 'napi_pending_exception'"), + ) + }; + } + + ptr::null_mut() +} + +pub fn init(env: napi_env, exports: napi_value) { + let properties = &[ + napi_new_property!(env, "isLossless", is_lossless), + napi_new_property!(env, "testInt64", test_int64), + napi_new_property!(env, "testUint64", test_uint64), + napi_new_property!(env, "testWords", test_words), + napi_new_property!(env, "createTooBigBigInt", create_too_big_big_int), + napi_new_property!(env, "makeBigIntWordsThrow", make_big_int_words_throw), + ]; + + assert_napi_ok!(napi_define_properties( + env, + exports, + properties.len(), + properties.as_ptr() + )); +} diff --git a/test_napi/src/callback.rs b/test_napi/src/callback.rs index c863eb7a60..ae917e7b13 100644 --- a/test_napi/src/callback.rs +++ b/test_napi/src/callback.rs @@ -5,6 +5,7 @@ use crate::napi_get_callback_info; use crate::napi_new_property; use napi_sys::ValueType::napi_function; use napi_sys::ValueType::napi_object; +use napi_sys::ValueType::napi_undefined; use napi_sys::*; use std::ptr; @@ -13,7 +14,9 @@ extern "C" fn test_callback_run( env: napi_env, info: napi_callback_info, ) -> napi_value { - let (args, argc, _) = napi_get_callback_info!(env, info, 2); + // We want to have argv with size 4, even though the callback will have + // only two arguments. We'll assert that the remaining two args are undefined. + let (args, argc, _) = napi_get_callback_info!(env, info, 4); assert_eq!(argc, 2); let mut ty = -1; @@ -24,6 +27,14 @@ extern "C" fn test_callback_run( assert_napi_ok!(napi_typeof(env, args[1], &mut ty)); assert_eq!(ty, napi_object); + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[2], &mut ty)); + assert_eq!(ty, napi_undefined); + + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[3], &mut ty)); + assert_eq!(ty, napi_undefined); + let mut len = 0; assert_napi_ok!(napi_get_array_length(env, args[1], &mut len)); diff --git a/test_napi/src/finalizer.rs b/test_napi/src/finalizer.rs new file mode 100644 index 0000000000..538f9599ef --- /dev/null +++ b/test_napi/src/finalizer.rs @@ -0,0 +1,141 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::assert_napi_ok; +use crate::napi_get_callback_info; +use crate::napi_new_property; +use napi_sys::ValueType::napi_object; +use napi_sys::*; +use std::ptr; + +unsafe extern "C" fn finalize_cb( + _env: napi_env, + data: *mut ::std::os::raw::c_void, + hint: *mut ::std::os::raw::c_void, +) { + assert!(data.is_null()); + assert!(hint.is_null()); +} + +extern "C" fn test_bind_finalizer( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, argc, _) = napi_get_callback_info!(env, info, 1); + assert_eq!(argc, 1); + + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[0], &mut ty)); + assert_eq!(ty, napi_object); + + let obj = args[0]; + unsafe { + napi_add_finalizer( + env, + obj, + ptr::null_mut(), + Some(finalize_cb), + ptr::null_mut(), + ptr::null_mut(), + ) + }; + obj +} + +struct Thing { + _allocation: Vec, +} + +unsafe extern "C" fn finalize_cb_drop( + _env: napi_env, + data: *mut ::std::os::raw::c_void, + hint: *mut ::std::os::raw::c_void, +) { + let _ = Box::from_raw(data as *mut Thing); + assert!(hint.is_null()); +} + +extern "C" fn test_external_finalizer( + env: napi_env, + _: napi_callback_info, +) -> napi_value { + let data = Box::into_raw(Box::new(Thing { + _allocation: vec![1, 2, 3], + })); + + let mut result = ptr::null_mut(); + assert_napi_ok!(napi_create_external( + env, + data as _, + Some(finalize_cb_drop), + ptr::null_mut(), + &mut result + )); + result +} + +unsafe extern "C" fn finalize_cb_vec( + _env: napi_env, + data: *mut ::std::os::raw::c_void, + hint: *mut ::std::os::raw::c_void, +) { + let _ = Vec::from_raw_parts(data as *mut u8, 3, 3); + assert!(hint.is_null()); +} + +extern "C" fn test_external_buffer( + env: napi_env, + _: napi_callback_info, +) -> napi_value { + let mut result = ptr::null_mut(); + let buf: Vec = vec![1, 2, 3]; + assert_napi_ok!(napi_create_external_buffer( + env, + 3, + buf.as_ptr() as _, + Some(finalize_cb_vec), + ptr::null_mut(), + &mut result + )); + std::mem::forget(buf); + + result +} + +extern "C" fn test_external_arraybuffer( + env: napi_env, + _: napi_callback_info, +) -> napi_value { + let mut result = ptr::null_mut(); + let buf: Vec = vec![1, 2, 3]; + assert_napi_ok!(napi_create_external_arraybuffer( + env, + buf.as_ptr() as _, + 3, + Some(finalize_cb_vec), + ptr::null_mut(), + &mut result + )); + std::mem::forget(buf); + + result +} + +pub fn init(env: napi_env, exports: napi_value) { + let properties = &[ + napi_new_property!(env, "test_bind_finalizer", test_bind_finalizer), + napi_new_property!(env, "test_external_finalizer", test_external_finalizer), + napi_new_property!(env, "test_external_buffer", test_external_buffer), + napi_new_property!( + env, + "test_external_arraybuffer", + test_external_arraybuffer + ), + ]; + + assert_napi_ok!(napi_define_properties( + env, + exports, + properties.len(), + properties.as_ptr() + )); +} diff --git a/test_napi/src/lib.rs b/test_napi/src/lib.rs index dba9f65a5c..9342656fda 100644 --- a/test_napi/src/lib.rs +++ b/test_napi/src/lib.rs @@ -9,11 +9,14 @@ use napi_sys::*; pub mod array; pub mod arraybuffer; pub mod r#async; +pub mod bigint; pub mod callback; pub mod coerce; pub mod date; pub mod env; pub mod error; +pub mod finalizer; +pub mod make_callback; pub mod mem; pub mod numbers; pub mod object_wrap; @@ -21,6 +24,7 @@ pub mod primitives; pub mod promise; pub mod properties; pub mod strings; +pub mod symbol; pub mod tsfn; pub mod typedarray; @@ -146,6 +150,7 @@ unsafe extern "C" fn napi_register_module_v1( array::init(env, exports); env::init(env, exports); error::init(env, exports); + finalizer::init(env, exports); primitives::init(env, exports); properties::init(env, exports); promise::init(env, exports); @@ -156,6 +161,9 @@ unsafe extern "C" fn napi_register_module_v1( date::init(env, exports); tsfn::init(env, exports); mem::init(env, exports); + bigint::init(env, exports); + symbol::init(env, exports); + make_callback::init(env, exports); init_cleanup_hook(env, exports); diff --git a/test_napi/src/make_callback.rs b/test_napi/src/make_callback.rs new file mode 100644 index 0000000000..c8d2b3342a --- /dev/null +++ b/test_napi/src/make_callback.rs @@ -0,0 +1,85 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::assert_napi_ok; +use crate::cstr; +use napi_sys::ValueType::napi_function; +use napi_sys::*; +use std::ptr; + +extern "C" fn make_callback( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + const MAX_ARGUMENTS: usize = 10; + const RESERVED_ARGUMENTS: usize = 3; + + let mut args = [std::ptr::null_mut(); MAX_ARGUMENTS]; + let mut argc = MAX_ARGUMENTS; + assert_napi_ok!(napi_get_cb_info( + env, + info, + &mut argc, + args.as_mut_ptr(), + ptr::null_mut(), + ptr::null_mut(), + )); + + assert!(argc > 0); + let resource = args[0]; + let recv = args[1]; + let func = args[2]; + + let mut argv: Vec = Vec::new(); + argv.resize(MAX_ARGUMENTS - RESERVED_ARGUMENTS, ptr::null_mut()); + for i in RESERVED_ARGUMENTS..argc { + argv[i - RESERVED_ARGUMENTS] = args[i]; + } + + let mut func_type: napi_valuetype = -1; + assert_napi_ok!(napi_typeof(env, func, &mut func_type)); + + let mut resource_name = ptr::null_mut(); + assert_napi_ok!(napi_create_string_utf8( + env, + cstr!("test"), + usize::MAX, + &mut resource_name + )); + + let mut context: napi_async_context = ptr::null_mut(); + assert_napi_ok!(napi_async_init(env, resource, resource_name, &mut context)); + + let mut result = ptr::null_mut(); + assert_eq!(func_type, napi_function); + assert_napi_ok!(napi_make_callback( + env, + context, + recv, + func, + argc - RESERVED_ARGUMENTS, + argv.as_mut_ptr(), + &mut result + )); + + assert_napi_ok!(napi_async_destroy(env, context)); + result +} + +pub fn init(env: napi_env, exports: napi_value) { + let mut fn_: napi_value = ptr::null_mut(); + + assert_napi_ok!(napi_create_function( + env, + ptr::null_mut(), + usize::MAX, + Some(make_callback), + ptr::null_mut(), + &mut fn_, + )); + assert_napi_ok!(napi_set_named_property( + env, + exports, + cstr!("makeCallback"), + fn_ + )); +} diff --git a/test_napi/src/symbol.rs b/test_napi/src/symbol.rs new file mode 100644 index 0000000000..5f404d5904 --- /dev/null +++ b/test_napi/src/symbol.rs @@ -0,0 +1,39 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::assert_napi_ok; +use crate::napi_get_callback_info; +use crate::napi_new_property; +use napi_sys::ValueType::napi_string; +use napi_sys::*; + +extern "C" fn symbol_new( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, argc, _) = napi_get_callback_info!(env, info, 1); + + let mut description: napi_value = std::ptr::null_mut(); + + if argc >= 1 { + let mut ty = -1; + assert_napi_ok!(napi_typeof(env, args[0], &mut ty)); + assert_eq!(ty, napi_string); + description = args[0]; + } + + let mut symbol: napi_value = std::ptr::null_mut(); + assert_napi_ok!(napi_create_symbol(env, description, &mut symbol)); + + symbol +} + +pub fn init(env: napi_env, exports: napi_value) { + let properties = &[napi_new_property!(env, "symbolNew", symbol_new)]; + + assert_napi_ok!(napi_define_properties( + env, + exports, + properties.len(), + properties.as_ptr() + )); +} diff --git a/test_napi/symbol_test.js b/test_napi/symbol_test.js new file mode 100644 index 0000000000..00c3edda22 --- /dev/null +++ b/test_napi/symbol_test.js @@ -0,0 +1,49 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import { assert, assertEquals, loadTestLibrary } from "./common.js"; + +const testSymbol = loadTestLibrary(); + +Deno.test("napi symbol1", () => { + const sym = testSymbol.symbolNew("test"); + assertEquals(sym.toString(), "Symbol(test)"); + + const myObj = {}; + const fooSym = testSymbol.symbolNew("foo"); + const otherSym = testSymbol.symbolNew("bar"); + myObj.foo = "bar"; + myObj[fooSym] = "baz"; + myObj[otherSym] = "bing"; + assertEquals(myObj.foo, "bar"); + assertEquals(myObj[fooSym], "baz"); + assertEquals(myObj[otherSym], "bing"); +}); + +Deno.test("napi symbol2", () => { + const sym = testSymbol.symbolNew("test"); + assertEquals(sym.toString(), "Symbol(test)"); + + const myObj = {}; + const fooSym = testSymbol.symbolNew("foo"); + myObj.foo = "bar"; + myObj[fooSym] = "baz"; + + assertEquals(Object.keys(myObj), ["foo"]); + assertEquals(Object.getOwnPropertyNames(myObj), ["foo"]); + assertEquals(Object.getOwnPropertySymbols(myObj), [fooSym]); +}); + +Deno.test("napi symbol3", () => { + assert(testSymbol.symbolNew() !== testSymbol.symbolNew()); + assert(testSymbol.symbolNew("foo") !== testSymbol.symbolNew("foo")); + assert(testSymbol.symbolNew("foo") !== testSymbol.symbolNew("bar")); + + const foo1 = testSymbol.symbolNew("foo"); + const foo2 = testSymbol.symbolNew("foo"); + const object = { + [foo1]: 1, + [foo2]: 2, + }; + assertEquals(object[foo1], 1); + assertEquals(object[foo2], 2); +}); diff --git a/test_napi/tests/napi_tests.rs b/test_napi/tests/napi_tests.rs index 747f6aa276..c3ce285e07 100644 --- a/test_napi/tests/napi_tests.rs +++ b/test_napi/tests/napi_tests.rs @@ -18,6 +18,35 @@ fn build() { } let build_plugin_output = build_plugin.output().unwrap(); assert!(build_plugin_output.status.success()); + + // cc module.c -undefined dynamic_lookup -shared -Wl,-no_fixup_chains -dynamic -o module.dylib + #[cfg(not(target_os = "windows"))] + { + let out = if cfg!(target_os = "macos") { + "module.dylib" + } else { + "module.so" + }; + + let mut cc = Command::new("cc"); + + #[cfg(not(target_os = "macos"))] + let c_module = cc.arg("module.c").arg("-shared").arg("-o").arg(out); + + #[cfg(target_os = "macos")] + let c_module = { + cc.arg("module.c") + .arg("-undefined") + .arg("dynamic_lookup") + .arg("-shared") + .arg("-Wl,-no_fixup_chains") + .arg("-dynamic") + .arg("-o") + .arg(out) + }; + let c_module_output = c_module.output().unwrap(); + assert!(c_module_output.status.success()); + } } #[test] @@ -26,6 +55,7 @@ fn napi_tests() { let output = deno_cmd() .current_dir(test_util::napi_tests_path()) + .env("RUST_BACKTRACE", "1") .arg("test") .arg("--allow-read") .arg("--allow-env") @@ -39,6 +69,7 @@ fn napi_tests() { let stderr = std::str::from_utf8(&output.stderr).unwrap(); if !output.status.success() { + eprintln!("exit code {:?}", output.status.code()); println!("stdout {stdout}"); println!("stderr {stderr}"); } diff --git a/test_napi/typedarray_test.js b/test_napi/typedarray_test.js index f9b3466264..7a60a3ab4b 100644 --- a/test_napi/typedarray_test.js +++ b/test_napi/typedarray_test.js @@ -28,9 +28,12 @@ Deno.test("napi typedarray float64", function () { assertEquals(Math.round(10 * doubleResult[2]) / 10, -6.6); }); -Deno.test("napi typedarray external", function () { - assertEquals( - new Uint8Array(typedarray.test_external()), - new Uint8Array([0, 1, 2, 3]), - ); -}); +// TODO(bartlomieju): this test causes segfaults when used with jemalloc. +// Node documentation provides a hint that this function is not supported by +// other runtime like electron. +// Deno.test("napi typedarray external", function () { +// assertEquals( +// new Uint8Array(typedarray.test_external()), +// new Uint8Array([0, 1, 2, 3]), +// ); +// }); diff --git a/test_util/Cargo.toml b/test_util/Cargo.toml index cb1ea46cc2..e4a992b51e 100644 --- a/test_util/Cargo.toml +++ b/test_util/Cargo.toml @@ -19,8 +19,10 @@ async-stream = "0.3.3" atty.workspace = true base64.workspace = true console_static_text.workspace = true +fastwebsockets = { workspace = true, features = ["upgrade"] } flate2.workspace = true futures.workspace = true +glob.workspace = true hyper = { workspace = true, features = ["server", "http1", "http2", "runtime"] } lazy-regex.workspace = true lsp-types.workspace = true @@ -40,7 +42,6 @@ tar.workspace = true tempfile.workspace = true tokio.workspace = true tokio-rustls.workspace = true -tokio-tungstenite.workspace = true url.workspace = true [target.'cfg(unix)'.dependencies] diff --git a/test_util/src/assertions.rs b/test_util/src/assertions.rs index a004530b6e..29066ded08 100644 --- a/test_util/src/assertions.rs +++ b/test_util/src/assertions.rs @@ -1,5 +1,15 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +#[macro_export] +macro_rules! assert_starts_with { + ($string:expr, $($test:expr),+) => { + let string = $string; // This might be a function call or something + if !($(string.starts_with($test))||+) { + panic!("{:?} does not start with {:?}", string, [$($test),+]); + } + } +} + #[macro_export] macro_rules! assert_ends_with { ($left:expr, $right:expr $(,)?) => { diff --git a/test_util/src/builders.rs b/test_util/src/builders.rs index a5f192b73a..a000e5bcd3 100644 --- a/test_util/src/builders.rs +++ b/test_util/src/builders.rs @@ -341,6 +341,7 @@ impl TestCommandBuilder { )) } + #[track_caller] pub fn run(&self) -> TestCommandOutput { fn read_pipe_to_string(mut pipe: os_pipe::PipeReader) -> String { let mut output = String::new(); @@ -393,7 +394,7 @@ impl TestCommandBuilder { (Some(combined_reader), None) }; - let mut process = command.spawn().unwrap(); + let mut process = command.spawn().expect("Failed spawning command"); if let Some(input) = &self.stdin { let mut p_stdin = process.stdin.take().unwrap(); diff --git a/test_util/src/factory.rs b/test_util/src/factory.rs new file mode 100644 index 0000000000..f11d774d43 --- /dev/null +++ b/test_util/src/factory.rs @@ -0,0 +1,99 @@ +use std::collections::HashSet; +use std::path::PathBuf; + +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use glob::glob; + +/// Generate a unit test factory verified and backed by a glob. +#[macro_export] +macro_rules! unit_test_factory { + ($test_fn:ident, $base:literal, $glob:literal, [ $( $test:ident $(= $($path:ident)/+)? ),+ $(,)? ]) => { + #[test] + fn check_test_glob() { + $crate::factory::check_test_glob($base, $glob, [ $( ( stringify!($test), stringify!( $( $($path)/+ )? ) ) ),+ ].as_slice()); + } + + $( + #[allow(non_snake_case)] + #[test] + fn $test() { + $test_fn($crate::factory::get_path(stringify!($test), stringify!( $( $($path)/+ )?))) + } + )+ + }; + (__test__ $($prefix:ident)* $test:ident) => { + #[allow(non_snake_case)] + #[test] + fn $test() { + $test_fn(stringify!($($prefix)/+ $test)) + } + }; +} + +pub fn get_path(test: &'static str, path: &'static str) -> String { + if path.is_empty() { + test.to_owned() + } else { + path.replace(' ', "") + } +} + +/// Validate that the glob matches the list of tests specified. +pub fn check_test_glob( + base: &'static str, + glob_pattern: &'static str, + files: &[(&'static str, &'static str)], +) { + let base_dir = PathBuf::from(base) + .canonicalize() + .unwrap() + .to_string_lossy() + // Strip Windows slashes + .replace('\\', "/"); + let mut found = HashSet::new(); + let mut list = vec![]; + for file in glob(&format!("{}/{}", base, glob_pattern)) + .expect("Failed to read test path") + { + let mut file = file + .expect("Invalid file from glob") + .canonicalize() + .unwrap(); + file.set_extension(""); + let name = file.file_name().unwrap().to_string_lossy(); + // Strip windows slashes + let file = file.to_string_lossy().replace('\\', "/"); + let file = file + .strip_prefix(&base_dir) + .expect("File {file} did not start with {base_dir} prefix"); + let file = file.strip_prefix('/').unwrap().to_owned(); + if file.contains('/') { + list.push(format!("{}={}", name, file)) + } else { + list.push(file.clone()); + } + found.insert(file); + } + + let mut error = false; + for (test, path) in files { + // Remove spaces from the macro + let path = if path.is_empty() { + (*test).to_owned() + } else { + path.replace(' ', "") + }; + if found.contains(&path) { + found.remove(&path); + } else { + error = true; + } + } + + if error || !found.is_empty() { + panic!( + "Glob did not match provided list of files. Expected: \n[\n {}\n]", + list.join(",\n ") + ); + } +} diff --git a/test_util/src/lib.rs b/test_util/src/lib.rs index 6a6614ad0a..f88092ad9e 100644 --- a/test_util/src/lib.rs +++ b/test_util/src/lib.rs @@ -2,6 +2,7 @@ // Usage: provide a port as argument to run hyper_hello benchmark server // otherwise this starts multiple servers on many ports for test endpoints. use anyhow::anyhow; +use futures::Future; use futures::FutureExt; use futures::Stream; use futures::StreamExt; @@ -9,6 +10,7 @@ use hyper::header::HeaderValue; use hyper::server::Server; use hyper::service::make_service_fn; use hyper::service::service_fn; +use hyper::upgrade::Upgraded; use hyper::Body; use hyper::Request; use hyper::Response; @@ -49,11 +51,11 @@ use tokio::net::TcpListener; use tokio::net::TcpStream; use tokio_rustls::rustls; use tokio_rustls::TlsAcceptor; -use tokio_tungstenite::accept_async; use url::Url; pub mod assertions; mod builders; +pub mod factory; pub mod lsp; mod npm; pub mod pty; @@ -79,8 +81,10 @@ const TLS_CLIENT_AUTH_PORT: u16 = 4552; const BASIC_AUTH_REDIRECT_PORT: u16 = 4554; const TLS_PORT: u16 = 4557; const HTTPS_PORT: u16 = 5545; -const H1_ONLY_PORT: u16 = 5546; -const H2_ONLY_PORT: u16 = 5547; +const H1_ONLY_TLS_PORT: u16 = 5546; +const H2_ONLY_TLS_PORT: u16 = 5547; +const H1_ONLY_PORT: u16 = 5548; +const H2_ONLY_PORT: u16 = 5549; const HTTPS_CLIENT_AUTH_PORT: u16 = 5552; const WS_PORT: u16 = 4242; const WSS_PORT: u16 = 4243; @@ -302,69 +306,128 @@ async fn basic_auth_redirect( Ok(resp) } +async fn echo_websocket_handler( + ws: fastwebsockets::WebSocket, +) -> Result<(), anyhow::Error> { + let mut ws = fastwebsockets::FragmentCollector::new(ws); + + loop { + let frame = ws.read_frame().await.unwrap(); + match frame.opcode { + fastwebsockets::OpCode::Close => break, + fastwebsockets::OpCode::Text | fastwebsockets::OpCode::Binary => { + ws.write_frame(frame).await.unwrap(); + } + _ => {} + } + } + + Ok(()) +} + +type WsHandler = + fn( + fastwebsockets::WebSocket, + ) -> Pin> + Send>>; + +fn spawn_ws_server(stream: S, handler: WsHandler) +where + S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static, +{ + let srv_fn = service_fn(move |mut req: Request| async move { + let (response, upgrade_fut) = fastwebsockets::upgrade::upgrade(&mut req) + .map_err(|e| anyhow!("Error upgrading websocket connection: {}", e))?; + + tokio::spawn(async move { + let ws = upgrade_fut + .await + .map_err(|e| anyhow!("Error upgrading websocket connection: {}", e)) + .unwrap(); + + if let Err(e) = handler(ws).await { + eprintln!("Error in websocket connection: {}", e); + } + }); + + Ok::<_, anyhow::Error>(response) + }); + + tokio::spawn(async move { + let conn_fut = hyper::server::conn::Http::new() + .serve_connection(stream, srv_fn) + .with_upgrades(); + + if let Err(e) = conn_fut.await { + eprintln!("websocket server error: {e:?}"); + } + }); +} + async fn run_ws_server(addr: &SocketAddr) { let listener = TcpListener::bind(addr).await.unwrap(); println!("ready: ws"); // Eye catcher for HttpServerCount while let Ok((stream, _addr)) = listener.accept().await { - tokio::spawn(async move { - let ws_stream_fut = accept_async(stream); - - let ws_stream = ws_stream_fut.await; - if let Ok(ws_stream) = ws_stream { - let (tx, rx) = ws_stream.split(); - rx.forward(tx) - .map(|result| { - if let Err(e) = result { - println!("websocket server error: {e:?}"); - } - }) - .await; - } - }); + spawn_ws_server(stream, |ws| Box::pin(echo_websocket_handler(ws))); } } +async fn ping_websocket_handler( + ws: fastwebsockets::WebSocket, +) -> Result<(), anyhow::Error> { + use fastwebsockets::Frame; + use fastwebsockets::OpCode; + + let mut ws = fastwebsockets::FragmentCollector::new(ws); + + for i in 0..9 { + ws.write_frame(Frame::new(true, OpCode::Ping, None, vec![])) + .await + .unwrap(); + + let frame = ws.read_frame().await.unwrap(); + assert_eq!(frame.opcode, OpCode::Pong); + assert!(frame.payload.is_empty()); + + ws.write_frame(Frame::text(format!("hello {}", i).as_bytes().to_vec())) + .await + .unwrap(); + + let frame = ws.read_frame().await.unwrap(); + assert_eq!(frame.opcode, OpCode::Text); + assert_eq!(frame.payload, format!("hello {}", i).as_bytes()); + } + + ws.write_frame(fastwebsockets::Frame::close(1000, b"")) + .await + .unwrap(); + + Ok(()) +} + async fn run_ws_ping_server(addr: &SocketAddr) { let listener = TcpListener::bind(addr).await.unwrap(); println!("ready: ws"); // Eye catcher for HttpServerCount while let Ok((stream, _addr)) = listener.accept().await { - tokio::spawn(async move { - let ws_stream = accept_async(stream).await; - use futures::SinkExt; - use tokio_tungstenite::tungstenite::Message; - if let Ok(mut ws_stream) = ws_stream { - for i in 0..9 { - ws_stream.send(Message::Ping(vec![])).await.unwrap(); - - let msg = ws_stream.next().await.unwrap().unwrap(); - assert_eq!(msg, Message::Pong(vec![])); - - ws_stream - .send(Message::Text(format!("hello {}", i))) - .await - .unwrap(); - - let msg = ws_stream.next().await.unwrap().unwrap(); - assert_eq!(msg, Message::Text(format!("hello {}", i))); - } - - ws_stream.close(None).await.unwrap(); - } - }); + spawn_ws_server(stream, |ws| Box::pin(ping_websocket_handler(ws))); } } +async fn close_websocket_handler( + ws: fastwebsockets::WebSocket, +) -> Result<(), anyhow::Error> { + let mut ws = fastwebsockets::FragmentCollector::new(ws); + + ws.write_frame(fastwebsockets::Frame::close_raw(vec![])) + .await + .unwrap(); + + Ok(()) +} + async fn run_ws_close_server(addr: &SocketAddr) { let listener = TcpListener::bind(addr).await.unwrap(); while let Ok((stream, _addr)) = listener.accept().await { - tokio::spawn(async move { - let ws_stream_fut = accept_async(stream); - - let ws_stream = ws_stream_fut.await; - if let Ok(mut ws_stream) = ws_stream { - ws_stream.close(None).await.unwrap(); - } - }); + spawn_ws_server(stream, |ws| Box::pin(close_websocket_handler(ws))); } } @@ -428,11 +491,11 @@ async fn get_tls_config( let mut config = rustls::ServerConfig::builder() .with_safe_defaults() - .with_client_cert_verifier( + .with_client_cert_verifier(Arc::new( rustls::server::AllowAnyAnonymousOrAuthenticatedClient::new( root_cert_store, ), - ) + )) .with_single_cert(certs, PrivateKey(key)) .map_err(|e| anyhow!("Error setting cert: {:?}", e)) .unwrap(); @@ -471,18 +534,9 @@ async fn run_wss_server(addr: &SocketAddr) { tokio::spawn(async move { match acceptor.accept(stream).await { Ok(tls_stream) => { - let ws_stream_fut = accept_async(tls_stream); - let ws_stream = ws_stream_fut.await; - if let Ok(ws_stream) = ws_stream { - let (tx, rx) = ws_stream.split(); - rx.forward(tx) - .map(|result| { - if let Err(e) = result { - println!("Websocket server error: {e:?}"); - } - }) - .await; - } + spawn_ws_server(tls_stream, |ws| { + Box::pin(echo_websocket_handler(ws)) + }); } Err(e) => { eprintln!("TLS accept error: {e:?}"); @@ -673,7 +727,10 @@ async fn main_server( req: Request, ) -> Result, hyper::http::Error> { return match (req.method(), req.uri().path()) { - (&hyper::Method::POST, "/echo_server") => { + ( + &hyper::Method::POST | &hyper::Method::PATCH | &hyper::Method::PUT, + "/echo_server", + ) => { let (parts, body) = req.into_parts(); let mut response = Response::new(body); @@ -681,16 +738,7 @@ async fn main_server( *response.status_mut() = StatusCode::from_bytes(status.as_bytes()).unwrap(); } - if let Some(content_type) = parts.headers.get("content-type") { - response - .headers_mut() - .insert("content-type", content_type.clone()); - } - if let Some(user_agent) = parts.headers.get("user-agent") { - response - .headers_mut() - .insert("user-agent", user_agent.clone()); - } + response.headers_mut().extend(parts.headers); Ok(response) } (&hyper::Method::POST, "/echo_multipart_file") => { @@ -1037,6 +1085,11 @@ async fn main_server( )); Ok(res) } + (_, "/search_params") => { + let query = req.uri().query().map(|s| s.to_string()); + let res = Response::new(Body::from(query.unwrap_or_default())); + Ok(res) + } _ => { let mut file_path = testdata_path(); file_path.push(&req.uri().path()[1..]); @@ -1344,8 +1397,9 @@ async fn wrap_main_https_server() { } } -async fn wrap_https_h1_only_server() { - let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], H1_ONLY_PORT)); +async fn wrap_https_h1_only_tls_server() { + let main_server_https_addr = + SocketAddr::from(([127, 0, 0, 1], H1_ONLY_TLS_PORT)); let cert_file = "tls/localhost.crt"; let key_file = "tls/localhost.key"; let ca_cert_file = "tls/RootCA.pem"; @@ -1389,8 +1443,9 @@ async fn wrap_https_h1_only_server() { } } -async fn wrap_https_h2_only_server() { - let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], H2_ONLY_PORT)); +async fn wrap_https_h2_only_tls_server() { + let main_server_https_addr = + SocketAddr::from(([127, 0, 0, 1], H2_ONLY_TLS_PORT)); let cert_file = "tls/localhost.crt"; let key_file = "tls/localhost.key"; let ca_cert_file = "tls/RootCA.pem"; @@ -1434,6 +1489,28 @@ async fn wrap_https_h2_only_server() { } } +async fn wrap_https_h1_only_server() { + let main_server_http_addr = SocketAddr::from(([127, 0, 0, 1], H1_ONLY_PORT)); + + let main_server_http_svc = + make_service_fn(|_| async { Ok::<_, Infallible>(service_fn(main_server)) }); + let main_server_http = Server::bind(&main_server_http_addr) + .http1_only(true) + .serve(main_server_http_svc); + let _ = main_server_http.await; +} + +async fn wrap_https_h2_only_server() { + let main_server_http_addr = SocketAddr::from(([127, 0, 0, 1], H2_ONLY_PORT)); + + let main_server_http_svc = + make_service_fn(|_| async { Ok::<_, Infallible>(service_fn(main_server)) }); + let main_server_http = Server::bind(&main_server_http_addr) + .http2_only(true) + .serve(main_server_http_svc); + let _ = main_server_http.await; +} + async fn wrap_client_auth_https_server() { let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], HTTPS_CLIENT_AUTH_PORT)); @@ -1522,6 +1599,8 @@ pub async fn run_all_servers() { let client_auth_server_https_fut = wrap_client_auth_https_server(); let main_server_fut = wrap_main_server(); let main_server_https_fut = wrap_main_https_server(); + let h1_only_server_tls_fut = wrap_https_h1_only_tls_server(); + let h2_only_server_tls_fut = wrap_https_h2_only_tls_server(); let h1_only_server_fut = wrap_https_h1_only_server(); let h2_only_server_fut = wrap_https_h2_only_server(); @@ -1543,6 +1622,8 @@ pub async fn run_all_servers() { main_server_fut, main_server_https_fut, client_auth_server_https_fut, + h1_only_server_tls_fut, + h2_only_server_tls_fut, h1_only_server_fut, h2_only_server_fut ) diff --git a/test_util/src/lsp.rs b/test_util/src/lsp.rs index 3e9d0a80bb..831df28e9e 100644 --- a/test_util/src/lsp.rs +++ b/test_util/src/lsp.rs @@ -87,6 +87,12 @@ impl<'a> From<&'a [u8]> for LspMessage { } } +#[derive(Debug, Deserialize)] +struct DiagnosticBatchNotificationParams { + batch_index: usize, + messages_len: usize, +} + fn read_message(reader: &mut R) -> Result>> where R: io::Read + io::BufRead, @@ -174,6 +180,25 @@ impl LspStdoutReader { cvar.wait(&mut msg_queue); } } + + pub fn read_latest_message( + &mut self, + mut get_match: impl FnMut(&LspMessage) -> Option, + ) -> R { + let (msg_queue, cvar) = &*self.pending_messages; + let mut msg_queue = msg_queue.lock(); + loop { + for i in (0..msg_queue.len()).rev() { + let msg = &msg_queue[i]; + if let Some(result) = get_match(msg) { + let msg = msg_queue.remove(i); + self.read_messages.push(msg); + return result; + } + } + cvar.wait(&mut msg_queue); + } + } } pub struct InitializeParamsBuilder { @@ -378,6 +403,12 @@ impl InitializeParamsBuilder { self } + pub fn set_preload_limit(&mut self, arg: usize) -> &mut Self { + let options = self.initialization_options_mut(); + options.insert("documentPreloadLimit".to_string(), arg.into()); + self + } + pub fn set_tls_certificate(&mut self, value: impl AsRef) -> &mut Self { let options = self.initialization_options_mut(); options.insert( @@ -439,6 +470,7 @@ pub struct LspClientBuilder { print_stderr: bool, deno_exe: PathBuf, context: Option, + use_diagnostic_sync: bool, } impl LspClientBuilder { @@ -448,6 +480,7 @@ impl LspClientBuilder { print_stderr: false, deno_exe: deno_exe_path(), context: None, + use_diagnostic_sync: true, } } @@ -464,6 +497,13 @@ impl LspClientBuilder { self } + /// Whether to use the synchronization messages to better sync diagnostics + /// between the test client and server. + pub fn use_diagnostic_sync(&mut self, value: bool) -> &mut Self { + self.use_diagnostic_sync = value; + self + } + pub fn set_test_context(&mut self, test_context: &TestContext) -> &mut Self { self.context = Some(test_context.clone()); self @@ -479,6 +519,11 @@ impl LspClientBuilder { command .env("DENO_DIR", deno_dir.path()) .env("NPM_CONFIG_REGISTRY", npm_registry_url()) + // turn on diagnostic synchronization communication + .env( + "DENO_DONT_USE_INTERNAL_LSP_DIAGNOSTIC_SYNC_FLAG", + if self.use_diagnostic_sync { "1" } else { "" }, + ) .arg("lsp") .stdin(Stdio::piped()) .stdout(Stdio::piped()); @@ -504,7 +549,6 @@ impl LspClientBuilder { .unwrap_or_else(|| TestContextBuilder::new().build()), writer, deno_dir, - diagnosable_open_file_count: 0, }) } } @@ -517,7 +561,6 @@ pub struct LspClient { writer: io::BufWriter, deno_dir: TempDir, context: TestContext, - diagnosable_open_file_count: usize, } impl Drop for LspClient { @@ -603,20 +646,6 @@ impl LspClient { } pub fn did_open_raw(&mut self, params: Value) { - let text_doc = params - .as_object() - .unwrap() - .get("textDocument") - .unwrap() - .as_object() - .unwrap(); - if matches!( - text_doc.get("languageId").unwrap().as_str().unwrap(), - "typescript" | "javascript" - ) { - self.diagnosable_open_file_count += 1; - } - self.write_notification("textDocument/didOpen", params); } @@ -626,11 +655,46 @@ impl LspClient { self.write_response(id, result); } + fn get_latest_diagnostic_batch_index(&mut self) -> usize { + let result = self + .write_request("deno/internalLatestDiagnosticBatchIndex", json!(null)); + result.as_u64().unwrap() as usize + } + + /// Reads the latest diagnostics. It's assumed that pub fn read_diagnostics(&mut self) -> CollectedDiagnostics { - let mut all_diagnostics = Vec::new(); - for _ in 0..self.diagnosable_open_file_count { - all_diagnostics.extend(read_diagnostics(self).0); + // ask the server what the latest diagnostic batch index is + let latest_diagnostic_batch_index = + self.get_latest_diagnostic_batch_index(); + + // now wait for three (deno, lint, and typescript diagnostics) batch + // notification messages for that index + let mut read = 0; + let mut total_messages_len = 0; + while read < 3 { + let (method, response) = + self.read_notification::(); + assert_eq!(method, "deno/internalTestDiagnosticBatch"); + let response = response.unwrap(); + if response.batch_index == latest_diagnostic_batch_index { + read += 1; + total_messages_len += response.messages_len; + } } + + // now read the latest diagnostic messages + let mut all_diagnostics = Vec::with_capacity(total_messages_len); + let mut seen_files = HashSet::new(); + for _ in 0..total_messages_len { + let (method, response) = + self.read_latest_notification::(); + assert_eq!(method, "textDocument/publishDiagnostics"); + let response = response.unwrap(); + if seen_files.insert(response.uri.to_string()) { + all_diagnostics.push(response); + } + } + CollectedDiagnostics(all_diagnostics) } @@ -662,6 +726,19 @@ impl LspClient { }) } + pub fn read_latest_notification(&mut self) -> (String, Option) + where + R: de::DeserializeOwned, + { + self.reader.read_latest_message(|msg| match msg { + LspMessage::Notification(method, maybe_params) => { + let params = serde_json::from_value(maybe_params.clone()?).ok()?; + Some((method.to_string(), params)) + } + _ => None, + }) + } + pub fn read_notification_with_method( &mut self, expected_method: &str, @@ -813,35 +890,29 @@ impl LspClient { } #[derive(Debug, Clone)] -pub struct CollectedDiagnostics(pub Vec); +pub struct CollectedDiagnostics(Vec); impl CollectedDiagnostics { /// Gets the diagnostics that the editor will see after all the publishes. - pub fn viewed(&self) -> Vec { + pub fn all(&self) -> Vec { self - .viewed_messages() + .all_messages() .into_iter() .flat_map(|m| m.diagnostics) .collect() } /// Gets the messages that the editor will see after all the publishes. - pub fn viewed_messages(&self) -> Vec { - // go over the publishes in reverse order in order to get - // the final messages that will be shown in the editor - let mut messages = Vec::new(); - let mut had_specifier = HashSet::new(); - for message in self.0.iter().rev() { - if had_specifier.insert(message.uri.clone()) { - messages.insert(0, message.clone()); - } - } - messages + pub fn all_messages(&self) -> Vec { + self.0.clone() } - pub fn with_source(&self, source: &str) -> lsp::PublishDiagnosticsParams { + pub fn messages_with_source( + &self, + source: &str, + ) -> lsp::PublishDiagnosticsParams { self - .viewed_messages() + .all_messages() .iter() .find(|p| { p.diagnostics @@ -852,14 +923,14 @@ impl CollectedDiagnostics { .unwrap() } - pub fn with_file_and_source( + pub fn messages_with_file_and_source( &self, specifier: &str, source: &str, ) -> lsp::PublishDiagnosticsParams { let specifier = Url::parse(specifier).unwrap(); self - .viewed_messages() + .all_messages() .iter() .find(|p| { p.uri == specifier @@ -873,18 +944,6 @@ impl CollectedDiagnostics { } } -fn read_diagnostics(client: &mut LspClient) -> CollectedDiagnostics { - // diagnostics come in batches of three unless they're cancelled - let mut diagnostics = vec![]; - for _ in 0..3 { - let (method, response) = - client.read_notification::(); - assert_eq!(method, "textDocument/publishDiagnostics"); - diagnostics.push(response.unwrap()); - } - CollectedDiagnostics(diagnostics) -} - #[cfg(test)] mod tests { use super::*; diff --git a/test_util/src/npm.rs b/test_util/src/npm.rs index 9600b0bce5..18949ad44a 100644 --- a/test_util/src/npm.rs +++ b/test_util/src/npm.rs @@ -136,6 +136,30 @@ fn get_npm_package(package_name: &str) -> Result> { let mut version_info: serde_json::Map = serde_json::from_str(&package_json_text)?; version_info.insert("dist".to_string(), dist.into()); + + if let Some(maybe_optional_deps) = version_info.get("optionalDependencies") + { + if let Some(optional_deps) = maybe_optional_deps.as_object() { + if let Some(maybe_deps) = version_info.get("dependencies") { + if let Some(deps) = maybe_deps.as_object() { + let mut cloned_deps = deps.to_owned(); + for (key, value) in optional_deps { + cloned_deps.insert(key.to_string(), value.to_owned()); + } + version_info.insert( + "dependencies".to_string(), + serde_json::to_value(cloned_deps).unwrap(), + ); + } + } else { + version_info.insert( + "dependencies".to_string(), + serde_json::to_value(optional_deps).unwrap(), + ); + } + } + } + versions.insert(version.clone(), version_info.into()); let version = semver::Version::parse(&version)?; if version.cmp(&latest_version).is_gt() { diff --git a/test_util/src/temp_dir.rs b/test_util/src/temp_dir.rs index db3c246dc5..f66bf1398b 100644 --- a/test_util/src/temp_dir.rs +++ b/test_util/src/temp_dir.rs @@ -58,6 +58,14 @@ impl TempDir { fs::create_dir_all(self.path().join(path)).unwrap(); } + pub fn remove_file(&self, path: impl AsRef) { + fs::remove_file(self.path().join(path)).unwrap(); + } + + pub fn remove_dir_all(&self, path: impl AsRef) { + fs::remove_dir_all(self.path().join(path)).unwrap(); + } + pub fn read_to_string(&self, path: impl AsRef) -> String { let file_path = self.path().join(path); fs::read_to_string(&file_path) @@ -72,4 +80,40 @@ impl TempDir { pub fn write(&self, path: impl AsRef, text: impl AsRef) { fs::write(self.path().join(path), text.as_ref()).unwrap(); } + + pub fn symlink_dir( + &self, + oldpath: impl AsRef, + newpath: impl AsRef, + ) { + #[cfg(unix)] + { + use std::os::unix::fs::symlink; + symlink(self.path().join(oldpath), self.path().join(newpath)).unwrap(); + } + #[cfg(not(unix))] + { + use std::os::windows::fs::symlink_dir; + symlink_dir(self.path().join(oldpath), self.path().join(newpath)) + .unwrap(); + } + } + + pub fn symlink_file( + &self, + oldpath: impl AsRef, + newpath: impl AsRef, + ) { + #[cfg(unix)] + { + use std::os::unix::fs::symlink; + symlink(self.path().join(oldpath), self.path().join(newpath)).unwrap(); + } + #[cfg(not(unix))] + { + use std::os::windows::fs::symlink_file; + symlink_file(self.path().join(oldpath), self.path().join(newpath)) + .unwrap(); + } + } } diff --git a/third_party b/third_party index fef5eaa2e3..fd270b7927 160000 --- a/third_party +++ b/third_party @@ -1 +1 @@ -Subproject commit fef5eaa2e364db431cfbf8089afdd81f71fd46d2 +Subproject commit fd270b79276bb2bed365f3fb2e4ba6acaff3234b diff --git a/tools/build_bench.ts b/tools/build_bench.ts new file mode 100755 index 0000000000..dbbe029677 --- /dev/null +++ b/tools/build_bench.ts @@ -0,0 +1,136 @@ +#!/usr/bin/env -S deno run --unstable --allow-env --allow-read --allow-write --allow-run +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import $ from "https://deno.land/x/dax@0.32.0/mod.ts"; + +if (Deno.args.length === 0) { + $.log( + "Usage: build_bench [-v] [--profile release|debug] commit1 [commit2 [comment3...]]", + ); + Deno.exit(1); +} + +const args = Deno.args.slice(); +let verbose = false; +if (args[0] == "-v") { + args.shift(); + verbose = true; +} + +let profile = "release"; +if (args[0] == "--profile") { + args.shift(); + profile = args.shift(); +} + +function exit(msg: string) { + $.logError(msg); + Deno.exit(1); +} + +// Make sure the .git dir exists +const gitDir = Deno.cwd() + "/.git"; +await Deno.stat(gitDir); + +async function runCommand(human: string, cmd) { + if (verbose) { + const out = await cmd.noThrow(); + if (out.code != 0) { + exit(human); + } + } else { + const out = await cmd.stdout("piped").stderr("piped").noThrow(); + if (out.code != 0) { + $.logLight("stdout"); + $.logGroup(); + $.log(out.stdout); + $.logGroupEnd(); + $.logLight("stderr"); + $.logGroup(); + $.log(out.stderr); + $.logGroupEnd(); + exit(human); + } + } +} + +async function buildGitCommit(progress, commit) { + const tempDir = $.path(await Deno.makeTempDir()); + + const gitInfo = + await $`git log --pretty=oneline --abbrev-commit -n1 ${commit}`.stdout( + "piped", + ).stderr("piped").noThrow(); + if (gitInfo.code != 0) { + $.log(gitInfo.stdout); + $.log(gitInfo.stderr); + exit(`Failed to get git info for commit ${commit}`); + } + + const hash = gitInfo.stdout.split(" ")[0]; + progress.message(`${commit} is ${hash}`); + + progress.message(`clone ${hash}`); + await runCommand( + `Failed to clone commit ${commit}`, + $`git clone ${gitDir} ${tempDir}`, + ); + + progress.message(`reset ${hash}`); + await runCommand( + `Failed to reset commit ${commit}`, + $`git reset --hard ${hash}`.cwd(tempDir), + ); + + progress.message(`build ${hash} (please wait)`); + const now = Date.now(); + const interval = setInterval(() => { + const elapsed = Math.round((Date.now() - now) / 1000); + progress.message(`build ${hash} (${elapsed}s)`); + }, 100); + try { + if (profile === "debug") { + await runCommand( + `Failed to build commit ${commit}`, + $`cargo build`.cwd(tempDir), + ); + } else { + await runCommand( + `Failed to build commit ${commit}`, + $`cargo build --profile ${profile}`.cwd(tempDir), + ); + } + } finally { + clearInterval(interval); + } + const elapsed = Math.round((Date.now() - now) / 1000); + + let file; + if (profile === "release") { + file = `deno-${hash}`; + } else { + file = `deno-${profile}-${hash}`; + } + progress.message(`copy ${hash}`); + await tempDir.join("target").join(profile).join("deno").copyFile(file); + + progress.message(`cleanup ${hash}`); + await tempDir.remove({ recursive: true }); + + progress.message("done"); + $.log(`Built ./${file} (${commit}) in ${elapsed}s: ${gitInfo.stdout}`); +} + +const promises = []; +for (const arg of args) { + if (verbose) { + promises.push(buildGitCommit({ message() {} }, arg)); + } else { + const progress = $.progress(`${arg}`); + promises.push(progress.with(async () => { + await buildGitCommit(progress, arg); + })); + } +} + +await Promise.all(promises); diff --git a/tools/core_import_map.json b/tools/core_import_map.json new file mode 100644 index 0000000000..c4c4b99f2a --- /dev/null +++ b/tools/core_import_map.json @@ -0,0 +1,244 @@ +{ + "imports": { + "ext:deno_broadcast_channel/01_broadcast_channel.js": "../ext/broadcast_channel/01_broadcast_channel.js", + "ext:deno_cache/01_cache.js": "../ext/cache/01_cache.js", + "ext:deno_console/01_console.js": "../ext/console/01_console.js", + "ext:deno_crypto/00_crypto.js": "../ext/crypto/00_crypto.js", + "ext:deno_fetch/20_headers.js": "../ext/fetch/20_headers.js", + "ext:deno_fetch/21_formdata.js": "../ext/fetch/21_formdata.js", + "ext:deno_fetch/22_body.js": "../ext/fetch/22_body.js", + "ext:deno_fetch/22_http_client.js": "../ext/fetch/22_http_client.js", + "ext:deno_fetch/23_request.js": "../ext/fetch/23_request.js", + "ext:deno_fetch/23_response.js": "../ext/fetch/23_response.js", + "ext:deno_fetch/26_fetch.js": "../ext/fetch/26_fetch.js", + "ext:deno_ffi/00_ffi.js": "../ext/ffi/00_ffi.js", + "ext:deno_fs/30_fs.js": "../ext/fs/30_fs.js", + "ext:deno_http/00_serve.js": "../ext/http/00_serve.js", + "ext:deno_http/01_http.js": "../ext/http/01_http.js", + "ext:deno_io/12_io.js": "../ext/io/12_io.js", + "ext:deno_kv/01_db.ts": "../ext/kv/01_db.ts", + "ext:deno_net/01_net.js": "../ext/net/01_net.js", + "ext:deno_net/02_tls.js": "../ext/net/02_tls.js", + "ext:deno_node/_events.d.ts": "../ext/node/polyfills/_events.d.ts", + "ext:deno_node/_fs/_fs_close.ts": "../ext/node/polyfills/_fs/_fs_close.ts", + "ext:deno_node/_fs/_fs_common.ts": "../ext/node/polyfills/_fs/_fs_common.ts", + "ext:deno_node/_fs/_fs_constants.ts": "../ext/node/polyfills/_fs/_fs_constants.ts", + "ext:deno_node/_fs/_fs_dir.ts": "../ext/node/polyfills/_fs/_fs_dir.ts", + "ext:deno_node/_fs/_fs_dirent.ts": "../ext/node/polyfills/_fs/_fs_dirent.ts", + "ext:deno_node/_fs/_fs_exists.ts": "../ext/node/polyfills/_fs/_fs_exists.ts", + "ext:deno_node/_fs/_fs_lstat.ts": "../ext/node/polyfills/_fs/_fs_lstat.ts", + "ext:deno_node/_fs/_fs_mkdir.ts": "../ext/node/polyfills/_fs/_fs_mkdir.ts", + "ext:deno_node/_fs/_fs_open.ts": "../ext/node/polyfills/_fs/_fs_open.ts", + "ext:deno_node/_fs/_fs_read.ts": "../ext/node/polyfills/_fs/_fs_read.ts", + "ext:deno_node/_fs/_fs_stat.ts": "../ext/node/polyfills/_fs/_fs_stat.ts", + "ext:deno_node/_fs/_fs_watch.ts": "../ext/node/polyfills/_fs/_fs_watch.ts", + "ext:deno_node/_fs/_fs_write.mjs": "../ext/node/polyfills/_fs/_fs_write.mjs", + "ext:deno_node/_fs/_fs_writev.mjs": "../ext/node/polyfills/_fs/_fs_writev.mjs", + "ext:deno_node/_global.d.ts": "../ext/node/polyfills/_global.d.ts", + "ext:deno_node/_http_agent.mjs": "../ext/node/polyfills/_http_agent.mjs", + "ext:deno_node/_http_common.ts": "../ext/node/polyfills/_http_common.ts", + "ext:deno_node/_http_outgoing.ts": "../ext/node/polyfills/_http_outgoing.ts", + "ext:deno_node/_next_tick.ts": "../ext/node/polyfills/_next_tick.ts", + "ext:deno_node/_process/exiting.ts": "../ext/node/polyfills/_process/exiting.ts", + "ext:deno_node/_process/process.ts": "../ext/node/polyfills/_process/process.ts", + "ext:deno_node/_process/streams.mjs": "../ext/node/polyfills/_process/streams.mjs", + "ext:deno_node/_readline_shared_types.d.ts": "../ext/node/polyfills/_readline_shared_types.d.ts", + "ext:deno_node/_stream.d.ts": "../ext/node/polyfills/_stream.d.ts", + "ext:deno_node/_stream.mjs": "../ext/node/polyfills/_stream.mjs", + "ext:deno_node/_tls_common.ts": "../ext/node/polyfills/_tls_common.ts", + "ext:deno_node/_util/asserts.ts": "../ext/node/polyfills/_util/asserts.ts", + "ext:deno_node/_util/async.ts": "../ext/node/polyfills/_util/async.ts", + "ext:deno_node/_util/os.ts": "../ext/node/polyfills/_util/os.ts", + "ext:deno_node/_utils.ts": "../ext/node/polyfills/_utils.ts", + "ext:deno_node/_zlib_binding.mjs": "../ext/node/polyfills/_zlib_binding.mjs", + "ext:deno_node/00_globals.js": "../ext/node/polyfills/00_globals.js", + "ext:deno_node/01_require.js": "../ext/node/polyfills/01_require.js", + "ext:deno_node/assert.ts": "../ext/node/polyfills/assert.ts", + "ext:deno_node/assert/strict.ts": "../ext/node/polyfills/assert/strict.ts", + "ext:deno_node/async_hooks.ts": "../ext/node/polyfills/async_hooks.ts", + "ext:deno_node/buffer.ts": "../ext/node/polyfills/buffer.ts", + "ext:deno_node/child_process.ts": "../ext/node/polyfills/child_process.ts", + "ext:deno_node/cluster.ts": "../ext/node/polyfills/cluster.ts", + "ext:deno_node/console.ts": "../ext/node/polyfills/console.ts", + "ext:deno_node/constants.ts": "../ext/node/polyfills/constants.ts", + "ext:deno_node/crypto.ts": "../ext/node/polyfills/crypto.ts", + "ext:deno_node/dgram.ts": "../ext/node/polyfills/dgram.ts", + "ext:deno_node/diagnostics_channel.ts": "../ext/node/polyfills/diagnostics_channel.ts", + "ext:deno_node/dns.ts": "../ext/node/polyfills/dns.ts", + "ext:deno_node/dns/promises.ts": "../ext/node/polyfills/dns/promises.ts", + "ext:deno_node/domain.ts": "../ext/node/polyfills/domain.ts", + "ext:deno_node/events.ts": "../ext/node/polyfills/events.ts", + "ext:deno_node/fs.ts": "../ext/node/polyfills/fs.ts", + "ext:deno_node/fs/promises.ts": "../ext/node/polyfills/fs/promises.ts", + "ext:deno_node/http.ts": "../ext/node/polyfills/http.ts", + "ext:deno_node/http2.ts": "../ext/node/polyfills/http2.ts", + "ext:deno_node/https.ts": "../ext/node/polyfills/https.ts", + "ext:deno_node/inspector.ts": "../ext/node/polyfills/inspector.ts", + "ext:deno_node/internal_binding/_libuv_winerror.ts": "../ext/node/polyfills/internal_binding/_libuv_winerror.ts", + "ext:deno_node/internal_binding/_listen.ts": "../ext/node/polyfills/internal_binding/_listen.ts", + "ext:deno_node/internal_binding/_node.ts": "../ext/node/polyfills/internal_binding/_node.ts", + "ext:deno_node/internal_binding/_timingSafeEqual.ts": "../ext/node/polyfills/internal_binding/_timingSafeEqual.ts", + "ext:deno_node/internal_binding/_utils.ts": "../ext/node/polyfills/internal_binding/_utils.ts", + "ext:deno_node/internal_binding/ares.ts": "../ext/node/polyfills/internal_binding/ares.ts", + "ext:deno_node/internal_binding/async_wrap.ts": "../ext/node/polyfills/internal_binding/async_wrap.ts", + "ext:deno_node/internal_binding/buffer.ts": "../ext/node/polyfills/internal_binding/buffer.ts", + "ext:deno_node/internal_binding/cares_wrap.ts": "../ext/node/polyfills/internal_binding/cares_wrap.ts", + "ext:deno_node/internal_binding/connection_wrap.ts": "../ext/node/polyfills/internal_binding/connection_wrap.ts", + "ext:deno_node/internal_binding/constants.ts": "../ext/node/polyfills/internal_binding/constants.ts", + "ext:deno_node/internal_binding/crypto.ts": "../ext/node/polyfills/internal_binding/crypto.ts", + "ext:deno_node/internal_binding/handle_wrap.ts": "../ext/node/polyfills/internal_binding/handle_wrap.ts", + "ext:deno_node/internal_binding/mod.ts": "../ext/node/polyfills/internal_binding/mod.ts", + "ext:deno_node/internal_binding/pipe_wrap.ts": "../ext/node/polyfills/internal_binding/pipe_wrap.ts", + "ext:deno_node/internal_binding/stream_wrap.ts": "../ext/node/polyfills/internal_binding/stream_wrap.ts", + "ext:deno_node/internal_binding/string_decoder.ts": "../ext/node/polyfills/internal_binding/string_decoder.ts", + "ext:deno_node/internal_binding/symbols.ts": "../ext/node/polyfills/internal_binding/symbols.ts", + "ext:deno_node/internal_binding/tcp_wrap.ts": "../ext/node/polyfills/internal_binding/tcp_wrap.ts", + "ext:deno_node/internal_binding/types.ts": "../ext/node/polyfills/internal_binding/types.ts", + "ext:deno_node/internal_binding/udp_wrap.ts": "../ext/node/polyfills/internal_binding/udp_wrap.ts", + "ext:deno_node/internal_binding/util.ts": "../ext/node/polyfills/internal_binding/util.ts", + "ext:deno_node/internal_binding/uv.ts": "../ext/node/polyfills/internal_binding/uv.ts", + "ext:deno_node/internal/assert.mjs": "../ext/node/polyfills/internal/assert.mjs", + "ext:deno_node/internal/async_hooks.ts": "../ext/node/polyfills/internal/async_hooks.ts", + "ext:deno_node/internal/buffer.mjs": "../ext/node/polyfills/internal/buffer.mjs", + "ext:deno_node/internal/child_process.ts": "../ext/node/polyfills/internal/child_process.ts", + "ext:deno_node/internal/cli_table.ts": "../ext/node/polyfills/internal/cli_table.ts", + "ext:deno_node/internal/constants.ts": "../ext/node/polyfills/internal/constants.ts", + "ext:deno_node/internal/crypto/_keys.ts": "../ext/node/polyfills/internal/crypto/_keys.ts", + "ext:deno_node/internal/crypto/_randomBytes.ts": "../ext/node/polyfills/internal/crypto/_randomBytes.ts", + "ext:deno_node/internal/crypto/_randomFill.ts": "../ext/node/polyfills/internal/crypto/_randomFill.ts", + "ext:deno_node/internal/crypto/_randomInt.ts": "../ext/node/polyfills/internal/crypto/_randomInt.ts", + "ext:deno_node/internal/crypto/certificate.ts": "../ext/node/polyfills/internal/crypto/certificate.ts", + "ext:deno_node/internal/crypto/cipher.ts": "../ext/node/polyfills/internal/crypto/cipher.ts", + "ext:deno_node/internal/crypto/constants.ts": "../ext/node/polyfills/internal/crypto/constants.ts", + "ext:deno_node/internal/crypto/diffiehellman.ts": "../ext/node/polyfills/internal/crypto/diffiehellman.ts", + "ext:deno_node/internal/crypto/hash.ts": "../ext/node/polyfills/internal/crypto/hash.ts", + "ext:deno_node/internal/crypto/hkdf.ts": "../ext/node/polyfills/internal/crypto/hkdf.ts", + "ext:deno_node/internal/crypto/keygen.ts": "../ext/node/polyfills/internal/crypto/keygen.ts", + "ext:deno_node/internal/crypto/keys.ts": "../ext/node/polyfills/internal/crypto/keys.ts", + "ext:deno_node/internal/crypto/pbkdf2.ts": "../ext/node/polyfills/internal/crypto/pbkdf2.ts", + "ext:deno_node/internal/crypto/random.ts": "../ext/node/polyfills/internal/crypto/random.ts", + "ext:deno_node/internal/crypto/scrypt.ts": "../ext/node/polyfills/internal/crypto/scrypt.ts", + "ext:deno_node/internal/crypto/sig.ts": "../ext/node/polyfills/internal/crypto/sig.ts", + "ext:deno_node/internal/crypto/types.ts": "../ext/node/polyfills/internal/crypto/types.ts", + "ext:deno_node/internal/crypto/util.ts": "../ext/node/polyfills/internal/crypto/util.ts", + "ext:deno_node/internal/crypto/x509.ts": "../ext/node/polyfills/internal/crypto/x509.ts", + "ext:deno_node/internal/dgram.ts": "../ext/node/polyfills/internal/dgram.ts", + "ext:deno_node/internal/dns/promises.ts": "../ext/node/polyfills/internal/dns/promises.ts", + "ext:deno_node/internal/dns/utils.ts": "../ext/node/polyfills/internal/dns/utils.ts", + "ext:deno_node/internal/error_codes.ts": "../ext/node/polyfills/internal/error_codes.ts", + "ext:deno_node/internal/errors.ts": "../ext/node/polyfills/internal/errors.ts", + "ext:deno_node/internal/event_target.mjs": "../ext/node/polyfills/internal/event_target.mjs", + "ext:deno_node/internal/fixed_queue.ts": "../ext/node/polyfills/internal/fixed_queue.ts", + "ext:deno_node/internal/fs/utils.mjs": "../ext/node/polyfills/internal/fs/utils.mjs", + "ext:deno_node/internal/hide_stack_frames.ts": "../ext/node/polyfills/internal/hide_stack_frames.ts", + "ext:deno_node/internal/http.ts": "../ext/node/polyfills/internal/http.ts", + "ext:deno_node/internal/net.ts": "../ext/node/polyfills/internal/net.ts", + "ext:deno_node/internal/normalize_encoding.mjs": "../ext/node/polyfills/internal/normalize_encoding.mjs", + "ext:deno_node/internal/options.ts": "../ext/node/polyfills/internal/options.ts", + "ext:deno_node/internal/primordials.mjs": "../ext/node/polyfills/internal/primordials.mjs", + "ext:deno_node/internal/process/per_thread.mjs": "../ext/node/polyfills/internal/process/per_thread.mjs", + "ext:deno_node/internal/querystring.ts": "../ext/node/polyfills/internal/querystring.ts", + "ext:deno_node/internal/readline/callbacks.mjs": "../ext/node/polyfills/internal/readline/callbacks.mjs", + "ext:deno_node/internal/readline/emitKeypressEvents.mjs": "../ext/node/polyfills/internal/readline/emitKeypressEvents.mjs", + "ext:deno_node/internal/readline/interface.mjs": "../ext/node/polyfills/internal/readline/interface.mjs", + "ext:deno_node/internal/readline/promises.mjs": "../ext/node/polyfills/internal/readline/promises.mjs", + "ext:deno_node/internal/readline/symbols.mjs": "../ext/node/polyfills/internal/readline/symbols.mjs", + "ext:deno_node/internal/readline/utils.mjs": "../ext/node/polyfills/internal/readline/utils.mjs", + "ext:deno_node/internal/streams/add-abort-signal.mjs": "../ext/node/polyfills/internal/streams/add-abort-signal.mjs", + "ext:deno_node/internal/streams/buffer_list.mjs": "../ext/node/polyfills/internal/streams/buffer_list.mjs", + "ext:deno_node/internal/streams/destroy.mjs": "../ext/node/polyfills/internal/streams/destroy.mjs", + "ext:deno_node/internal/streams/duplex.mjs": "../ext/node/polyfills/internal/streams/duplex.mjs", + "ext:deno_node/internal/streams/end-of-stream.mjs": "../ext/node/polyfills/internal/streams/end-of-stream.mjs", + "ext:deno_node/internal/streams/lazy_transform.mjs": "../ext/node/polyfills/internal/streams/lazy_transform.mjs", + "ext:deno_node/internal/streams/passthrough.mjs": "../ext/node/polyfills/internal/streams/passthrough.mjs", + "ext:deno_node/internal/streams/readable.mjs": "../ext/node/polyfills/internal/streams/readable.mjs", + "ext:deno_node/internal/streams/state.mjs": "../ext/node/polyfills/internal/streams/state.mjs", + "ext:deno_node/internal/streams/transform.mjs": "../ext/node/polyfills/internal/streams/transform.mjs", + "ext:deno_node/internal/streams/utils.mjs": "../ext/node/polyfills/internal/streams/utils.mjs", + "ext:deno_node/internal/streams/writable.mjs": "../ext/node/polyfills/internal/streams/writable.mjs", + "ext:deno_node/internal/test/binding.ts": "../ext/node/polyfills/internal/test/binding.ts", + "ext:deno_node/internal/timers.mjs": "../ext/node/polyfills/internal/timers.mjs", + "ext:deno_node/internal/url.ts": "../ext/node/polyfills/internal/url.ts", + "ext:deno_node/internal/util.mjs": "../ext/node/polyfills/internal/util.mjs", + "ext:deno_node/internal/util/debuglog.ts": "../ext/node/polyfills/internal/util/debuglog.ts", + "ext:deno_node/internal/util/inspect.mjs": "../ext/node/polyfills/internal/util/inspect.mjs", + "ext:deno_node/internal/util/types.ts": "../ext/node/polyfills/internal/util/types.ts", + "ext:deno_node/internal/validators.mjs": "../ext/node/polyfills/internal/validators.mjs", + "ext:deno_node/net.ts": "../ext/node/polyfills/net.ts", + "ext:deno_node/os.ts": "../ext/node/polyfills/os.ts", + "ext:deno_node/path.ts": "../ext/node/polyfills/path.ts", + "ext:deno_node/path/_constants.ts": "../ext/node/polyfills/path/_constants.ts", + "ext:deno_node/path/_interface.ts": "../ext/node/polyfills/path/_interface.ts", + "ext:deno_node/path/_posix.ts": "../ext/node/polyfills/path/_posix.ts", + "ext:deno_node/path/_util.ts": "../ext/node/polyfills/path/_util.ts", + "ext:deno_node/path/_win32.ts": "../ext/node/polyfills/path/_win32.ts", + "ext:deno_node/path/mod.ts": "../ext/node/polyfills/path/mod.ts", + "ext:deno_node/path/posix.ts": "../ext/node/polyfills/path/posix.ts", + "ext:deno_node/path/separator.ts": "../ext/node/polyfills/path/separator.ts", + "ext:deno_node/path/win32.ts": "../ext/node/polyfills/path/win32.ts", + "ext:deno_node/perf_hooks.ts": "../ext/node/polyfills/perf_hooks.ts", + "ext:deno_node/process.ts": "../ext/node/polyfills/process.ts", + "ext:deno_node/punycode.ts": "../ext/node/polyfills/punycode.ts", + "ext:deno_node/querystring.ts": "../ext/node/polyfills/querystring.ts", + "ext:deno_node/readline.ts": "../ext/node/polyfills/readline.ts", + "ext:deno_node/readline/promises.ts": "../ext/node/polyfills/readline/promises.ts", + "ext:deno_node/repl.ts": "../ext/node/polyfills/repl.ts", + "ext:deno_node/stream.ts": "../ext/node/polyfills/stream.ts", + "ext:deno_node/stream/consumers.mjs": "../ext/node/polyfills/stream/consumers.mjs", + "ext:deno_node/stream/promises.mjs": "../ext/node/polyfills/stream/promises.mjs", + "ext:deno_node/stream/web.ts": "../ext/node/polyfills/stream/web.ts", + "ext:deno_node/string_decoder.ts": "../ext/node/polyfills/string_decoder.ts", + "ext:deno_node/sys.ts": "../ext/node/polyfills/sys.ts", + "ext:deno_node/timers.ts": "../ext/node/polyfills/timers.ts", + "ext:deno_node/timers/promises.ts": "../ext/node/polyfills/timers/promises.ts", + "ext:deno_node/tls.ts": "../ext/node/polyfills/tls.ts", + "ext:deno_node/tty.ts": "../ext/node/polyfills/tty.ts", + "ext:deno_node/url.ts": "../ext/node/polyfills/url.ts", + "ext:deno_node/util.ts": "../ext/node/polyfills/util.ts", + "ext:deno_node/util/types.ts": "../ext/node/polyfills/util/types.ts", + "ext:deno_node/v8.ts": "../ext/node/polyfills/v8.ts", + "ext:deno_node/vm.ts": "../ext/node/polyfills/vm.ts", + "ext:deno_node/wasi.ts": "../ext/node/polyfills/wasi.ts", + "ext:deno_node/worker_threads.ts": "../ext/node/polyfills/worker_threads.ts", + "ext:deno_node/zlib.ts": "../ext/node/polyfills/zlib.ts", + "ext:deno_url/00_url.js": "../ext/url/00_url.js", + "ext:deno_url/01_urlpattern.js": "../ext/url/01_urlpattern.js", + "ext:deno_web/00_infra.js": "../ext/web/00_infra.js", + "ext:deno_web/01_dom_exception.js": "../ext/web/01_dom_exception.js", + "ext:deno_web/01_mimesniff.js": "../ext/web/01_mimesniff.js", + "ext:deno_web/02_event.js": "../ext/web/02_event.js", + "ext:deno_web/02_structured_clone.js": "../ext/web/02_structured_clone.js", + "ext:deno_web/02_timers.js": "../ext/web/02_timers.js", + "ext:deno_web/03_abort_signal.js": "../ext/web/03_abort_signal.js", + "ext:deno_web/04_global_interfaces.js": "../ext/web/04_global_interfaces.js", + "ext:deno_web/05_base64.js": "../ext/web/05_base64.js", + "ext:deno_web/06_streams.js": "../ext/web/06_streams.js", + "ext:deno_web/08_text_encoding.js": "../ext/web/08_text_encoding.js", + "ext:deno_web/09_file.js": "../ext/web/09_file.js", + "ext:deno_web/10_filereader.js": "../ext/web/10_filereader.js", + "ext:deno_web/12_location.js": "../ext/web/12_location.js", + "ext:deno_web/13_message_port.js": "../ext/web/13_message_port.js", + "ext:deno_web/14_compression.js": "../ext/web/14_compression.js", + "ext:deno_web/15_performance.js": "../ext/web/15_performance.js", + "ext:deno_webidl/00_webidl.js": "../ext/webidl/00_webidl.js", + "ext:deno_websocket/01_websocket.js": "../ext/websocket/01_websocket.js", + "ext:deno_websocket/02_websocketstream.js": "../ext/websocket/02_websocketstream.js", + "ext:deno_webstorage/01_webstorage.js": "../ext/webstorage/01_webstorage.js", + "ext:runtime/01_errors.js": "../runtime/js/01_errors.js", + "ext:runtime/01_version.ts": "../runtime/js/01_version.ts", + "ext:runtime/06_util.js": "../runtime/js/06_util.js", + "ext:runtime/10_permissions.js": "../runtime/js/10_permissions.js", + "ext:runtime/11_workers.js": "../runtime/js/11_workers.js", + "ext:runtime/13_buffer.js": "../runtime/js/13_buffer.js", + "ext:runtime/30_os.js": "../runtime/js/30_os.js", + "ext:runtime/40_fs_events.js": "../runtime/js/40_fs_events.js", + "ext:runtime/40_http.js": "../runtime/js/40_http.js", + "ext:runtime/40_process.js": "../runtime/js/40_process.js", + "ext:runtime/40_signals.js": "../runtime/js/40_signals.js", + "ext:runtime/40_tty.js": "../runtime/js/40_tty.js", + "ext:runtime/41_prompt.js": "../runtime/js/41_prompt.js", + "ext:runtime/90_deno_ns.js": "../runtime/js/90_deno_ns.js", + "ext:runtime/98_global_scope.js": "../runtime/js/98_global_scope.js", + "ext:deno_node/_util/std_fmt_colors.ts": "../ext/node/polyfills/_util/std_fmt_colors.ts" + } +} diff --git a/tools/lint.js b/tools/lint.js index bdaa015621..c88b0e933c 100755 --- a/tools/lint.js +++ b/tools/lint.js @@ -55,6 +55,7 @@ async function dlint() { ":!:cli/tests/testdata/lint/**", ":!:cli/tests/testdata/run/**", ":!:cli/tests/testdata/tsc/**", + ":!:cli/tests/testdata/test/glob/**", ":!:cli/tsc/*typescript.js", ":!:cli/tsc/compiler.d.ts", ":!:test_util/wpt/**", diff --git a/tools/node_compat/README.md b/tools/node_compat/README.md index 08fcc077b4..29a7ef7399 100644 --- a/tools/node_compat/README.md +++ b/tools/node_compat/README.md @@ -11,7 +11,7 @@ Node.js compat testing in Deno repository. - This script sets up the Node.js compat tests. - `//tools/node_comapt/versions/` - Node.js source tarballs and extracted test cases are stored here. -- `//cli/tests/node_compat/config.json` +- `//cli/tests/node_compat/config.jsonc` - This json file stores the settings about which Node.js compat test to run with Deno. - `//cli/tests/node_compat/test` @@ -19,24 +19,33 @@ Node.js compat testing in Deno repository. ## Steps to add new test cases from Node.js test cases -1. Update `tests` property of `//cli/tests/node_compat/config.json`. For - example, if you want to add `test/paralles/test-foo.js` from Node.js test +1. Update `tests` property of `//cli/tests/node_compat/config.jsonc`. For + example, if you want to add `test/parallel/test-foo.js` from Node.js test cases, then add `test-foo.js` entry in `tests.parallel` array property in - `config.json` -1. Run `./tools/node_compat/setup.ts` + `config.jsonc` +1. Run `deno task setup` in `tools/node_compat` dir. The above command copies the updated items from Node.js tarball to the Deno source tree. Ideally Deno should pass the Node.js compat tests without modification, but if -you need to modify it, then add that item in `ignore` perperty of `config.json`. -Then `setup.ts` doesn't overwrite the modified Node.js test cases anymore. +you need to modify it, then add that item in `ignore` perperty of +`config.jsonc`. Then `setup.ts` doesn't overwrite the modified Node.js test +cases anymore. If the test needs to be ignored in particular platform, then add them in -`${platform}Ignore` property of `config.json` +`${platform}Ignore` property of `config.jsonc` ## Run Node.js test cases Node.js compat tests are run as part of `cargo test` command. If you want to run only the Node.js compat test cases you can use the command -`cargo test node_compat`. +`cargo test node_compat`. If you want to run specific tests you can use the +command `deno task test` (in `tools/node_comapt` dir). For example, if you want +to run all test files which contains `buffer` in filename you can use the +command: + +```shellsession +/path/to/deno/tools/node_compat +$ deno task test buffer +``` diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index ae29d72e59..cb0da2d8af 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -1,9 +1,9 @@ # Remaining Node Tests -NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. +NOTE: This file should not be manually edited. Please edit `cli/tests/node_compat/config.json` and run `deno task setup` in `tools/node_compat` dir instead. -Total: 2924 +Total: 2934 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -88,7 +88,6 @@ Total: 2924 - [internet/test-dns-cares-domains.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-dns-cares-domains.js) - [internet/test-dns-txt-sigsegv.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-dns-txt-sigsegv.js) - [internet/test-http-dns-fail.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-http-dns-fail.js) -- [internet/test-http-https-default-ports.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-http-https-default-ports.js) - [internet/test-http2-issue-32922.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-http2-issue-32922.js) - [internet/test-https-issue-43963.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-https-issue-43963.js) - [internet/test-inspector-help-page.js](https://github.com/nodejs/node/tree/v18.12.1/test/internet/test-inspector-help-page.js) @@ -281,6 +280,7 @@ Total: 2924 - [parallel/test-child-process-exec-encoding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-encoding.js) - [parallel/test-child-process-exec-std-encoding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-std-encoding.js) - [parallel/test-child-process-exec-timeout-expire.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-timeout-expire.js) +- [parallel/test-child-process-exec-timeout-kill.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-timeout-kill.js) - [parallel/test-child-process-exec-timeout-not-expired.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-timeout-not-expired.js) - [parallel/test-child-process-execFile-promisified-abortController.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-execFile-promisified-abortController.js) - [parallel/test-child-process-execfile.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-execfile.js) @@ -358,6 +358,7 @@ Total: 2924 - [parallel/test-cli-syntax-eval.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-cli-syntax-eval.js) - [parallel/test-cli-syntax-piped-bad.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-cli-syntax-piped-bad.js) - [parallel/test-cli-syntax-piped-good.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-cli-syntax-piped-good.js) +- [parallel/test-client-request-destroy.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-client-request-destroy.js) - [parallel/test-cluster-accept-fail.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-cluster-accept-fail.js) - [parallel/test-cluster-advanced-serialization.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-cluster-advanced-serialization.js) - [parallel/test-cluster-basic.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-cluster-basic.js) @@ -473,9 +474,7 @@ Total: 2924 - [parallel/test-crypto-dh-modp2.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-modp2.js) - [parallel/test-crypto-dh-odd-key.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-odd-key.js) - [parallel/test-crypto-dh-padding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-padding.js) -- [parallel/test-crypto-dh-shared.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-shared.js) - [parallel/test-crypto-dh-stateless.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-stateless.js) -- [parallel/test-crypto-dh.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh.js) - [parallel/test-crypto-domain.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-domain.js) - [parallel/test-crypto-domains.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-domains.js) - [parallel/test-crypto-ecb.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-ecb.js) @@ -504,7 +503,6 @@ Total: 2924 - [parallel/test-crypto-scrypt.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-scrypt.js) - [parallel/test-crypto-secure-heap.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-secure-heap.js) - [parallel/test-crypto-sign-verify.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-sign-verify.js) -- [parallel/test-crypto-stream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-stream.js) - [parallel/test-crypto-subtle-zero-length.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-subtle-zero-length.js) - [parallel/test-crypto-verify-failure.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-verify-failure.js) - [parallel/test-crypto-webcrypto-aes-decrypt-tag-too-small.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-webcrypto-aes-decrypt-tag-too-small.js) @@ -1085,6 +1083,7 @@ Total: 2924 - [parallel/test-http-no-content-length.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-no-content-length.js) - [parallel/test-http-no-read-no-dump.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-no-read-no-dump.js) - [parallel/test-http-nodelay.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-nodelay.js) +- [parallel/test-http-outgoing-buffer.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-buffer.js) - [parallel/test-http-outgoing-destroy.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-destroy.js) - [parallel/test-http-outgoing-destroyed.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-destroyed.js) - [parallel/test-http-outgoing-end-cork.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-end-cork.js) @@ -1095,6 +1094,7 @@ Total: 2924 - [parallel/test-http-outgoing-finished.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-finished.js) - [parallel/test-http-outgoing-first-chunk-singlebyte-encoding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-first-chunk-singlebyte-encoding.js) - [parallel/test-http-outgoing-message-capture-rejection.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-message-capture-rejection.js) +- [parallel/test-http-outgoing-message-inheritance.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-message-inheritance.js) - [parallel/test-http-outgoing-message-write-callback.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-message-write-callback.js) - [parallel/test-http-outgoing-properties.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-properties.js) - [parallel/test-http-outgoing-proto.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-http-outgoing-proto.js) @@ -1815,6 +1815,7 @@ Total: 2924 - [parallel/test-process-env.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-env.js) - [parallel/test-process-euid-egid.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-euid-egid.js) - [parallel/test-process-exception-capture-errors.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture-errors.js) +- [parallel/test-process-exception-capture-should-abort-on-uncaught-setflagsfromstring.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture-should-abort-on-uncaught-setflagsfromstring.js) - [parallel/test-process-exception-capture-should-abort-on-uncaught.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture-should-abort-on-uncaught.js) - [parallel/test-process-exception-capture.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture.js) - [parallel/test-process-exec-argv.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exec-argv.js) @@ -2372,6 +2373,7 @@ Total: 2924 - [parallel/test-unhandled-exception-rethrow-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-unhandled-exception-rethrow-error.js) - [parallel/test-unicode-node-options.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-unicode-node-options.js) - [parallel/test-url-null-char.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-url-null-char.js) +- [parallel/test-url-parse-format.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-url-parse-format.js) - [parallel/test-utf8-scripts.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-utf8-scripts.js) - [parallel/test-util-callbackify.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-util-callbackify.js) - [parallel/test-util-emit-experimental-warning.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-util-emit-experimental-warning.js) @@ -2496,10 +2498,13 @@ Total: 2924 - [parallel/test-webcrypto-wrap-unwrap.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-webcrypto-wrap-unwrap.js) - [parallel/test-webstream-encoding-inspect.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-webstream-encoding-inspect.js) - [parallel/test-webstream-readablestream-pipeto.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-webstream-readablestream-pipeto.js) +- [parallel/test-whatwg-encoding-custom-fatal-streaming.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js) - [parallel/test-whatwg-encoding-custom-internals.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-internals.js) - [parallel/test-whatwg-encoding-custom-interop.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-interop.js) - [parallel/test-whatwg-encoding-custom-textdecoder-api-invalid-label.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-api-invalid-label.js) +- [parallel/test-whatwg-encoding-custom-textdecoder-fatal.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js) - [parallel/test-whatwg-encoding-custom-textdecoder-invalid-arg.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-invalid-arg.js) +- [parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js) - [parallel/test-whatwg-encoding-custom-textdecoder.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder.js) - [parallel/test-whatwg-events-event-constructors.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-events-event-constructors.js) - [parallel/test-whatwg-events-eventtarget-this-of-listener.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-events-eventtarget-this-of-listener.js) @@ -2507,6 +2512,9 @@ Total: 2924 - [parallel/test-whatwg-readablebytestream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-readablebytestream.js) - [parallel/test-whatwg-readablestream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-readablestream.js) - [parallel/test-whatwg-transformstream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-transformstream.js) +- [parallel/test-whatwg-url-custom-domainto.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-domainto.js) +- [parallel/test-whatwg-url-custom-inspect.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-inspect.js) +- [parallel/test-whatwg-url-custom-parsing.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-parsing.js) - [parallel/test-whatwg-url-custom-properties.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-properties.js) - [parallel/test-whatwg-url-custom-searchparams-append.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-append.js) - [parallel/test-whatwg-url-custom-searchparams-constructor.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-constructor.js) @@ -2523,7 +2531,9 @@ Total: 2924 - [parallel/test-whatwg-url-custom-searchparams-stringifier.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js) - [parallel/test-whatwg-url-custom-searchparams-values.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-values.js) - [parallel/test-whatwg-url-custom-searchparams.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams.js) +- [parallel/test-whatwg-url-custom-setters.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-setters.js) - [parallel/test-whatwg-url-invalidthis.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-invalidthis.js) +- [parallel/test-whatwg-url-toascii.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-toascii.js) - [parallel/test-whatwg-webstreams-adapters-streambase.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-webstreams-adapters-streambase.js) - [parallel/test-whatwg-webstreams-adapters-to-readablestream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-webstreams-adapters-to-readablestream.js) - [parallel/test-whatwg-webstreams-adapters-to-readablewritablepair.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-webstreams-adapters-to-readablewritablepair.js) diff --git a/tools/node_compat/deno.json b/tools/node_compat/deno.json new file mode 100644 index 0000000000..6e9c891a03 --- /dev/null +++ b/tools/node_compat/deno.json @@ -0,0 +1,6 @@ +{ + "tasks": { + "setup": "deno run --allow-read --allow-write ./setup.ts", + "test": "deno test -A ../../cli/tests/node_compat/test.ts --" + } +} diff --git a/tools/node_compat/node b/tools/node_compat/node new file mode 160000 index 0000000000..d0d9c1ba9d --- /dev/null +++ b/tools/node_compat/node @@ -0,0 +1 @@ +Subproject commit d0d9c1ba9d3facf1086438e21d6d329c599e5a3b diff --git a/tools/node_compat/setup.ts b/tools/node_compat/setup.ts index c8fd6a8e09..132acfe116 100755 --- a/tools/node_compat/setup.ts +++ b/tools/node_compat/setup.ts @@ -1,23 +1,11 @@ -#!/usr/bin/env -S deno run --allow-read=. --allow-write=. --allow-net=nodejs.org +#!/usr/bin/env -S deno run --allow-read=. --allow-write=. --allow-run=git // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -/** This script downloads Node.js source tarball, extracts it and copies the - * test files according to the config file `cli/tests/node_compat/config.json` - */ +/** This copies the test files according to the config file `cli/tests/node_compat/config.jsonc` */ -import { Foras, gunzip } from "https://deno.land/x/denoflate@2.0.2/deno/mod.ts"; -import { Untar } from "../../test_util/std/archive/untar.ts"; import { walk } from "../../test_util/std/fs/walk.ts"; -import { - dirname, - fromFileUrl, - join, - sep, -} from "../../test_util/std/path/mod.ts"; +import { sep } from "../../test_util/std/path/mod.ts"; import { ensureFile } from "../../test_util/std/fs/ensure_file.ts"; -import { Buffer } from "../../test_util/std/io/buffer.ts"; -import { copy } from "../../test_util/std/streams/copy.ts"; -import { readAll } from "../../test_util/std/streams/read_all.ts"; import { writeAll } from "../../test_util/std/streams/write_all.ts"; import { withoutAll } from "../../test_util/std/collections/without_all.ts"; import { relative } from "../../test_util/std/path/posix.ts"; @@ -27,8 +15,6 @@ import { config, ignoreList } from "../../cli/tests/node_compat/common.ts"; const encoder = new TextEncoder(); const NODE_VERSION = config.nodeVersion; -const NODE_NAME = "node-v" + NODE_VERSION; -const NODE_ARCHIVE_NAME = `${NODE_NAME}.tar.gz`; const NODE_IGNORED_TEST_DIRS = [ "addons", @@ -51,25 +37,17 @@ const NODE_IGNORED_TEST_DIRS = [ "wpt", ]; -const NODE_TARBALL_URL = - `https://nodejs.org/dist/v${NODE_VERSION}/${NODE_ARCHIVE_NAME}`; -const NODE_VERSIONS_ROOT = new URL("versions/", import.meta.url); -const NODE_TARBALL_LOCAL_URL = new URL(NODE_ARCHIVE_NAME, NODE_VERSIONS_ROOT); -// local dir url where we copy the node tests -const NODE_LOCAL_ROOT_URL = new URL(NODE_NAME, NODE_VERSIONS_ROOT); -const NODE_LOCAL_TEST_URL = new URL(NODE_NAME + "/test/", NODE_VERSIONS_ROOT); +const VENDORED_NODE_TEST = new URL("node/test/", import.meta.url); const NODE_COMPAT_TEST_DEST_URL = new URL( "../../cli/tests/node_compat/test/", import.meta.url, ); -Foras.initSyncBundledOnce(); - async function getNodeTests(): Promise { const paths: string[] = []; - const rootPath = NODE_LOCAL_TEST_URL.href.slice(7); + const rootPath = VENDORED_NODE_TEST.href.slice(7); for await ( - const item of walk(NODE_LOCAL_TEST_URL, { exts: [".js"] }) + const item of walk(VENDORED_NODE_TEST, { exts: [".js"] }) ) { const path = relative(rootPath, item.path); if (NODE_IGNORED_TEST_DIRS.every((dir) => !path.startsWith(dir))) { @@ -98,7 +76,7 @@ async function updateToDo() { await file.write(encoder.encode(` # Remaining Node Tests -NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. +NOTE: This file should not be manually edited. Please edit \`cli/tests/node_compat/config.json\` and run \`deno task setup\` in \`tools/node_compat\` dir instead. Total: ${missingTests.length} @@ -125,33 +103,6 @@ async function clearTests() { } } -async function decompressTests() { - console.log(`Decompressing ${NODE_ARCHIVE_NAME}...`); - - const compressedFile = await Deno.open(NODE_TARBALL_LOCAL_URL); - - const buffer = new Buffer(gunzip(await readAll(compressedFile))); - compressedFile.close(); - - const tar = new Untar(buffer); - const outFolder = dirname(fromFileUrl(NODE_TARBALL_LOCAL_URL)); - const testsFolder = `${NODE_NAME}/test`; - - for await (const entry of tar) { - if (entry.type !== "file") continue; - if (!entry.fileName.startsWith(testsFolder)) continue; - const path = join(outFolder, entry.fileName); - await ensureFile(path); - const file = await Deno.open(path, { - create: true, - truncate: true, - write: true, - }); - await copy(entry, file); - file.close(); - } -} - /** Checks if file has entry in config.json */ function hasEntry(file: string, suite: string) { return Array.isArray(config.tests[suite]) && @@ -161,12 +112,12 @@ function hasEntry(file: string, suite: string) { async function copyTests() { console.log("Copying test files..."); - for await (const entry of walk(NODE_LOCAL_TEST_URL, { skip: ignoreList })) { + for await (const entry of walk(VENDORED_NODE_TEST, { skip: ignoreList })) { const fragments = entry.path.split(sep); // suite is the directory name after test/. For example, if the file is - // "node-v18.12.1/test/fixtures/policy/main.mjs" + // "node_comapt/node/test/fixtures/policy/main.mjs" // then suite is "fixtures/policy" - const suite = fragments.slice(fragments.indexOf(NODE_NAME) + 2, -1) + const suite = fragments.slice(fragments.indexOf("node_compat") + 3, -1) .join("/"); if (!hasEntry(entry.name, suite)) { continue; @@ -180,8 +131,9 @@ async function copyTests() { write: true, }); const srcFile = await Deno.open( - new URL(`${suite}/${entry.name}`, NODE_LOCAL_TEST_URL), + new URL(`${suite}/${entry.name}`, VENDORED_NODE_TEST), ); + // Add header to js files if (dest.pathname.endsWith("js")) { await writeAll( destFile, @@ -190,7 +142,7 @@ async function copyTests() { // Copyright Joyent and Node contributors. All rights reserved. MIT license. // Taken from Node ${NODE_VERSION} -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually +// This file is automatically generated by \`tools/node_compat/setup.ts\`. Do not modify this file manually. `), ); @@ -199,44 +151,19 @@ async function copyTests() { } } -/** Downloads Node tarball */ -async function downloadFile() { - console.log( - `Downloading ${NODE_TARBALL_URL} in "${NODE_TARBALL_LOCAL_URL}" ...`, - ); - const response = await fetch(NODE_TARBALL_URL); - if (!response.ok) { - throw new Error(`Request failed with status ${response.status}`); - } - await ensureFile(NODE_TARBALL_LOCAL_URL); - const file = await Deno.open(NODE_TARBALL_LOCAL_URL, { - truncate: true, - write: true, - create: true, - }); - await response.body.pipeTo(file.writable); -} - // main -try { - Deno.lstatSync(NODE_TARBALL_LOCAL_URL); -} catch (e) { - if (!(e instanceof Deno.errors.NotFound)) { - throw e; - } - await downloadFile(); -} - -try { - Deno.lstatSync(NODE_LOCAL_ROOT_URL); -} catch (e) { - if (!(e instanceof Deno.errors.NotFound)) { - throw e; - } - await decompressTests(); -} - await clearTests(); await copyTests(); await updateToDo(); + +if (Deno.args[0] === "--check") { + const cmd = new Deno.Command("git", { args: ["status", "-s"] }); + const { stdout } = await cmd.output(); + + if (stdout.length > 0) { + console.log("The following files have been changed:"); + console.log(new TextDecoder().decode(stdout)); + Deno.exit(1); + } +} diff --git a/tools/release/01_bump_crate_versions.ts b/tools/release/01_bump_crate_versions.ts index d9c67a8170..c709ccc809 100755 --- a/tools/release/01_bump_crate_versions.ts +++ b/tools/release/01_bump_crate_versions.ts @@ -8,6 +8,8 @@ const repo = workspace.repo; const cliCrate = workspace.getCliCrate(); const originalCliVersion = cliCrate.version; +await bumpCiCacheVersion(); + // increment the cli version if (Deno.args.some((a) => a === "--patch")) { await cliCrate.increment("patch"); @@ -110,3 +112,25 @@ async function updateStdVersion() { text.replace(versionRe, `std@${newStdVersion}`), ); } + +async function bumpCiCacheVersion() { + const generateScript = workspace.repo.folderPath.join( + ".github/workflows/ci.generate.ts", + ); + const fileText = generateScript.readTextSync(); + const cacheVersionRegex = /const cacheVersion = ([0-9]+);/; + const version = fileText.match(cacheVersionRegex)?.[1]; + if (version == null) { + throw new Error("Could not find cache version in text."); + } + const toVersion = parseInt(version, 10) + 1; + $.logStep(`Bumping cache version from ${version} to ${toVersion}...`); + const newText = fileText.replace( + cacheVersionRegex, + `const cacheVersion = ${toVersion};`, + ); + generateScript.writeTextSync(newText); + + // run the script + await $`${generateScript}`; +} diff --git a/tools/release/release_doc_template.md b/tools/release/release_doc_template.md index fe36d16777..c0eb9e2140 100644 --- a/tools/release/release_doc_template.md +++ b/tools/release/release_doc_template.md @@ -5,7 +5,7 @@ - Forks and local clones of [`denoland/deno`](https://github.com/denoland/deno/), [`denoland/deno_std`](https://github.com/denoland/deno_std/), - [`denoland/dotland`](https://github.com/denoland/dotland/), + [`denoland/dotcom`](https://github.com/denoland/dotcom/), [`denoland/deno_docker`](https://github.com/denoland/deno_docker/) [`denoland/manual`](https://github.com/denoland/manual/) @@ -163,15 +163,14 @@ verify on GitHub that everything looks correct. - [ ] Publish the release on Github - [ ] Run the - https://github.com/denoland/dotland/actions/workflows/update_versions.yml + https://github.com/denoland/dotcom/actions/workflows/update_versions.yml workflow. - [ ] This should open a PR. Review and merge it.
Failure Steps - 1. Update https://github.com/denoland/dotland/blob/main/versions.json - manually. + 1. Update https://github.com/denoland/dotcom/blob/main/versions.json manually. 2. Open a PR and merge.
diff --git a/tools/wpt.ts b/tools/wpt.ts index a3426c5b87..fe2a350b27 100755 --- a/tools/wpt.ts +++ b/tools/wpt.ts @@ -145,6 +145,18 @@ interface TestToRun { expectation: boolean | string[]; } +function getTestTimeout(test: TestToRun) { + if (Deno.env.get("CI")) { + // Don't give expected failures the full time + if (test.expectation === false) { + return { long: 60_000, default: 10_000 }; + } + return { long: 4 * 60_000, default: 4 * 60_000 }; + } + + return { long: 60_000, default: 10_000 }; +} + async function run() { const startTime = new Date().getTime(); assert(Array.isArray(rest), "filter must be array"); @@ -154,11 +166,11 @@ async function run() { expectation, ); assertAllExpectationsHaveTests(expectation, tests, rest); - console.log(`Going to run ${tests.length} test files.`); + const cores = navigator.hardwareConcurrency; + console.log(`Going to run ${tests.length} test files on ${cores} cores.`); const results = await runWithTestUtil(false, async () => { const results: { test: TestToRun; result: TestResult }[] = []; - const cores = navigator.hardwareConcurrency; const inParallel = !(cores === 1 || tests.length === 1); // ideally we would parallelize all tests, but we ran into some flakiness // on the CI, so here we're partitioning based on the start of the test path @@ -174,9 +186,7 @@ async function run() { test.options, inParallel ? () => {} : createReportTestCase(test.expectation), inspectBrk, - Deno.env.get("CI") - ? { long: 4 * 60_000, default: 4 * 60_000 } - : { long: 60_000, default: 10_000 }, + getTestTimeout(test), ); results.push({ test, result }); if (inParallel) { @@ -755,6 +765,11 @@ function discoverTestsToRun( function partitionTests(tests: TestToRun[]): TestToRun[][] { const testsByKey: { [key: string]: TestToRun[] } = {}; for (const test of tests) { + // Run all WebCryptoAPI tests in parallel + if (test.path.includes("/WebCryptoAPI")) { + testsByKey[test.path] = [test]; + continue; + } // Paths looks like: /fetch/corb/img-html-correctly-labeled.sub-ref.html const key = test.path.split("/")[1]; if (!(key in testsByKey)) { diff --git a/tools/wpt/certs/cacert.key b/tools/wpt/certs/cacert.key index 67fefc486c..372ccca449 100644 --- a/tools/wpt/certs/cacert.key +++ b/tools/wpt/certs/cacert.key @@ -1,30 +1,30 @@ -----BEGIN ENCRYPTED PRIVATE KEY----- -MIIFHzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQIiXlaXg3yauACAggA -MB0GCWCGSAFlAwQBKgQQMqGs3Ec13AqQkSvVLb47vgSCBNAhdAsbb4/SpmbL02DB -QX6g6owDLutgUL3KrkYFipTo9jWoakzrJaNuGnz/uRZP3tTD2XqjvH5ILs7BBZ/w -iMGOnjy+ZkRE361MJgMo+9199Ge/GqHhhxp/j0DziJNq4CEpWFcBP+fSRCjpMQzX -6lVrf39XpfsT9n+m2deEGHxMLoxhiaF9uxCAAaEK7E/l9ifCusbOGeWQkFvP61xT -mmotO8jOzb1bQ7OPjuHkW5PEjgsTSBXOP7tvsJi7M7OoiP0saliZHM76n3V88SKS -aPkMfBxLimSpzW98H03ATZOvKJv0TLBJngukMZVj2aJjhw/+ojuoHbAYTyZ+vysp -QGy1vdPf8H59ORBt2Bb97SVFaKK2/ZhIF/u+KSKMUfbyOL0vh8I3BkQQX22s7YER -TmWf563s+viMfE3GkHcIK+8uzkCGAj2DJNdXrnYTZz4rfOwe9SkH2H3yDxJW12wm -iX7gwthyqQ7oRKSYxUBOuKSa/xAom2p7OyeNUukBgvMa+afNtGouGzp57cnSZXQ3 -aNDECWBXwd5bhS0tZ4XBaEGaBAY4jV+EI/WVdpVx/xPaWsHHTsbX9n1he0Da+HtJ -9otEJe+9Cmv1xkthvH7PaXzI1KbWSZqzCfZ/HkDHs8fsrDl0WIZV/+Z1LDlLvmNp -3ldd8mO4Ai2qdIVesLzfRSmQ3r5zTA36v747H5nwwcWbbPg+RJdfdV1Hl79kZSqr -xzferKE8js5mGu7zHsMfHr8pupZrtjBGjeZfdOuPnDUFw1IhsljIAKH2lrCWgxHQ -+WmpkAbKtJayCzqo8IUpB2TLcgfvRyJG994bmzoEjYQlW980pVv77K+44mziaRFy -BvBsSE2jqD+QlQNcFT0E+F9S/peLLL0M0G7u7fwrYXjtumioLszCrIqSxpFDqjo1 -C1ZbX+NBi5fKoTwz0ZaT1ps4wLZH4Eu/qZQU6u4CEum8q8oSACPAHAX2QcqaYcoz -QQX0KiWIweWZDsw+A/VPiyQbSnFL1O/vhDDy67H0HfbRkad2/H2JU6SQAwL7/7b5 -pHNmvC5ypLm1gdua7Nv1jg6Z7+oOvTc65mzXbuf2Ig5ZhwUIhHtn9U2kZHndlK3y -6032Ty7s98wO4hXyp72+C1hFF+laUnPJoM09y6fdD84o5x4ddRM26WaaJHdmxhzm -VvVeY4ui5LRFNheTLpRfv2NKpT7FY6KyfM1Rx4hc4D2oU0rG7LQd9PwyrIltwhwe -mEC/qbo4RKfjdGoX2PKkBYDuMI1EuGVWcUHt+CS9cExOsA/Bt/mF1yZuFlBYZRqk -VqDjs7SUYvlEVUksx0lTcoQ0oJkGSBhuNaaWVHe6cJBgADBBFedzK8u4Sxx2QgFM -SwZGTx8UWuA3LNyYtMxER957joUPvrpYIkuBW6cGB3At5YZmYuKt6lmR+wEXl1Ew -aWcUjBUDjLYZ/OVqJ3ZWzCkJh2ZQLfj9M1zPRiJp59ThHMF8hFliot6BLi4H5MGs -Tvc8OF8b8ZaCgfGIUmdI3Gr0o5IcPO516n/HOXgn8hx9kQsVJckRAGpDQZXW3B3M -iZHppcLhqIPleV+0VwjDdWuwGMVNzMTlV8RV/N/pJmDR8DUiSKfeSCSUP9mYkBNb -yjhQG3W27ATKHJx3Dm9mVpoEtA== +MIIFHzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQIQ+W03vdK1q8CAggA +MB0GCWCGSAFlAwQBKgQQvAinKAExkO2ZesObnF6FMgSCBNCclHCzJ7wkLCCLn6oU +GGNpNORoeiteSIr+2OHluK9in3YFoS9sXGDQrsb56wVbQIisxWFFGRES9ds+KW3n +Ll38AutYQ5yArIFqWO83f80zybnrtynTluCtV6s3JZGhgSpYXUJ0v+sNwRuGILvb +BxJYB4RFkSCyne629JScEthMoaVvjNbOvcpRmIcO1WdRCsf3EEuZPHExhNnQxUAU +Yw8eNN17kc/e9Wjl4W65gfehcwKJRsWs+tD4zmYUKFTyETtbuNyntR5n8Lil3abV +lgENx1/XOTan/AvYRv3DqoLyt6p/lp4F4FtQQlylCjg+rZaovP7NC4s22bBnoQPx +QbbdOx+v1WeCquezG3VzgMiw6//wSfqOCy1TWmOWghcORolYSzQ7KUY0jHhiIo0y +IkxBgMd4XefYXaCCPdtY8O2pJKQzaqY9p8cIzBe41X/PuACUNSTh1Kt1oBGCBQbA +Ox1mT8wUBM7ETU96TX87g+AtbTW8LB9fxf0PW3cieEc4QPxciN1RZQQOKw/qU9wx +MQj9nlyGn3c6LK11mZ8piz6wD6/dCZAocGndmRA5qELDrGihAGBvHmpLM08vgH2B +BKtv3Is9/v54ATUvGBtXKkYOmK4fONUyMJoyIbZPbifZBs1PUVm0qcHF2VnamdCr +EB4Ea/p49G1SuOZN7WtmrJR7fedoappa7+vlUU4cCTw5XIsFInLJ4xQe+BhCnQKs +Gm3ewR3ferrC8TeiExiYShVVPb3n0Qr5Mm/9SHCutux4a4/Q8a09MNB1+Inh+tH7 +sKyFQS6IettSMZ2FtO+23nd1BM1VbGkxaLVVah3Lcho4byYAhdLo3wRqFLq+QI5G +AYZHrK+WlL1vcXh1Ba+H5aeNUV/fJHy4jMe78VzYb1Enuj8RhhbqlvRlnySfMt+N +SYaVrWIioWzAkUDAZsTZqywn+TWP12zMb1bA3S8zX02PY1/YFfhfYDn0ME+tAVPv +JxdwS6z6x2KHN5WEabKYlFGc+F9bAgW/KtWXBSzPiUENB5oq7fRKiZSjIHXqaSBO +kJ5FcqTwj2b6hgD4KYAAs1qOflmvkq5Yxc9zOYWB+avPE0FUFP/QV+65RHtau8G5 +OdWGWs/tzcEG3GAsIWBCoSCe4YKJsUGoi19Aa/i/k+z1OKvl41LXwIEyS79S6snG +vrCm3BpQMtvE2tMJ1f2n230v7PyG/JF/UuW41F7jUg40E2ULwzCBTCmiaWKD4iYp +nGIorbt9bHo7P1OJAryVGvI6tRyPeShPhjzb9DSWqWOCsfJ7HapiOQb0e+83N1Uv +H7lWz9WMEzp/ewnd6qQTpIFZBmQ3oKxSQ6agvHBmMrpvW/kpH+Oa40dE+QP5L0zU +n7Mv5j1q03lA6dO5Fa/2BR0AiTnYE+Yn5c/llDYpbFZam6s+3e9/64nFTOApDW5l +XfgXOy2KlQOziuz+8rjOmMFmVYRkfBKtjzuhRIERGFKZntsXQbAGM3EKGdusA1mz +AcXbp17gMpEBI24AYjo0Hu6XYUTGeXsbqJBOnIAg0JslbbLlRMzKdRVINKKLXLdX +TUunm9tmOSwL4o22WX/D3iUx0IiSOdFfq8FMTEmAPStfoBTCxyEPfVPYz9w0+trW +JvvPmdH0mTZp8G3UmkgBSPk1dw== -----END ENCRYPTED PRIVATE KEY----- diff --git a/tools/wpt/certs/cacert.pem b/tools/wpt/certs/cacert.pem index 97a283387c..9bc42cc1ba 100644 --- a/tools/wpt/certs/cacert.pem +++ b/tools/wpt/certs/cacert.pem @@ -1,178 +1,71 @@ -----BEGIN CERTIFICATE----- -MIIg6DCCH9CgAwIBAgIDAsJPMA0GCSqGSIb3DQEBCwUAMB0xGzAZBgNVBAMMEndl -Yi1wbGF0Zm9ybS10ZXN0czAeFw0yMjA2MDEyMjI4MzJaFw0yMzA2MDEyMjI4MzJa +MIIM3jCCC8agAwIBAgIDDdH2MA0GCSqGSIb3DQEBCwUAMB0xGzAZBgNVBAMMEndl +Yi1wbGF0Zm9ybS10ZXN0czAeFw0yMzA2MDIwMjQ1MTdaFw0yNDA2MDEwMjQ1MTda MB0xGzAZBgNVBAMMEndlYi1wbGF0Zm9ybS10ZXN0czCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAOoO+MDm5HkntqGMufTd/3UJy1doKUimlrTWIbYfdHKH -jWhIpAXxH0CJqIgy574mU9j8FigBLUJQNu2XxxQ2O4HKz9H62JnuXQ9zO7LvwJ1r -9Lt837rm71lMqPtx/4XOFcEqE2ij9FuKX1Adhy3bSAY2JX9joGK8OQLmGZw/cQ+X -YUZoeViLaChWryU/zYkGy3a12RnzjJhQtwX0EAHSuGKLi1+uJSd57skRvpSmbp5f -OSZ4q4ZZDm9Eiy6UGRyR9dzXvnchyR9n2orGk68xEho/tTEAwP+gVinJ2516bYnN -8N6pXML/+20Z1REi2CtglJX527tCRjZ8GXR/MdgjXSMCAwEAAaOCHi8wgh4rMAwG -A1UdEwQFMAMBAf8wHQYDVR0OBBYEFGGs85y2SLkKyp8vUZcMjFvWcxP7MEcGA1Ud -IwRAMD6AFGGs85y2SLkKyp8vUZcMjFvWcxP7oSGkHzAdMRswGQYDVQQDDBJ3ZWIt -cGxhdGZvcm0tdGVzdHOCAwLCTzALBgNVHQ8EBAMCAgQwEwYDVR0lBAwwCgYIKwYB -BQUHAwEwgh2PBgNVHREEgh2GMIIdgoIRd2ViLXBsYXRmb3JtLnRlc3SCFW9wMy53 -ZWItcGxhdGZvcm0udGVzdIIVb3A0LndlYi1wbGF0Zm9ybS50ZXN0ghVvcDcud2Vi -LXBsYXRmb3JtLnRlc3SCFW9wNi53ZWItcGxhdGZvcm0udGVzdIIVb3AxLndlYi1w -bGF0Zm9ybS50ZXN0ghVvcDIud2ViLXBsYXRmb3JtLnRlc3SCFXd3dy53ZWItcGxh -dGZvcm0udGVzdIIVbm90LXdlYi1wbGF0Zm9ybS50ZXN0ghVvcDkud2ViLXBsYXRm -b3JtLnRlc3SCFW9wNS53ZWItcGxhdGZvcm0udGVzdIIVb3A4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDI0LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDg5LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDMyLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDIxLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDQzLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDQ1LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDQ5LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDY4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDQ0LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDE5LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDc3LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDIwLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDQ3LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDE1LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDI1LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDkyLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDYzLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDc5LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDk3LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDUwLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDY3LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDExLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDg1LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDUzLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDM3LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDM1LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDYyLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDY5LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDQyLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDI3LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDk0LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDE0LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDkxLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDgzLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDk1LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDMxLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDk5LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDg2LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDU1LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDU4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDIzLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDUyLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDIyLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDUxLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDYwLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDQwLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDkzLndlYi1wbGF0Zm9ybS50ZXN0ghZ3d3cyLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDgxLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDkwLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDE2LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDc4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDI2LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDE3LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDYxLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDc1LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDczLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDc2LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDM4LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDcyLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDgwLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDY0LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDQ4LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDMzLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDU0LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDEzLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDM2LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDU3LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDEwLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDg4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDcwLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDM0LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDM5LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDY2LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDU2LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDk4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDcxLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDQ2LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDk2LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDQxLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDI5LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDU5LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDEyLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDY1LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDMwLndlYi1wbGF0Zm9ybS50ZXN0ghZvcDgyLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDg0LndlYi1wbGF0Zm9ybS50ZXN0ghZ3d3cxLndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDg3LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDE4LndlYi1wbGF0Zm9y -bS50ZXN0ghZvcDc0LndlYi1wbGF0Zm9ybS50ZXN0ghZvcDI4LndlYi1wbGF0Zm9y -bS50ZXN0ghlvcDIubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghl3d3cud3d3LndlYi1w -bGF0Zm9ybS50ZXN0ghlvcDMubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghlvcDgubm90 -LXdlYi1wbGF0Zm9ybS50ZXN0ghlvcDEubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghlv -cDcubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghlvcDUubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghlvcDkubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghlvcDYubm90LXdlYi1wbGF0 -Zm9ybS50ZXN0ghl3d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghlvcDQubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDMwLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3Az -OC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNjIubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDU0Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A2MS5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wMjYubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDkyLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNzMubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDUyLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A0My5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMTAubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDM2Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3Ay -NS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wOTcubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDc1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A4Mi5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wNDYubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDk5Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A5OC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wMzMubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDY0Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3AyOS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNzkubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDIzLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A4 -My5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNzYubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDc4Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A3Ny5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wODUubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDg2Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gnd3dy53d3cyLndlYi1wbGF0Zm9ybS50ZXN0ghpvcDcwLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIad3d3MS53d3cud2ViLXBsYXRmb3JtLnRlc3SCGm9wMTEubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDQ1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A4 -OS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNDAubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDU1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3AyMi5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wNjYubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDUwLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A0Ny5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wODEubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDgwLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A5MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNjAubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDEyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A0 -OS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wODgubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDQxLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3AyMS5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wNTcubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDI0Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A1MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNjMubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDE2Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIad3d3Mi53d3cud2ViLXBsYXRmb3JtLnRlc3SCGm9wMTcubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDM5Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A0 -OC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMzcubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDk1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A2OC5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wOTYubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDk0Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AzNS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNjkubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDkzLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3AzMi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGnd3dy53d3cxLndl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDU5Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A2 -Ny5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMTkubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDc0Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A0Mi5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wMTQubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDI3Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIad3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNTgubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDM0Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A1Ni5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGnd3dzEubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDE1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A5 -MC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMjgubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDUzLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3AxOC5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wNDQubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDg0Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A4Ny5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNjUubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDMxLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3AyMC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMTMubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ght3d3cxLnd3dzEud2ViLXBsYXRmb3JtLnRlc3SCG3d3 -dzEud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIbd3d3Mi53d3cyLndlYi1wbGF0Zm9y -bS50ZXN0ght3d3cyLnd3dzEud2ViLXBsYXRmb3JtLnRlc3SCHXd3dy53d3cubm90 -LXdlYi1wbGF0Zm9ybS50ZXN0gh53d3cud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRl -c3SCHnhuLS1sdmUtNmxhZC53ZWItcGxhdGZvcm0udGVzdIIed3d3Mi53d3cubm90 -LXdlYi1wbGF0Zm9ybS50ZXN0gh53d3cud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRl -c3SCHnd3dzEud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIfd3d3Mi53d3cxLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIfd3d3MS53d3cxLm5vdC13ZWItcGxhdGZvcm0u -dGVzdIIfd3d3Mi53d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIfd3d3MS53d3cy -Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIieG4tLWx2ZS02bGFkLm5vdC13ZWItcGxh -dGZvcm0udGVzdIIieG4tLWx2ZS02bGFkLnd3dy53ZWItcGxhdGZvcm0udGVzdIIi -d3d3LnhuLS1sdmUtNmxhZC53ZWItcGxhdGZvcm0udGVzdIIjd3d3Mi54bi0tbHZl -LTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCI3d3dzEueG4tLWx2ZS02bGFkLndlYi1w -bGF0Zm9ybS50ZXN0giN4bi0tbHZlLTZsYWQud3d3MS53ZWItcGxhdGZvcm0udGVz -dIIjeG4tLWx2ZS02bGFkLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCJnd3dy54bi0t -bHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0giZ4bi0tbHZlLTZsYWQud3d3 -Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIneG4tLWx2ZS02bGFkLnd3dzIubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0gid4bi0tbHZlLTZsYWQud3d3MS5ub3Qtd2ViLXBsYXRm -b3JtLnRlc3SCJ3d3dzEueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0udGVz -dIInd3d3Mi54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0gil4bi0t -bjhqNmRzNTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIreG4tLWx2ZS02 -bGFkLnhuLS1sdmUtNmxhZC53ZWItcGxhdGZvcm0udGVzdIIteG4tLW44ajZkczUz -bHd3a3JxaHYyOGEud3d3LndlYi1wbGF0Zm9ybS50ZXN0gi13d3cueG4tLW44ajZk -czUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCLXhuLS1uOGo2ZHM1M2x3 -d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdIIueG4tLW44ajZkczUzbHd3 -a3JxaHYyOGEud3d3MS53ZWItcGxhdGZvcm0udGVzdIIud3d3MS54bi0tbjhqNmRz -NTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIueG4tLW44ajZkczUzbHd3 -a3JxaHYyOGEud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIud3d3Mi54bi0tbjhqNmRz -NTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIveG4tLWx2ZS02bGFkLnhu -LS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMXhuLS1uOGo2ZHM1M2x3 -d2tycWh2MjhhLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMXd3dy54bi0tbjhq -NmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMnhuLS1uOGo2 -ZHM1M2x3d2tycWh2MjhhLnd3dzIubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjJ3d3cx -LnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdIIy -d3d3Mi54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRl -c3SCMnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3dzEubm90LXdlYi1wbGF0Zm9y -bS50ZXN0gjZ4bi0tbHZlLTZsYWQueG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2Vi -LXBsYXRmb3JtLnRlc3SCNnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1sdmUt -NmxhZC53ZWItcGxhdGZvcm0udGVzdII6eG4tLW44ajZkczUzbHd3a3JxaHYyOGEu -eG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdII6eG4tLWx2ZS02bGFk -LnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdIJB -eG4tLW44ajZkczUzbHd3a3JxaHYyOGEueG4tLW44ajZkczUzbHd3a3JxaHYyOGEu -d2ViLXBsYXRmb3JtLnRlc3SCRXhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1u -OGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdDANBgkqhkiG -9w0BAQsFAAOCAQEA41QFhD/a2Ug71UoYtiM2Kgz9ujeEPpfxXItUVj+QrgAXeRZe -VE1B1xqxuy0QopG7vDuo02tstWcBZON8lr0x7xEheBP7jOdAFA0LBIbOwTcPBVlT -N/dzIpn1E6cIucIYoAtf2KFemEeZFeGAf0J1DhVOt4+luXa15Snl9ScQML5uVmIs -n7RGBnSt3yKgsUXzFujhv9T+ZG7jsSw5240OWjK7kqN8z9Ny3FuF1hd5vSTsvvfl -be6acO8LtCDxODYIfo1TMdvoT+UyCMKkuSmFpGtqXzGbg929l04AvnfPhu/1R1R0 -/TqZNh0FA3rtKtLBxdmh9VdvQZlhiOVK1J9FUA== +BQADggEPADCCAQoCggEBAKvZZxnQbYFxul9QFZvEzyaUpOy/maaeSbsEKTXcmMV+ +Op1pBFEIsVKfW09a2V0yIPNeNJt9ezGR12klitPi6HbtENWRIHwH7pEl99QElYSt +yVNcN2dy8bn8p94xUshjMV3sPzBCjMUaAu1FQtYiSeCw+XG4vh74uOYGV6EiIATQ +Xj6J10aLNBGMrOlbqYLbB5KIHIIX3HIlxOS1VyVUJANhv8+m89ykyy/eWjmexbeK +tXhqc98iopV/2Okl2K6AAjjPMwV+5HKzDBGKS5MOmnzqCSEjkQbCSwQgAy758DeN +KuWWd2OwfaudYt3wprnIcT8AJW8Sp1aGSQmeJYmywbkCAwEAAaOCCiUwggohMAwG +A1UdEwQFMAMBAf8wHQYDVR0OBBYEFGbZlmcsD7pGcpPSrjXntECqiONKMEcGA1Ud +IwRAMD6AFGbZlmcsD7pGcpPSrjXntECqiONKoSGkHzAdMRswGQYDVQQDDBJ3ZWIt +cGxhdGZvcm0tdGVzdHOCAw3R9jALBgNVHQ8EBAMCAgQwEwYDVR0lBAwwCgYIKwYB +BQUHAwEwggmFBgNVHREEggl8MIIJeIIRd2ViLXBsYXRmb3JtLnRlc3SCFW5vdC13 +ZWItcGxhdGZvcm0udGVzdIIVd3d3LndlYi1wbGF0Zm9ybS50ZXN0ghZ3d3cxLndl +Yi1wbGF0Zm9ybS50ZXN0ghZ3d3cyLndlYi1wbGF0Zm9ybS50ZXN0ghl3d3cubm90 +LXdlYi1wbGF0Zm9ybS50ZXN0ghl3d3cud3d3LndlYi1wbGF0Zm9ybS50ZXN0ghp3 +d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIad3d3MS5ub3Qtd2ViLXBsYXRmb3Jt +LnRlc3SCGnd3dy53d3cxLndlYi1wbGF0Zm9ybS50ZXN0ghp3d3cxLnd3dy53ZWIt +cGxhdGZvcm0udGVzdIIad3d3Mi53d3cud2ViLXBsYXRmb3JtLnRlc3SCGnd3dy53 +d3cyLndlYi1wbGF0Zm9ybS50ZXN0ght3d3cyLnd3dzEud2ViLXBsYXRmb3JtLnRl +c3SCG3d3dzIud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIbd3d3MS53d3cxLndlYi1w +bGF0Zm9ybS50ZXN0ght3d3cxLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCHXd3dy53 +d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0gh53d3cud3d3Mi5ub3Qtd2ViLXBsYXRm +b3JtLnRlc3SCHnd3dy53d3cxLm5vdC13ZWItcGxhdGZvcm0udGVzdIIeeG4tLWx2 +ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0gh53d3cxLnd3dy5ub3Qtd2ViLXBsYXRm +b3JtLnRlc3SCHnd3dzIud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIfd3d3Mi53 +d3cxLm5vdC13ZWItcGxhdGZvcm0udGVzdIIfd3d3MS53d3cyLm5vdC13ZWItcGxh +dGZvcm0udGVzdIIfd3d3MS53d3cxLm5vdC13ZWItcGxhdGZvcm0udGVzdIIfd3d3 +Mi53d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIid3d3LnhuLS1sdmUtNmxhZC53 +ZWItcGxhdGZvcm0udGVzdIIieG4tLWx2ZS02bGFkLnd3dy53ZWItcGxhdGZvcm0u +dGVzdIIieG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdIIjeG4tLWx2 +ZS02bGFkLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCI3huLS1sdmUtNmxhZC53d3cx +LndlYi1wbGF0Zm9ybS50ZXN0giN3d3cyLnhuLS1sdmUtNmxhZC53ZWItcGxhdGZv +cm0udGVzdIIjd3d3MS54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCJnd3 +dy54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0giZ4bi0tbHZlLTZs +YWQud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIneG4tLWx2ZS02bGFkLnd3dzIu +bm90LXdlYi1wbGF0Zm9ybS50ZXN0gid4bi0tbHZlLTZsYWQud3d3MS5ub3Qtd2Vi +LXBsYXRmb3JtLnRlc3SCJ3d3dzEueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZv +cm0udGVzdIInd3d3Mi54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0 +gil4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIreG4t +LWx2ZS02bGFkLnhuLS1sdmUtNmxhZC53ZWItcGxhdGZvcm0udGVzdIItd3d3Lnhu +LS1uOGo2ZHM1M2x3d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gi14bi0tbjhq +NmRzNTNsd3drcnFodjI4YS53d3cud2ViLXBsYXRmb3JtLnRlc3SCLXhuLS1uOGo2 +ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdIIud3d3Mi54bi0t +bjhqNmRzNTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIud3d3MS54bi0t +bjhqNmRzNTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIueG4tLW44ajZk +czUzbHd3a3JxaHYyOGEud3d3MS53ZWItcGxhdGZvcm0udGVzdIIueG4tLW44ajZk +czUzbHd3a3JxaHYyOGEud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIveG4tLWx2ZS02 +bGFkLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMXd3dy54bi0t +bjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMXhuLS1u +OGo2ZHM1M2x3d2tycWh2MjhhLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCMnd3 +dzEueG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1wbGF0Zm9ybS50ZXN0 +gjJ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cxLm5vdC13ZWItcGxhdGZvcm0u +dGVzdIIyd3d3Mi54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRm +b3JtLnRlc3SCMnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnd3dzIubm90LXdlYi1w +bGF0Zm9ybS50ZXN0gjZ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS54bi0tbHZlLTZs +YWQud2ViLXBsYXRmb3JtLnRlc3SCNnhuLS1sdmUtNmxhZC54bi0tbjhqNmRzNTNs +d3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdII6eG4tLWx2ZS02bGFkLnhuLS1u +OGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdII6eG4tLW44 +ajZkczUzbHd3a3JxaHYyOGEueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0u +dGVzdIJBeG4tLW44ajZkczUzbHd3a3JxaHYyOGEueG4tLW44ajZkczUzbHd3a3Jx +aHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCRXhuLS1uOGo2ZHM1M2x3d2tycWh2Mjhh +LnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0udGVzdDAN +BgkqhkiG9w0BAQsFAAOCAQEApKeUP4UaaT1TxI3DMdNMRIAHRt3v5/CiPKWrntfW +gfBTXPvM+WcYl1n+x4dZ2UL2r4PFmUT1MSaotx4TC4Tu80NoHyFZYDpWtBrkjd35 +M8niW02Fq0j4vjWL3mJFjUQPfQq5GFw5jZXSuTT6FHfXG5YNT1nVTMVwKu0sX7y/ +2eNddJIr2LbEOLRgyYPr2PtmYPSe60rSiRXRWCgd9xYnRTnkt/6bjOy9qzWT9owj +JZAUou5L5q2hUxXbpi5eV5x907KHTkH7SumQVKBY61Mhz6ULxS08KWOeFWC6y4FX +HmUM54620yFqretTloDEw1XS0tgyGHgR4RqouL2vnFsmJg== -----END CERTIFICATE----- diff --git a/tools/wpt/certs/web-platform.test.key b/tools/wpt/certs/web-platform.test.key index 3d0f3ce0a9..661a9ba87d 100644 --- a/tools/wpt/certs/web-platform.test.key +++ b/tools/wpt/certs/web-platform.test.key @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDcVQzzTtqXFIAH -fNi6S9Wj2s6BmBmITFWUjETrulYKmIhFTwCY3x7Bt5j4CmVqxEx5C3XZTRuD0T/4 -ZZ4SCTly6LpnHfIf1RX2qS1YU/D3MCbUfqAigtstW4S8xaz48TRUBV5KpIrBVN85 -2efelS6BQicUMWMXjxQe+Z70FuQ5SSNe2KxkfROTVg/zOWYefcgvGPOTkfUrdVF/ -AZB3FZIyEFj/6By7hmCu+mtdjLFMTQL8RJFhsta5YgPsaJb+5vCCnho4v2SAW0O/ -Ak8pTdvVjINUMgkc3ljddBzqutDjaC5HVvzjDj49GCsBEYtCjQTLlibX/HpriGmy -eO3mSxdLAgMBAAECggEBANMR7uwOeD0O+G8z3HLrPuhPLlDbomn9pmY90bB4uwA/ -ixvKSOoppE8zUKkdXHZhQlka73SR92/Zvwx4XGsuxwgbhqZN+6se+IFpI/VfxJEo -Yapi+4jUDp9u8sPcIo4Ak36B/ghHhF0crxO+ee+yvfA5rTIc9a6V9FKQEwQGAfAV -J1+IW2rRjAlVxHCcYY3Hf5Ov3dKtr7/KRso8Pvaj1yBbliF70x4RYgcVcjF4w491 -tErjvfNMXTKVigY6zRUVizzkDug2V+kMmku3SmyjJ51VnnxFD4n2JWkZubxMXfu+ -aKXdcJVlWeOVWRWhT4pw+qNKqHABtZ2AqwbPHlkavIECgYEA8c8EfrbZCDlrrevl -suvvAHcA4UqwHPiFw1spwRkiHxym+FT5H6LQUxxdUyxo94nQbnLWaK6FjUVZKH0K -gqvl4ibcZAsPsfeCWSd2aHe//VzCtg6dG2bam3sKgq2oU/fP5VBV0ytK1VaZjc+Z -GbLp040Y54xyI9bLvHnbPYf0gWECgYEA6UNd4iHAKwwCe4MYWplEqxTUQTc2PbFP -Bp1gM2+WYApZqy+DS/dH6qZ1GJ7Uz8k3uhKkN2ZFpP3WlgTVLDHqeReKkSUDIf6E -RJ2wZSYBCSmOMjm77XIj37RgNmLa2AefaTvf+C4BgaOyy+TkOeUJYX065+ByFg9s -OTDooWtK3CsCgYBcff8OKQqjJrOVdyIJak2lM9/WTSdILZecvgYeomp21BLZ8kbI -XxKod7UK13XfGYvsc2in6fmTq+8KmamnifEIrCyo58I9JJtrVrjoBMYfN5gS40wc -6x67UjkW+y3j0GppIG1ztJ+PMuzRXZMGTXU7QkFAcGxIYSTVFxdRVeukgQKBgQDo -J/4mc8TYkwldeCoi8WwggYiGp7a6S+AFC1iVmLenOmXoNE+OqbvP1H+yU1Imw3NN -JASAhq9F+MU7Ze/sHnnorv9gWd/2U2O9DtxiryJUxiEZ59AcEH+Xw+QF/PkmDOAh -UlAkp1Uke5IVz+c9n7BocMTRxtaFEjB58y10il5lJQKBgDFo344NEGF7klwFbP3/ -ZJl4QYyWaH3PGaIa4yCrO0Or76L/OQwZy5z7CCFD5p/4MoaqGH/p7KLj+JFrtgPn -Y0MSIYubWafLG1hXjxXQlZqP+OZUJ+C0aMEU45h2uYmSBngo58CbYl1e+OyMt02Z -7I/0TK++5Iudhf0ZgxtlNGKa +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDbQdpYdpYNdw21 +21aAjI/AbN8jRxLjD3KaahP7xCvMdu0gBSbVkr4OYUMckzf+xWPU6w3YZ8CUsIc/ +NQD6j3dC1Iz3x5+0HpFfqoWx22bkzeIM7/X98njsmlv47TBEmvMKD9rz3yHGMdi9 +MmzdQohQBnFoidCV/AWqeKEOlxPYhzdTghbv7FMhD4I7Hz3OOvfmsrwEWTcvP03/ +1V46J4h2oILYELglzS5dCQFEjAbd8SIJlT+YMjWMZXzHgxI+UNJFgcbNUEkR/3v5 +8YGrdVmneI5UjVs1ok8uo/ey9OaJSwWOCrWXasglMDkU9J8JWoce/Q8PIlOGdJCs ++bR92pEvAgMBAAECggEAZrwwhwrpvF1weynPGmTCZrOcynbfDq2KUXgq7Ok6bI+9 +BCflzmT30N0aCSWiMypiYL3SuvfCcOlWNfOsBbt7ckEN3HwZBNjd3SmVg4T+bW9a +4Bz/8oHOz250cesRltPT1X8gZOzEco2gtOYPxI0MOhMKyKTqq5xvOzVSofeAQRAb +7bpNucHS3TP6I50jpav1K1w+PIAbMkRyC/hgITp1sGvx/N/OVLRQ22Jd7jCn9umy +f4UYTpvgHChOm5JDFRqH4zRwntKxbcZhYs/WpUxtyKCW8us9IYVp1tebOtnlY918 +boX2Aq+4CohLgI0x67rt2kDG+actL9slrJI3jqVCAQKBgQD+sGV02GT8MidTFTU3 +mMTeL7SaBC1u1JQV4YyAIKY0Bk8S1Oux+wGIXVYl4VINgn63vtkEbKpSEjLIYIzM +E6KOmwguza1mvZ+vJSoglKnh0dC/I/USJLndAigYYiOSl/mGkK9IKMB1zrGmR03R +v0bZAX2I9Zbvynvccztt0MycwQKBgQDcYsSi/eOakVjEV9b0z9u3alna93s/4Y7O +NPCh3NwkGIlweOi1B/5BVCNbTLIz+9Q93UvBW7b7/JQvt9HWdjJf1EFXAz5PM5ip +pAyxtggIYdtu2zW/Gxc8e+chWrZQugoZWZK+oBH5BH4wjtI7SpHqj/RK48WwTjnR +Eo/sbv157wKBgFFZfH6O4+qeBmuGOaJRfIhPiosrTGu7ILXAfkUqqIuzfCxSsBoA +R6QL1AlzZ+cCyIDeR6kfIGPohin0lORWXTTZlgqFDZ7kcI3b/BG+CmkjwF5dGNk7 +u9Y46x+msSAQxNXTfvk4cwjuBVZMLjIRu4py7GsDrrtW3Ks0b0YLTF/BAoGAa/jH +tcMFe3iyMJ+IZLBhSN8F3s1YyNdNC6HMMsDt6ZFL5JqYB/k+i+sY12Yf+G/sb9K3 +hqfUYmhAgZBhcdy+mUx4JpUkNdFlfdctkPNJxDGNPCaRkmtHWw6pEiJLKAm9YOYN +iu8JXyLgYBHY1cuW6YBVg0tMUzBACzo44PEPpmkCgYEAhAroOZaSByBkVKtHKX8D +/j+MUbNyY3GZN81byLpJQ+whFLtNCfkC6YL7zchhnX1S5nX0kZTGzNP2rEI5y2+a +5sK5t3lXrY+aLRPI+gTXe3jxJBDfLp58HhZMGVlLBxx3tTqFzw0KyPPU6IGUgo68 +tgkF4hAUb2u0WO/Vvmc2xfo= -----END PRIVATE KEY----- diff --git a/tools/wpt/certs/web-platform.test.pem b/tools/wpt/certs/web-platform.test.pem index 5a763c764f..0e48d23e22 100644 --- a/tools/wpt/certs/web-platform.test.pem +++ b/tools/wpt/certs/web-platform.test.pem @@ -1,240 +1,133 @@ Certificate: Data: Version: 3 (0x2) - Serial Number: 180816 (0x2c250) + Serial Number: 905719 (0xdd1f7) Signature Algorithm: sha256WithRSAEncryption Issuer: CN=web-platform-tests Validity - Not Before: Jun 1 22:28:32 2022 GMT - Not After : Jun 1 22:28:32 2023 GMT + Not Before: Jun 2 02:45:17 2023 GMT + Not After : Jun 1 02:45:17 2024 GMT Subject: CN=web-platform.test Subject Public Key Info: Public Key Algorithm: rsaEncryption - Public-Key: (2048 bit) + RSA Public-Key: (2048 bit) Modulus: - 00:dc:55:0c:f3:4e:da:97:14:80:07:7c:d8:ba:4b: - d5:a3:da:ce:81:98:19:88:4c:55:94:8c:44:eb:ba: - 56:0a:98:88:45:4f:00:98:df:1e:c1:b7:98:f8:0a: - 65:6a:c4:4c:79:0b:75:d9:4d:1b:83:d1:3f:f8:65: - 9e:12:09:39:72:e8:ba:67:1d:f2:1f:d5:15:f6:a9: - 2d:58:53:f0:f7:30:26:d4:7e:a0:22:82:db:2d:5b: - 84:bc:c5:ac:f8:f1:34:54:05:5e:4a:a4:8a:c1:54: - df:39:d9:e7:de:95:2e:81:42:27:14:31:63:17:8f: - 14:1e:f9:9e:f4:16:e4:39:49:23:5e:d8:ac:64:7d: - 13:93:56:0f:f3:39:66:1e:7d:c8:2f:18:f3:93:91: - f5:2b:75:51:7f:01:90:77:15:92:32:10:58:ff:e8: - 1c:bb:86:60:ae:fa:6b:5d:8c:b1:4c:4d:02:fc:44: - 91:61:b2:d6:b9:62:03:ec:68:96:fe:e6:f0:82:9e: - 1a:38:bf:64:80:5b:43:bf:02:4f:29:4d:db:d5:8c: - 83:54:32:09:1c:de:58:dd:74:1c:ea:ba:d0:e3:68: - 2e:47:56:fc:e3:0e:3e:3d:18:2b:01:11:8b:42:8d: - 04:cb:96:26:d7:fc:7a:6b:88:69:b2:78:ed:e6:4b: - 17:4b + 00:db:41:da:58:76:96:0d:77:0d:b5:db:56:80:8c: + 8f:c0:6c:df:23:47:12:e3:0f:72:9a:6a:13:fb:c4: + 2b:cc:76:ed:20:05:26:d5:92:be:0e:61:43:1c:93: + 37:fe:c5:63:d4:eb:0d:d8:67:c0:94:b0:87:3f:35: + 00:fa:8f:77:42:d4:8c:f7:c7:9f:b4:1e:91:5f:aa: + 85:b1:db:66:e4:cd:e2:0c:ef:f5:fd:f2:78:ec:9a: + 5b:f8:ed:30:44:9a:f3:0a:0f:da:f3:df:21:c6:31: + d8:bd:32:6c:dd:42:88:50:06:71:68:89:d0:95:fc: + 05:aa:78:a1:0e:97:13:d8:87:37:53:82:16:ef:ec: + 53:21:0f:82:3b:1f:3d:ce:3a:f7:e6:b2:bc:04:59: + 37:2f:3f:4d:ff:d5:5e:3a:27:88:76:a0:82:d8:10: + b8:25:cd:2e:5d:09:01:44:8c:06:dd:f1:22:09:95: + 3f:98:32:35:8c:65:7c:c7:83:12:3e:50:d2:45:81: + c6:cd:50:49:11:ff:7b:f9:f1:81:ab:75:59:a7:78: + 8e:54:8d:5b:35:a2:4f:2e:a3:f7:b2:f4:e6:89:4b: + 05:8e:0a:b5:97:6a:c8:25:30:39:14:f4:9f:09:5a: + 87:1e:fd:0f:0f:22:53:86:74:90:ac:f9:b4:7d:da: + 91:2f Exponent: 65537 (0x10001) X509v3 extensions: X509v3 Basic Constraints: CA:FALSE X509v3 Subject Key Identifier: - 2D:00:F5:6F:CF:D8:44:52:54:9E:09:2E:F7:5B:46:88:BC:F6:52:48 + AF:6B:74:D4:D8:85:48:A2:50:B9:E3:9C:2D:1A:91:A9:94:75:60:A5 X509v3 Authority Key Identifier: - keyid:61:AC:F3:9C:B6:48:B9:0A:CA:9F:2F:51:97:0C:8C:5B:D6:73:13:FB + keyid:66:D9:96:67:2C:0F:BA:46:72:93:D2:AE:35:E7:B4:40:AA:88:E3:4A X509v3 Key Usage: Digital Signature, Non Repudiation, Key Encipherment X509v3 Extended Key Usage: TLS Web Server Authentication X509v3 Subject Alternative Name: - DNS:web-platform.test, DNS:op3.web-platform.test, DNS:op4.web-platform.test, DNS:op7.web-platform.test, DNS:op6.web-platform.test, DNS:op1.web-platform.test, DNS:op2.web-platform.test, DNS:www.web-platform.test, DNS:not-web-platform.test, DNS:op9.web-platform.test, DNS:op5.web-platform.test, DNS:op8.web-platform.test, DNS:op24.web-platform.test, DNS:op89.web-platform.test, DNS:op32.web-platform.test, DNS:op21.web-platform.test, DNS:op43.web-platform.test, DNS:op45.web-platform.test, DNS:op49.web-platform.test, DNS:op68.web-platform.test, DNS:op44.web-platform.test, DNS:op19.web-platform.test, DNS:op77.web-platform.test, DNS:op20.web-platform.test, DNS:op47.web-platform.test, DNS:op15.web-platform.test, DNS:op25.web-platform.test, DNS:op92.web-platform.test, DNS:op63.web-platform.test, DNS:op79.web-platform.test, DNS:op97.web-platform.test, DNS:op50.web-platform.test, DNS:op67.web-platform.test, DNS:op11.web-platform.test, DNS:op85.web-platform.test, DNS:op53.web-platform.test, DNS:op37.web-platform.test, DNS:op35.web-platform.test, DNS:op62.web-platform.test, DNS:op69.web-platform.test, DNS:op42.web-platform.test, DNS:op27.web-platform.test, DNS:op94.web-platform.test, DNS:op14.web-platform.test, DNS:op91.web-platform.test, DNS:op83.web-platform.test, DNS:op95.web-platform.test, DNS:op31.web-platform.test, DNS:op99.web-platform.test, DNS:op86.web-platform.test, DNS:op55.web-platform.test, DNS:op58.web-platform.test, DNS:op23.web-platform.test, DNS:op52.web-platform.test, DNS:op22.web-platform.test, DNS:op51.web-platform.test, DNS:op60.web-platform.test, DNS:op40.web-platform.test, DNS:op93.web-platform.test, DNS:www2.web-platform.test, DNS:op81.web-platform.test, DNS:op90.web-platform.test, DNS:op16.web-platform.test, DNS:op78.web-platform.test, DNS:op26.web-platform.test, DNS:op17.web-platform.test, DNS:op61.web-platform.test, DNS:op75.web-platform.test, DNS:op73.web-platform.test, DNS:op76.web-platform.test, DNS:op38.web-platform.test, DNS:op72.web-platform.test, DNS:op80.web-platform.test, DNS:op64.web-platform.test, DNS:op48.web-platform.test, DNS:op33.web-platform.test, DNS:op54.web-platform.test, DNS:op13.web-platform.test, DNS:op36.web-platform.test, DNS:op57.web-platform.test, DNS:op10.web-platform.test, DNS:op88.web-platform.test, DNS:op70.web-platform.test, DNS:op34.web-platform.test, DNS:op39.web-platform.test, DNS:op66.web-platform.test, DNS:op56.web-platform.test, DNS:op98.web-platform.test, DNS:op71.web-platform.test, DNS:op46.web-platform.test, DNS:op96.web-platform.test, DNS:op41.web-platform.test, DNS:op29.web-platform.test, DNS:op59.web-platform.test, DNS:op12.web-platform.test, DNS:op65.web-platform.test, DNS:op30.web-platform.test, DNS:op82.web-platform.test, DNS:op84.web-platform.test, DNS:www1.web-platform.test, DNS:op87.web-platform.test, DNS:op18.web-platform.test, DNS:op74.web-platform.test, DNS:op28.web-platform.test, DNS:op2.not-web-platform.test, DNS:www.www.web-platform.test, DNS:op3.not-web-platform.test, DNS:op8.not-web-platform.test, DNS:op1.not-web-platform.test, DNS:op7.not-web-platform.test, DNS:op5.not-web-platform.test, DNS:op9.not-web-platform.test, DNS:op6.not-web-platform.test, DNS:www.not-web-platform.test, DNS:op4.not-web-platform.test, DNS:op30.not-web-platform.test, DNS:op38.not-web-platform.test, DNS:op62.not-web-platform.test, DNS:op54.not-web-platform.test, DNS:op61.not-web-platform.test, DNS:op26.not-web-platform.test, DNS:op92.not-web-platform.test, DNS:op71.not-web-platform.test, DNS:op73.not-web-platform.test, DNS:op52.not-web-platform.test, DNS:op43.not-web-platform.test, DNS:op10.not-web-platform.test, DNS:op36.not-web-platform.test, DNS:op25.not-web-platform.test, DNS:op97.not-web-platform.test, DNS:op75.not-web-platform.test, DNS:op82.not-web-platform.test, DNS:op46.not-web-platform.test, DNS:op99.not-web-platform.test, DNS:op98.not-web-platform.test, DNS:op33.not-web-platform.test, DNS:op64.not-web-platform.test, DNS:op29.not-web-platform.test, DNS:op79.not-web-platform.test, DNS:op23.not-web-platform.test, DNS:op83.not-web-platform.test, DNS:op76.not-web-platform.test, DNS:op78.not-web-platform.test, DNS:op77.not-web-platform.test, DNS:op85.not-web-platform.test, DNS:op86.not-web-platform.test, DNS:op72.not-web-platform.test, DNS:www.www2.web-platform.test, DNS:op70.not-web-platform.test, DNS:www1.www.web-platform.test, DNS:op11.not-web-platform.test, DNS:op45.not-web-platform.test, DNS:op89.not-web-platform.test, DNS:op40.not-web-platform.test, DNS:op55.not-web-platform.test, DNS:op22.not-web-platform.test, DNS:op66.not-web-platform.test, DNS:op50.not-web-platform.test, DNS:op47.not-web-platform.test, DNS:op81.not-web-platform.test, DNS:op80.not-web-platform.test, DNS:op91.not-web-platform.test, DNS:op60.not-web-platform.test, DNS:op12.not-web-platform.test, DNS:op49.not-web-platform.test, DNS:op88.not-web-platform.test, DNS:op41.not-web-platform.test, DNS:op21.not-web-platform.test, DNS:op57.not-web-platform.test, DNS:op24.not-web-platform.test, DNS:op51.not-web-platform.test, DNS:op63.not-web-platform.test, DNS:op16.not-web-platform.test, DNS:www2.www.web-platform.test, DNS:op17.not-web-platform.test, DNS:op39.not-web-platform.test, DNS:op48.not-web-platform.test, DNS:op37.not-web-platform.test, DNS:op95.not-web-platform.test, DNS:op68.not-web-platform.test, DNS:op96.not-web-platform.test, DNS:op94.not-web-platform.test, DNS:op35.not-web-platform.test, DNS:op69.not-web-platform.test, DNS:op93.not-web-platform.test, DNS:op32.not-web-platform.test, DNS:www.www1.web-platform.test, DNS:op59.not-web-platform.test, DNS:op67.not-web-platform.test, DNS:op19.not-web-platform.test, DNS:op74.not-web-platform.test, DNS:op42.not-web-platform.test, DNS:op14.not-web-platform.test, DNS:op27.not-web-platform.test, DNS:www2.not-web-platform.test, DNS:op58.not-web-platform.test, DNS:op34.not-web-platform.test, DNS:op56.not-web-platform.test, DNS:www1.not-web-platform.test, DNS:op15.not-web-platform.test, DNS:op90.not-web-platform.test, DNS:op28.not-web-platform.test, DNS:op53.not-web-platform.test, DNS:op18.not-web-platform.test, DNS:op44.not-web-platform.test, DNS:op84.not-web-platform.test, DNS:op87.not-web-platform.test, DNS:op65.not-web-platform.test, DNS:op31.not-web-platform.test, DNS:op20.not-web-platform.test, DNS:op13.not-web-platform.test, DNS:www1.www1.web-platform.test, DNS:www1.www2.web-platform.test, DNS:www2.www2.web-platform.test, DNS:www2.www1.web-platform.test, DNS:www.www.not-web-platform.test, DNS:www.www1.not-web-platform.test, DNS:xn--lve-6lad.web-platform.test, DNS:www2.www.not-web-platform.test, DNS:www.www2.not-web-platform.test, DNS:www1.www.not-web-platform.test, DNS:www2.www1.not-web-platform.test, DNS:www1.www1.not-web-platform.test, DNS:www2.www2.not-web-platform.test, DNS:www1.www2.not-web-platform.test, DNS:xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.www.web-platform.test, DNS:www.xn--lve-6lad.web-platform.test, DNS:www2.xn--lve-6lad.web-platform.test, DNS:www1.xn--lve-6lad.web-platform.test, DNS:xn--lve-6lad.www1.web-platform.test, DNS:xn--lve-6lad.www2.web-platform.test, DNS:www.xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.www.not-web-platform.test, DNS:xn--lve-6lad.www2.not-web-platform.test, DNS:xn--lve-6lad.www1.not-web-platform.test, DNS:www1.xn--lve-6lad.not-web-platform.test, DNS:www2.xn--lve-6lad.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--lve-6lad.xn--lve-6lad.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www.web-platform.test, DNS:www.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www1.web-platform.test, DNS:www1.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www2.web-platform.test, DNS:www2.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--lve-6lad.xn--lve-6lad.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www.not-web-platform.test, DNS:www.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www2.not-web-platform.test, DNS:www1.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:www2.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www1.not-web-platform.test, DNS:xn--lve-6lad.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--lve-6lad.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test + DNS:web-platform.test, DNS:not-web-platform.test, DNS:www.web-platform.test, DNS:www1.web-platform.test, DNS:www2.web-platform.test, DNS:www.not-web-platform.test, DNS:www.www.web-platform.test, DNS:www2.not-web-platform.test, DNS:www1.not-web-platform.test, DNS:www.www1.web-platform.test, DNS:www1.www.web-platform.test, DNS:www2.www.web-platform.test, DNS:www.www2.web-platform.test, DNS:www2.www1.web-platform.test, DNS:www2.www2.web-platform.test, DNS:www1.www1.web-platform.test, DNS:www1.www2.web-platform.test, DNS:www.www.not-web-platform.test, DNS:www.www2.not-web-platform.test, DNS:www.www1.not-web-platform.test, DNS:xn--lve-6lad.web-platform.test, DNS:www1.www.not-web-platform.test, DNS:www2.www.not-web-platform.test, DNS:www2.www1.not-web-platform.test, DNS:www1.www2.not-web-platform.test, DNS:www1.www1.not-web-platform.test, DNS:www2.www2.not-web-platform.test, DNS:www.xn--lve-6lad.web-platform.test, DNS:xn--lve-6lad.www.web-platform.test, DNS:xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.www2.web-platform.test, DNS:xn--lve-6lad.www1.web-platform.test, DNS:www2.xn--lve-6lad.web-platform.test, DNS:www1.xn--lve-6lad.web-platform.test, DNS:www.xn--lve-6lad.not-web-platform.test, DNS:xn--lve-6lad.www.not-web-platform.test, DNS:xn--lve-6lad.www2.not-web-platform.test, DNS:xn--lve-6lad.www1.not-web-platform.test, DNS:www1.xn--lve-6lad.not-web-platform.test, DNS:www2.xn--lve-6lad.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--lve-6lad.xn--lve-6lad.web-platform.test, DNS:www.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:www2.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:www1.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www1.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www2.web-platform.test, DNS:xn--lve-6lad.xn--lve-6lad.not-web-platform.test, DNS:www.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www.not-web-platform.test, DNS:www1.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www1.not-web-platform.test, DNS:www2.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.www2.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--lve-6lad.web-platform.test, DNS:xn--lve-6lad.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--lve-6lad.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--lve-6lad.not-web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--n8j6ds53lwwkrqhv28a.web-platform.test, DNS:xn--n8j6ds53lwwkrqhv28a.xn--n8j6ds53lwwkrqhv28a.not-web-platform.test Signature Algorithm: sha256WithRSAEncryption - b2:47:7b:f2:69:31:9b:16:42:fb:e0:ae:0b:d6:f9:7d:ae:c4: - f9:93:4e:8e:1c:35:ba:e3:b0:4e:54:11:74:b2:6c:91:88:f1: - 03:aa:a1:5d:7f:06:2c:64:ec:5e:d0:47:72:92:4c:0c:ef:d6: - 70:54:76:dc:a8:42:62:d1:e6:98:e6:fd:0e:2c:0d:8d:7e:36: - 2e:b3:9d:68:65:77:dd:74:e8:ed:0b:f5:60:f8:86:90:6c:2c: - 32:f6:9f:a6:47:20:75:4f:d9:a2:9a:e5:a0:41:2b:ea:44:78: - 27:62:14:63:98:66:49:40:c2:46:ac:82:c1:a8:67:18:d9:ff: - 74:ca:58:98:0b:c3:fa:46:7c:ea:28:9e:12:0f:ef:c3:b2:9e: - d8:99:12:7f:ae:8f:b1:f4:12:92:44:df:63:a3:ce:e1:ab:fd: - 34:8b:a4:7a:66:39:2a:76:25:4a:46:da:76:fc:05:92:57:f6: - fd:89:d6:89:27:b5:e9:ce:4d:1c:08:54:72:ff:31:63:da:2b: - 7d:08:28:91:15:37:0e:63:fb:a7:b2:ee:52:47:30:58:0c:be: - 7c:6e:2b:13:64:fe:e1:8d:5a:e4:f6:56:54:78:f3:d0:57:b2: - 3d:68:32:2f:c4:7c:e6:b4:b8:66:01:c1:2b:f6:00:09:4f:2d: - 28:24:07:38 + 20:b8:e5:97:9b:08:4c:bf:34:a6:4c:4b:7c:97:b6:93:5d:29: + 1a:f0:65:66:19:8b:77:5e:7b:e5:33:96:68:31:2e:40:5d:47: + 47:56:ba:75:18:3a:e8:8a:12:8b:2f:a7:30:f0:91:82:6c:88: + c3:31:7d:07:aa:9b:78:98:4c:c7:fd:e9:28:89:c0:57:2c:ed: + 28:2a:79:b7:6d:bf:7a:6f:15:e4:ae:4e:1c:56:1e:27:78:5a: + a5:7f:78:b6:e8:d8:38:79:cc:32:53:9c:0e:8f:fe:a2:c2:82: + 27:b1:d3:f9:ee:10:9b:5a:42:26:3d:ee:05:b3:15:9f:40:ce: + 8d:6a:4f:97:3b:20:37:6e:ff:65:ca:18:85:d3:77:0e:4e:fe: + b0:71:61:91:22:28:9e:d5:17:13:8b:85:1a:8b:9b:17:ff:bd: + b5:fd:81:c5:2d:de:40:cd:47:c3:a5:9e:eb:70:b3:e2:57:8a: + 46:02:22:41:a5:5f:71:30:3e:65:30:46:70:4d:67:49:3a:11: + f0:03:5c:6b:a5:ae:ca:73:b0:35:91:89:63:9a:65:c9:65:ff: + 43:59:5b:af:b6:b9:2e:35:d6:47:26:d1:d4:7b:22:c1:79:28: + cb:8c:a3:d2:d3:1d:b6:dc:39:de:f1:ba:b0:10:91:bd:16:e9: + 53:cd:77:e4 -----BEGIN CERTIFICATE----- -MIIgvDCCH6SgAwIBAgIDAsJQMA0GCSqGSIb3DQEBCwUAMB0xGzAZBgNVBAMMEndl -Yi1wbGF0Zm9ybS10ZXN0czAeFw0yMjA2MDEyMjI4MzJaFw0yMzA2MDEyMjI4MzJa +MIIMsjCCC5qgAwIBAgIDDdH3MA0GCSqGSIb3DQEBCwUAMB0xGzAZBgNVBAMMEndl +Yi1wbGF0Zm9ybS10ZXN0czAeFw0yMzA2MDIwMjQ1MTdaFw0yNDA2MDEwMjQ1MTda MBwxGjAYBgNVBAMMEXdlYi1wbGF0Zm9ybS50ZXN0MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEA3FUM807alxSAB3zYukvVo9rOgZgZiExVlIxE67pWCpiI -RU8AmN8ewbeY+AplasRMeQt12U0bg9E/+GWeEgk5cui6Zx3yH9UV9qktWFPw9zAm -1H6gIoLbLVuEvMWs+PE0VAVeSqSKwVTfOdnn3pUugUInFDFjF48UHvme9BbkOUkj -XtisZH0Tk1YP8zlmHn3ILxjzk5H1K3VRfwGQdxWSMhBY/+gcu4ZgrvprXYyxTE0C -/ESRYbLWuWID7GiW/ubwgp4aOL9kgFtDvwJPKU3b1YyDVDIJHN5Y3XQc6rrQ42gu -R1b84w4+PRgrARGLQo0Ey5Ym1/x6a4hpsnjt5ksXSwIDAQABo4IeBDCCHgAwCQYD -VR0TBAIwADAdBgNVHQ4EFgQULQD1b8/YRFJUngku91tGiLz2UkgwHwYDVR0jBBgw -FoAUYazznLZIuQrKny9RlwyMW9ZzE/swCwYDVR0PBAQDAgXgMBMGA1UdJQQMMAoG -CCsGAQUFBwMBMIIdjwYDVR0RBIIdhjCCHYKCEXdlYi1wbGF0Zm9ybS50ZXN0ghVv -cDMud2ViLXBsYXRmb3JtLnRlc3SCFW9wNC53ZWItcGxhdGZvcm0udGVzdIIVb3A3 -LndlYi1wbGF0Zm9ybS50ZXN0ghVvcDYud2ViLXBsYXRmb3JtLnRlc3SCFW9wMS53 -ZWItcGxhdGZvcm0udGVzdIIVb3AyLndlYi1wbGF0Zm9ybS50ZXN0ghV3d3cud2Vi -LXBsYXRmb3JtLnRlc3SCFW5vdC13ZWItcGxhdGZvcm0udGVzdIIVb3A5LndlYi1w -bGF0Zm9ybS50ZXN0ghVvcDUud2ViLXBsYXRmb3JtLnRlc3SCFW9wOC53ZWItcGxh -dGZvcm0udGVzdIIWb3AyNC53ZWItcGxhdGZvcm0udGVzdIIWb3A4OS53ZWItcGxh -dGZvcm0udGVzdIIWb3AzMi53ZWItcGxhdGZvcm0udGVzdIIWb3AyMS53ZWItcGxh -dGZvcm0udGVzdIIWb3A0My53ZWItcGxhdGZvcm0udGVzdIIWb3A0NS53ZWItcGxh -dGZvcm0udGVzdIIWb3A0OS53ZWItcGxhdGZvcm0udGVzdIIWb3A2OC53ZWItcGxh -dGZvcm0udGVzdIIWb3A0NC53ZWItcGxhdGZvcm0udGVzdIIWb3AxOS53ZWItcGxh -dGZvcm0udGVzdIIWb3A3Ny53ZWItcGxhdGZvcm0udGVzdIIWb3AyMC53ZWItcGxh -dGZvcm0udGVzdIIWb3A0Ny53ZWItcGxhdGZvcm0udGVzdIIWb3AxNS53ZWItcGxh -dGZvcm0udGVzdIIWb3AyNS53ZWItcGxhdGZvcm0udGVzdIIWb3A5Mi53ZWItcGxh -dGZvcm0udGVzdIIWb3A2My53ZWItcGxhdGZvcm0udGVzdIIWb3A3OS53ZWItcGxh -dGZvcm0udGVzdIIWb3A5Ny53ZWItcGxhdGZvcm0udGVzdIIWb3A1MC53ZWItcGxh -dGZvcm0udGVzdIIWb3A2Ny53ZWItcGxhdGZvcm0udGVzdIIWb3AxMS53ZWItcGxh -dGZvcm0udGVzdIIWb3A4NS53ZWItcGxhdGZvcm0udGVzdIIWb3A1My53ZWItcGxh -dGZvcm0udGVzdIIWb3AzNy53ZWItcGxhdGZvcm0udGVzdIIWb3AzNS53ZWItcGxh -dGZvcm0udGVzdIIWb3A2Mi53ZWItcGxhdGZvcm0udGVzdIIWb3A2OS53ZWItcGxh -dGZvcm0udGVzdIIWb3A0Mi53ZWItcGxhdGZvcm0udGVzdIIWb3AyNy53ZWItcGxh -dGZvcm0udGVzdIIWb3A5NC53ZWItcGxhdGZvcm0udGVzdIIWb3AxNC53ZWItcGxh -dGZvcm0udGVzdIIWb3A5MS53ZWItcGxhdGZvcm0udGVzdIIWb3A4My53ZWItcGxh -dGZvcm0udGVzdIIWb3A5NS53ZWItcGxhdGZvcm0udGVzdIIWb3AzMS53ZWItcGxh -dGZvcm0udGVzdIIWb3A5OS53ZWItcGxhdGZvcm0udGVzdIIWb3A4Ni53ZWItcGxh -dGZvcm0udGVzdIIWb3A1NS53ZWItcGxhdGZvcm0udGVzdIIWb3A1OC53ZWItcGxh -dGZvcm0udGVzdIIWb3AyMy53ZWItcGxhdGZvcm0udGVzdIIWb3A1Mi53ZWItcGxh -dGZvcm0udGVzdIIWb3AyMi53ZWItcGxhdGZvcm0udGVzdIIWb3A1MS53ZWItcGxh -dGZvcm0udGVzdIIWb3A2MC53ZWItcGxhdGZvcm0udGVzdIIWb3A0MC53ZWItcGxh -dGZvcm0udGVzdIIWb3A5My53ZWItcGxhdGZvcm0udGVzdIIWd3d3Mi53ZWItcGxh -dGZvcm0udGVzdIIWb3A4MS53ZWItcGxhdGZvcm0udGVzdIIWb3A5MC53ZWItcGxh -dGZvcm0udGVzdIIWb3AxNi53ZWItcGxhdGZvcm0udGVzdIIWb3A3OC53ZWItcGxh -dGZvcm0udGVzdIIWb3AyNi53ZWItcGxhdGZvcm0udGVzdIIWb3AxNy53ZWItcGxh -dGZvcm0udGVzdIIWb3A2MS53ZWItcGxhdGZvcm0udGVzdIIWb3A3NS53ZWItcGxh -dGZvcm0udGVzdIIWb3A3My53ZWItcGxhdGZvcm0udGVzdIIWb3A3Ni53ZWItcGxh -dGZvcm0udGVzdIIWb3AzOC53ZWItcGxhdGZvcm0udGVzdIIWb3A3Mi53ZWItcGxh -dGZvcm0udGVzdIIWb3A4MC53ZWItcGxhdGZvcm0udGVzdIIWb3A2NC53ZWItcGxh -dGZvcm0udGVzdIIWb3A0OC53ZWItcGxhdGZvcm0udGVzdIIWb3AzMy53ZWItcGxh -dGZvcm0udGVzdIIWb3A1NC53ZWItcGxhdGZvcm0udGVzdIIWb3AxMy53ZWItcGxh -dGZvcm0udGVzdIIWb3AzNi53ZWItcGxhdGZvcm0udGVzdIIWb3A1Ny53ZWItcGxh -dGZvcm0udGVzdIIWb3AxMC53ZWItcGxhdGZvcm0udGVzdIIWb3A4OC53ZWItcGxh -dGZvcm0udGVzdIIWb3A3MC53ZWItcGxhdGZvcm0udGVzdIIWb3AzNC53ZWItcGxh -dGZvcm0udGVzdIIWb3AzOS53ZWItcGxhdGZvcm0udGVzdIIWb3A2Ni53ZWItcGxh -dGZvcm0udGVzdIIWb3A1Ni53ZWItcGxhdGZvcm0udGVzdIIWb3A5OC53ZWItcGxh -dGZvcm0udGVzdIIWb3A3MS53ZWItcGxhdGZvcm0udGVzdIIWb3A0Ni53ZWItcGxh -dGZvcm0udGVzdIIWb3A5Ni53ZWItcGxhdGZvcm0udGVzdIIWb3A0MS53ZWItcGxh -dGZvcm0udGVzdIIWb3AyOS53ZWItcGxhdGZvcm0udGVzdIIWb3A1OS53ZWItcGxh -dGZvcm0udGVzdIIWb3AxMi53ZWItcGxhdGZvcm0udGVzdIIWb3A2NS53ZWItcGxh -dGZvcm0udGVzdIIWb3AzMC53ZWItcGxhdGZvcm0udGVzdIIWb3A4Mi53ZWItcGxh -dGZvcm0udGVzdIIWb3A4NC53ZWItcGxhdGZvcm0udGVzdIIWd3d3MS53ZWItcGxh -dGZvcm0udGVzdIIWb3A4Ny53ZWItcGxhdGZvcm0udGVzdIIWb3AxOC53ZWItcGxh -dGZvcm0udGVzdIIWb3A3NC53ZWItcGxhdGZvcm0udGVzdIIWb3AyOC53ZWItcGxh -dGZvcm0udGVzdIIZb3AyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIZd3d3Lnd3dy53 -ZWItcGxhdGZvcm0udGVzdIIZb3AzLm5vdC13ZWItcGxhdGZvcm0udGVzdIIZb3A4 -Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIZb3AxLm5vdC13ZWItcGxhdGZvcm0udGVz -dIIZb3A3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIZb3A1Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIZb3A5Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIZb3A2Lm5vdC13ZWIt -cGxhdGZvcm0udGVzdIIZd3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIZb3A0Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AzMC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wMzgubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDYyLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A1NC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNjEubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDI2Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A5 -Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNzEubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDczLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A1Mi5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wNDMubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDEwLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AzNi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wMjUubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDk3Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A3NS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wODIubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDQ2Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A5 -OS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wOTgubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDMzLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A2NC5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wMjkubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDc5Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AyMy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wODMubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDc2Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A3OC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNzcubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDg1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A4 -Ni5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNzIubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghp3d3cud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIab3A3MC5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGnd3dzEud3d3LndlYi1wbGF0Zm9ybS50ZXN0ghpvcDExLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3A0NS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wODkubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDQwLm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A1NS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMjIubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDY2Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A1 -MC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNDcubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDgxLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A4MC5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wOTEubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDYwLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AxMi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNDkubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDg4Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A0MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMjEubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDU3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3Ay -NC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNTEubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDYzLm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3AxNi5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGnd3dzIud3d3LndlYi1wbGF0Zm9ybS50ZXN0ghpvcDE3Lm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AzOS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNDgubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDM3Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A5NS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNjgubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDk2Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A5 -NC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMzUubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDY5Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A5My5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wMzIubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghp3d3cud3d3 -MS53ZWItcGxhdGZvcm0udGVzdIIab3A1OS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wNjcubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDE5Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A3NC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wNDIubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDE0Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3Ay -Ny5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGnd3dzIubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDU4Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3AzNC5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wNTYubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghp3d3cxLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIab3AxNS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC -Gm9wOTAubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDI4Lm5vdC13ZWItcGxhdGZv -cm0udGVzdIIab3A1My5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wMTgubm90LXdl -Yi1wbGF0Zm9ybS50ZXN0ghpvcDQ0Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3A4 -NC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGm9wODcubm90LXdlYi1wbGF0Zm9ybS50 -ZXN0ghpvcDY1Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIab3AzMS5ub3Qtd2ViLXBs -YXRmb3JtLnRlc3SCGm9wMjAubm90LXdlYi1wbGF0Zm9ybS50ZXN0ghpvcDEzLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIbd3d3MS53d3cxLndlYi1wbGF0Zm9ybS50ZXN0 -ght3d3cxLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCG3d3dzIud3d3Mi53ZWItcGxh -dGZvcm0udGVzdIIbd3d3Mi53d3cxLndlYi1wbGF0Zm9ybS50ZXN0gh13d3cud3d3 -Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIed3d3Lnd3dzEubm90LXdlYi1wbGF0Zm9y -bS50ZXN0gh54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCHnd3dzIud3d3 -Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIed3d3Lnd3dzIubm90LXdlYi1wbGF0Zm9y -bS50ZXN0gh53d3cxLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzIud3d3 -MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzEud3d3MS5ub3Qtd2ViLXBsYXRm -b3JtLnRlc3SCH3d3dzIud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzEu -d3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCInhuLS1sdmUtNmxhZC5ub3Qtd2Vi -LXBsYXRmb3JtLnRlc3SCInhuLS1sdmUtNmxhZC53d3cud2ViLXBsYXRmb3JtLnRl -c3SCInd3dy54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCI3d3dzIueG4t -LWx2ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0giN3d3cxLnhuLS1sdmUtNmxhZC53 -ZWItcGxhdGZvcm0udGVzdIIjeG4tLWx2ZS02bGFkLnd3dzEud2ViLXBsYXRmb3Jt -LnRlc3SCI3huLS1sdmUtNmxhZC53d3cyLndlYi1wbGF0Zm9ybS50ZXN0giZ3d3cu -eG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdIImeG4tLWx2ZS02bGFk -Lnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCJ3huLS1sdmUtNmxhZC53d3cyLm5v -dC13ZWItcGxhdGZvcm0udGVzdIIneG4tLWx2ZS02bGFkLnd3dzEubm90LXdlYi1w -bGF0Zm9ybS50ZXN0gid3d3cxLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3Jt -LnRlc3SCJ3d3dzIueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdIIp -eG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCK3huLS1s -dmUtNmxhZC54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCLXhuLS1uOGo2 -ZHM1M2x3d2tycWh2MjhhLnd3dy53ZWItcGxhdGZvcm0udGVzdIItd3d3LnhuLS1u -OGo2ZHM1M2x3d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gi14bi0tbjhqNmRz -NTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCLnhuLS1uOGo2ZHM1 -M2x3d2tycWh2MjhhLnd3dzEud2ViLXBsYXRmb3JtLnRlc3SCLnd3dzEueG4tLW44 -ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCLnhuLS1uOGo2ZHM1 -M2x3d2tycWh2MjhhLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCLnd3dzIueG4tLW44 -ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCL3huLS1sdmUtNmxh -ZC54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjF4bi0tbjhqNmRz -NTNsd3drcnFodjI4YS53d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjF3d3cueG4t -LW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjJ4bi0t -bjhqNmRzNTNsd3drcnFodjI4YS53d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIy -d3d3MS54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRl -c3SCMnd3dzIueG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1wbGF0Zm9y -bS50ZXN0gjJ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cxLm5vdC13ZWItcGxh -dGZvcm0udGVzdII2eG4tLWx2ZS02bGFkLnhuLS1uOGo2ZHM1M2x3d2tycWh2Mjhh -LndlYi1wbGF0Zm9ybS50ZXN0gjZ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS54bi0t -bHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCOnhuLS1uOGo2ZHM1M2x3d2tycWh2 -MjhhLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCOnhuLS1sdmUt -NmxhZC54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRl -c3SCQXhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1uOGo2ZHM1M2x3d2tycWh2 -MjhhLndlYi1wbGF0Zm9ybS50ZXN0gkV4bi0tbjhqNmRzNTNsd3drcnFodjI4YS54 -bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3QwDQYJ -KoZIhvcNAQELBQADggEBALJHe/JpMZsWQvvgrgvW+X2uxPmTTo4cNbrjsE5UEXSy -bJGI8QOqoV1/Bixk7F7QR3KSTAzv1nBUdtyoQmLR5pjm/Q4sDY1+Ni6znWhld910 -6O0L9WD4hpBsLDL2n6ZHIHVP2aKa5aBBK+pEeCdiFGOYZklAwkasgsGoZxjZ/3TK -WJgLw/pGfOoonhIP78OyntiZEn+uj7H0EpJE32OjzuGr/TSLpHpmOSp2JUpG2nb8 -BZJX9v2J1okntenOTRwIVHL/MWPaK30IKJEVNw5j+6ey7lJHMFgMvnxuKxNk/uGN -WuT2VlR489BXsj1oMi/EfOa0uGYBwSv2AAlPLSgkBzg= +AAOCAQ8AMIIBCgKCAQEA20HaWHaWDXcNtdtWgIyPwGzfI0cS4w9ymmoT+8QrzHbt +IAUm1ZK+DmFDHJM3/sVj1OsN2GfAlLCHPzUA+o93QtSM98eftB6RX6qFsdtm5M3i +DO/1/fJ47Jpb+O0wRJrzCg/a898hxjHYvTJs3UKIUAZxaInQlfwFqnihDpcT2Ic3 +U4IW7+xTIQ+COx89zjr35rK8BFk3Lz9N/9VeOieIdqCC2BC4Jc0uXQkBRIwG3fEi +CZU/mDI1jGV8x4MSPlDSRYHGzVBJEf97+fGBq3VZp3iOVI1bNaJPLqP3svTmiUsF +jgq1l2rIJTA5FPSfCVqHHv0PDyJThnSQrPm0fdqRLwIDAQABo4IJ+jCCCfYwCQYD +VR0TBAIwADAdBgNVHQ4EFgQUr2t01NiFSKJQueOcLRqRqZR1YKUwHwYDVR0jBBgw +FoAUZtmWZywPukZyk9KuNee0QKqI40owCwYDVR0PBAQDAgXgMBMGA1UdJQQMMAoG +CCsGAQUFBwMBMIIJhQYDVR0RBIIJfDCCCXiCEXdlYi1wbGF0Zm9ybS50ZXN0ghVu +b3Qtd2ViLXBsYXRmb3JtLnRlc3SCFXd3dy53ZWItcGxhdGZvcm0udGVzdIIWd3d3 +MS53ZWItcGxhdGZvcm0udGVzdIIWd3d3Mi53ZWItcGxhdGZvcm0udGVzdIIZd3d3 +Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIZd3d3Lnd3dy53ZWItcGxhdGZvcm0udGVz +dIIad3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCGnd3dzEubm90LXdlYi1wbGF0 +Zm9ybS50ZXN0ghp3d3cud3d3MS53ZWItcGxhdGZvcm0udGVzdIIad3d3MS53d3cu +d2ViLXBsYXRmb3JtLnRlc3SCGnd3dzIud3d3LndlYi1wbGF0Zm9ybS50ZXN0ghp3 +d3cud3d3Mi53ZWItcGxhdGZvcm0udGVzdIIbd3d3Mi53d3cxLndlYi1wbGF0Zm9y +bS50ZXN0ght3d3cyLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCG3d3dzEud3d3MS53 +ZWItcGxhdGZvcm0udGVzdIIbd3d3MS53d3cyLndlYi1wbGF0Zm9ybS50ZXN0gh13 +d3cud3d3Lm5vdC13ZWItcGxhdGZvcm0udGVzdIIed3d3Lnd3dzIubm90LXdlYi1w +bGF0Zm9ybS50ZXN0gh53d3cud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCHnhu +LS1sdmUtNmxhZC53ZWItcGxhdGZvcm0udGVzdIIed3d3MS53d3cubm90LXdlYi1w +bGF0Zm9ybS50ZXN0gh53d3cyLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3 +dzIud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCH3d3dzEud3d3Mi5ub3Qtd2Vi +LXBsYXRmb3JtLnRlc3SCH3d3dzEud3d3MS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SC +H3d3dzIud3d3Mi5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCInd3dy54bi0tbHZlLTZs +YWQud2ViLXBsYXRmb3JtLnRlc3SCInhuLS1sdmUtNmxhZC53d3cud2ViLXBsYXRm +b3JtLnRlc3SCInhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCI3hu +LS1sdmUtNmxhZC53d3cyLndlYi1wbGF0Zm9ybS50ZXN0giN4bi0tbHZlLTZsYWQu +d3d3MS53ZWItcGxhdGZvcm0udGVzdIIjd3d3Mi54bi0tbHZlLTZsYWQud2ViLXBs +YXRmb3JtLnRlc3SCI3d3dzEueG4tLWx2ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0 +giZ3d3cueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0udGVzdIImeG4tLWx2 +ZS02bGFkLnd3dy5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCJ3huLS1sdmUtNmxhZC53 +d3cyLm5vdC13ZWItcGxhdGZvcm0udGVzdIIneG4tLWx2ZS02bGFkLnd3dzEubm90 +LXdlYi1wbGF0Zm9ybS50ZXN0gid3d3cxLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBs +YXRmb3JtLnRlc3SCJ3d3dzIueG4tLWx2ZS02bGFkLm5vdC13ZWItcGxhdGZvcm0u +dGVzdIIpeG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SC +K3huLS1sdmUtNmxhZC54bi0tbHZlLTZsYWQud2ViLXBsYXRmb3JtLnRlc3SCLXd3 +dy54bi0tbjhqNmRzNTNsd3drcnFodjI4YS53ZWItcGxhdGZvcm0udGVzdIIteG4t +LW44ajZkczUzbHd3a3JxaHYyOGEud3d3LndlYi1wbGF0Zm9ybS50ZXN0gi14bi0t +bjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCLnd3dzIu +eG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCLnd3dzEu +eG4tLW44ajZkczUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCLnhuLS1u +OGo2ZHM1M2x3d2tycWh2MjhhLnd3dzEud2ViLXBsYXRmb3JtLnRlc3SCLnhuLS1u +OGo2ZHM1M2x3d2tycWh2MjhhLnd3dzIud2ViLXBsYXRmb3JtLnRlc3SCL3huLS1s +dmUtNmxhZC54bi0tbHZlLTZsYWQubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjF3d3cu +eG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1wbGF0Zm9ybS50ZXN0gjF4 +bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cubm90LXdlYi1wbGF0Zm9ybS50ZXN0 +gjJ3d3cxLnhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLm5vdC13ZWItcGxhdGZvcm0u +dGVzdIIyeG4tLW44ajZkczUzbHd3a3JxaHYyOGEud3d3MS5ub3Qtd2ViLXBsYXRm +b3JtLnRlc3SCMnd3dzIueG4tLW44ajZkczUzbHd3a3JxaHYyOGEubm90LXdlYi1w +bGF0Zm9ybS50ZXN0gjJ4bi0tbjhqNmRzNTNsd3drcnFodjI4YS53d3cyLm5vdC13 +ZWItcGxhdGZvcm0udGVzdII2eG4tLW44ajZkczUzbHd3a3JxaHYyOGEueG4tLWx2 +ZS02bGFkLndlYi1wbGF0Zm9ybS50ZXN0gjZ4bi0tbHZlLTZsYWQueG4tLW44ajZk +czUzbHd3a3JxaHYyOGEud2ViLXBsYXRmb3JtLnRlc3SCOnhuLS1sdmUtNmxhZC54 +bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRlc3SCOnhu +LS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1sdmUtNmxhZC5ub3Qtd2ViLXBsYXRm +b3JtLnRlc3SCQXhuLS1uOGo2ZHM1M2x3d2tycWh2MjhhLnhuLS1uOGo2ZHM1M2x3 +d2tycWh2MjhhLndlYi1wbGF0Zm9ybS50ZXN0gkV4bi0tbjhqNmRzNTNsd3drcnFo +djI4YS54bi0tbjhqNmRzNTNsd3drcnFodjI4YS5ub3Qtd2ViLXBsYXRmb3JtLnRl +c3QwDQYJKoZIhvcNAQELBQADggEBACC45ZebCEy/NKZMS3yXtpNdKRrwZWYZi3de +e+UzlmgxLkBdR0dWunUYOuiKEosvpzDwkYJsiMMxfQeqm3iYTMf96SiJwFcs7Sgq +ebdtv3pvFeSuThxWHid4WqV/eLbo2Dh5zDJTnA6P/qLCgiex0/nuEJtaQiY97gWz +FZ9Azo1qT5c7IDdu/2XKGIXTdw5O/rBxYZEiKJ7VFxOLhRqLmxf/vbX9gcUt3kDN +R8Olnutws+JXikYCIkGlX3EwPmUwRnBNZ0k6EfADXGulrspzsDWRiWOaZcll/0NZ +W6+2uS411kcm0dR7IsF5KMuMo9LTHbbcOd7xurAQkb0W6VPNd+Q= -----END CERTIFICATE----- diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 8f45901836..ed29c43e4c 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -1159,12 +1159,84 @@ "Good parameters: X448 bits (pkcs8, buffer(72), {name: X448}, false, [deriveBits])", "Good parameters: X448 bits (jwk, object(crv, d, x, kty), {name: X448}, false, [deriveBits])" ], - "okp_importKey_failures_Ed25519.https.any.html": false, - "okp_importKey_failures_Ed25519.https.any.worker.html": false, + "okp_importKey_failures_Ed25519.https.any.html": [ + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify, verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify, verify])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign, sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])" + ], + "okp_importKey_failures_Ed25519.https.any.worker.html": [ + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify, verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify, verify])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign, sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])" + ], "okp_importKey_failures_Ed448.https.any.html": false, "okp_importKey_failures_Ed448.https.any.worker.html": false, - "okp_importKey_failures_X25519.https.any.html": false, - "okp_importKey_failures_X25519.https.any.worker.html": false, + "okp_importKey_failures_X25519.https.any.html": [ + "Bad key length: importKey(raw, {name: X25519}, true, [])", + "Bad key length: importKey(raw, {name: X25519}, false, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, true, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, false, [])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])" + ], + "okp_importKey_failures_X25519.https.any.worker.html": [ + "Bad key length: importKey(raw, {name: X25519}, true, [])", + "Bad key length: importKey(raw, {name: X25519}, false, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, true, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, false, [])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])" + ], "okp_importKey_failures_X448.https.any.html": false, "okp_importKey_failures_X448.https.any.worker.html": false }, @@ -1843,14 +1915,8 @@ "bad-buffers-and-views.any.worker.html": true, "construct-byob-request.any.html": true, "construct-byob-request.any.worker.html": true, - "general.any.html": [ - "ReadableStream with byte source: Respond to multiple pull() by separate enqueue()", - "ReadableStream with byte source: enqueue() discards auto-allocated BYOB request" - ], - "general.any.worker.html": [ - "ReadableStream with byte source: Respond to multiple pull() by separate enqueue()", - "ReadableStream with byte source: enqueue() discards auto-allocated BYOB request" - ], + "general.any.html": true, + "general.any.worker.html": true, "non-transferable-buffers.any.html": true, "non-transferable-buffers.any.worker.html": true, "enqueue-with-detached-buffer.window.html": false, @@ -1862,10 +1928,7 @@ "enqueue-with-detached-buffer.any.worker.html": true }, "readable-streams": { - "async-iterator.any.html": [ - "next() that succeeds; return()", - "next() that succeeds; return() [no awaiting]" - ], + "async-iterator.any.html": true, "bad-strategies.any.html": true, "bad-strategies.any.worker.html": true, "bad-underlying-sources.any.html": true, @@ -1892,10 +1955,7 @@ "tee.any.worker.html": true, "templated.any.html": true, "templated.any.worker.html": true, - "async-iterator.any.worker.html": [ - "next() that succeeds; return()", - "next() that succeeds; return() [no awaiting]" - ], + "async-iterator.any.worker.html": true, "cross-realm-crash.window.html": false }, "transform-streams": { @@ -1929,12 +1989,8 @@ "bad-underlying-sinks.any.worker.html": true, "byte-length-queuing-strategy.any.html": true, "byte-length-queuing-strategy.any.worker.html": true, - "close.any.html": [ - "when close is called on a WritableStream in waiting state, ready should be fulfilled immediately even if close takes a long time" - ], - "close.any.worker.html": [ - "when close is called on a WritableStream in waiting state, ready should be fulfilled immediately even if close takes a long time" - ], + "close.any.html": true, + "close.any.worker.html": true, "constructor.any.html": true, "constructor.any.worker.html": true, "count-queuing-strategy.any.html": true, @@ -3729,7 +3785,9 @@ ], "request-referrer.any.html": false, "request-referrer.any.worker.html": false, - "response-null-body.any.html": true + "response-null-body.any.html": { + "ignore": true + } }, "response": { "json.any.html": true, @@ -5480,18 +5538,9 @@ "Serializing a non-serializable platform object fails", "An object whose interface is deleted from the global must still deserialize", "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" + "Transferring OOB TypedArray throws" ], "structured-clone.any.worker.html": [ "Blob basic", @@ -5517,18 +5566,9 @@ "Serializing a non-serializable platform object fails", "An object whose interface is deleted from the global must still deserialize", "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" + "Transferring OOB TypedArray throws" ] }, "dynamic-markup-insertion": { @@ -5659,95 +5699,8 @@ }, "infrastructure": { "safe-passing-of-structured-data": { - "messagechannel.any.html": [ - "Blob basic", - "Blob unpaired high surrogate (invalid utf-8)", - "Blob unpaired low surrogate (invalid utf-8)", - "Blob paired surrogates (invalid utf-8)", - "Blob empty", - "Blob NUL", - "Array Blob object, Blob basic", - "Array Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Array Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Array Blob object, Blob paired surrogates (invalid utf-8)", - "Array Blob object, Blob empty", - "Array Blob object, Blob NUL", - "Array Blob object, two Blobs", - "Object Blob object, Blob basic", - "Object Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Object Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Object Blob object, Blob paired surrogates (invalid utf-8)", - "Object Blob object, Blob empty", - "Object Blob object, Blob NUL", - "File basic", - "FileList empty", - "Array FileList object, FileList empty", - "Object FileList object, FileList empty", - "ImageData 1x1 transparent black", - "ImageData 1x1 non-transparent non-black", - "Array ImageData object, ImageData 1x1 transparent black", - "Array ImageData object, ImageData 1x1 non-transparent non-black", - "Object ImageData object, ImageData 1x1 transparent black", - "Object ImageData object, ImageData 1x1 non-transparent non-black", - "ImageBitmap 1x1 transparent black", - "ImageBitmap 1x1 non-transparent non-black", - "Array ImageBitmap object, ImageBitmap 1x1 transparent black", - "Array ImageBitmap object, ImageBitmap 1x1 transparent non-black", - "Object ImageBitmap object, ImageBitmap 1x1 transparent black", - "Object ImageBitmap object, ImageBitmap 1x1 transparent non-black", - "Serializing a non-serializable platform object fails", - "An object whose interface is deleted from the global must still deserialize", - "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", - "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", - "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" - ], - "messagechannel.any.worker.html": [ - "Blob basic", - "Blob unpaired high surrogate (invalid utf-8)", - "Blob unpaired low surrogate (invalid utf-8)", - "Blob paired surrogates (invalid utf-8)", - "Blob empty", - "Blob NUL", - "Array Blob object, Blob basic", - "Array Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Array Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Array Blob object, Blob paired surrogates (invalid utf-8)", - "Array Blob object, Blob empty", - "Array Blob object, Blob NUL", - "Array Blob object, two Blobs", - "Object Blob object, Blob basic", - "Object Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Object Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Object Blob object, Blob paired surrogates (invalid utf-8)", - "Object Blob object, Blob empty", - "Object Blob object, Blob NUL", - "File basic", - "Serializing a non-serializable platform object fails", - "An object whose interface is deleted from the global must still deserialize", - "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", - "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", - "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" - ], + "messagechannel.any.html": false, + "messagechannel.any.worker.html": false, "shared-array-buffers": { "no-coop-coep.https.any.html": false, "no-coop-coep.https.any.worker.html": false, @@ -9190,4 +9143,4 @@ "media-sniff.window.html": false } } -} \ No newline at end of file +}