From c3c2b379669b17e5fdcbe5e62662404ca22c71c6 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Tue, 12 Nov 2024 19:54:47 +0900 Subject: [PATCH 01/18] fix(ext/node): add autoSelectFamily option to net.createConnection (#26661) --- ext/node/polyfills/internal/errors.ts | 26 +- ext/node/polyfills/internal/net.ts | 1 + ext/node/polyfills/internal_binding/uv.ts | 2 + ext/node/polyfills/net.ts | 490 +++++++++++++++++- tests/node_compat/config.jsonc | 2 + tests/node_compat/runner/TODO.md | 1 - tests/node_compat/test/common/index.js | 1 + .../parallel/test-net-autoselectfamily.js | 312 +++++++++++ tests/unit_node/http2_test.ts | 5 + 9 files changed, 824 insertions(+), 16 deletions(-) create mode 100644 tests/node_compat/test/parallel/test-net-autoselectfamily.js diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 5a3d4437a1..962ca86e92 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -18,7 +18,7 @@ */ import { primordials } from "ext:core/mod.js"; -const { JSONStringify, SymbolFor } = primordials; +const { JSONStringify, SafeArrayIterator, SymbolFor } = primordials; import { format, inspect } from "ext:deno_node/internal/util/inspect.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; import { @@ -1874,6 +1874,11 @@ export class ERR_SOCKET_CLOSED extends NodeError { super("ERR_SOCKET_CLOSED", `Socket is closed`); } } +export class ERR_SOCKET_CONNECTION_TIMEOUT extends NodeError { + constructor() { + super("ERR_SOCKET_CONNECTION_TIMEOUT", `Socket connection timeout`); + } +} export class ERR_SOCKET_DGRAM_IS_CONNECTED extends NodeError { constructor() { super("ERR_SOCKET_DGRAM_IS_CONNECTED", `Already connected`); @@ -2633,11 +2638,30 @@ export function aggregateTwoErrors( } return innerError || outerError; } + +export class NodeAggregateError extends AggregateError { + code: string; + constructor(errors, message) { + super(new SafeArrayIterator(errors), message); + this.code = errors[0]?.code; + } + + get [kIsNodeError]() { + return true; + } + + // deno-lint-ignore adjacent-overload-signatures + get ["constructor"]() { + return AggregateError; + } +} + codes.ERR_IPC_CHANNEL_CLOSED = ERR_IPC_CHANNEL_CLOSED; codes.ERR_INVALID_ARG_TYPE = ERR_INVALID_ARG_TYPE; codes.ERR_INVALID_ARG_VALUE = ERR_INVALID_ARG_VALUE; codes.ERR_OUT_OF_RANGE = ERR_OUT_OF_RANGE; codes.ERR_SOCKET_BAD_PORT = ERR_SOCKET_BAD_PORT; +codes.ERR_SOCKET_CONNECTION_TIMEOUT = ERR_SOCKET_CONNECTION_TIMEOUT; codes.ERR_BUFFER_OUT_OF_BOUNDS = ERR_BUFFER_OUT_OF_BOUNDS; codes.ERR_UNKNOWN_ENCODING = ERR_UNKNOWN_ENCODING; codes.ERR_PARSE_ARGS_INVALID_OPTION_VALUE = ERR_PARSE_ARGS_INVALID_OPTION_VALUE; diff --git a/ext/node/polyfills/internal/net.ts b/ext/node/polyfills/internal/net.ts index 144612626f..a3dcb3ed21 100644 --- a/ext/node/polyfills/internal/net.ts +++ b/ext/node/polyfills/internal/net.ts @@ -95,4 +95,5 @@ export function makeSyncWrite(fd: number) { }; } +export const kReinitializeHandle = Symbol("kReinitializeHandle"); export const normalizedArgsSymbol = Symbol("normalizedArgs"); diff --git a/ext/node/polyfills/internal_binding/uv.ts b/ext/node/polyfills/internal_binding/uv.ts index aa468a0a58..6cd70a7e85 100644 --- a/ext/node/polyfills/internal_binding/uv.ts +++ b/ext/node/polyfills/internal_binding/uv.ts @@ -530,10 +530,12 @@ export function mapSysErrnoToUvErrno(sysErrno: number): number { export const UV_EAI_MEMORY = codeMap.get("EAI_MEMORY")!; export const UV_EBADF = codeMap.get("EBADF")!; +export const UV_ECANCELED = codeMap.get("ECANCELED")!; export const UV_EEXIST = codeMap.get("EEXIST"); export const UV_EINVAL = codeMap.get("EINVAL")!; export const UV_ENOENT = codeMap.get("ENOENT"); export const UV_ENOTSOCK = codeMap.get("ENOTSOCK")!; +export const UV_ETIMEDOUT = codeMap.get("ETIMEDOUT")!; export const UV_UNKNOWN = codeMap.get("UNKNOWN")!; export function errname(errno: number): string { diff --git a/ext/node/polyfills/net.ts b/ext/node/polyfills/net.ts index 48e1d0de87..2b01125190 100644 --- a/ext/node/polyfills/net.ts +++ b/ext/node/polyfills/net.ts @@ -31,6 +31,7 @@ import { isIP, isIPv4, isIPv6, + kReinitializeHandle, normalizedArgsSymbol, } from "ext:deno_node/internal/net.ts"; import { Duplex } from "node:stream"; @@ -50,9 +51,11 @@ import { ERR_SERVER_ALREADY_LISTEN, ERR_SERVER_NOT_RUNNING, ERR_SOCKET_CLOSED, + ERR_SOCKET_CONNECTION_TIMEOUT, errnoException, exceptionWithHostPort, genericNodeError, + NodeAggregateError, uvExceptionWithHostPort, } from "ext:deno_node/internal/errors.ts"; import type { ErrnoException } from "ext:deno_node/internal/errors.ts"; @@ -80,6 +83,7 @@ import { Buffer } from "node:buffer"; import type { LookupOneOptions } from "ext:deno_node/internal/dns/utils.ts"; import { validateAbortSignal, + validateBoolean, validateFunction, validateInt32, validateNumber, @@ -100,13 +104,25 @@ import { ShutdownWrap } from "ext:deno_node/internal_binding/stream_wrap.ts"; import { assert } from "ext:deno_node/_util/asserts.ts"; import { isWindows } from "ext:deno_node/_util/os.ts"; import { ADDRCONFIG, lookup as dnsLookup } from "node:dns"; -import { codeMap } from "ext:deno_node/internal_binding/uv.ts"; +import { + codeMap, + UV_ECANCELED, + UV_ETIMEDOUT, +} from "ext:deno_node/internal_binding/uv.ts"; import { guessHandleType } from "ext:deno_node/internal_binding/util.ts"; import { debuglog } from "ext:deno_node/internal/util/debuglog.ts"; import type { DuplexOptions } from "ext:deno_node/_stream.d.ts"; import type { BufferEncoding } from "ext:deno_node/_global.d.ts"; import type { Abortable } from "ext:deno_node/_events.d.ts"; import { channel } from "node:diagnostics_channel"; +import { primordials } from "ext:core/mod.js"; + +const { + ArrayPrototypeIncludes, + ArrayPrototypePush, + FunctionPrototypeBind, + MathMax, +} = primordials; let debug = debuglog("net", (fn) => { debug = fn; @@ -120,6 +136,9 @@ const kBytesWritten = Symbol("kBytesWritten"); const DEFAULT_IPV4_ADDR = "0.0.0.0"; const DEFAULT_IPV6_ADDR = "::"; +let autoSelectFamilyDefault = true; +let autoSelectFamilyAttemptTimeoutDefault = 250; + type Handle = TCP | Pipe; interface HandleOptions { @@ -214,6 +233,8 @@ interface TcpSocketConnectOptions extends ConnectOptions { hints?: number; family?: number; lookup?: LookupFunction; + autoSelectFamily?: boolean | undefined; + autoSelectFamilyAttemptTimeout?: number | undefined; } interface IpcSocketConnectOptions extends ConnectOptions { @@ -316,12 +337,6 @@ export function _normalizeArgs(args: unknown[]): NormalizedArgs { return arr; } -function _isTCPConnectWrap( - req: TCPConnectWrap | PipeConnectWrap, -): req is TCPConnectWrap { - return "localAddress" in req && "localPort" in req; -} - function _afterConnect( status: number, // deno-lint-ignore no-explicit-any @@ -372,7 +387,7 @@ function _afterConnect( socket.connecting = false; let details; - if (_isTCPConnectWrap(req)) { + if (req.localAddress && req.localPort) { details = req.localAddress + ":" + req.localPort; } @@ -384,7 +399,7 @@ function _afterConnect( details, ); - if (_isTCPConnectWrap(req)) { + if (details) { ex.localAddress = req.localAddress; ex.localPort = req.localPort; } @@ -393,6 +408,107 @@ function _afterConnect( } } +function _createConnectionError(req, status) { + let details; + + if (req.localAddress && req.localPort) { + details = req.localAddress + ":" + req.localPort; + } + + const ex = exceptionWithHostPort( + status, + "connect", + req.address, + req.port, + details, + ); + if (details) { + ex.localAddress = req.localAddress; + ex.localPort = req.localPort; + } + + return ex; +} + +function _afterConnectMultiple( + context, + current, + status, + handle, + req, + readable, + writable, +) { + debug( + "connect/multiple: connection attempt to %s:%s completed with status %s", + req.address, + req.port, + status, + ); + + // Make sure another connection is not spawned + clearTimeout(context[kTimeout]); + + // One of the connection has completed and correctly dispatched but after timeout, ignore this one + if (status === 0 && current !== context.current - 1) { + debug( + "connect/multiple: ignoring successful but timedout connection to %s:%s", + req.address, + req.port, + ); + handle.close(); + return; + } + + const self = context.socket; + + // Some error occurred, add to the list of exceptions + if (status !== 0) { + const ex = _createConnectionError(req, status); + ArrayPrototypePush(context.errors, ex); + + self.emit( + "connectionAttemptFailed", + req.address, + req.port, + req.addressType, + ex, + ); + + // Try the next address, unless we were aborted + if (context.socket.connecting) { + _internalConnectMultiple(context, status === UV_ECANCELED); + } + + return; + } + + _afterConnect(status, self._handle, req, readable, writable); +} + +function _internalConnectMultipleTimeout(context, req, handle) { + debug( + "connect/multiple: connection to %s:%s timed out", + req.address, + req.port, + ); + context.socket.emit( + "connectionAttemptTimeout", + req.address, + req.port, + req.addressType, + ); + + req.oncomplete = undefined; + ArrayPrototypePush(context.errors, _createConnectionError(req, UV_ETIMEDOUT)); + handle.close(); + + // Try the next address, unless we were aborted + if (context.socket.connecting) { + _internalConnectMultiple(context); + } +} + function _checkBindError(err: number, port: number, handle: TCP) { // EADDRINUSE may not be reported until we call `listen()` or `connect()`. // To complicate matters, a failed `bind()` followed by `listen()` or `connect()` @@ -495,6 +611,131 @@ function _internalConnect( } } +function _internalConnectMultiple(context, canceled?: boolean) { + clearTimeout(context[kTimeout]); + const self = context.socket; + + // We were requested to abort. Stop all operations + if (self._aborted) { + return; + } + + // All connections have been tried without success, destroy with error + if (canceled || context.current === context.addresses.length) { + if (context.errors.length === 0) { + self.destroy(new ERR_SOCKET_CONNECTION_TIMEOUT()); + return; + } + + self.destroy(new NodeAggregateError(context.errors)); + return; + } + + assert(self.connecting); + + const current = context.current++; + + if (current > 0) { + self[kReinitializeHandle](new TCP(TCPConstants.SOCKET)); + } + + const { localPort, port, flags } = context; + const { address, family: addressType } = context.addresses[current]; + let localAddress; + let err; + + if (localPort) { + if (addressType === 4) { + localAddress = DEFAULT_IPV4_ADDR; + err = self._handle.bind(localAddress, localPort); + } else { // addressType === 6 + localAddress = DEFAULT_IPV6_ADDR; + err = self._handle.bind6(localAddress, localPort, flags); + } + + debug( + "connect/multiple: binding to localAddress: %s and localPort: %d (addressType: %d)", + localAddress, + localPort, + addressType, + ); + + err = _checkBindError(err, localPort, self._handle); + if (err) { + ArrayPrototypePush( + context.errors, + exceptionWithHostPort(err, "bind", localAddress, localPort), + ); + _internalConnectMultiple(context); + return; + } + } + + debug( + "connect/multiple: attempting to connect to %s:%d (addressType: %d)", + address, + port, + addressType, + ); + self.emit("connectionAttempt", address, port, addressType); + + const req = new TCPConnectWrap(); + req.oncomplete = FunctionPrototypeBind( + _afterConnectMultiple, + undefined, + context, + current, + ); + req.address = address; + req.port = port; + req.localAddress = localAddress; + req.localPort = localPort; + req.addressType = addressType; + + ArrayPrototypePush( + self.autoSelectFamilyAttemptedAddresses, + `${address}:${port}`, + ); + + if (addressType === 4) { + err = self._handle.connect(req, address, port); + } else { + err = self._handle.connect6(req, address, port); + } + + if (err) { + const sockname = self._getsockname(); + let details; + + if (sockname) { + details = sockname.address + ":" + sockname.port; + } + + const ex = exceptionWithHostPort(err, "connect", address, port, details); + ArrayPrototypePush(context.errors, ex); + + self.emit("connectionAttemptFailed", address, port, addressType, ex); + _internalConnectMultiple(context); + return; + } + + if (current < context.addresses.length - 1) { + debug( + "connect/multiple: setting the attempt timeout to %d ms", + context.timeout, + ); + + // If the attempt has not returned an error, start the connection timer + context[kTimeout] = setTimeout( + _internalConnectMultipleTimeout, + context.timeout, + context, + req, + self._handle, + ); + } +} + // Provide a better error message when we call end() as a result // of the other side sending a FIN. The standard "write after end" // is overly vague, and makes it seem like the user's code is to blame. @@ -597,7 +838,7 @@ function _lookupAndConnect( ) { const { localAddress, localPort } = options; const host = options.host || "localhost"; - let { port } = options; + let { port, autoSelectFamilyAttemptTimeout, autoSelectFamily } = options; if (localAddress && !isIP(localAddress)) { throw new ERR_INVALID_IP_ADDRESS(localAddress); @@ -621,6 +862,22 @@ function _lookupAndConnect( port |= 0; + if (autoSelectFamily != null) { + validateBoolean(autoSelectFamily, "options.autoSelectFamily"); + } else { + autoSelectFamily = autoSelectFamilyDefault; + } + + if (autoSelectFamilyAttemptTimeout !== undefined) { + validateInt32(autoSelectFamilyAttemptTimeout); + + if (autoSelectFamilyAttemptTimeout < 10) { + autoSelectFamilyAttemptTimeout = 10; + } + } else { + autoSelectFamilyAttemptTimeout = autoSelectFamilyAttemptTimeoutDefault; + } + // If host is an IP, skip performing a lookup const addressType = isIP(host); if (addressType) { @@ -649,6 +906,7 @@ function _lookupAndConnect( const dnsOpts = { family: options.family, hints: options.hints || 0, + all: false, }; if ( @@ -665,6 +923,31 @@ function _lookupAndConnect( self._host = host; const lookup = options.lookup || dnsLookup; + if ( + dnsOpts.family !== 4 && dnsOpts.family !== 6 && !localAddress && + autoSelectFamily + ) { + debug("connect: autodetecting"); + + dnsOpts.all = true; + defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () { + _lookupAndConnectMultiple( + self, + asyncIdSymbol, + lookup, + host, + options, + dnsOpts, + port, + localAddress, + localPort, + autoSelectFamilyAttemptTimeout, + ); + }); + + return; + } + defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () { lookup( host, @@ -719,6 +1002,143 @@ function _lookupAndConnect( }); } +function _lookupAndConnectMultiple( + self: Socket, + asyncIdSymbol: number, + // deno-lint-ignore no-explicit-any + lookup: any, + host: string, + options: TcpSocketConnectOptions, + dnsopts, + port: number, + localAddress: string, + localPort: number, + timeout: number | undefined, +) { + defaultTriggerAsyncIdScope(self[asyncIdSymbol], function emitLookup() { + lookup(host, dnsopts, function emitLookup(err, addresses) { + // It's possible we were destroyed while looking this up. + // XXX it would be great if we could cancel the promise returned by + // the look up. + if (!self.connecting) { + return; + } else if (err) { + self.emit("lookup", err, undefined, undefined, host); + + // net.createConnection() creates a net.Socket object and immediately + // calls net.Socket.connect() on it (that's us). There are no event + // listeners registered yet so defer the error event to the next tick. + nextTick(_connectErrorNT, self, err); + return; + } + + // Filter addresses by only keeping the one which are either IPv4 or IPV6. + // The first valid address determines which group has preference on the + // alternate family sorting which happens later. + const validAddresses = [[], []]; + const validIps = [[], []]; + let destinations; + for (let i = 0, l = addresses.length; i < l; i++) { + const address = addresses[i]; + const { address: ip, family: addressType } = address; + self.emit("lookup", err, ip, addressType, host); + // It's possible we were destroyed while looking this up. + if (!self.connecting) { + return; + } + if (isIP(ip) && (addressType === 4 || addressType === 6)) { + destinations ||= addressType === 6 ? { 6: 0, 4: 1 } : { 4: 0, 6: 1 }; + + const destination = destinations[addressType]; + + // Only try an address once + if (!ArrayPrototypeIncludes(validIps[destination], ip)) { + ArrayPrototypePush(validAddresses[destination], address); + ArrayPrototypePush(validIps[destination], ip); + } + } + } + + // When no AAAA or A records are available, fail on the first one + if (!validAddresses[0].length && !validAddresses[1].length) { + const { address: firstIp, family: firstAddressType } = addresses[0]; + + if (!isIP(firstIp)) { + err = new ERR_INVALID_IP_ADDRESS(firstIp); + nextTick(_connectErrorNT, self, err); + } else if (firstAddressType !== 4 && firstAddressType !== 6) { + err = new ERR_INVALID_ADDRESS_FAMILY( + firstAddressType, + options.host, + options.port, + ); + nextTick(_connectErrorNT, self, err); + } + + return; + } + + // Sort addresses alternating families + const toAttempt = []; + for ( + let i = 0, + l = MathMax(validAddresses[0].length, validAddresses[1].length); + i < l; + i++ + ) { + if (i in validAddresses[0]) { + ArrayPrototypePush(toAttempt, validAddresses[0][i]); + } + if (i in validAddresses[1]) { + ArrayPrototypePush(toAttempt, validAddresses[1][i]); + } + } + + if (toAttempt.length === 1) { + debug( + "connect/multiple: only one address found, switching back to single connection", + ); + const { address: ip, family: addressType } = toAttempt[0]; + + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[asyncIdSymbol], + _internalConnect, + self, + ip, + port, + addressType, + localAddress, + localPort, + ); + + return; + } + + self.autoSelectFamilyAttemptedAddresses = []; + debug("connect/multiple: will try the following addresses", toAttempt); + + const context = { + socket: self, + addresses: toAttempt, + current: 0, + port, + localPort, + timeout, + [kTimeout]: null, + errors: [], + }; + + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[asyncIdSymbol], + _internalConnectMultiple, + context, + ); + }); + }); +} + function _afterShutdown(this: ShutdownWrap) { // deno-lint-ignore no-explicit-any const self: any = this.handle[ownerSymbol]; @@ -777,6 +1197,7 @@ export class Socket extends Duplex { _host: string | null = null; // deno-lint-ignore no-explicit-any _parent: any = null; + autoSelectFamilyAttemptedAddresses: AddressInfo[] | undefined = undefined; constructor(options: SocketOptions | number) { if (typeof options === "number") { @@ -1546,6 +1967,16 @@ export class Socket extends Duplex { set _handle(v: Handle | null) { this[kHandle] = v; } + + // deno-lint-ignore no-explicit-any + [kReinitializeHandle](handle: any) { + this._handle?.close(); + + this._handle = handle; + this._handle[ownerSymbol] = this; + + _initSocketHandle(this); + } } export const Stream = Socket; @@ -1593,6 +2024,33 @@ export function connect(...args: unknown[]) { export const createConnection = connect; +/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamily */ +export function getDefaultAutoSelectFamily() { + return autoSelectFamilyDefault; +} + +/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamily */ +export function setDefaultAutoSelectFamily(value: boolean) { + validateBoolean(value, "value"); + autoSelectFamilyDefault = value; +} + +/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamilyattempttimeout */ +export function getDefaultAutoSelectFamilyAttemptTimeout() { + return autoSelectFamilyAttemptTimeoutDefault; +} + +/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamilyattempttimeout */ +export function setDefaultAutoSelectFamilyAttemptTimeout(value: number) { + validateInt32(value, "value", 1); + + if (value < 10) { + value = 10; + } + + autoSelectFamilyAttemptTimeoutDefault = value; +} + export interface ListenOptions extends Abortable { fd?: number; port?: number | undefined; @@ -2478,15 +2936,19 @@ export { BlockList, isIP, isIPv4, isIPv6, SocketAddress }; export default { _createServerHandle, _normalizeArgs, - isIP, - isIPv4, - isIPv6, BlockList, - SocketAddress, connect, createConnection, createServer, + getDefaultAutoSelectFamily, + getDefaultAutoSelectFamilyAttemptTimeout, + isIP, + isIPv4, + isIPv6, Server, + setDefaultAutoSelectFamily, + setDefaultAutoSelectFamilyAttemptTimeout, Socket, + SocketAddress, Stream, }; diff --git a/tests/node_compat/config.jsonc b/tests/node_compat/config.jsonc index 16951d9ede..664adaedfa 100644 --- a/tests/node_compat/config.jsonc +++ b/tests/node_compat/config.jsonc @@ -77,6 +77,7 @@ "test-fs-rmdir-recursive.js", "test-fs-write-file.js", "test-http-url.parse-https.request.js", + "test-net-autoselectfamily.js", "test-net-better-error-messages-path.js", "test-net-connect-buffer.js", "test-net-connect-buffer2.js", @@ -404,6 +405,7 @@ "test-http-url.parse-only-support-http-https-protocol.js", "test-icu-transcode.js", "test-net-access-byteswritten.js", + "test-net-autoselectfamily.js", "test-net-better-error-messages-listen-path.js", "test-net-better-error-messages-path.js", "test-net-better-error-messages-port-hostname.js", diff --git a/tests/node_compat/runner/TODO.md b/tests/node_compat/runner/TODO.md index 231a4f62c9..27c2ef3e78 100644 --- a/tests/node_compat/runner/TODO.md +++ b/tests/node_compat/runner/TODO.md @@ -1767,7 +1767,6 @@ NOTE: This file should not be manually edited. Please edit `tests/node_compat/co - [parallel/test-net-autoselectfamily-commandline-option.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily-commandline-option.js) - [parallel/test-net-autoselectfamily-default.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily-default.js) - [parallel/test-net-autoselectfamily-ipv4first.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily-ipv4first.js) -- [parallel/test-net-autoselectfamily.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily.js) - [parallel/test-net-better-error-messages-listen.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-better-error-messages-listen.js) - [parallel/test-net-binary.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-binary.js) - [parallel/test-net-bind-twice.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-bind-twice.js) diff --git a/tests/node_compat/test/common/index.js b/tests/node_compat/test/common/index.js index d2165aecd0..d358ffce5b 100644 --- a/tests/node_compat/test/common/index.js +++ b/tests/node_compat/test/common/index.js @@ -473,6 +473,7 @@ const pwdCommand = isWindows ? module.exports = { allowGlobals, + defaultAutoSelectFamilyAttemptTimeout: 2500, expectsError, expectWarning, getArrayBufferViews, diff --git a/tests/node_compat/test/parallel/test-net-autoselectfamily.js b/tests/node_compat/test/parallel/test-net-autoselectfamily.js new file mode 100644 index 0000000000..3b520e6c80 --- /dev/null +++ b/tests/node_compat/test/parallel/test-net-autoselectfamily.js @@ -0,0 +1,312 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by `tests/node_compat/runner/setup.ts`. Do not modify this file manually. + +'use strict'; + +const common = require('../common'); +const { parseDNSPacket, writeDNSPacket } = require('../common/dns'); + +const assert = require('assert'); +const dgram = require('dgram'); +const { Resolver } = require('dns'); +const { createConnection, createServer } = require('net'); + +// Test that happy eyeballs algorithm is properly implemented. + +// Purposely not using setDefaultAutoSelectFamilyAttemptTimeout here to test the +// parameter is correctly used in options. +// +// Some of the machines in the CI need more time to establish connection +const autoSelectFamilyAttemptTimeout = common.defaultAutoSelectFamilyAttemptTimeout; + +function _lookup(resolver, hostname, options, cb) { + resolver.resolve(hostname, 'ANY', (err, replies) => { + assert.notStrictEqual(options.family, 4); + + if (err) { + return cb(err); + } + + const hosts = replies + .map((r) => ({ address: r.address, family: r.type === 'AAAA' ? 6 : 4 })) + .sort((a, b) => b.family - a.family); + + if (options.all === true) { + return cb(null, hosts); + } + + return cb(null, hosts[0].address, hosts[0].family); + }); +} + +function createDnsServer(ipv6Addrs, ipv4Addrs, cb) { + if (!Array.isArray(ipv6Addrs)) { + ipv6Addrs = [ipv6Addrs]; + } + + if (!Array.isArray(ipv4Addrs)) { + ipv4Addrs = [ipv4Addrs]; + } + + // Create a DNS server which replies with a AAAA and a A record for the same host + const socket = dgram.createSocket('udp4'); + + // TODO(kt3k): We use common.mustCallAtLeast instead of common.mustCall + // because Deno sends multiple requests to the DNS server. + // This can be addressed if Deno.resolveDns supports ANY record type. + // See https://github.com/denoland/deno/issues/14492 + socket.on('message', common.mustCallAtLeast((msg, { address, port }) => { + const parsed = parseDNSPacket(msg); + const domain = parsed.questions[0].domain; + assert.strictEqual(domain, 'example.org'); + + socket.send(writeDNSPacket({ + id: parsed.id, + questions: parsed.questions, + answers: [ + ...ipv6Addrs.map((address) => ({ type: 'AAAA', address, ttl: 123, domain: 'example.org' })), + ...ipv4Addrs.map((address) => ({ type: 'A', address, ttl: 123, domain: 'example.org' })), + ] + }), port, address); + })); + + socket.bind(0, () => { + const resolver = new Resolver(); + resolver.setServers([`127.0.0.1:${socket.address().port}`]); + + cb({ dnsServer: socket, lookup: _lookup.bind(null, resolver) }); + }); +} + +// Test that IPV4 is reached if IPV6 is not reachable +{ + createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) { + const ipv4Server = createServer((socket) => { + socket.on('data', common.mustCall(() => { + socket.write('response-ipv4'); + socket.end(); + })); + }); + + ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => { + const port = ipv4Server.address().port; + + const connection = createConnection({ + host: 'example.org', + port: port, + lookup, + autoSelectFamily: true, + autoSelectFamilyAttemptTimeout, + }); + + let response = ''; + connection.setEncoding('utf-8'); + + connection.on('ready', common.mustCall(() => { + assert.deepStrictEqual(connection.autoSelectFamilyAttemptedAddresses, [`::1:${port}`, `127.0.0.1:${port}`]); + })); + + connection.on('data', (chunk) => { + response += chunk; + }); + + connection.on('end', common.mustCall(() => { + assert.strictEqual(response, 'response-ipv4'); + ipv4Server.close(); + dnsServer.close(); + })); + + connection.write('request'); + })); + })); +} + +// Test that only the last successful connection is established. +{ + createDnsServer( + ['2606:4700::6810:85e5', '2606:4700::6810:84e5', "::1"], + // TODO(kt3k): Comment out ipv4 addresses to make the test pass faster. + // Enable this when Deno.connect() call becomes cancellable. + // See https://github.com/denoland/deno/issues/26819 + // ['104.20.22.46', '104.20.23.46', '127.0.0.1'], + ['127.0.0.1'], + common.mustCall(function({ dnsServer, lookup }) { + const ipv4Server = createServer((socket) => { + socket.on('data', common.mustCall(() => { + socket.write('response-ipv4'); + socket.end(); + })); + }); + + ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => { + const port = ipv4Server.address().port; + + const connection = createConnection({ + host: 'example.org', + port: port, + lookup, + autoSelectFamily: true, + autoSelectFamilyAttemptTimeout, + }); + + let response = ''; + connection.setEncoding('utf-8'); + + connection.on('ready', common.mustCall(() => { + assert.deepStrictEqual( + connection.autoSelectFamilyAttemptedAddresses, + [ + `2606:4700::6810:85e5:${port}`, + `104.20.22.46:${port}`, + `2606:4700::6810:84e5:${port}`, + `104.20.23.46:${port}`, + `::1:${port}`, + `127.0.0.1:${port}`, + ] + ); + })); + + connection.on('data', (chunk) => { + response += chunk; + }); + + connection.on('end', common.mustCall(() => { + assert.strictEqual(response, 'response-ipv4'); + ipv4Server.close(); + dnsServer.close(); + })); + + connection.write('request'); + })); + }) + ); +} + +// Test that IPV4 is NOT reached if IPV6 is reachable +if (common.hasIPv6) { + createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) { + const ipv4Server = createServer((socket) => { + socket.on('data', common.mustNotCall(() => { + socket.write('response-ipv4'); + socket.end(); + })); + }); + + const ipv6Server = createServer((socket) => { + socket.on('data', common.mustCall(() => { + socket.write('response-ipv6'); + socket.end(); + })); + }); + + ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => { + const port = ipv4Server.address().port; + + ipv6Server.listen(port, '::1', common.mustCall(() => { + const connection = createConnection({ + host: 'example.org', + port, + lookup, + autoSelectFamily: true, + autoSelectFamilyAttemptTimeout, + }); + + let response = ''; + connection.setEncoding('utf-8'); + + connection.on('ready', common.mustCall(() => { + assert.deepStrictEqual(connection.autoSelectFamilyAttemptedAddresses, [`::1:${port}`]); + })); + + connection.on('data', (chunk) => { + response += chunk; + }); + + connection.on('end', common.mustCall(() => { + assert.strictEqual(response, 'response-ipv6'); + ipv4Server.close(); + ipv6Server.close(); + dnsServer.close(); + })); + + connection.write('request'); + })); + })); + })); +} + +// Test that when all errors are returned when no connections succeeded +{ + createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) { + const connection = createConnection({ + host: 'example.org', + port: 10, + lookup, + autoSelectFamily: true, + autoSelectFamilyAttemptTimeout, + }); + + connection.on('ready', common.mustNotCall()); + connection.on('error', common.mustCall((error) => { + assert.deepStrictEqual(connection.autoSelectFamilyAttemptedAddresses, ['::1:10', '127.0.0.1:10']); + assert.strictEqual(error.constructor.name, 'AggregateError'); + assert.strictEqual(error.errors.length, 2); + + const errors = error.errors.map((e) => e.message); + assert.ok(errors.includes('connect ECONNREFUSED 127.0.0.1:10')); + + if (common.hasIPv6) { + assert.ok(errors.includes('connect ECONNREFUSED ::1:10')); + } + + dnsServer.close(); + })); + })); +} + +// Test that the option can be disabled +{ + createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) { + const ipv4Server = createServer((socket) => { + socket.on('data', common.mustCall(() => { + socket.write('response-ipv4'); + socket.end(); + })); + }); + + ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => { + const port = ipv4Server.address().port; + + const connection = createConnection({ + host: 'example.org', + port, + lookup, + autoSelectFamily: false, + }); + + connection.on('ready', common.mustNotCall()); + connection.on('error', common.mustCall((error) => { + assert.strictEqual(connection.autoSelectFamilyAttemptedAddresses, undefined); + + if (common.hasIPv6) { + assert.strictEqual(error.code, 'ECONNREFUSED'); + assert.strictEqual(error.message, `connect ECONNREFUSED ::1:${port}`); + } else if (error.code === 'EAFNOSUPPORT') { + assert.strictEqual(error.message, `connect EAFNOSUPPORT ::1:${port} - Local (undefined:undefined)`); + } else if (error.code === 'EUNATCH') { + assert.strictEqual(error.message, `connect EUNATCH ::1:${port} - Local (:::0)`); + } else { + assert.strictEqual(error.code, 'EADDRNOTAVAIL'); + assert.strictEqual(error.message, `connect EADDRNOTAVAIL ::1:${port} - Local (:::0)`); + } + + ipv4Server.close(); + dnsServer.close(); + })); + })); + })); +} diff --git a/tests/unit_node/http2_test.ts b/tests/unit_node/http2_test.ts index 7473a487ad..c540c90f7e 100644 --- a/tests/unit_node/http2_test.ts +++ b/tests/unit_node/http2_test.ts @@ -10,6 +10,11 @@ import * as net from "node:net"; import { assert, assertEquals } from "@std/assert"; import { curlRequest } from "../unit/test_util.ts"; +// Increase the timeout for the auto select family to avoid flakiness +net.setDefaultAutoSelectFamilyAttemptTimeout( + net.getDefaultAutoSelectFamilyAttemptTimeout() * 30, +); + for (const url of ["http://localhost:4246", "https://localhost:4247"]) { Deno.test(`[node/http2 client] ${url}`, { ignore: Deno.build.os === "windows", From 3b99f6833cd3354da20785fdcf01e7409e610175 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Tue, 12 Nov 2024 17:10:07 +0530 Subject: [PATCH 02/18] fix(ext/websocket): initialize `error` attribute of WebSocket ErrorEvent (#26796) Fixes https://github.com/denoland/deno/issues/26216 Not required by the spec but Discord.js depends on it, see https://github.com/denoland/deno/issues/26216#issuecomment-2466060306 --- ext/websocket/01_websocket.js | 6 +++++- tests/unit/websocket_test.ts | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 58f4773101..468999b95d 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -28,6 +28,7 @@ const { ArrayPrototypePush, ArrayPrototypeShift, ArrayPrototypeSome, + Error, ErrorPrototypeToString, ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, @@ -488,8 +489,11 @@ class WebSocket extends EventTarget { /* error */ this[_readyState] = CLOSED; + const message = op_ws_get_error(rid); + const error = new Error(message); const errorEv = new ErrorEvent("error", { - message: op_ws_get_error(rid), + error, + message, }); this.dispatchEvent(errorEv); diff --git a/tests/unit/websocket_test.ts b/tests/unit/websocket_test.ts index 7db876b177..3aafe8da22 100644 --- a/tests/unit/websocket_test.ts +++ b/tests/unit/websocket_test.ts @@ -453,7 +453,8 @@ Deno.test("invalid server", async () => { const { promise, resolve } = Promise.withResolvers(); const ws = new WebSocket("ws://localhost:2121"); let err = false; - ws.onerror = () => { + ws.onerror = (e) => { + assert("error" in e); err = true; }; ws.onclose = () => { From 7d326c269cfb8463d530ee5dd4bcabb7499e1ba7 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Tue, 12 Nov 2024 13:15:32 +0000 Subject: [PATCH 03/18] fix(lsp): skip code action edits that can't be converted (#26831) --- cli/lsp/analysis.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 98215855c9..683a59c219 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -10,6 +10,7 @@ use super::tsc; use super::urls::url_to_uri; use crate::args::jsr_url; +use crate::lsp::logging::lsp_warn; use crate::lsp::search::PackageSearchApi; use crate::tools::lint::CliLinter; use crate::util::path::relative_specifier; @@ -747,8 +748,14 @@ pub fn ts_changes_to_edit( ) -> Result, AnyError> { let mut text_document_edits = Vec::new(); for change in changes { - let text_document_edit = change.to_text_document_edit(language_server)?; - text_document_edits.push(text_document_edit); + let edit = match change.to_text_document_edit(language_server) { + Ok(e) => e, + Err(err) => { + lsp_warn!("Couldn't covert text document edit: {:#}", err); + continue; + } + }; + text_document_edits.push(edit); } Ok(Some(lsp::WorkspaceEdit { changes: None, From 7179bdcc772577e44566acded5b165946d73fe4b Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Tue, 12 Nov 2024 21:25:49 +0530 Subject: [PATCH 04/18] fix(ext/node): handle `--allow-sys=inspector` (#26836) `op_inspector_open` checks for "inspector" as one of the allowed sys value. --- runtime/permissions/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index 6480f4bf58..3aa5aabb3d 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -1461,7 +1461,7 @@ pub struct SysDescriptor(String); impl SysDescriptor { pub fn parse(kind: String) -> Result { match kind.as_str() { - "hostname" | "osRelease" | "osUptime" | "loadavg" + "hostname" | "inspector" | "osRelease" | "osUptime" | "loadavg" | "networkInterfaces" | "systemMemoryInfo" | "uid" | "gid" | "cpus" | "homedir" | "getegid" | "statfs" | "getPriority" | "setPriority" | "userInfo" => Ok(Self(kind)), From 15b6baff33bb2405b174c5eaa919f9219421d513 Mon Sep 17 00:00:00 2001 From: "Soc Virnyl S. Estela" Date: Wed, 13 Nov 2024 00:25:59 +0800 Subject: [PATCH 05/18] chore: update zeromq to 0.4.1 (#26811) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes: #26810 --------- Signed-off-by: Soc Virnyl Estela Co-authored-by: Divy Srivastava Co-authored-by: Bartek Iwańczuk --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b86b8acd94..6f7799bac8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8485,9 +8485,9 @@ dependencies = [ [[package]] name = "zeromq" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb0560d00172817b7f7c2265060783519c475702ae290b154115ca75e976d4d0" +checksum = "6a4528179201f6eecf211961a7d3276faa61554c82651ecc66387f68fc3004bd" dependencies = [ "async-trait", "asynchronous-codec", diff --git a/Cargo.toml b/Cargo.toml index d83ca2e413..e372e542bb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -204,7 +204,7 @@ webpki-root-certs = "0.26.5" webpki-roots = "0.26" which = "4.2.5" yoke = { version = "0.7.4", features = ["derive"] } -zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] } +zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] } zstd = "=0.12.4" # crypto From c371b2a492c60f47ce6b96b4df129c5d01706e1b Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Tue, 12 Nov 2024 09:23:39 -0800 Subject: [PATCH 06/18] fix(install): re-setup bin entries after running lifecycle scripts (#26752) Fixes #26677 Some packages (like supabase) declare bin entries that don't exist until lifecycle scripts are run. For instance, the lifecycle script downloads a binary file which serves as a bin entrypoint. Unfortunately you can't just defer setting up the bin entries until after lifecycle scripts have run, because the scripts may rely on them. I looked into this, and PNPM just re-links bin entries after running lifecycle scripts. I think that's about the best we can do as well. Note that we'll only re-setup bin entries for packages whose lifecycle scripts we run. This should limit the performance cost, as typically a given project will not have many lifecycle scripts (and of those, many of them probably don't have bin entries to set up). --- .../managed/resolvers/common/bin_entries.rs | 139 ++++++++++++++---- .../resolvers/common/lifecycle_scripts.rs | 57 +++++-- cli/npm/managed/resolvers/local.rs | 29 +++- .../1.0.0/install.mjs | 3 + .../1.0.0/package.json | 10 ++ .../__test__.jsonc | 29 ++++ .../all_at_once_install.out | 4 + .../install_warn.out | 10 ++ .../install_warn_windows.out | 9 ++ .../package.json | 8 + .../run_testbin.out | 2 + 11 files changed, 253 insertions(+), 47 deletions(-) create mode 100644 tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/install.mjs create mode 100644 tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/package.json create mode 100644 tests/specs/npm/bin_entry_created_by_lifecycle/__test__.jsonc create mode 100644 tests/specs/npm/bin_entry_created_by_lifecycle/all_at_once_install.out create mode 100644 tests/specs/npm/bin_entry_created_by_lifecycle/install_warn.out create mode 100644 tests/specs/npm/bin_entry_created_by_lifecycle/install_warn_windows.out create mode 100644 tests/specs/npm/bin_entry_created_by_lifecycle/package.json create mode 100644 tests/specs/npm/bin_entry_created_by_lifecycle/run_testbin.out diff --git a/cli/npm/managed/resolvers/common/bin_entries.rs b/cli/npm/managed/resolvers/common/bin_entries.rs index 4524ce8326..e4a1845689 100644 --- a/cli/npm/managed/resolvers/common/bin_entries.rs +++ b/cli/npm/managed/resolvers/common/bin_entries.rs @@ -18,6 +18,7 @@ pub struct BinEntries<'a> { seen_names: HashMap<&'a str, &'a NpmPackageId>, /// The bin entries entries: Vec<(&'a NpmResolutionPackage, PathBuf)>, + sorted: bool, } /// Returns the name of the default binary for the given package. @@ -31,6 +32,20 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str { .map_or(package.id.nv.name.as_str(), |(_, name)| name) } +pub fn warn_missing_entrypoint( + bin_name: &str, + package_path: &Path, + entrypoint: &Path, +) { + log::warn!( + "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", + deno_terminal::colors::yellow("Warning"), + bin_name, + package_path.display(), + entrypoint.display() + ); +} + impl<'a> BinEntries<'a> { pub fn new() -> Self { Self::default() @@ -42,6 +57,7 @@ impl<'a> BinEntries<'a> { package: &'a NpmResolutionPackage, package_path: PathBuf, ) { + self.sorted = false; // check for a new collision, if we haven't already // found one match package.bin.as_ref().unwrap() { @@ -79,16 +95,21 @@ impl<'a> BinEntries<'a> { &str, // bin name &str, // bin script ) -> Result<(), AnyError>, + mut filter: impl FnMut(&NpmResolutionPackage) -> bool, ) -> Result<(), AnyError> { - if !self.collisions.is_empty() { + if !self.collisions.is_empty() && !self.sorted { // walking the dependency tree to find out the depth of each package // is sort of expensive, so we only do it if there's a collision sort_by_depth(snapshot, &mut self.entries, &mut self.collisions); + self.sorted = true; } let mut seen = HashSet::new(); for (package, package_path) in &self.entries { + if !filter(package) { + continue; + } if let Some(bin_entries) = &package.bin { match bin_entries { deno_npm::registry::NpmPackageVersionBinEntry::String(script) => { @@ -118,8 +139,8 @@ impl<'a> BinEntries<'a> { } /// Collect the bin entries into a vec of (name, script path) - pub fn into_bin_files( - mut self, + pub fn collect_bin_files( + &mut self, snapshot: &NpmResolutionSnapshot, ) -> Vec<(String, PathBuf)> { let mut bins = Vec::new(); @@ -131,17 +152,18 @@ impl<'a> BinEntries<'a> { bins.push((name.to_string(), package_path.join(script))); Ok(()) }, + |_| true, ) .unwrap(); bins } - /// Finish setting up the bin entries, writing the necessary files - /// to disk. - pub fn finish( + fn set_up_entries_filtered( mut self, snapshot: &NpmResolutionSnapshot, bin_node_modules_dir_path: &Path, + filter: impl FnMut(&NpmResolutionPackage) -> bool, + mut handler: impl FnMut(&EntrySetupOutcome<'_>), ) -> Result<(), AnyError> { if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() { std::fs::create_dir_all(bin_node_modules_dir_path).with_context( @@ -160,18 +182,54 @@ impl<'a> BinEntries<'a> { Ok(()) }, |package, package_path, name, script| { - set_up_bin_entry( + let outcome = set_up_bin_entry( package, name, script, package_path, bin_node_modules_dir_path, - ) + )?; + handler(&outcome); + Ok(()) }, + filter, )?; Ok(()) } + + /// Finish setting up the bin entries, writing the necessary files + /// to disk. + pub fn finish( + self, + snapshot: &NpmResolutionSnapshot, + bin_node_modules_dir_path: &Path, + handler: impl FnMut(&EntrySetupOutcome<'_>), + ) -> Result<(), AnyError> { + self.set_up_entries_filtered( + snapshot, + bin_node_modules_dir_path, + |_| true, + handler, + ) + } + + /// Finish setting up the bin entries, writing the necessary files + /// to disk. + pub fn finish_only( + self, + snapshot: &NpmResolutionSnapshot, + bin_node_modules_dir_path: &Path, + handler: impl FnMut(&EntrySetupOutcome<'_>), + only: &HashSet<&NpmPackageId>, + ) -> Result<(), AnyError> { + self.set_up_entries_filtered( + snapshot, + bin_node_modules_dir_path, + |package| only.contains(&package.id), + handler, + ) + } } // walk the dependency tree to find out the depth of each package @@ -233,16 +291,17 @@ fn sort_by_depth( }); } -pub fn set_up_bin_entry( - package: &NpmResolutionPackage, - bin_name: &str, +pub fn set_up_bin_entry<'a>( + package: &'a NpmResolutionPackage, + bin_name: &'a str, #[allow(unused_variables)] bin_script: &str, - #[allow(unused_variables)] package_path: &Path, + #[allow(unused_variables)] package_path: &'a Path, bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { +) -> Result, AnyError> { #[cfg(windows)] { set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?; + Ok(EntrySetupOutcome::Success) } #[cfg(unix)] { @@ -252,9 +311,8 @@ pub fn set_up_bin_entry( bin_script, package_path, bin_node_modules_dir_path, - )?; + ) } - Ok(()) } #[cfg(windows)] @@ -301,14 +359,39 @@ fn make_executable_if_exists(path: &Path) -> Result { Ok(true) } +pub enum EntrySetupOutcome<'a> { + #[cfg_attr(windows, allow(dead_code))] + MissingEntrypoint { + bin_name: &'a str, + package_path: &'a Path, + entrypoint: PathBuf, + package: &'a NpmResolutionPackage, + }, + Success, +} + +impl<'a> EntrySetupOutcome<'a> { + pub fn warn_if_failed(&self) { + match self { + EntrySetupOutcome::MissingEntrypoint { + bin_name, + package_path, + entrypoint, + .. + } => warn_missing_entrypoint(bin_name, package_path, entrypoint), + EntrySetupOutcome::Success => {} + } + } +} + #[cfg(unix)] -fn symlink_bin_entry( - _package: &NpmResolutionPackage, - bin_name: &str, +fn symlink_bin_entry<'a>( + package: &'a NpmResolutionPackage, + bin_name: &'a str, bin_script: &str, - package_path: &Path, + package_path: &'a Path, bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { +) -> Result, AnyError> { use std::io; use std::os::unix::fs::symlink; let link = bin_node_modules_dir_path.join(bin_name); @@ -318,14 +401,12 @@ fn symlink_bin_entry( format!("Can't set up '{}' bin at {}", bin_name, original.display()) })?; if !found { - log::warn!( - "{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.", - deno_terminal::colors::yellow("Warning"), + return Ok(EntrySetupOutcome::MissingEntrypoint { bin_name, - package_path.display(), - original.display() - ); - return Ok(()); + package_path, + entrypoint: original, + package, + }); } let original_relative = @@ -348,7 +429,7 @@ fn symlink_bin_entry( original_relative.display() ) })?; - return Ok(()); + return Ok(EntrySetupOutcome::Success); } return Err(err).with_context(|| { format!( @@ -359,5 +440,5 @@ fn symlink_bin_entry( }); } - Ok(()) + Ok(EntrySetupOutcome::Success) } diff --git a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs index 5735f52482..5c5755c819 100644 --- a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs +++ b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs @@ -10,6 +10,7 @@ use deno_runtime::deno_io::FromRawIoHandle; use deno_semver::package::PackageNv; use deno_semver::Version; use std::borrow::Cow; +use std::collections::HashSet; use std::rc::Rc; use std::path::Path; @@ -61,7 +62,7 @@ impl<'a> LifecycleScripts<'a> { } } -fn has_lifecycle_scripts( +pub fn has_lifecycle_scripts( package: &NpmResolutionPackage, package_path: &Path, ) -> bool { @@ -83,7 +84,7 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool { } impl<'a> LifecycleScripts<'a> { - fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { + pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { if !self.strategy.can_run_scripts() { return false; } @@ -98,6 +99,9 @@ impl<'a> LifecycleScripts<'a> { PackagesAllowedScripts::None => false, } } + pub fn has_run_scripts(&self, package: &NpmResolutionPackage) -> bool { + self.strategy.has_run(package) + } /// Register a package for running lifecycle scripts, if applicable. /// /// `package_path` is the path containing the package's code (its root dir). @@ -110,12 +114,12 @@ impl<'a> LifecycleScripts<'a> { ) { if has_lifecycle_scripts(package, &package_path) { if self.can_run_scripts(&package.id.nv) { - if !self.strategy.has_run(package) { + if !self.has_run_scripts(package) { self .packages_with_scripts .push((package, package_path.into_owned())); } - } else if !self.strategy.has_run(package) + } else if !self.has_run_scripts(package) && (self.config.explicit_install || !self.strategy.has_warned(package)) { // Skip adding `esbuild` as it is known that it can work properly without lifecycle script @@ -149,22 +153,32 @@ impl<'a> LifecycleScripts<'a> { self, snapshot: &NpmResolutionSnapshot, packages: &[NpmResolutionPackage], - root_node_modules_dir_path: Option<&Path>, + root_node_modules_dir_path: &Path, progress_bar: &ProgressBar, ) -> Result<(), AnyError> { self.warn_not_run_scripts()?; let get_package_path = |p: &NpmResolutionPackage| self.strategy.package_path(p); let mut failed_packages = Vec::new(); + let mut bin_entries = BinEntries::new(); if !self.packages_with_scripts.is_empty() { + let package_ids = self + .packages_with_scripts + .iter() + .map(|(p, _)| &p.id) + .collect::>(); // get custom commands for each bin available in the node_modules dir (essentially // the scripts that are in `node_modules/.bin`) - let base = - resolve_baseline_custom_commands(snapshot, packages, get_package_path)?; + let base = resolve_baseline_custom_commands( + &mut bin_entries, + snapshot, + packages, + get_package_path, + )?; let init_cwd = &self.config.initial_cwd; let process_state = crate::npm::managed::npm_process_state( snapshot.as_valid_serialized(), - root_node_modules_dir_path, + Some(root_node_modules_dir_path), ); let mut env_vars = crate::task_runner::real_env_vars(); @@ -221,7 +235,7 @@ impl<'a> LifecycleScripts<'a> { custom_commands: custom_commands.clone(), init_cwd, argv: &[], - root_node_modules_dir: root_node_modules_dir_path, + root_node_modules_dir: Some(root_node_modules_dir_path), stdio: Some(crate::task_runner::TaskIo { stderr: TaskStdio::piped(), stdout: TaskStdio::piped(), @@ -262,6 +276,17 @@ impl<'a> LifecycleScripts<'a> { } self.strategy.did_run_scripts(package)?; } + + // re-set up bin entries for the packages which we've run scripts for. + // lifecycle scripts can create files that are linked to by bin entries, + // and the only reliable way to handle this is to re-link bin entries + // (this is what PNPM does as well) + bin_entries.finish_only( + snapshot, + &root_node_modules_dir_path.join(".bin"), + |outcome| outcome.warn_if_failed(), + &package_ids, + )?; } if failed_packages.is_empty() { Ok(()) @@ -281,9 +306,10 @@ impl<'a> LifecycleScripts<'a> { // take in all (non copy) packages from snapshot, // and resolve the set of available binaries to create // custom commands available to the task runner -fn resolve_baseline_custom_commands( - snapshot: &NpmResolutionSnapshot, - packages: &[NpmResolutionPackage], +fn resolve_baseline_custom_commands<'a>( + bin_entries: &mut BinEntries<'a>, + snapshot: &'a NpmResolutionSnapshot, + packages: &'a [NpmResolutionPackage], get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, ) -> Result { let mut custom_commands = crate::task_runner::TaskCustomCommands::new(); @@ -306,6 +332,7 @@ fn resolve_baseline_custom_commands( // doing it for packages that are set up already. // realistically, scripts won't be run very often so it probably isn't too big of an issue. resolve_custom_commands_from_packages( + bin_entries, custom_commands, snapshot, packages, @@ -320,12 +347,12 @@ fn resolve_custom_commands_from_packages< 'a, P: IntoIterator, >( + bin_entries: &mut BinEntries<'a>, mut commands: crate::task_runner::TaskCustomCommands, snapshot: &'a NpmResolutionSnapshot, packages: P, get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf, ) -> Result { - let mut bin_entries = BinEntries::new(); for package in packages { let package_path = get_package_path(package); @@ -333,7 +360,7 @@ fn resolve_custom_commands_from_packages< bin_entries.add(package, package_path); } } - let bins = bin_entries.into_bin_files(snapshot); + let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot); for (bin_name, script_path) in bins { commands.insert( bin_name.clone(), @@ -356,7 +383,9 @@ fn resolve_custom_commands_from_deps( snapshot: &NpmResolutionSnapshot, get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, ) -> Result { + let mut bin_entries = BinEntries::new(); resolve_custom_commands_from_packages( + &mut bin_entries, baseline, snapshot, package diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index eddb0dc9b6..50c5bd2689 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -55,6 +55,7 @@ use crate::util::progress_bar::ProgressMessagePrompt; use super::super::cache::NpmCache; use super::super::cache::TarballCache; use super::super::resolution::NpmResolution; +use super::common::bin_entries; use super::common::NpmPackageFsResolver; use super::common::RegistryReadPermissionChecker; @@ -329,8 +330,7 @@ async fn sync_resolution_with_fs( let mut cache_futures = FuturesUnordered::new(); let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> = HashMap::with_capacity(package_partitions.packages.len()); - let bin_entries = - Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new())); + let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new())); let mut lifecycle_scripts = super::common::lifecycle_scripts::LifecycleScripts::new( lifecycle_scripts, @@ -658,7 +658,28 @@ async fn sync_resolution_with_fs( // 7. Set up `node_modules/.bin` entries for packages that need it. { let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut()); - bin_entries.finish(snapshot, &bin_node_modules_dir_path)?; + bin_entries.finish( + snapshot, + &bin_node_modules_dir_path, + |setup_outcome| { + match setup_outcome { + bin_entries::EntrySetupOutcome::MissingEntrypoint { + package, + package_path, + .. + } if super::common::lifecycle_scripts::has_lifecycle_scripts( + package, + package_path, + ) && lifecycle_scripts.can_run_scripts(&package.id.nv) + && !lifecycle_scripts.has_run_scripts(package) => + { + // ignore, it might get fixed when the lifecycle scripts run. + // if not, we'll warn then + } + outcome => outcome.warn_if_failed(), + } + }, + )?; } // 8. Create symlinks for the workspace packages @@ -708,7 +729,7 @@ async fn sync_resolution_with_fs( .finish( snapshot, &package_partitions.packages, - Some(root_node_modules_dir_path), + root_node_modules_dir_path, progress_bar, ) .await?; diff --git a/tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/install.mjs b/tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/install.mjs new file mode 100644 index 0000000000..31020fcdf9 --- /dev/null +++ b/tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/install.mjs @@ -0,0 +1,3 @@ +import * as fs from "node:fs"; + +fs.writeFileSync("./testbin.js", "#!/usr/bin/env node\nconsole.log('run testbin');"); \ No newline at end of file diff --git a/tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/package.json b/tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/package.json new file mode 100644 index 0000000000..ad8dea002e --- /dev/null +++ b/tests/registry/npm/@denotest/bin-created-by-lifecycle/1.0.0/package.json @@ -0,0 +1,10 @@ +{ + "name": "@denotest/bin-created-by-lifecycle", + "version": "1.0.0", + "scripts": { + "install": "node install.mjs" + }, + "bin": { + "testbin": "testbin.js" + } +} \ No newline at end of file diff --git a/tests/specs/npm/bin_entry_created_by_lifecycle/__test__.jsonc b/tests/specs/npm/bin_entry_created_by_lifecycle/__test__.jsonc new file mode 100644 index 0000000000..665aec823d --- /dev/null +++ b/tests/specs/npm/bin_entry_created_by_lifecycle/__test__.jsonc @@ -0,0 +1,29 @@ +{ + "tempDir": true, + "tests": { + "all_at_once": { + "steps": [ + { + "args": "install --allow-scripts", + "output": "all_at_once_install.out" + }, + { "args": "task run-testbin", "output": "run_testbin.out" } + ] + }, + "separate_steps": { + "steps": [ + { "if": "unix", "args": "install", "output": "install_warn.out" }, + { + "if": "windows", + "args": "install", + "output": "install_warn_windows.out" + }, + { + "args": "install --allow-scripts", + "output": "Initialize @denotest/bin-created-by-lifecycle@1.0.0: running 'install' script\n" + }, + { "args": "task run-testbin", "output": "run_testbin.out" } + ] + } + } +} diff --git a/tests/specs/npm/bin_entry_created_by_lifecycle/all_at_once_install.out b/tests/specs/npm/bin_entry_created_by_lifecycle/all_at_once_install.out new file mode 100644 index 0000000000..bfaba3caf2 --- /dev/null +++ b/tests/specs/npm/bin_entry_created_by_lifecycle/all_at_once_install.out @@ -0,0 +1,4 @@ +Download http://localhost:4260/@denotest%2fbin-created-by-lifecycle +Download http://localhost:4260/@denotest/bin-created-by-lifecycle/1.0.0.tgz +Initialize @denotest/bin-created-by-lifecycle@1.0.0 +Initialize @denotest/bin-created-by-lifecycle@1.0.0: running 'install' script diff --git a/tests/specs/npm/bin_entry_created_by_lifecycle/install_warn.out b/tests/specs/npm/bin_entry_created_by_lifecycle/install_warn.out new file mode 100644 index 0000000000..864a3f6f51 --- /dev/null +++ b/tests/specs/npm/bin_entry_created_by_lifecycle/install_warn.out @@ -0,0 +1,10 @@ +Download http://localhost:4260/@denotest%2fbin-created-by-lifecycle +Download http://localhost:4260/@denotest/bin-created-by-lifecycle/1.0.0.tgz +Initialize @denotest/bin-created-by-lifecycle@1.0.0 +Warning Trying to set up 'testbin' bin for "[WILDCARD]bin-created-by-lifecycle", but the entry point "[WILDCARD]testbin.js" doesn't exist. +Warning The following packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed: +┠─ npm:@denotest/bin-created-by-lifecycle@1.0.0 +┃ +┠─ This may cause the packages to not work correctly. +┖─ To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`: + deno install --allow-scripts=npm:@denotest/bin-created-by-lifecycle@1.0.0 diff --git a/tests/specs/npm/bin_entry_created_by_lifecycle/install_warn_windows.out b/tests/specs/npm/bin_entry_created_by_lifecycle/install_warn_windows.out new file mode 100644 index 0000000000..6838088735 --- /dev/null +++ b/tests/specs/npm/bin_entry_created_by_lifecycle/install_warn_windows.out @@ -0,0 +1,9 @@ +Download http://localhost:4260/@denotest%2fbin-created-by-lifecycle +Download http://localhost:4260/@denotest/bin-created-by-lifecycle/1.0.0.tgz +Initialize @denotest/bin-created-by-lifecycle@1.0.0 +Warning The following packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed: +┠─ npm:@denotest/bin-created-by-lifecycle@1.0.0 +┃ +┠─ This may cause the packages to not work correctly. +┖─ To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`: + deno install --allow-scripts=npm:@denotest/bin-created-by-lifecycle@1.0.0 diff --git a/tests/specs/npm/bin_entry_created_by_lifecycle/package.json b/tests/specs/npm/bin_entry_created_by_lifecycle/package.json new file mode 100644 index 0000000000..9a8941ed9c --- /dev/null +++ b/tests/specs/npm/bin_entry_created_by_lifecycle/package.json @@ -0,0 +1,8 @@ +{ + "dependencies": { + "@denotest/bin-created-by-lifecycle": "1.0.0" + }, + "scripts": { + "run-testbin": "testbin" + } +} diff --git a/tests/specs/npm/bin_entry_created_by_lifecycle/run_testbin.out b/tests/specs/npm/bin_entry_created_by_lifecycle/run_testbin.out new file mode 100644 index 0000000000..a03f8bc58e --- /dev/null +++ b/tests/specs/npm/bin_entry_created_by_lifecycle/run_testbin.out @@ -0,0 +1,2 @@ +Task run-testbin testbin +run testbin From 01f3451869f56dc0651d8cd30bf8540aeb462c04 Mon Sep 17 00:00:00 2001 From: Richard Carson Date: Tue, 12 Nov 2024 12:49:49 -0500 Subject: [PATCH 07/18] chore: make fields public on `PermissionDeniedError` and `deno_kv::KvConfig` (#26798) A few small changes to avoid needing unsafe mem transmutes to instantiate the extensions --------- Signed-off-by: Richard Carson --- ext/kv/config.rs | 21 +++++++++++---------- runtime/permissions/lib.rs | 4 ++-- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/ext/kv/config.rs b/ext/kv/config.rs index 6e2e2c3a1f..7166bcbcc2 100644 --- a/ext/kv/config.rs +++ b/ext/kv/config.rs @@ -1,16 +1,17 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +#[derive(Clone, Copy, Debug)] pub struct KvConfig { - pub(crate) max_write_key_size_bytes: usize, - pub(crate) max_read_key_size_bytes: usize, - pub(crate) max_value_size_bytes: usize, - pub(crate) max_read_ranges: usize, - pub(crate) max_read_entries: usize, - pub(crate) max_checks: usize, - pub(crate) max_mutations: usize, - pub(crate) max_watched_keys: usize, - pub(crate) max_total_mutation_size_bytes: usize, - pub(crate) max_total_key_size_bytes: usize, + pub max_write_key_size_bytes: usize, + pub max_read_key_size_bytes: usize, + pub max_value_size_bytes: usize, + pub max_read_ranges: usize, + pub max_read_entries: usize, + pub max_checks: usize, + pub max_mutations: usize, + pub max_watched_keys: usize, + pub max_total_mutation_size_bytes: usize, + pub max_total_key_size_bytes: usize, } impl KvConfig { diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index 3aa5aabb3d..71ef7d2289 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -40,8 +40,8 @@ pub use prompter::PromptResponse; #[derive(Debug, thiserror::Error)] #[error("Requires {access}, {}", format_permission_error(.name))] pub struct PermissionDeniedError { - access: String, - name: &'static str, + pub access: String, + pub name: &'static str, } fn format_permission_error(name: &'static str) -> String { From 119910f3395cf073b7acf6a31c207daf597917f1 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Tue, 12 Nov 2024 17:14:19 -0500 Subject: [PATCH 08/18] fix(permissions): say to use --allow-run instead of --allow-all (#26842) For https://github.com/denoland/deno/issues/26839 --- runtime/ops/process.rs | 19 +++++++++++-------- tests/specs/run/ld_preload/env_arg.out | 4 ++-- tests/specs/run/ld_preload/env_arg.ts | 4 ++-- .../run/ld_preload/set_with_allow_env.out | 4 ++-- 4 files changed, 17 insertions(+), 14 deletions(-) diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index de3141f1f9..ee2f660dcc 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -756,14 +756,17 @@ fn check_run_permission( if !env_var_names.is_empty() { // we don't allow users to launch subprocesses with any LD_ or DYLD_* // env vars set because this allows executing code (ex. LD_PRELOAD) - return Err(CheckRunPermissionError::Other(deno_core::error::custom_error( - "NotCapable", - format!( - "Requires --allow-all permissions to spawn subprocess with {} environment variable{}.", - env_var_names.join(", "), - if env_var_names.len() != 1 { "s" } else { "" } - ) - ))); + return Err(CheckRunPermissionError::Other( + deno_core::error::custom_error( + "NotCapable", + format!( + "Requires --allow-run permissions to spawn subprocess with {0} environment variable{1}. Alternatively, spawn with {2} environment variable{1} unset.", + env_var_names.join(", "), + if env_var_names.len() != 1 { "s" } else { "" }, + if env_var_names.len() != 1 { "these" } else { "the" } + ), + ), + )); } permissions.check_run(cmd, api_name)?; } diff --git a/tests/specs/run/ld_preload/env_arg.out b/tests/specs/run/ld_preload/env_arg.out index 945737e65b..d87a1115c6 100644 --- a/tests/specs/run/ld_preload/env_arg.out +++ b/tests/specs/run/ld_preload/env_arg.out @@ -1,8 +1,8 @@ -NotCapable: Requires --allow-all permissions to spawn subprocess with LD_PRELOAD environment variable. +NotCapable: Requires --allow-run permissions to spawn subprocess with LD_PRELOAD environment variable. Alternatively, spawn with the environment variable unset. [WILDCARD] name: "NotCapable" } -NotCapable: Requires --allow-all permissions to spawn subprocess with LD_PRELOAD environment variable. +NotCapable: Requires --allow-run permissions to spawn subprocess with LD_PRELOAD environment variable. Alternatively, spawn with the environment variable unset. [WILDCARD] name: "NotCapable" } diff --git a/tests/specs/run/ld_preload/env_arg.ts b/tests/specs/run/ld_preload/env_arg.ts index d7ca1073df..fe043d56dc 100644 --- a/tests/specs/run/ld_preload/env_arg.ts +++ b/tests/specs/run/ld_preload/env_arg.ts @@ -1,5 +1,5 @@ try { - new Deno.Command("echo", { + new Deno.Command("curl", { env: { "LD_PRELOAD": "./libpreload.so", }, @@ -10,7 +10,7 @@ try { try { Deno.run({ - cmd: ["echo"], + cmd: ["curl"], env: { "LD_PRELOAD": "./libpreload.so", }, diff --git a/tests/specs/run/ld_preload/set_with_allow_env.out b/tests/specs/run/ld_preload/set_with_allow_env.out index f89582d6c8..570515fc00 100644 --- a/tests/specs/run/ld_preload/set_with_allow_env.out +++ b/tests/specs/run/ld_preload/set_with_allow_env.out @@ -1,8 +1,8 @@ -NotCapable: Requires --allow-all permissions to spawn subprocess with LD_PRELOAD environment variable. +NotCapable: Requires --allow-run permissions to spawn subprocess with LD_PRELOAD environment variable. Alternatively, spawn with the environment variable unset. [WILDCARD] name: "NotCapable" } -NotCapable: Requires --allow-all permissions to spawn subprocess with DYLD_FALLBACK_LIBRARY_PATH, LD_PRELOAD environment variables. +NotCapable: Requires --allow-run permissions to spawn subprocess with DYLD_FALLBACK_LIBRARY_PATH, LD_PRELOAD environment variables. Alternatively, spawn with these environment variables unset. [WILDCARD] name: "NotCapable" } From 43812ee8ff0eb2584c7beb18639da14d96d06817 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 13 Nov 2024 08:02:09 +0530 Subject: [PATCH 09/18] fix(ext/node): process.getBuiltinModule (#26833) Closes https://github.com/denoland/deno/issues/26832 --- ext/node/polyfills/01_require.js | 20 +++++++++++++++++++- ext/node/polyfills/process.ts | 14 ++++++++++++-- tests/unit_node/process_test.ts | 6 ++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index d818bb5727..0d267ca444 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -1233,6 +1233,24 @@ function isBuiltin(moduleName) { !StringPrototypeStartsWith(moduleName, "internal/"); } +function getBuiltinModule(id) { + if (!isBuiltin(id)) { + return undefined; + } + + if (StringPrototypeStartsWith(id, "node:")) { + // Slice 'node:' prefix + id = StringPrototypeSlice(id, 5); + } + + const mod = loadNativeModule(id, id); + if (mod) { + return mod.exports; + } + + return undefined; +} + Module.isBuiltin = isBuiltin; Module.createRequire = createRequire; @@ -1327,7 +1345,7 @@ export function register(_specifier, _parentUrl, _options) { return undefined; } -export { builtinModules, createRequire, isBuiltin, Module }; +export { builtinModules, createRequire, getBuiltinModule, isBuiltin, Module }; export const _cache = Module._cache; export const _extensions = Module._extensions; export const _findPath = Module._findPath; diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index 2605fa6d1a..bf626e4100 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -15,7 +15,7 @@ import { import { warnNotImplemented } from "ext:deno_node/_utils.ts"; import { EventEmitter } from "node:events"; -import Module from "node:module"; +import Module, { getBuiltinModule } from "node:module"; import { report } from "ext:deno_node/internal/process/report.ts"; import { validateString } from "ext:deno_node/internal/validators.mjs"; import { @@ -38,7 +38,15 @@ import { versions, } from "ext:deno_node/_process/process.ts"; import { _exiting } from "ext:deno_node/_process/exiting.ts"; -export { _nextTick as nextTick, chdir, cwd, env, version, versions }; +export { + _nextTick as nextTick, + chdir, + cwd, + env, + getBuiltinModule, + version, + versions, +}; import { createWritableStdioStream, initStdin, @@ -728,6 +736,8 @@ Process.prototype.getegid = getegid; /** This method is removed on Windows */ Process.prototype.geteuid = geteuid; +Process.prototype.getBuiltinModule = getBuiltinModule; + // TODO(kt3k): Implement this when we added -e option to node compat mode Process.prototype._eval = undefined; diff --git a/tests/unit_node/process_test.ts b/tests/unit_node/process_test.ts index f9138c8f08..49de2dce1d 100644 --- a/tests/unit_node/process_test.ts +++ b/tests/unit_node/process_test.ts @@ -1152,3 +1152,9 @@ Deno.test("process.stdout.columns writable", () => { process.stdout.columns = 80; assertEquals(process.stdout.columns, 80); }); + +Deno.test("getBuiltinModule", () => { + assert(process.getBuiltinModule("fs")); + assert(process.getBuiltinModule("node:fs")); + assertEquals(process.getBuiltinModule("something"), undefined); +}); From 7becd83a3828b35331d0fcb82c64146e520f154b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Czerniawski?= <33061335+lczerniawski@users.noreply.github.com> Date: Wed, 13 Nov 2024 05:35:04 +0100 Subject: [PATCH 10/18] feat(ext/fs): add ctime to Deno.stats and use it in node compat layer (#24801) This PR fixes #24453, by introducing a ctime (using ctime for UNIX and ChangeTime for Windows) to Deno.stats. Co-authored-by: Yoshiya Hinosawa --- cli/standalone/virtual_fs.rs | 3 +++ cli/tsc/dts/lib.deno.ns.d.ts | 7 ++++-- ext/fs/30_fs.js | 6 +++-- ext/fs/in_memory_fs.rs | 2 ++ ext/fs/ops.rs | 4 ++++ ext/fs/std_fs.rs | 34 ++++++++++++++++++++++++---- ext/io/fs.rs | 12 ++++++++++ ext/node/polyfills/_fs/_fs_stat.ts | 10 ++++---- tests/unit/stat_test.ts | 18 +++++++++++++-- tests/unit_node/_fs/_fs_stat_test.ts | 7 ++++++ 10 files changed, 88 insertions(+), 15 deletions(-) diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs index 0ae00accbf..26bb0db75f 100644 --- a/cli/standalone/virtual_fs.rs +++ b/cli/standalone/virtual_fs.rs @@ -350,6 +350,7 @@ impl<'a> VfsEntryRef<'a> { atime: None, birthtime: None, mtime: None, + ctime: None, blksize: 0, size: 0, dev: 0, @@ -372,6 +373,7 @@ impl<'a> VfsEntryRef<'a> { atime: None, birthtime: None, mtime: None, + ctime: None, blksize: 0, size: file.len, dev: 0, @@ -394,6 +396,7 @@ impl<'a> VfsEntryRef<'a> { atime: None, birthtime: None, mtime: None, + ctime: None, blksize: 0, size: 0, dev: 0, diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 6e0e84b687..8179e4223c 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -2971,6 +2971,10 @@ declare namespace Deno { * field from `stat` on Mac/BSD and `ftCreationTime` on Windows. This may * not be available on all platforms. */ birthtime: Date | null; + /** The last change time of the file. This corresponds to the `ctime` + * field from `stat` on Mac/BSD and `ChangeTime` on Windows. This may + * not be available on all platforms. */ + ctime: Date | null; /** ID of the device containing the file. */ dev: number; /** Inode number. @@ -2979,8 +2983,7 @@ declare namespace Deno { ino: number | null; /** The underlying raw `st_mode` bits that contain the standard Unix * permissions for this file/directory. - * - * _Linux/Mac OS only._ */ + */ mode: number | null; /** Number of hard links pointing to this file. * diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index c8e19ac758..40513e7e02 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -346,9 +346,10 @@ const { 0: statStruct, 1: statBuf } = createByteStruct({ mtime: "date", atime: "date", birthtime: "date", + ctime: "date", dev: "u64", ino: "?u64", - mode: "?u64", + mode: "u64", nlink: "?u64", uid: "?u64", gid: "?u64", @@ -377,9 +378,10 @@ function parseFileInfo(response) { birthtime: response.birthtimeSet === true ? new Date(response.birthtime) : null, + ctime: response.ctimeSet === true ? new Date(response.ctime) : null, dev: response.dev, + mode: response.mode, ino: unix ? response.ino : null, - mode: unix ? response.mode : null, nlink: unix ? response.nlink : null, uid: unix ? response.uid : null, gid: unix ? response.gid : null, diff --git a/ext/fs/in_memory_fs.rs b/ext/fs/in_memory_fs.rs index e29b9d50c6..34b77836d9 100644 --- a/ext/fs/in_memory_fs.rs +++ b/ext/fs/in_memory_fs.rs @@ -229,6 +229,7 @@ impl FileSystem for InMemoryFs { mtime: None, atime: None, birthtime: None, + ctime: None, dev: 0, ino: 0, mode: 0, @@ -251,6 +252,7 @@ impl FileSystem for InMemoryFs { mtime: None, atime: None, birthtime: None, + ctime: None, dev: 0, ino: 0, mode: 0, diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index 9b76b49e61..3d0d96ce66 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -1795,6 +1795,8 @@ create_struct_writer! { atime: u64, birthtime_set: bool, birthtime: u64, + ctime_set: bool, + ctime: u64, // Following are only valid under Unix. dev: u64, ino: u64, @@ -1826,6 +1828,8 @@ impl From for SerializableStat { atime: stat.atime.unwrap_or(0), birthtime_set: stat.birthtime.is_some(), birthtime: stat.birthtime.unwrap_or(0), + ctime_set: stat.ctime.is_some(), + ctime: stat.ctime.unwrap_or(0), dev: stat.dev, ino: stat.ino, diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 1a83c97c53..73439d9bab 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -821,24 +821,46 @@ fn stat_extra( Ok(info.dwVolumeSerialNumber as u64) } + const WINDOWS_TICK: i64 = 10_000; // 100-nanosecond intervals in a millisecond + const SEC_TO_UNIX_EPOCH: i64 = 11_644_473_600; // Seconds between Windows epoch and Unix epoch + + fn windows_time_to_unix_time_msec(windows_time: &i64) -> i64 { + let milliseconds_since_windows_epoch = windows_time / WINDOWS_TICK; + milliseconds_since_windows_epoch - SEC_TO_UNIX_EPOCH * 1000 + } + use windows_sys::Wdk::Storage::FileSystem::FILE_ALL_INFORMATION; + use windows_sys::Win32::Foundation::NTSTATUS; unsafe fn query_file_information( handle: winapi::shared::ntdef::HANDLE, - ) -> std::io::Result { + ) -> Result { use windows_sys::Wdk::Storage::FileSystem::NtQueryInformationFile; + use windows_sys::Win32::Foundation::RtlNtStatusToDosError; + use windows_sys::Win32::Foundation::ERROR_MORE_DATA; + use windows_sys::Win32::System::IO::IO_STATUS_BLOCK; let mut info = std::mem::MaybeUninit::::zeroed(); + let mut io_status_block = + std::mem::MaybeUninit::::zeroed(); let status = NtQueryInformationFile( handle as _, - std::ptr::null_mut(), + io_status_block.as_mut_ptr(), info.as_mut_ptr() as *mut _, std::mem::size_of::() as _, 18, /* FileAllInformation */ ); if status < 0 { - return Err(std::io::Error::last_os_error()); + let converted_status = RtlNtStatusToDosError(status); + + // If error more data is returned, then it means that the buffer is too small to get full filename information + // to have that we should retry. However, since we only use BasicInformation and StandardInformation, it is fine to ignore it + // since struct is populated with other data anyway. + // https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/nf-ntifs-ntqueryinformationfile#remarksdd + if converted_status != ERROR_MORE_DATA { + return Err(converted_status as NTSTATUS); + } } Ok(info.assume_init()) @@ -862,10 +884,13 @@ fn stat_extra( } let result = get_dev(file_handle); - CloseHandle(file_handle); fsstat.dev = result?; if let Ok(file_info) = query_file_information(file_handle) { + fsstat.ctime = Some(windows_time_to_unix_time_msec( + &file_info.BasicInformation.ChangeTime, + ) as u64); + if file_info.BasicInformation.FileAttributes & winapi::um::winnt::FILE_ATTRIBUTE_REPARSE_POINT != 0 @@ -898,6 +923,7 @@ fn stat_extra( } } + CloseHandle(file_handle); Ok(()) } } diff --git a/ext/io/fs.rs b/ext/io/fs.rs index 8854265209..7ef02315ba 100644 --- a/ext/io/fs.rs +++ b/ext/io/fs.rs @@ -94,6 +94,7 @@ pub struct FsStat { pub mtime: Option, pub atime: Option, pub birthtime: Option, + pub ctime: Option, pub dev: u64, pub ino: u64, @@ -153,6 +154,16 @@ impl FsStat { } } + #[inline(always)] + fn get_ctime(ctime_or_0: i64) -> Option { + if ctime_or_0 > 0 { + // ctime return seconds since epoch, but we need milliseconds + return Some(ctime_or_0 as u64 * 1000); + } + + None + } + Self { is_file: metadata.is_file(), is_directory: metadata.is_dir(), @@ -162,6 +173,7 @@ impl FsStat { mtime: to_msec(metadata.modified()), atime: to_msec(metadata.accessed()), birthtime: to_msec(metadata.created()), + ctime: get_ctime(unix_or_zero!(ctime)), dev: unix_or_zero!(dev), ino: unix_or_zero!(ino), diff --git a/ext/node/polyfills/_fs/_fs_stat.ts b/ext/node/polyfills/_fs/_fs_stat.ts index d00c81ffb6..507cb05eaf 100644 --- a/ext/node/polyfills/_fs/_fs_stat.ts +++ b/ext/node/polyfills/_fs/_fs_stat.ts @@ -290,8 +290,8 @@ export function convertFileInfoToStats(origin: Deno.FileInfo): Stats { isFIFO: () => false, isCharacterDevice: () => false, isSocket: () => false, - ctime: origin.mtime, - ctimeMs: origin.mtime?.getTime() || null, + ctime: origin.ctime, + ctimeMs: origin.ctime?.getTime() || null, }); return stats; @@ -336,9 +336,9 @@ export function convertFileInfoToBigIntStats( isFIFO: () => false, isCharacterDevice: () => false, isSocket: () => false, - ctime: origin.mtime, - ctimeMs: origin.mtime ? BigInt(origin.mtime.getTime()) : null, - ctimeNs: origin.mtime ? BigInt(origin.mtime.getTime()) * 1000000n : null, + ctime: origin.ctime, + ctimeMs: origin.ctime ? BigInt(origin.ctime.getTime()) : null, + ctimeNs: origin.ctime ? BigInt(origin.ctime.getTime()) * 1000000n : null, }); return stats; } diff --git a/tests/unit/stat_test.ts b/tests/unit/stat_test.ts index 59831a069f..0609035b41 100644 --- a/tests/unit/stat_test.ts +++ b/tests/unit/stat_test.ts @@ -31,6 +31,13 @@ Deno.test( assert( tempInfo.birthtime === null || now - tempInfo.birthtime.valueOf() < 1000, ); + assert(tempInfo.ctime !== null && now - tempInfo.ctime.valueOf() < 1000); + const mode = tempInfo.mode! & 0o777; + if (Deno.build.os === "windows") { + assertEquals(mode, 0o666); + } else { + assertEquals(mode, 0o600); + } const readmeInfoByUrl = Deno.statSync(pathToAbsoluteFileUrl("README.md")); assert(readmeInfoByUrl.isFile); @@ -65,6 +72,10 @@ Deno.test( tempInfoByUrl.birthtime === null || now - tempInfoByUrl.birthtime.valueOf() < 1000, ); + assert( + tempInfoByUrl.ctime !== null && + now - tempInfoByUrl.ctime.valueOf() < 1000, + ); Deno.removeSync(tempFile, { recursive: true }); Deno.removeSync(tempFileForUrl, { recursive: true }); @@ -171,6 +182,7 @@ Deno.test( assert( tempInfo.birthtime === null || now - tempInfo.birthtime.valueOf() < 1000, ); + assert(tempInfo.ctime !== null && now - tempInfo.ctime.valueOf() < 1000); const tempFileForUrl = await Deno.makeTempFile(); const tempInfoByUrl = await Deno.stat( @@ -191,7 +203,10 @@ Deno.test( tempInfoByUrl.birthtime === null || now - tempInfoByUrl.birthtime.valueOf() < 1000, ); - + assert( + tempInfoByUrl.ctime !== null && + now - tempInfoByUrl.ctime.valueOf() < 1000, + ); Deno.removeSync(tempFile, { recursive: true }); Deno.removeSync(tempFileForUrl, { recursive: true }); }, @@ -271,7 +286,6 @@ Deno.test( const s = Deno.statSync(filename); assert(s.dev !== 0); assert(s.ino === null); - assert(s.mode === null); assert(s.nlink === null); assert(s.uid === null); assert(s.gid === null); diff --git a/tests/unit_node/_fs/_fs_stat_test.ts b/tests/unit_node/_fs/_fs_stat_test.ts index 02c620e2dc..e42aa34a9a 100644 --- a/tests/unit_node/_fs/_fs_stat_test.ts +++ b/tests/unit_node/_fs/_fs_stat_test.ts @@ -18,9 +18,11 @@ export function assertStats(actual: Stats, expected: Deno.FileInfo) { assertEquals(actual.atime?.getTime(), expected.atime?.getTime()); assertEquals(actual.mtime?.getTime(), expected.mtime?.getTime()); assertEquals(actual.birthtime?.getTime(), expected.birthtime?.getTime()); + assertEquals(actual.ctime?.getTime(), expected.ctime?.getTime()); assertEquals(actual.atimeMs ?? undefined, expected.atime?.getTime()); assertEquals(actual.mtimeMs ?? undefined, expected.mtime?.getTime()); assertEquals(actual.birthtimeMs ?? undefined, expected.birthtime?.getTime()); + assertEquals(actual.ctimeMs ?? undefined, expected.ctime?.getTime()); assertEquals(actual.isFile(), expected.isFile); assertEquals(actual.isDirectory(), expected.isDirectory); assertEquals(actual.isSymbolicLink(), expected.isSymlink); @@ -49,6 +51,7 @@ export function assertStatsBigInt( assertEquals(actual.atime?.getTime(), expected.atime?.getTime()); assertEquals(actual.mtime?.getTime(), expected.mtime?.getTime()); assertEquals(actual.birthtime?.getTime(), expected.birthtime?.getTime()); + assertEquals(actual.ctime?.getTime(), expected.ctime?.getTime()); assertEquals( actual.atimeMs === null ? undefined : Number(actual.atimeMs), expected.atime?.getTime(), @@ -61,6 +64,10 @@ export function assertStatsBigInt( actual.birthtimeMs === null ? undefined : Number(actual.birthtimeMs), expected.birthtime?.getTime(), ); + assertEquals( + actual.ctimeMs === null ? undefined : Number(actual.ctimeMs), + expected.ctime?.getTime(), + ); assertEquals(actual.atimeNs === null, actual.atime === null); assertEquals(actual.mtimeNs === null, actual.mtime === null); assertEquals(actual.birthtimeNs === null, actual.birthtime === null); From aa546189be730163ee5370029e4dfdb3b454ab96 Mon Sep 17 00:00:00 2001 From: snek Date: Wed, 13 Nov 2024 11:38:46 +0100 Subject: [PATCH 11/18] feat: OpenTelemetry Tracing API and Exporting (#26710) Initial import of OTEL code supporting tracing. Metrics soon to come. Implements APIs for https://jsr.io/@deno/otel so that code using OpenTelemetry.js just works tm. There is still a lot of work to do with configuration and adding built-in tracing to core APIs, which will come in followup PRs. --------- Co-authored-by: Luca Casonato --- Cargo.lock | 324 ++++++++-- Cargo.toml | 6 + cli/args/mod.rs | 18 + cli/factory.rs | 1 + cli/standalone/binary.rs | 3 + cli/standalone/mod.rs | 1 + cli/tsc/dts/lib.deno.unstable.d.ts | 102 ++++ cli/worker.rs | 6 + ext/http/00_serve.ts | 141 +++-- runtime/Cargo.toml | 7 + runtime/js/90_deno_ns.js | 19 +- runtime/js/99_main.js | 14 +- runtime/js/telemetry.js | 395 +++++++++++++ runtime/lib.rs | 16 +- runtime/ops/mod.rs | 1 + runtime/ops/os/mod.rs | 2 + runtime/ops/otel.rs | 686 ++++++++++++++++++++++ runtime/shared.rs | 1 + runtime/snapshot.rs | 1 + runtime/web_worker.rs | 3 + runtime/worker.rs | 3 + runtime/worker_bootstrap.rs | 11 + tests/specs/cli/otel_basic/__test__.jsonc | 4 + tests/specs/cli/otel_basic/child.ts | 20 + tests/specs/cli/otel_basic/deno.json | 4 + tests/specs/cli/otel_basic/main.ts | 76 +++ tools/core_import_map.json | 1 + 27 files changed, 1742 insertions(+), 124 deletions(-) create mode 100644 runtime/js/telemetry.js create mode 100644 runtime/ops/otel.rs create mode 100644 tests/specs/cli/otel_basic/__test__.jsonc create mode 100644 tests/specs/cli/otel_basic/child.ts create mode 100644 tests/specs/cli/otel_basic/deno.json create mode 100644 tests/specs/cli/otel_basic/main.ts diff --git a/Cargo.lock b/Cargo.lock index 6f7799bac8..00c1f0736a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -347,6 +347,53 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +[[package]] +name = "axum" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", +] + [[package]] name = "backtrace" version = "0.3.73" @@ -1118,7 +1165,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", @@ -1207,7 +1254,7 @@ dependencies = [ "http-body-util", "hyper-util", "import_map", - "indexmap", + "indexmap 2.3.0", "jsonc-parser", "junction", "lazy-regex", @@ -1363,7 +1410,7 @@ dependencies = [ "base32", "deno_media_type", "deno_path_util", - "indexmap", + "indexmap 2.3.0", "log", "once_cell", "parking_lot", @@ -1398,7 +1445,7 @@ dependencies = [ "glob", "ignore", "import_map", - "indexmap", + "indexmap 2.3.0", "jsonc-parser", "log", "percent-encoding", @@ -1519,7 +1566,7 @@ dependencies = [ "handlebars", "html-escape", "import_map", - "indexmap", + "indexmap 2.3.0", "itoa", "lazy_static", "regex", @@ -1619,7 +1666,7 @@ dependencies = [ "encoding_rs", "futures", "import_map", - "indexmap", + "indexmap 2.3.0", "log", "monch", "once_cell", @@ -1715,7 +1762,7 @@ dependencies = [ "http-body-util", "log", "num-bigint", - "prost", + "prost 0.11.9", "prost-build", "rand", "rusqlite", @@ -1851,7 +1898,7 @@ dependencies = [ "hyper 1.4.1", "hyper-util", "idna 0.3.0", - "indexmap", + "indexmap 2.3.0", "ipnetwork", "k256", "lazy-regex", @@ -1941,7 +1988,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cbc4c4d3eb0960b58e8f43f9fc2d3f620fcac9a03cd85203e08db5b04e83c1f" dependencies = [ "deno_semver", - "indexmap", + "indexmap 2.3.0", "serde", "serde_json", "thiserror", @@ -1997,6 +2044,7 @@ dependencies = [ name = "deno_runtime" version = "0.186.0" dependencies = [ + "async-trait", "color-print", "deno_ast", "deno_broadcast_channel", @@ -2042,7 +2090,13 @@ dependencies = [ "notify", "ntapi", "once_cell", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", "percent-encoding", + "pin-project", "regex", "rustyline", "same-file", @@ -2268,7 +2322,7 @@ dependencies = [ "chrono", "futures", "num-bigint", - "prost", + "prost 0.11.9", "serde", "uuid", ] @@ -2288,7 +2342,7 @@ dependencies = [ "futures", "http 1.1.0", "log", - "prost", + "prost 0.11.9", "rand", "serde", "serde_json", @@ -2548,8 +2602,8 @@ checksum = "f3ab0dd2bedc109d25f0d21afb09b7d329f6c6fa83b095daf31d2d967e091548" dependencies = [ "anyhow", "bumpalo", - "hashbrown", - "indexmap", + "hashbrown 0.14.5", + "indexmap 2.3.0", "rustc-hash 1.1.0", "serde", "unicode-width", @@ -2755,7 +2809,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48cede2bb1b07dd598d269f973792c43e0cd92686d3b452bd6e01d7a8eb01211" dependencies = [ "debug-ignore", - "indexmap", + "indexmap 2.3.0", "log", "thiserror", "zerocopy", @@ -3392,7 +3446,7 @@ checksum = "9c08c1f623a8d0b722b8b99f821eb0ba672a1618f0d3b16ddbee1cedd2dd8557" dependencies = [ "bitflags 2.6.0", "gpu-descriptor-types", - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -3436,7 +3490,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap", + "indexmap 2.3.0", "slab", "tokio", "tokio-util", @@ -3455,7 +3509,7 @@ dependencies = [ "futures-sink", "futures-util", "http 1.1.0", - "indexmap", + "indexmap 2.3.0", "slab", "tokio", "tokio-util", @@ -3468,7 +3522,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8588661a8607108a5ca69cab034063441a0413a0b041c13618a7dd348021ef6f" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", "serde", ] @@ -3487,6 +3541,12 @@ dependencies = [ "thiserror", ] +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.5" @@ -3503,7 +3563,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", ] [[package]] @@ -3666,7 +3726,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a9de2bdef6354361892492bab5e316b2d78a0ee9971db4d36da9b1eb0e11999" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", "new_debug_unreachable", "once_cell", "phf", @@ -3820,6 +3880,19 @@ dependencies = [ "webpki-roots", ] +[[package]] +name = "hyper-timeout" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +dependencies = [ + "hyper 1.4.1", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.7" @@ -3908,7 +3981,7 @@ version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "351a787decc56f38d65d16d32687265045d6d6a4531b4a0e1b649def3590354e" dependencies = [ - "indexmap", + "indexmap 2.3.0", "log", "percent-encoding", "serde", @@ -3917,6 +3990,16 @@ dependencies = [ "url", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.3.0" @@ -3924,7 +4007,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.5", "serde", ] @@ -4406,6 +4489,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "md-5" version = "0.10.6" @@ -4533,7 +4622,7 @@ dependencies = [ "bitflags 2.6.0", "codespan-reporting", "hexf-parse", - "indexmap", + "indexmap 2.3.0", "log", "num-traits", "rustc-hash 1.1.0", @@ -4837,6 +4926,93 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "opentelemetry" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f3cebff57f7dbd1255b44d8bddc2cebeb0ea677dbaa2e25a3070a91b318f660" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", +] + +[[package]] +name = "opentelemetry-http" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a8a7f5f6ba7c1b286c2fbca0454eaba116f63bbe69ed250b642d36fbb04d80" +dependencies = [ + "async-trait", + "bytes", + "http 1.1.0", + "opentelemetry", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76" +dependencies = [ + "async-trait", + "futures-core", + "http 1.1.0", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost 0.13.3", + "serde_json", + "thiserror", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6" +dependencies = [ + "hex", + "opentelemetry", + "opentelemetry_sdk", + "prost 0.13.3", + "serde", + "tonic", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc1b6902ff63b32ef6c489e8048c5e253e2e4a803ea3ea7e783914536eb15c52" + +[[package]] +name = "opentelemetry_sdk" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b742c1cae4693792cc564e58d75a2a0ba29421a34a85b50da92efa89ecb2bc" +dependencies = [ + "async-trait", + "futures-channel", + "futures-executor", + "futures-util", + "glob", + "once_cell", + "opentelemetry", + "percent-encoding", + "rand", + "serde_json", + "thiserror", + "tracing", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -5062,7 +5238,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 2.3.0", ] [[package]] @@ -5339,7 +5515,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" dependencies = [ "bytes", - "prost-derive", + "prost-derive 0.11.9", +] + +[[package]] +name = "prost" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +dependencies = [ + "bytes", + "prost-derive 0.13.3", ] [[package]] @@ -5356,7 +5542,7 @@ dependencies = [ "multimap", "petgraph", "prettyplease 0.1.25", - "prost", + "prost 0.11.9", "prost-types", "regex", "syn 1.0.109", @@ -5377,13 +5563,26 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "prost-derive" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +dependencies = [ + "anyhow", + "itertools 0.13.0", + "proc-macro2", + "quote", + "syn 2.0.72", +] + [[package]] name = "prost-types" version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" dependencies = [ - "prost", + "prost 0.11.9", ] [[package]] @@ -5439,7 +5638,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1a341ae463320e9f8f34adda49c8a85d81d4e8f34cce4397fb0350481552224" dependencies = [ "chrono", - "indexmap", + "indexmap 2.3.0", "quick-xml", "strip-ansi-escapes", "thiserror", @@ -5801,7 +6000,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" dependencies = [ "countme", - "hashbrown", + "hashbrown 0.14.5", "memoffset", "rustc-hash 1.1.0", "text-size", @@ -6206,7 +6405,7 @@ version = "1.0.122" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" dependencies = [ - "indexmap", + "indexmap 2.3.0", "itoa", "memchr", "ryu", @@ -6603,7 +6802,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc8bd3075d1c6964010333fae9ddcd91ad422a4f8eb8b3206a9b2b6afb4209e" dependencies = [ "bumpalo", - "hashbrown", + "hashbrown 0.14.5", "ptr_meta", "rustc-hash 1.1.0", "triomphe", @@ -6629,7 +6828,7 @@ checksum = "c77c112c218a09635d99a45802a81b4f341d6c28c81076aa2c29ba3bcd9151a9" dependencies = [ "anyhow", "crc", - "indexmap", + "indexmap 2.3.0", "is-macro", "once_cell", "parking_lot", @@ -6699,7 +6898,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4740e53eaf68b101203c1df0937d5161a29f3c13bceed0836ddfe245b72dd000" dependencies = [ "anyhow", - "indexmap", + "indexmap 2.3.0", "serde", "serde_json", "swc_cached", @@ -6811,7 +7010,7 @@ checksum = "65f21494e75d0bd8ef42010b47cabab9caaed8f2207570e809f6f4eb51a710d1" dependencies = [ "better_scoped_tls", "bitflags 2.6.0", - "indexmap", + "indexmap 2.3.0", "once_cell", "phf", "rustc-hash 1.1.0", @@ -6859,7 +7058,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98d8447ea20ef76958a8240feef95743702485a84331e6df5bdbe7e383c87838" dependencies = [ "dashmap", - "indexmap", + "indexmap 2.3.0", "once_cell", "petgraph", "rustc-hash 1.1.0", @@ -6904,7 +7103,7 @@ checksum = "76c76d8b9792ce51401d38da0fa62158d61f6d80d16d68fe5b03ce4bf5fba383" dependencies = [ "base64 0.21.7", "dashmap", - "indexmap", + "indexmap 2.3.0", "once_cell", "serde", "sha1", @@ -6944,7 +7143,7 @@ version = "0.134.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "029eec7dd485923a75b5a45befd04510288870250270292fc2c1b3a9e7547408" dependencies = [ - "indexmap", + "indexmap 2.3.0", "num_cpus", "once_cell", "rustc-hash 1.1.0", @@ -6989,7 +7188,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357e2c97bb51431d65080f25b436bc4e2fc1a7f64a643bc21a8353e478dc799f" dependencies = [ - "indexmap", + "indexmap 2.3.0", "petgraph", "rustc-hash 1.1.0", "swc_common", @@ -7210,7 +7409,7 @@ dependencies = [ "os_pipe", "parking_lot", "pretty_assertions", - "prost", + "prost 0.11.9", "prost-build", "regex", "reqwest", @@ -7402,9 +7601,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", @@ -7422,7 +7621,7 @@ dependencies = [ "futures-io", "futures-sink", "futures-util", - "hashbrown", + "hashbrown 0.14.5", "pin-project-lite", "slab", "tokio", @@ -7438,6 +7637,36 @@ dependencies = [ "serde", ] +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.22.1", + "bytes", + "h2 0.4.4", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.4.1", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost 0.13.3", + "socket2", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.4.13" @@ -7446,11 +7675,16 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", + "indexmap 1.9.3", "pin-project", "pin-project-lite", + "rand", + "slab", "tokio", + "tokio-util", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -7763,7 +7997,7 @@ checksum = "97599c400fc79925922b58303e98fcb8fa88f573379a08ddb652e72cbd2e70f6" dependencies = [ "bitflags 2.6.0", "encoding_rs", - "indexmap", + "indexmap 2.3.0", "num-bigint", "serde", "thiserror", @@ -7972,7 +8206,7 @@ dependencies = [ "cfg_aliases", "codespan-reporting", "document-features", - "indexmap", + "indexmap 2.3.0", "log", "naga", "once_cell", @@ -8521,7 +8755,7 @@ dependencies = [ "crossbeam-utils", "displaydoc", "flate2", - "indexmap", + "indexmap 2.3.0", "memchr", "thiserror", ] diff --git a/Cargo.toml b/Cargo.toml index e372e542bb..50e41145bf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -207,6 +207,12 @@ yoke = { version = "0.7.4", features = ["derive"] } zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] } zstd = "=0.12.4" +opentelemetry = "0.27.0" +opentelemetry-http = "0.27.0" +opentelemetry-otlp = { version = "0.27.0", features = ["logs", "http-proto", "http-json"] } +opentelemetry-semantic-conventions = { version = "0.27.0", features = ["semconv_experimental"] } +opentelemetry_sdk = "0.27.0" + # crypto hkdf = "0.12.3" rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node diff --git a/cli/args/mod.rs b/cli/args/mod.rs index e19025f8b1..3aaf2bd438 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -27,6 +27,7 @@ use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; use deno_path_util::normalize_path; +use deno_runtime::ops::otel::OtelConfig; use deno_semver::npm::NpmPackageReqReference; use import_map::resolve_import_map_value_from_specifier; @@ -1129,6 +1130,23 @@ impl CliOptions { } } + pub fn otel_config(&self) -> Option { + if self + .flags + .unstable_config + .features + .contains(&String::from("otel")) + { + Some(OtelConfig { + runtime_name: Cow::Borrowed("deno"), + runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno), + ..Default::default() + }) + } else { + None + } + } + pub fn env_file_name(&self) -> Option<&String> { self.flags.env_file.as_ref() } diff --git a/cli/factory.rs b/cli/factory.rs index 4a36c75ba2..417f771a30 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -939,6 +939,7 @@ impl CliFactory { StorageKeyResolver::from_options(cli_options), cli_options.sub_command().clone(), self.create_cli_main_worker_options()?, + self.cli_options()?.otel_config(), )) } diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 9e26512268..960aad1578 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -47,6 +47,7 @@ use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::RealFs; use deno_runtime::deno_io::fs::FsError; use deno_runtime::deno_node::PackageJson; +use deno_runtime::ops::otel::OtelConfig; use deno_semver::npm::NpmVersionReqParseError; use deno_semver::package::PackageReq; use deno_semver::Version; @@ -185,6 +186,7 @@ pub struct Metadata { pub entrypoint_key: String, pub node_modules: Option, pub unstable_config: UnstableConfig, + pub otel_config: Option, // None means disabled. } fn write_binary_bytes( @@ -722,6 +724,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { sloppy_imports: cli_options.unstable_sloppy_imports(), features: cli_options.unstable_features(), }, + otel_config: cli_options.otel_config(), }; write_binary_bytes( diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 85610f4c20..bb0ab423dd 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -800,6 +800,7 @@ pub async fn run(data: StandaloneData) -> Result { serve_port: None, serve_host: None, }, + metadata.otel_config, ); // Initialize v8 once from the main thread. diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index 973a09d92a..6234268c39 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1225,6 +1225,108 @@ declare namespace Deno { export {}; // only export exports } + /** + * @category Telemetry + * @experimental + */ + export namespace tracing { + /** + * Whether tracing is enabled. + * @category Telemetry + * @experimental + */ + export const enabled: boolean; + + /** + * Allowed attribute type. + * @category Telemetry + * @experimental + */ + export type AttributeValue = string | number | boolean | bigint; + + /** + * A tracing span. + * @category Telemetry + * @experimental + */ + export class Span implements Disposable { + readonly traceId: string; + readonly spanId: string; + readonly parentSpanId: string; + readonly kind: string; + readonly name: string; + readonly startTime: number; + readonly endTime: number; + readonly status: null | { code: 1 } | { code: 2; message: string }; + readonly attributes: Record; + readonly traceFlags: number; + + /** + * Construct a new Span and enter it as the "current" span. + */ + constructor( + name: string, + kind?: "internal" | "server" | "client" | "producer" | "consumer", + ); + + /** + * Set an attribute on this span. + */ + setAttribute( + name: string, + value: AttributeValue, + ): void; + + /** + * Enter this span as the "current" span. + */ + enter(): void; + + /** + * Exit this span as the "current" span and restore the previous one. + */ + exit(): void; + + /** + * End this span, and exit it as the "current" span. + */ + end(): void; + + [Symbol.dispose](): void; + + /** + * Get the "current" span, if one exists. + */ + static current(): Span | undefined | null; + } + + /** + * A SpanExporter compatible with OpenTelemetry.js + * https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_sdk_trace_base.SpanExporter.html + * @category Telemetry + * @experimental + */ + export class SpanExporter {} + + /** + * A ContextManager compatible with OpenTelemetry.js + * https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_api.ContextManager.html + * @category Telemetry + * @experimental + */ + export class ContextManager {} + + export {}; // only export exports + } + + /** + * @category Telemetry + * @experimental + */ + export namespace metrics { + export {}; // only export exports + } + export {}; // only export exports } diff --git a/cli/worker.rs b/cli/worker.rs index baacd681a1..402644a42c 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -30,6 +30,7 @@ use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::inspector_server::InspectorServer; +use deno_runtime::ops::otel::OtelConfig; use deno_runtime::ops::process::NpmProcessStateProviderRc; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::web_worker::WebWorker; @@ -142,6 +143,7 @@ struct SharedWorkerState { storage_key_resolver: StorageKeyResolver, options: CliMainWorkerOptions, subcommand: DenoSubcommand, + otel_config: Option, // `None` means OpenTelemetry is disabled. } impl SharedWorkerState { @@ -405,6 +407,7 @@ impl CliMainWorkerFactory { storage_key_resolver: StorageKeyResolver, subcommand: DenoSubcommand, options: CliMainWorkerOptions, + otel_config: Option, ) -> Self { Self { shared: Arc::new(SharedWorkerState { @@ -427,6 +430,7 @@ impl CliMainWorkerFactory { storage_key_resolver, options, subcommand, + otel_config, }), } } @@ -576,6 +580,7 @@ impl CliMainWorkerFactory { mode, serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), + otel_config: shared.otel_config.clone(), }, extensions: custom_extensions, startup_snapshot: crate::js::deno_isolate_init(), @@ -775,6 +780,7 @@ fn create_web_worker_callback( mode: WorkerExecutionMode::Worker, serve_port: shared.options.serve_port, serve_host: shared.options.serve_host.clone(), + otel_config: shared.otel_config.clone(), }, extensions: vec![], startup_snapshot: crate::js::deno_isolate_init(), diff --git a/ext/http/00_serve.ts b/ext/http/00_serve.ts index 7bf83e49c3..fcdb87d092 100644 --- a/ext/http/00_serve.ts +++ b/ext/http/00_serve.ts @@ -42,6 +42,10 @@ const { Uint8Array, Promise, } = primordials; +const { + getAsyncContext, + setAsyncContext, +} = core; import { InnerBody } from "ext:deno_fetch/22_body.js"; import { Event } from "ext:deno_web/02_event.js"; @@ -397,8 +401,10 @@ class CallbackContext { /** @type {Promise | undefined} */ closing; listener; + asyncContext; constructor(signal, args, listener) { + this.asyncContext = getAsyncContext(); // The abort signal triggers a non-graceful shutdown signal?.addEventListener( "abort", @@ -508,82 +514,89 @@ function fastSyncResponseOrStream( */ function mapToCallback(context, callback, onError) { return async function (req) { - // Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback - // 500 error. - let innerRequest; - let response; + const asyncContext = getAsyncContext(); + setAsyncContext(context.asyncContext); + try { - innerRequest = new InnerRequest(req, context); - const request = fromInnerRequest(innerRequest, "immutable"); - innerRequest.request = request; - response = await callback( - request, - new ServeHandlerInfo(innerRequest), - ); - - // Throwing Error if the handler return value is not a Response class - if (!ObjectPrototypeIsPrototypeOf(ResponsePrototype, response)) { - throw new TypeError( - "Return value from serve handler must be a response or a promise resolving to a response", - ); - } - - if (response.type === "error") { - throw new TypeError( - "Return value from serve handler must not be an error response (like Response.error())", - ); - } - - if (response.bodyUsed) { - throw new TypeError( - "The body of the Response returned from the serve handler has already been consumed", - ); - } - } catch (error) { + // Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback + // 500 error. + let innerRequest; + let response; try { - response = await onError(error); + innerRequest = new InnerRequest(req, context); + const request = fromInnerRequest(innerRequest, "immutable"); + innerRequest.request = request; + response = await callback( + request, + new ServeHandlerInfo(innerRequest), + ); + + // Throwing Error if the handler return value is not a Response class if (!ObjectPrototypeIsPrototypeOf(ResponsePrototype, response)) { throw new TypeError( - "Return value from onError handler must be a response or a promise resolving to a response", + "Return value from serve handler must be a response or a promise resolving to a response", + ); + } + + if (response.type === "error") { + throw new TypeError( + "Return value from serve handler must not be an error response (like Response.error())", + ); + } + + if (response.bodyUsed) { + throw new TypeError( + "The body of the Response returned from the serve handler has already been consumed", ); } } catch (error) { - // deno-lint-ignore no-console - console.error("Exception in onError while handling exception", error); - response = internalServerError(); + try { + response = await onError(error); + if (!ObjectPrototypeIsPrototypeOf(ResponsePrototype, response)) { + throw new TypeError( + "Return value from onError handler must be a response or a promise resolving to a response", + ); + } + } catch (error) { + // deno-lint-ignore no-console + console.error("Exception in onError while handling exception", error); + response = internalServerError(); + } } - } - const inner = toInnerResponse(response); - if (innerRequest?.[_upgraded]) { - // We're done here as the connection has been upgraded during the callback and no longer requires servicing. - if (response !== UPGRADE_RESPONSE_SENTINEL) { - // deno-lint-ignore no-console - console.error("Upgrade response was not returned from callback"); - context.close(); + const inner = toInnerResponse(response); + if (innerRequest?.[_upgraded]) { + // We're done here as the connection has been upgraded during the callback and no longer requires servicing. + if (response !== UPGRADE_RESPONSE_SENTINEL) { + // deno-lint-ignore no-console + console.error("Upgrade response was not returned from callback"); + context.close(); + } + innerRequest?.[_upgraded](); + return; } - innerRequest?.[_upgraded](); - return; - } - // Did everything shut down while we were waiting? - if (context.closed) { - // We're shutting down, so this status shouldn't make it back to the client but "Service Unavailable" seems appropriate - innerRequest?.close(); - op_http_set_promise_complete(req, 503); - return; - } - - const status = inner.status; - const headers = inner.headerList; - if (headers && headers.length > 0) { - if (headers.length == 1) { - op_http_set_response_header(req, headers[0][0], headers[0][1]); - } else { - op_http_set_response_headers(req, headers); + // Did everything shut down while we were waiting? + if (context.closed) { + // We're shutting down, so this status shouldn't make it back to the client but "Service Unavailable" seems appropriate + innerRequest?.close(); + op_http_set_promise_complete(req, 503); + return; } - } - fastSyncResponseOrStream(req, inner.body, status, innerRequest); + const status = inner.status; + const headers = inner.headerList; + if (headers && headers.length > 0) { + if (headers.length == 1) { + op_http_set_response_header(req, headers[0][0], headers[0][1]); + } else { + op_http_set_response_headers(req, headers); + } + } + + fastSyncResponseOrStream(req, inner.body, status, innerRequest); + } finally { + setAsyncContext(asyncContext); + } }; } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index ba236de149..b59cd14fa9 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -100,6 +100,7 @@ deno_websocket.workspace = true deno_webstorage.workspace = true node_resolver = { workspace = true, features = ["sync"] } +async-trait.workspace = true color-print.workspace = true dlopen2.workspace = true encoding_rs.workspace = true @@ -114,7 +115,13 @@ log.workspace = true netif = "0.1.6" notify.workspace = true once_cell.workspace = true +opentelemetry.workspace = true +opentelemetry-http.workspace = true +opentelemetry-otlp.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry_sdk.workspace = true percent-encoding.workspace = true +pin-project.workspace = true regex.workspace = true rustyline = { workspace = true, features = ["custom-bindings"] } same-file = "1.0.6" diff --git a/runtime/js/90_deno_ns.js b/runtime/js/90_deno_ns.js index fd2ac00f20..11f618ce27 100644 --- a/runtime/js/90_deno_ns.js +++ b/runtime/js/90_deno_ns.js @@ -29,6 +29,7 @@ import * as tty from "ext:runtime/40_tty.js"; import * as kv from "ext:deno_kv/01_db.ts"; import * as cron from "ext:deno_cron/01_cron.ts"; import * as webgpuSurface from "ext:deno_webgpu/02_surface.js"; +import * as telemetry from "ext:runtime/telemetry.js"; const denoNs = { Process: process.Process, @@ -134,7 +135,7 @@ const denoNs = { createHttpClient: httpClient.createHttpClient, }; -// NOTE(bartlomieju): keep IDs in sync with `cli/main.rs` +// NOTE(bartlomieju): keep IDs in sync with `runtime/lib.rs` const unstableIds = { broadcastChannel: 1, cron: 2, @@ -143,11 +144,12 @@ const unstableIds = { http: 5, kv: 6, net: 7, - process: 8, - temporal: 9, - unsafeProto: 10, - webgpu: 11, - workerOptions: 12, + otel: 8, + process: 9, + temporal: 10, + unsafeProto: 11, + webgpu: 12, + workerOptions: 13, }; const denoNsUnstableById = { __proto__: null }; @@ -181,4 +183,9 @@ denoNsUnstableById[unstableIds.webgpu] = { // denoNsUnstableById[unstableIds.workerOptions] = { __proto__: null } +denoNsUnstableById[unstableIds.otel] = { + tracing: telemetry.tracing, + metrics: telemetry.metrics, +}; + export { denoNs, denoNsUnstableById, unstableIds }; diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 6ddaa1335e..2da5c5398c 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -86,6 +86,8 @@ import { workerRuntimeGlobalProperties, } from "ext:runtime/98_global_scope_worker.js"; import { SymbolDispose, SymbolMetadata } from "ext:deno_web/00_infra.js"; +import { bootstrap as bootstrapOtel } from "ext:runtime/telemetry.js"; + // deno-lint-ignore prefer-primordials if (Symbol.metadata) { throw "V8 supports Symbol.metadata now, no need to shim it"; @@ -573,6 +575,7 @@ function bootstrapMainRuntime(runtimeOptions, warmup = false) { 10: serveHost, 11: serveIsMain, 12: serveWorkerCount, + 13: otelConfig, } = runtimeOptions; if (mode === executionModes.serve) { @@ -673,9 +676,10 @@ function bootstrapMainRuntime(runtimeOptions, warmup = false) { }); ObjectSetPrototypeOf(globalThis, Window.prototype); + bootstrapOtel(otelConfig); + if (inspectFlag) { - const consoleFromDeno = globalThis.console; - core.wrapConsole(consoleFromDeno, core.v8Console); + core.wrapConsole(globalThis.console, core.v8Console); } event.defineEventHandler(globalThis, "error"); @@ -855,6 +859,7 @@ function bootstrapWorkerRuntime( 5: hasNodeModulesDir, 6: argv0, 7: nodeDebug, + 13: otelConfig, } = runtimeOptions; performance.setTimeOrigin(); @@ -882,8 +887,9 @@ function bootstrapWorkerRuntime( } ObjectSetPrototypeOf(globalThis, DedicatedWorkerGlobalScope.prototype); - const consoleFromDeno = globalThis.console; - core.wrapConsole(consoleFromDeno, core.v8Console); + bootstrapOtel(otelConfig); + + core.wrapConsole(globalThis.console, core.v8Console); event.defineEventHandler(self, "message"); event.defineEventHandler(self, "error", undefined, true); diff --git a/runtime/js/telemetry.js b/runtime/js/telemetry.js new file mode 100644 index 0000000000..e9eb51f7ca --- /dev/null +++ b/runtime/js/telemetry.js @@ -0,0 +1,395 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { core, primordials } from "ext:core/mod.js"; +import { + op_otel_log, + op_otel_span_attribute, + op_otel_span_attribute2, + op_otel_span_attribute3, + op_otel_span_continue, + op_otel_span_flush, + op_otel_span_start, +} from "ext:core/ops"; +import { Console } from "ext:deno_console/01_console.js"; +import { performance } from "ext:deno_web/15_performance.js"; + +const { + SymbolDispose, + MathRandom, + Array, + ObjectEntries, + SafeMap, + ReflectApply, + SymbolFor, + Error, +} = primordials; +const { AsyncVariable, setAsyncContext } = core; + +const CURRENT = new AsyncVariable(); +let TRACING_ENABLED = false; + +const SPAN_ID_BYTES = 8; +const TRACE_ID_BYTES = 16; + +const TRACE_FLAG_SAMPLED = 1 << 0; + +const hexSliceLookupTable = (function () { + const alphabet = "0123456789abcdef"; + const table = new Array(256); + for (let i = 0; i < 16; ++i) { + const i16 = i * 16; + for (let j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j]; + } + } + return table; +})(); + +function generateId(bytes) { + let out = ""; + for (let i = 0; i < bytes / 4; i += 1) { + const r32 = (MathRandom() * 2 ** 32) >>> 0; + out += hexSliceLookupTable[(r32 >> 24) & 0xff]; + out += hexSliceLookupTable[(r32 >> 16) & 0xff]; + out += hexSliceLookupTable[(r32 >> 8) & 0xff]; + out += hexSliceLookupTable[r32 & 0xff]; + } + return out; +} + +function submit(span) { + if (!(span.traceFlags & TRACE_FLAG_SAMPLED)) return; + + op_otel_span_start( + span.traceId, + span.spanId, + span.parentSpanId ?? "", + span.kind, + span.name, + span.startTime, + span.endTime, + ); + + if (span.status !== null && span.status.code !== 0) { + op_otel_span_continue(span.code, span.message ?? ""); + } + + const attributes = ObjectEntries(span.attributes); + let i = 0; + while (i < attributes.length) { + if (i + 2 < attributes.length) { + op_otel_span_attribute3( + attributes.length, + attributes[i][0], + attributes[i][1], + attributes[i + 1][0], + attributes[i + 1][1], + attributes[i + 2][0], + attributes[i + 2][1], + ); + i += 3; + } else if (i + 1 < attributes.length) { + op_otel_span_attribute2( + attributes.length, + attributes[i][0], + attributes[i][1], + attributes[i + 1][0], + attributes[i + 1][1], + ); + i += 2; + } else { + op_otel_span_attribute( + attributes.length, + attributes[i][0], + attributes[i][1], + ); + i += 1; + } + } + + op_otel_span_flush(); +} + +const now = () => (performance.timeOrigin + performance.now()) / 1000; + +const INVALID_SPAN_ID = "0000000000000000"; +const INVALID_TRACE_ID = "00000000000000000000000000000000"; +const NO_ASYNC_CONTEXT = {}; + +class Span { + traceId; + spanId; + parentSpanId; + kind; + name; + startTime; + endTime; + status = null; + attributes = { __proto__: null }; + traceFlags = TRACE_FLAG_SAMPLED; + + enabled = TRACING_ENABLED; + #asyncContext = NO_ASYNC_CONTEXT; + + constructor(name, kind = "internal") { + if (!this.enabled) { + this.traceId = INVALID_TRACE_ID; + this.spanId = INVALID_SPAN_ID; + this.parentSpanId = INVALID_SPAN_ID; + return; + } + + this.startTime = now(); + + this.spanId = generateId(SPAN_ID_BYTES); + + let traceId; + let parentSpanId; + const parent = Span.current(); + if (parent) { + if (parent.spanId !== undefined) { + parentSpanId = parent.spanId; + traceId = parent.traceId; + } else { + const context = parent.spanContext(); + parentSpanId = context.spanId; + traceId = context.traceId; + } + } + if ( + traceId && traceId !== INVALID_TRACE_ID && parentSpanId && + parentSpanId !== INVALID_SPAN_ID + ) { + this.traceId = traceId; + this.parentSpanId = parentSpanId; + } else { + this.traceId = generateId(TRACE_ID_BYTES); + this.parentSpanId = INVALID_SPAN_ID; + } + + this.name = name; + + switch (kind) { + case "internal": + this.kind = 0; + break; + case "server": + this.kind = 1; + break; + case "client": + this.kind = 2; + break; + case "producer": + this.kind = 3; + break; + case "consumer": + this.kind = 4; + break; + default: + throw new Error(`Invalid span kind: ${kind}`); + } + + this.enter(); + } + + // helper function to match otel js api + spanContext() { + return { + traceId: this.traceId, + spanId: this.spanId, + traceFlags: this.traceFlags, + }; + } + + setAttribute(name, value) { + if (!this.enabled) return; + this.attributes[name] = value; + } + + enter() { + if (!this.enabled) return; + const context = (CURRENT.get() || ROOT_CONTEXT).setValue(SPAN_KEY, this); + this.#asyncContext = CURRENT.enter(context); + } + + exit() { + if (!this.enabled || this.#asyncContext === NO_ASYNC_CONTEXT) return; + setAsyncContext(this.#asyncContext); + this.#asyncContext = NO_ASYNC_CONTEXT; + } + + end() { + if (!this.enabled || this.endTime !== undefined) return; + this.exit(); + this.endTime = now(); + submit(this); + } + + [SymbolDispose]() { + this.end(); + } + + static current() { + return CURRENT.get()?.getValue(SPAN_KEY); + } +} + +function hrToSecs(hr) { + return ((hr[0] * 1e3 + hr[1] / 1e6) / 1000); +} + +// Exporter compatible with opentelemetry js library +class SpanExporter { + export(spans, resultCallback) { + try { + for (let i = 0; i < spans.length; i += 1) { + const span = spans[i]; + const context = span.spanContext(); + submit({ + spanId: context.spanId, + traceId: context.traceId, + traceFlags: context.traceFlags, + name: span.name, + kind: span.kind, + parentSpanId: span.parentSpanId, + startTime: hrToSecs(span.startTime), + endTime: hrToSecs(span.endTime), + status: span.status, + attributes: span.attributes, + }); + } + resultCallback({ code: 0 }); + } catch (error) { + resultCallback({ code: 1, error }); + } + } + + async shutdown() {} + + async forceFlush() {} +} + +// SPAN_KEY matches symbol in otel-js library +const SPAN_KEY = SymbolFor("OpenTelemetry Context Key SPAN"); + +// Context tracker compatible with otel-js api +class Context { + #data = new SafeMap(); + + constructor(data) { + this.#data = data ? new SafeMap(data) : new SafeMap(); + } + + getValue(key) { + return this.#data.get(key); + } + + setValue(key, value) { + const c = new Context(this.#data); + c.#data.set(key, value); + return c; + } + + deleteValue(key) { + const c = new Context(this.#data); + c.#data.delete(key); + return c; + } +} + +const ROOT_CONTEXT = new Context(); + +// Context manager for opentelemetry js library +class ContextManager { + active() { + return CURRENT.get() ?? ROOT_CONTEXT; + } + + with(context, fn, thisArg, ...args) { + const ctx = CURRENT.enter(context); + try { + return ReflectApply(fn, thisArg, args); + } finally { + setAsyncContext(ctx); + } + } + + bind(context, f) { + return (...args) => { + const ctx = CURRENT.enter(context); + try { + return ReflectApply(f, thisArg, args); + } finally { + setAsyncContext(ctx); + } + }; + } + + enable() { + return this; + } + + disable() { + return this; + } +} + +function otelLog(message, level) { + let traceId = ""; + let spanId = ""; + let traceFlags = 0; + const span = Span.current(); + if (span) { + if (span.spanId !== undefined) { + spanId = span.spanId; + traceId = span.traceId; + traceFlags = span.traceFlags; + } else { + const context = span.spanContext(); + spanId = context.spanId; + traceId = context.traceId; + traceFlags = context.traceFlags; + } + } + return op_otel_log(message, level, traceId, spanId, traceFlags); +} + +const otelConsoleConfig = { + ignore: 0, + capture: 1, + replace: 2, +}; + +export function bootstrap(config) { + if (config.length === 0) return; + const { 0: consoleConfig } = config; + + TRACING_ENABLED = true; + + switch (consoleConfig) { + case otelConsoleConfig.capture: + core.wrapConsole(globalThis.console, new Console(otelLog)); + break; + case otelConsoleConfig.replace: + ObjectDefineProperty( + globalThis, + "console", + core.propNonEnumerable(new Console(otelLog)), + ); + break; + default: + break; + } +} + +export const tracing = { + get enabled() { + return TRACING_ENABLED; + }, + Span, + SpanExporter, + ContextManager, +}; + +// TODO(devsnek): implement metrics +export const metrics = {}; diff --git a/runtime/lib.rs b/runtime/lib.rs index f0b1129ce3..21b61e1c05 100644 --- a/runtime/lib.rs +++ b/runtime/lib.rs @@ -99,18 +99,24 @@ pub static UNSTABLE_GRANULAR_FLAGS: &[UnstableGranularFlag] = &[ show_in_help: true, id: 7, }, + UnstableGranularFlag { + name: "otel", + help_text: "Enable unstable OpenTelemetry features", + show_in_help: false, + id: 8, + }, // TODO(bartlomieju): consider removing it UnstableGranularFlag { name: ops::process::UNSTABLE_FEATURE_NAME, help_text: "Enable unstable process APIs", show_in_help: false, - id: 8, + id: 9, }, UnstableGranularFlag { name: "temporal", help_text: "Enable unstable Temporal API", show_in_help: true, - id: 9, + id: 10, }, UnstableGranularFlag { name: "unsafe-proto", @@ -118,19 +124,19 @@ pub static UNSTABLE_GRANULAR_FLAGS: &[UnstableGranularFlag] = &[ show_in_help: true, // This number is used directly in the JS code. Search // for "unstableIds" to see where it's used. - id: 10, + id: 11, }, UnstableGranularFlag { name: deno_webgpu::UNSTABLE_FEATURE_NAME, help_text: "Enable unstable `WebGPU` APIs", show_in_help: true, - id: 11, + id: 12, }, UnstableGranularFlag { name: ops::worker_host::UNSTABLE_FEATURE_NAME, help_text: "Enable unstable Web Worker APIs", show_in_help: true, - id: 12, + id: 13, }, ]; diff --git a/runtime/ops/mod.rs b/runtime/ops/mod.rs index 67065b901b..c2e402f33c 100644 --- a/runtime/ops/mod.rs +++ b/runtime/ops/mod.rs @@ -4,6 +4,7 @@ pub mod bootstrap; pub mod fs_events; pub mod http; pub mod os; +pub mod otel; pub mod permissions; pub mod process; pub mod runtime; diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index 9bee9d8234..790962f38d 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -186,6 +186,8 @@ fn op_get_exit_code(state: &mut OpState) -> i32 { #[op2(fast)] fn op_exit(state: &mut OpState) { + crate::ops::otel::otel_drop_state(state); + let code = state.borrow::().get(); std::process::exit(code) } diff --git a/runtime/ops/otel.rs b/runtime/ops/otel.rs new file mode 100644 index 0000000000..6a4750acc2 --- /dev/null +++ b/runtime/ops/otel.rs @@ -0,0 +1,686 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use crate::tokio_util::create_basic_runtime; +use deno_core::anyhow::anyhow; +use deno_core::anyhow::{self}; +use deno_core::futures::channel::mpsc; +use deno_core::futures::channel::mpsc::UnboundedSender; +use deno_core::futures::future::BoxFuture; +use deno_core::futures::stream; +use deno_core::futures::Stream; +use deno_core::futures::StreamExt; +use deno_core::op2; +use deno_core::v8; +use deno_core::OpState; +use once_cell::sync::Lazy; +use opentelemetry::logs::Severity; +use opentelemetry::trace::SpanContext; +use opentelemetry::trace::SpanId; +use opentelemetry::trace::SpanKind; +use opentelemetry::trace::Status as SpanStatus; +use opentelemetry::trace::TraceFlags; +use opentelemetry::trace::TraceId; +use opentelemetry::InstrumentationScope; +use opentelemetry::Key; +use opentelemetry::KeyValue; +use opentelemetry::StringValue; +use opentelemetry::Value; +use opentelemetry_otlp::HttpExporterBuilder; +use opentelemetry_otlp::Protocol; +use opentelemetry_otlp::WithExportConfig; +use opentelemetry_otlp::WithHttpConfig; +use opentelemetry_sdk::export::trace::SpanData; +use opentelemetry_sdk::logs::BatchLogProcessor; +use opentelemetry_sdk::logs::LogProcessor as LogProcessorTrait; +use opentelemetry_sdk::logs::LogRecord; +use opentelemetry_sdk::trace::BatchSpanProcessor; +use opentelemetry_sdk::trace::SpanProcessor as SpanProcessorTrait; +use opentelemetry_sdk::Resource; +use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_NAME; +use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_VERSION; +use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_LANGUAGE; +use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_NAME; +use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_VERSION; +use serde::Deserialize; +use serde::Serialize; +use std::borrow::Cow; +use std::env; +use std::fmt::Debug; +use std::pin::Pin; +use std::task::Context; +use std::task::Poll; +use std::thread; +use std::time::Duration; +use std::time::SystemTime; + +type SpanProcessor = BatchSpanProcessor; +type LogProcessor = BatchLogProcessor; + +deno_core::extension!( + deno_otel, + ops = [op_otel_log, op_otel_span_start, op_otel_span_continue, op_otel_span_attribute, op_otel_span_attribute2, op_otel_span_attribute3, op_otel_span_flush], + options = { + otel_config: Option, // `None` means OpenTelemetry is disabled. + }, + state = |state, options| { + if let Some(otel_config) = options.otel_config { + otel_create_globals(otel_config, state).unwrap(); + } + } +); + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OtelConfig { + pub runtime_name: Cow<'static, str>, + pub runtime_version: Cow<'static, str>, + pub console: OtelConsoleConfig, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[repr(u8)] +pub enum OtelConsoleConfig { + Ignore = 0, + Capture = 1, + Replace = 2, +} + +impl Default for OtelConfig { + fn default() -> Self { + Self { + runtime_name: Cow::Borrowed(env!("CARGO_PKG_NAME")), + runtime_version: Cow::Borrowed(env!("CARGO_PKG_VERSION")), + console: OtelConsoleConfig::Capture, + } + } +} + +static OTEL_SHARED_RUNTIME_SPAWN_TASK_TX: Lazy< + UnboundedSender>, +> = Lazy::new(otel_create_shared_runtime); + +fn otel_create_shared_runtime() -> UnboundedSender> { + let (spawn_task_tx, mut spawn_task_rx) = + mpsc::unbounded::>(); + + thread::spawn(move || { + let rt = create_basic_runtime(); + rt.block_on(async move { + while let Some(task) = spawn_task_rx.next().await { + tokio::spawn(task); + } + }); + }); + + spawn_task_tx +} + +#[derive(Clone, Copy)] +struct OtelSharedRuntime; + +impl hyper::rt::Executor> for OtelSharedRuntime { + fn execute(&self, fut: BoxFuture<'static, ()>) { + (*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX) + .unbounded_send(fut) + .expect("failed to send task to shared OpenTelemetry runtime"); + } +} + +impl opentelemetry_sdk::runtime::Runtime for OtelSharedRuntime { + type Interval = Pin + Send + 'static>>; + type Delay = Pin>; + + fn interval(&self, period: Duration) -> Self::Interval { + stream::repeat(()) + .then(move |_| tokio::time::sleep(period)) + .boxed() + } + + fn spawn(&self, future: BoxFuture<'static, ()>) { + (*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX) + .unbounded_send(future) + .expect("failed to send task to shared OpenTelemetry runtime"); + } + + fn delay(&self, duration: Duration) -> Self::Delay { + Box::pin(tokio::time::sleep(duration)) + } +} + +impl opentelemetry_sdk::runtime::RuntimeChannel for OtelSharedRuntime { + type Receiver = BatchMessageChannelReceiver; + type Sender = BatchMessageChannelSender; + + fn batch_message_channel( + &self, + capacity: usize, + ) -> (Self::Sender, Self::Receiver) { + let (batch_tx, batch_rx) = tokio::sync::mpsc::channel::(capacity); + (batch_tx.into(), batch_rx.into()) + } +} + +#[derive(Debug)] +pub struct BatchMessageChannelSender { + sender: tokio::sync::mpsc::Sender, +} + +impl From> + for BatchMessageChannelSender +{ + fn from(sender: tokio::sync::mpsc::Sender) -> Self { + Self { sender } + } +} + +impl opentelemetry_sdk::runtime::TrySend + for BatchMessageChannelSender +{ + type Message = T; + + fn try_send( + &self, + item: Self::Message, + ) -> Result<(), opentelemetry_sdk::runtime::TrySendError> { + self.sender.try_send(item).map_err(|err| match err { + tokio::sync::mpsc::error::TrySendError::Full(_) => { + opentelemetry_sdk::runtime::TrySendError::ChannelFull + } + tokio::sync::mpsc::error::TrySendError::Closed(_) => { + opentelemetry_sdk::runtime::TrySendError::ChannelClosed + } + }) + } +} + +pub struct BatchMessageChannelReceiver { + receiver: tokio::sync::mpsc::Receiver, +} + +impl From> + for BatchMessageChannelReceiver +{ + fn from(receiver: tokio::sync::mpsc::Receiver) -> Self { + Self { receiver } + } +} + +impl Stream for BatchMessageChannelReceiver { + type Item = T; + + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + self.receiver.poll_recv(cx) + } +} + +mod hyper_client { + use http_body_util::BodyExt; + use http_body_util::Full; + use hyper::body::Body as HttpBody; + use hyper::body::Frame; + use hyper_util::client::legacy::connect::HttpConnector; + use hyper_util::client::legacy::Client; + use opentelemetry_http::Bytes; + use opentelemetry_http::HttpError; + use opentelemetry_http::Request; + use opentelemetry_http::Response; + use opentelemetry_http::ResponseExt; + use std::fmt::Debug; + use std::pin::Pin; + use std::task::Poll; + use std::task::{self}; + + use super::OtelSharedRuntime; + + // same as opentelemetry_http::HyperClient except it uses OtelSharedRuntime + #[derive(Debug, Clone)] + pub struct HyperClient { + inner: Client, + } + + impl HyperClient { + pub fn new() -> Self { + Self { + inner: Client::builder(OtelSharedRuntime).build(HttpConnector::new()), + } + } + } + + #[async_trait::async_trait] + impl opentelemetry_http::HttpClient for HyperClient { + async fn send( + &self, + request: Request>, + ) -> Result, HttpError> { + let (parts, body) = request.into_parts(); + let request = Request::from_parts(parts, Body(Full::from(body))); + let mut response = self.inner.request(request).await?; + let headers = std::mem::take(response.headers_mut()); + + let mut http_response = Response::builder() + .status(response.status()) + .body(response.into_body().collect().await?.to_bytes())?; + *http_response.headers_mut() = headers; + + Ok(http_response.error_for_status()?) + } + } + + #[pin_project::pin_project] + pub struct Body(#[pin] Full); + + impl HttpBody for Body { + type Data = Bytes; + type Error = Box; + + #[inline] + fn poll_frame( + self: Pin<&mut Self>, + cx: &mut task::Context<'_>, + ) -> Poll, Self::Error>>> { + self.project().0.poll_frame(cx).map_err(Into::into) + } + + #[inline] + fn is_end_stream(&self) -> bool { + self.0.is_end_stream() + } + + #[inline] + fn size_hint(&self) -> hyper::body::SizeHint { + self.0.size_hint() + } + } +} + +fn otel_create_globals( + config: OtelConfig, + op_state: &mut OpState, +) -> anyhow::Result<()> { + // Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_* + // crates don't do this automatically. + // TODO(piscisaureus): enable GRPC support. + let protocol = match env::var("OTEL_EXPORTER_OTLP_PROTOCOL").as_deref() { + Ok("http/protobuf") => Protocol::HttpBinary, + Ok("http/json") => Protocol::HttpJson, + Ok("") | Err(env::VarError::NotPresent) => { + return Ok(()); + } + Ok(protocol) => { + return Err(anyhow!( + "Env var OTEL_EXPORTER_OTLP_PROTOCOL specifies an unsupported protocol: {}", + protocol + )); + } + Err(err) => { + return Err(anyhow!( + "Failed to read env var OTEL_EXPORTER_OTLP_PROTOCOL: {}", + err + )) + } + }; + + // Define the resource attributes that will be attached to all log records. + // These attributes are sourced as follows (in order of precedence): + // * The `service.name` attribute from the `OTEL_SERVICE_NAME` env var. + // * Additional attributes from the `OTEL_RESOURCE_ATTRIBUTES` env var. + // * Default attribute values defined here. + // TODO(piscisaureus): add more default attributes (e.g. script path). + let mut resource = Resource::default(); + + // Add the runtime name and version to the resource attributes. Also override + // the `telemetry.sdk` attributes to include the Deno runtime. + resource = resource.merge(&Resource::new(vec![ + KeyValue::new(PROCESS_RUNTIME_NAME, config.runtime_name), + KeyValue::new(PROCESS_RUNTIME_VERSION, config.runtime_version.clone()), + KeyValue::new( + TELEMETRY_SDK_LANGUAGE, + format!( + "deno-{}", + resource.get(Key::new(TELEMETRY_SDK_LANGUAGE)).unwrap() + ), + ), + KeyValue::new( + TELEMETRY_SDK_NAME, + format!( + "deno-{}", + resource.get(Key::new(TELEMETRY_SDK_NAME)).unwrap() + ), + ), + KeyValue::new( + TELEMETRY_SDK_VERSION, + format!( + "{}-{}", + config.runtime_version, + resource.get(Key::new(TELEMETRY_SDK_VERSION)).unwrap() + ), + ), + ])); + + // The OTLP endpoint is automatically picked up from the + // `OTEL_EXPORTER_OTLP_ENDPOINT` environment variable. Additional headers can + // be specified using `OTEL_EXPORTER_OTLP_HEADERS`. + + let client = hyper_client::HyperClient::new(); + + let span_exporter = HttpExporterBuilder::default() + .with_http_client(client.clone()) + .with_protocol(protocol) + .build_span_exporter()?; + let mut span_processor = + BatchSpanProcessor::builder(span_exporter, OtelSharedRuntime).build(); + span_processor.set_resource(&resource); + op_state.put::(span_processor); + + let log_exporter = HttpExporterBuilder::default() + .with_http_client(client) + .with_protocol(protocol) + .build_log_exporter()?; + let log_processor = + BatchLogProcessor::builder(log_exporter, OtelSharedRuntime).build(); + log_processor.set_resource(&resource); + op_state.put::(log_processor); + + Ok(()) +} + +/// This function is called by the runtime whenever it is about to call +/// `os::process::exit()`, to ensure that all OpenTelemetry logs are properly +/// flushed before the process terminates. +pub fn otel_drop_state(state: &mut OpState) { + if let Some(processor) = state.try_take::() { + let _ = processor.force_flush(); + drop(processor); + } + if let Some(processor) = state.try_take::() { + let _ = processor.force_flush(); + drop(processor); + } +} + +#[op2(fast)] +fn op_otel_log( + state: &mut OpState, + #[string] message: String, + #[smi] level: i32, + #[string] trace_id: &str, + #[string] span_id: &str, + #[smi] trace_flags: u8, +) { + let Some(logger) = state.try_borrow::() else { + log::error!("op_otel_log: OpenTelemetry Logger not available"); + return; + }; + + // Convert the integer log level that ext/console uses to the corresponding + // OpenTelemetry log severity. + let severity = match level { + ..=0 => Severity::Debug, + 1 => Severity::Info, + 2 => Severity::Warn, + 3.. => Severity::Error, + }; + + let mut log_record = LogRecord::default(); + + log_record.observed_timestamp = Some(SystemTime::now()); + log_record.body = Some(message.into()); + log_record.severity_number = Some(severity); + log_record.severity_text = Some(severity.name()); + if let (Ok(trace_id), Ok(span_id)) = + (TraceId::from_hex(trace_id), SpanId::from_hex(span_id)) + { + let span_context = SpanContext::new( + trace_id, + span_id, + TraceFlags::new(trace_flags), + false, + Default::default(), + ); + log_record.trace_context = Some((&span_context).into()); + } + logger.emit( + &mut log_record, + &InstrumentationScope::builder("deno").build(), + ); +} + +struct TemporarySpan(SpanData); + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_span_start<'s>( + scope: &mut v8::HandleScope<'s>, + state: &mut OpState, + trace_id: v8::Local<'s, v8::Value>, + span_id: v8::Local<'s, v8::Value>, + parent_span_id: v8::Local<'s, v8::Value>, + #[smi] span_kind: u8, + name: v8::Local<'s, v8::Value>, + start_time: f64, + end_time: f64, +) -> Result<(), anyhow::Error> { + if let Some(temporary_span) = state.try_take::() { + let Some(span_processor) = state.try_borrow::() else { + return Ok(()); + }; + span_processor.on_end(temporary_span.0); + }; + + let trace_id = { + let x = v8::ValueView::new(scope, trace_id.try_cast()?); + match x.data() { + v8::ValueViewData::OneByte(bytes) => { + TraceId::from_hex(&String::from_utf8_lossy(bytes))? + } + _ => return Err(anyhow!("invalid trace_id")), + } + }; + + let span_id = { + let x = v8::ValueView::new(scope, span_id.try_cast()?); + match x.data() { + v8::ValueViewData::OneByte(bytes) => { + SpanId::from_hex(&String::from_utf8_lossy(bytes))? + } + _ => return Err(anyhow!("invalid span_id")), + } + }; + + let parent_span_id = { + let x = v8::ValueView::new(scope, parent_span_id.try_cast()?); + match x.data() { + v8::ValueViewData::OneByte(bytes) => { + let s = String::from_utf8_lossy(bytes); + if s.is_empty() { + SpanId::INVALID + } else { + SpanId::from_hex(&s)? + } + } + _ => return Err(anyhow!("invalid parent_span_id")), + } + }; + + let name = { + let x = v8::ValueView::new(scope, name.try_cast()?); + match x.data() { + v8::ValueViewData::OneByte(bytes) => { + String::from_utf8_lossy(bytes).into_owned() + } + v8::ValueViewData::TwoByte(bytes) => String::from_utf16_lossy(bytes), + } + }; + + let temporary_span = TemporarySpan(SpanData { + span_context: SpanContext::new( + trace_id, + span_id, + TraceFlags::SAMPLED, + false, + Default::default(), + ), + parent_span_id, + span_kind: match span_kind { + 0 => SpanKind::Internal, + 1 => SpanKind::Server, + 2 => SpanKind::Client, + 3 => SpanKind::Producer, + 4 => SpanKind::Consumer, + _ => return Err(anyhow!("invalid span kind")), + }, + name: Cow::Owned(name), + start_time: SystemTime::UNIX_EPOCH + .checked_add(std::time::Duration::from_secs_f64(start_time)) + .ok_or_else(|| anyhow!("invalid start time"))?, + end_time: SystemTime::UNIX_EPOCH + .checked_add(std::time::Duration::from_secs_f64(end_time)) + .ok_or_else(|| anyhow!("invalid start time"))?, + attributes: Vec::new(), + dropped_attributes_count: 0, + events: Default::default(), + links: Default::default(), + status: SpanStatus::Unset, + instrumentation_scope: InstrumentationScope::builder("deno").build(), + }); + state.put(temporary_span); + + Ok(()) +} + +#[op2(fast)] +fn op_otel_span_continue( + state: &mut OpState, + #[smi] status: u8, + #[string] error_description: Cow<'_, str>, +) { + if let Some(temporary_span) = state.try_borrow_mut::() { + temporary_span.0.status = match status { + 0 => SpanStatus::Unset, + 1 => SpanStatus::Ok, + 2 => SpanStatus::Error { + description: Cow::Owned(error_description.into_owned()), + }, + _ => return, + }; + } +} + +macro_rules! attr { + ($scope:ident, $temporary_span:ident, $name:ident, $value:ident) => { + let name = if let Ok(name) = $name.try_cast() { + let view = v8::ValueView::new($scope, name); + match view.data() { + v8::ValueViewData::OneByte(bytes) => { + Some(String::from_utf8_lossy(bytes).into_owned()) + } + v8::ValueViewData::TwoByte(bytes) => { + Some(String::from_utf16_lossy(bytes)) + } + } + } else { + None + }; + let value = if let Ok(string) = $value.try_cast::() { + Some(Value::String(StringValue::from({ + let x = v8::ValueView::new($scope, string); + match x.data() { + v8::ValueViewData::OneByte(bytes) => { + String::from_utf8_lossy(bytes).into_owned() + } + v8::ValueViewData::TwoByte(bytes) => String::from_utf16_lossy(bytes), + } + }))) + } else if let Ok(number) = $value.try_cast::() { + Some(Value::F64(number.value())) + } else if let Ok(boolean) = $value.try_cast::() { + Some(Value::Bool(boolean.is_true())) + } else if let Ok(bigint) = $value.try_cast::() { + let (i64_value, _lossless) = bigint.i64_value(); + Some(Value::I64(i64_value)) + } else { + None + }; + if let (Some(name), Some(value)) = (name, value) { + $temporary_span + .0 + .attributes + .push(KeyValue::new(name, value)); + } else { + $temporary_span.0.dropped_attributes_count += 1; + } + }; +} + +#[op2(fast)] +fn op_otel_span_attribute<'s>( + scope: &mut v8::HandleScope<'s>, + state: &mut OpState, + #[smi] capacity: u32, + key: v8::Local<'s, v8::Value>, + value: v8::Local<'s, v8::Value>, +) { + if let Some(temporary_span) = state.try_borrow_mut::() { + temporary_span.0.attributes.reserve_exact( + (capacity as usize) - temporary_span.0.attributes.capacity(), + ); + attr!(scope, temporary_span, key, value); + } +} + +#[op2(fast)] +fn op_otel_span_attribute2<'s>( + scope: &mut v8::HandleScope<'s>, + state: &mut OpState, + #[smi] capacity: u32, + key1: v8::Local<'s, v8::Value>, + value1: v8::Local<'s, v8::Value>, + key2: v8::Local<'s, v8::Value>, + value2: v8::Local<'s, v8::Value>, +) { + if let Some(temporary_span) = state.try_borrow_mut::() { + temporary_span.0.attributes.reserve_exact( + (capacity as usize) - temporary_span.0.attributes.capacity(), + ); + attr!(scope, temporary_span, key1, value1); + attr!(scope, temporary_span, key2, value2); + } +} + +#[allow(clippy::too_many_arguments)] +#[op2(fast)] +fn op_otel_span_attribute3<'s>( + scope: &mut v8::HandleScope<'s>, + state: &mut OpState, + #[smi] capacity: u32, + key1: v8::Local<'s, v8::Value>, + value1: v8::Local<'s, v8::Value>, + key2: v8::Local<'s, v8::Value>, + value2: v8::Local<'s, v8::Value>, + key3: v8::Local<'s, v8::Value>, + value3: v8::Local<'s, v8::Value>, +) { + if let Some(temporary_span) = state.try_borrow_mut::() { + temporary_span.0.attributes.reserve_exact( + (capacity as usize) - temporary_span.0.attributes.capacity(), + ); + attr!(scope, temporary_span, key1, value1); + attr!(scope, temporary_span, key2, value2); + attr!(scope, temporary_span, key3, value3); + } +} + +#[op2(fast)] +fn op_otel_span_flush(state: &mut OpState) { + let Some(temporary_span) = state.try_take::() else { + return; + }; + + let Some(span_processor) = state.try_borrow::() else { + return; + }; + + span_processor.on_end(temporary_span.0); +} diff --git a/runtime/shared.rs b/runtime/shared.rs index f7d76f67a7..c05f352f1c 100644 --- a/runtime/shared.rs +++ b/runtime/shared.rs @@ -47,6 +47,7 @@ extension!(runtime, "40_signals.js", "40_tty.js", "41_prompt.js", + "telemetry.js", "90_deno_ns.js", "98_global_scope_shared.js", "98_global_scope_window.js", diff --git a/runtime/snapshot.rs b/runtime/snapshot.rs index 251ee5f41c..bb9bf9166f 100644 --- a/runtime/snapshot.rs +++ b/runtime/snapshot.rs @@ -312,6 +312,7 @@ pub fn create_runtime_snapshot( ), ops::fs_events::deno_fs_events::init_ops(), ops::os::deno_os::init_ops(Default::default()), + ops::otel::deno_otel::init_ops(None), ops::permissions::deno_permissions::init_ops(), ops::process::deno_process::init_ops(None), ops::signal::deno_signal::init_ops(), diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 61e5c77029..d81c82c501 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -505,6 +505,9 @@ impl WebWorker { ), ops::fs_events::deno_fs_events::init_ops_and_esm(), ops::os::deno_os_worker::init_ops_and_esm(), + ops::otel::deno_otel::init_ops_and_esm( + options.bootstrap.otel_config.clone(), + ), ops::permissions::deno_permissions::init_ops_and_esm(), ops::process::deno_process::init_ops_and_esm( services.npm_process_state_provider, diff --git a/runtime/worker.rs b/runtime/worker.rs index 88a61fa938..82df755faf 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -422,6 +422,9 @@ impl MainWorker { ), ops::fs_events::deno_fs_events::init_ops_and_esm(), ops::os::deno_os::init_ops_and_esm(exit_code.clone()), + ops::otel::deno_otel::init_ops_and_esm( + options.bootstrap.otel_config.clone(), + ), ops::permissions::deno_permissions::init_ops_and_esm(), ops::process::deno_process::init_ops_and_esm( services.npm_process_state_provider, diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 3f3c25c5ea..dc989a1c0f 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -1,5 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use crate::ops::otel::OtelConfig; use deno_core::v8; use deno_core::ModuleSpecifier; use serde::Serialize; @@ -118,6 +119,8 @@ pub struct BootstrapOptions { // Used by `deno serve` pub serve_port: Option, pub serve_host: Option, + // OpenTelemetry output options. If `None`, OpenTelemetry is disabled. + pub otel_config: Option, } impl Default for BootstrapOptions { @@ -152,6 +155,7 @@ impl Default for BootstrapOptions { mode: WorkerExecutionMode::None, serve_port: Default::default(), serve_host: Default::default(), + otel_config: None, } } } @@ -193,6 +197,8 @@ struct BootstrapV8<'a>( Option, // serve worker count Option, + // OTEL config + Box<[u8]>, ); impl BootstrapOptions { @@ -219,6 +225,11 @@ impl BootstrapOptions { self.serve_host.as_deref(), serve_is_main, serve_worker_count, + if let Some(otel_config) = self.otel_config.as_ref() { + Box::new([otel_config.console as u8]) + } else { + Box::new([]) + }, ); bootstrap.serialize(ser).unwrap() diff --git a/tests/specs/cli/otel_basic/__test__.jsonc b/tests/specs/cli/otel_basic/__test__.jsonc new file mode 100644 index 0000000000..a9d4fff049 --- /dev/null +++ b/tests/specs/cli/otel_basic/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -A main.ts", + "output": "processed\n" +} diff --git a/tests/specs/cli/otel_basic/child.ts b/tests/specs/cli/otel_basic/child.ts new file mode 100644 index 0000000000..72cffd9f0b --- /dev/null +++ b/tests/specs/cli/otel_basic/child.ts @@ -0,0 +1,20 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +async function inner() { + using _span = new Deno.tracing.Span("inner span"); + console.log("log 1"); + await 1; + console.log("log 2"); +} + +Deno.serve({ + port: 0, + onListen({ port }) { + console.log(port.toString()); + }, + handler: async (_req) => { + using _span = new Deno.tracing.Span("outer span"); + await inner(); + return new Response(null, { status: 200 }); + }, +}); diff --git a/tests/specs/cli/otel_basic/deno.json b/tests/specs/cli/otel_basic/deno.json new file mode 100644 index 0000000000..105514e133 --- /dev/null +++ b/tests/specs/cli/otel_basic/deno.json @@ -0,0 +1,4 @@ +{ + "lock": false, + "importMap": "../../../../import_map.json" +} diff --git a/tests/specs/cli/otel_basic/main.ts b/tests/specs/cli/otel_basic/main.ts new file mode 100644 index 0000000000..66ef5c79cc --- /dev/null +++ b/tests/specs/cli/otel_basic/main.ts @@ -0,0 +1,76 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +import { assert, assertEquals } from "@std/assert"; +import { TextLineStream } from "@std/streams/text-line-stream"; + +const logs = []; +const spans = []; +let child: Deno.ChildProcess; + +Deno.serve( + { + port: 0, + async onListen({ port }) { + const command = new Deno.Command(Deno.execPath(), { + args: ["run", "-A", "--unstable-otel", "child.ts"], + env: { + OTEL_EXPORTER_OTLP_PROTOCOL: "http/json", + OTEL_EXPORTER_OTLP_ENDPOINT: `http://localhost:${port}`, + OTEL_BSP_SCHEDULE_DELAY: "10", + OTEL_BLRP_SCHEDULE_DELAY: "10", + }, + stdin: "piped", + stdout: "piped", + stderr: "inherit", + }); + child = command.spawn(); + const lines = child.stdout + .pipeThrough(new TextDecoderStream()) + .pipeThrough(new TextLineStream()) + .getReader(); + const line = await lines.read(); + await fetch(`http://localhost:${line.value}/`); + }, + async handler(req) { + try { + const body = await req.json(); + if (body.resourceLogs) { + logs.push(...body.resourceLogs[0].scopeLogs[0].logRecords); + } + if (body.resourceSpans) { + spans.push(...body.resourceSpans[0].scopeSpans[0].spans); + } + + if (logs.length > 2 && spans.length > 1) { + child.kill(); + + const inner = spans.find((s) => s.name === "inner span"); + const outer = spans.find((s) => s.name === "outer span"); + + assertEquals(inner.traceId, outer.traceId); + assertEquals(inner.parentSpanId, outer.spanId); + + assertEquals(logs[1].body.stringValue, "log 1\n"); + assertEquals(logs[1].traceId, inner.traceId); + assertEquals(logs[1].spanId, inner.spanId); + + assertEquals(logs[2].body.stringValue, "log 2\n"); + assertEquals(logs[2].traceId, inner.traceId); + assertEquals(logs[2].spanId, inner.spanId); + + console.log("processed"); + Deno.exit(0); + } + + return Response.json({ partialSuccess: {} }, { status: 200 }); + } catch (e) { + console.error(e); + Deno.exit(1); + } + }, + }, +); + +setTimeout(() => { + assert(false, "test did not finish in time"); +}, 10e3); diff --git a/tools/core_import_map.json b/tools/core_import_map.json index aae4e63a45..0811672b16 100644 --- a/tools/core_import_map.json +++ b/tools/core_import_map.json @@ -247,6 +247,7 @@ "ext:runtime/41_prompt.js": "../runtime/js/41_prompt.js", "ext:runtime/90_deno_ns.js": "../runtime/js/90_deno_ns.js", "ext:runtime/98_global_scope.js": "../runtime/js/98_global_scope.js", + "ext:runtime/telemetry.js": "../runtime/js/telemetry.js", "ext:deno_node/_util/std_fmt_colors.ts": "../ext/node/polyfills/_util/std_fmt_colors.ts", "@std/archive": "../tests/util/std/archive/mod.ts", "@std/archive/tar": "../tests/util/std/archive/tar.ts", From ef4646278efef85a379b1e0f38c028320d07e3aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 13 Nov 2024 12:04:30 +0000 Subject: [PATCH 12/18] feat: upgrade V8 to 13.0 (#26851) --- Cargo.lock | 53 ++++++++++++----------------------------------------- Cargo.toml | 2 +- 2 files changed, 13 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 00c1f0736a..0345685569 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -482,29 +482,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bindgen" -version = "0.69.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" -dependencies = [ - "bitflags 2.6.0", - "cexpr", - "clang-sys", - "itertools 0.10.5", - "lazy_static", - "lazycell", - "log", - "prettyplease 0.2.17", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn 2.0.72", - "which 4.4.2", -] - [[package]] name = "bindgen" version = "0.70.1" @@ -1465,9 +1442,9 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.318.0" +version = "0.319.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10cae2393219ff9278123f7b24799cdfab37c7d6561b69ca06ced115cac92111" +checksum = "e9dbb841f9850534320d8927dce53ca8d64bafbab5576c2a98f03f9e08534215" dependencies = [ "anyhow", "bincode", @@ -1497,9 +1474,9 @@ dependencies = [ [[package]] name = "deno_core_icudata" -version = "0.0.73" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1" +checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695" [[package]] name = "deno_cron" @@ -1968,9 +1945,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.194.0" +version = "0.195.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f760b492bd638c1dc3e992d11672c259fbe9a233162099a8347591c9e22d0391" +checksum = "797f348c38c07a5398bf790b280077c698e13fb49252f61ca6f6c5c616060292" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -4275,12 +4252,6 @@ dependencies = [ "spin", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" version = "0.2.153" @@ -4372,7 +4343,7 @@ version = "1.48.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca8dfd1a173826d193e3b955e07c22765829890f62c677a59c4a410cb4f47c01" dependencies = [ - "bindgen 0.70.1", + "bindgen", "libloading 0.8.5", ] @@ -6437,9 +6408,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.227.0" +version = "0.228.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a8294c2223c53bed343be8b80564ece4dc0d03b643b06fa86c4ccc0e064eda0" +checksum = "bfe23e75c9a167f4e9d67a90d9fcaa622d1eec9aecad526c270e99a92f6915ff" dependencies = [ "num-bigint", "serde", @@ -7974,11 +7945,11 @@ dependencies = [ [[package]] name = "v8" -version = "0.106.0" +version = "130.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a381badc47c6f15acb5fe0b5b40234162349ed9d4e4fd7c83a7f5547c0fc69c5" +checksum = "c23b5c2caff00209b03a716609b275acae94b02dd3b63c4648e7232a84a8402f" dependencies = [ - "bindgen 0.69.4", + "bindgen", "bitflags 2.6.0", "fslock", "gzip-header", diff --git a/Cargo.toml b/Cargo.toml index 50e41145bf..de8b07cd00 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.43.3", features = ["transpiling"] } -deno_core = { version = "0.318.0" } +deno_core = { version = "0.319.0" } deno_bench_util = { version = "0.171.0", path = "./bench_util" } deno_lockfile = "=0.23.1" From 9331e2cef044945a91d53ea4cdc829d304157e0d Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Wed, 13 Nov 2024 14:23:36 +0100 Subject: [PATCH 13/18] chore: update denokv_* (#26850) --- Cargo.lock | 86 +++++++++++++++++------------------------------------- Cargo.toml | 10 +++---- 2 files changed, 31 insertions(+), 65 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0345685569..8d84073a88 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -493,7 +493,7 @@ dependencies = [ "clang-sys", "itertools 0.13.0", "log", - "prettyplease 0.2.17", + "prettyplease", "proc-macro2", "quote", "regex", @@ -1739,7 +1739,7 @@ dependencies = [ "http-body-util", "log", "num-bigint", - "prost 0.11.9", + "prost", "prost-build", "rand", "rusqlite", @@ -2290,25 +2290,25 @@ dependencies = [ [[package]] name = "denokv_proto" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f629c77d2bf59e2e2a07fd7b14bfffae352a3813fbdcb801e72205741fb7625c" +checksum = "f7ba1f99ed11a9c11e868a8521b1f71a7e1aba785d7f42ea9ecbdc01146c89ec" dependencies = [ "anyhow", "async-trait", "chrono", "futures", "num-bigint", - "prost 0.11.9", + "prost", "serde", "uuid", ] [[package]] name = "denokv_remote" -version = "0.8.1" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d57717b5123e8d1ec5f52973a67f98e3621274d362d18b245038967b402082df" +checksum = "08ed833073189e8f6d03155fe3b05a024e75e29d8a28a4c2e9ec3b5c925e727b" dependencies = [ "anyhow", "async-stream", @@ -2319,7 +2319,7 @@ dependencies = [ "futures", "http 1.1.0", "log", - "prost 0.11.9", + "prost", "rand", "serde", "serde_json", @@ -2331,9 +2331,9 @@ dependencies = [ [[package]] name = "denokv_sqlite" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c4f5719e2bf698ec4f39fe29d91b62ff06a4b4a37ee481ffb8658d140fed986" +checksum = "9b790f01d1302d53a0c3cbd27de88a06b3abd64ec8ab8673924e490541c7c713" dependencies = [ "anyhow", "async-stream", @@ -4936,7 +4936,7 @@ dependencies = [ "opentelemetry-http", "opentelemetry-proto", "opentelemetry_sdk", - "prost 0.13.3", + "prost", "serde_json", "thiserror", "tokio", @@ -4953,7 +4953,7 @@ dependencies = [ "hex", "opentelemetry", "opentelemetry_sdk", - "prost 0.13.3", + "prost", "serde", "tonic", ] @@ -5388,16 +5388,6 @@ dependencies = [ "yaml_parser", ] -[[package]] -name = "prettyplease" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" -dependencies = [ - "proc-macro2", - "syn 1.0.109", -] - [[package]] name = "prettyplease" version = "0.2.17" @@ -5479,16 +5469,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43d84d1d7a6ac92673717f9f6d1518374ef257669c24ebc5ac25d5033828be58" -[[package]] -name = "prost" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" -dependencies = [ - "bytes", - "prost-derive 0.11.9", -] - [[package]] name = "prost" version = "0.13.3" @@ -5496,42 +5476,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" dependencies = [ "bytes", - "prost-derive 0.13.3", + "prost-derive", ] [[package]] name = "prost-build" -version = "0.11.9" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" dependencies = [ "bytes", - "heck 0.4.1", - "itertools 0.10.5", - "lazy_static", + "heck 0.5.0", + "itertools 0.13.0", "log", "multimap", + "once_cell", "petgraph", - "prettyplease 0.1.25", - "prost 0.11.9", + "prettyplease", + "prost", "prost-types", "regex", - "syn 1.0.109", + "syn 2.0.72", "tempfile", - "which 4.4.2", -] - -[[package]] -name = "prost-derive" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" -dependencies = [ - "anyhow", - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.109", ] [[package]] @@ -5549,11 +5515,11 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.11.9" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" dependencies = [ - "prost 0.11.9", + "prost", ] [[package]] @@ -7380,7 +7346,7 @@ dependencies = [ "os_pipe", "parking_lot", "pretty_assertions", - "prost 0.11.9", + "prost", "prost-build", "regex", "reqwest", @@ -7628,7 +7594,7 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project", - "prost 0.13.3", + "prost", "socket2", "tokio", "tokio-stream", diff --git a/Cargo.toml b/Cargo.toml index de8b07cd00..6a8855629c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,10 +60,10 @@ deno_terminal = "0.2.0" napi_sym = { version = "0.107.0", path = "./ext/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } -denokv_proto = "0.8.1" -denokv_remote = "0.8.1" +denokv_proto = "0.8.4" +denokv_remote = "0.8.4" # denokv_sqlite brings in bundled sqlite if we don't disable the default features -denokv_sqlite = { default-features = false, version = "0.8.2" } +denokv_sqlite = { default-features = false, version = "0.8.4" } # exts deno_broadcast_channel = { version = "0.171.0", path = "./ext/broadcast_channel" } @@ -157,8 +157,8 @@ percent-encoding = "2.3.0" phf = { version = "0.11", features = ["macros"] } pin-project = "1.0.11" # don't pin because they yank crates from cargo pretty_assertions = "=1.4.0" -prost = "0.11" -prost-build = "0.11" +prost = "0.13" +prost-build = "0.13" rand = "=0.8.5" regex = "^1.7.0" reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955 From 7d9ba09f5a4464072476b8992e43f5e5c30bde3a Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 13 Nov 2024 19:47:01 +0530 Subject: [PATCH 14/18] fix(ext/node): use ERR_NOT_IMPLEMENTED for notImplemented (#26853) --- ext/node/polyfills/_utils.ts | 4 ++-- ext/node/polyfills/internal/errors.ts | 10 ++++++++++ tests/unit_node/perf_hooks_test.ts | 15 ++++++++++++++- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/ext/node/polyfills/_utils.ts b/ext/node/polyfills/_utils.ts index b50c113e14..79d84e00f0 100644 --- a/ext/node/polyfills/_utils.ts +++ b/ext/node/polyfills/_utils.ts @@ -17,6 +17,7 @@ const { import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js"; import { errorMap } from "ext:deno_node/internal_binding/uv.ts"; import { codes } from "ext:deno_node/internal/error_codes.ts"; +import { ERR_NOT_IMPLEMENTED } from "ext:deno_node/internal/errors.ts"; export type BinaryEncodings = "binary"; @@ -34,8 +35,7 @@ export type TextEncodings = export type Encodings = BinaryEncodings | TextEncodings; export function notImplemented(msg: string): never { - const message = msg ? `Not implemented: ${msg}` : "Not implemented"; - throw new Error(message); + throw new ERR_NOT_IMPLEMENTED(msg); } export function warnNotImplemented(msg?: string) { diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 962ca86e92..61b53fa968 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -2390,6 +2390,15 @@ export class ERR_INVALID_RETURN_VALUE extends NodeTypeError { } } +export class ERR_NOT_IMPLEMENTED extends NodeError { + constructor(message?: string) { + super( + "ERR_NOT_IMPLEMENTED", + message ? `Not implemented: ${message}` : "Not implemented", + ); + } +} + export class ERR_INVALID_URL extends NodeTypeError { input: string; constructor(input: string) { @@ -2862,6 +2871,7 @@ export default { ERR_INVALID_SYNC_FORK_INPUT, ERR_INVALID_THIS, ERR_INVALID_TUPLE, + ERR_NOT_IMPLEMENTED, ERR_INVALID_URI, ERR_INVALID_URL, ERR_INVALID_URL_SCHEME, diff --git a/tests/unit_node/perf_hooks_test.ts b/tests/unit_node/perf_hooks_test.ts index d5b9000410..8247f9fd3a 100644 --- a/tests/unit_node/perf_hooks_test.ts +++ b/tests/unit_node/perf_hooks_test.ts @@ -1,6 +1,10 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import * as perfHooks from "node:perf_hooks"; -import { performance, PerformanceObserver } from "node:perf_hooks"; +import { + monitorEventLoopDelay, + performance, + PerformanceObserver, +} from "node:perf_hooks"; import { assertEquals, assertThrows } from "@std/assert"; Deno.test({ @@ -68,3 +72,12 @@ Deno.test("[perf_hooks]: eventLoopUtilization", () => { assertEquals(typeof obj.active, "number"); assertEquals(typeof obj.utilization, "number"); }); + +Deno.test("[perf_hooks]: monitorEventLoopDelay", () => { + const e = assertThrows(() => { + monitorEventLoopDelay({ resolution: 1 }); + }); + + // deno-lint-ignore no-explicit-any + assertEquals((e as any).code, "ERR_NOT_IMPLEMENTED"); +}); From 6a4c6d83bacf5f03628a494778a30bce970f7cbc Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 13 Nov 2024 20:07:45 +0530 Subject: [PATCH 15/18] fix(ext/node): zlib.crc32() (#26856) Fixes https://github.com/denoland/deno/issues/26845 --- ext/node/lib.rs | 1 + ext/node/ops/zlib/mod.rs | 10 +++++++ ext/node/polyfills/zlib.ts | 53 ++++++++++++++++++++++++++++++++++++ tests/unit_node/zlib_test.ts | 5 ++++ 4 files changed, 69 insertions(+) diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 9ca21e9941..6d320b92c1 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -345,6 +345,7 @@ deno_core::extension!(deno_node, ops::zlib::op_zlib_write, ops::zlib::op_zlib_init, ops::zlib::op_zlib_reset, + ops::zlib::op_zlib_crc32, ops::zlib::brotli::op_brotli_compress, ops::zlib::brotli::op_brotli_compress_async, ops::zlib::brotli::op_create_brotli_compress, diff --git a/ext/node/ops/zlib/mod.rs b/ext/node/ops/zlib/mod.rs index e75ef050d2..991c0925d2 100644 --- a/ext/node/ops/zlib/mod.rs +++ b/ext/node/ops/zlib/mod.rs @@ -1,6 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use deno_core::op2; +use libc::c_ulong; use std::borrow::Cow; use std::cell::RefCell; use zlib::*; @@ -381,6 +382,15 @@ pub fn op_zlib_close_if_pending( Ok(()) } +#[op2(fast)] +#[smi] +pub fn op_zlib_crc32(#[buffer] data: &[u8], #[smi] value: u32) -> u32 { + // SAFETY: `data` is a valid buffer. + unsafe { + zlib::crc32(value as c_ulong, data.as_ptr(), data.len() as u32) as u32 + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/ext/node/polyfills/zlib.ts b/ext/node/polyfills/zlib.ts index 3fe5f8bbd9..6e5d02b5be 100644 --- a/ext/node/polyfills/zlib.ts +++ b/ext/node/polyfills/zlib.ts @@ -40,6 +40,58 @@ import { createBrotliCompress, createBrotliDecompress, } from "ext:deno_node/_brotli.js"; +import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; +import { validateUint32 } from "ext:deno_node/internal/validators.mjs"; +import { op_zlib_crc32 } from "ext:core/ops"; +import { core, primordials } from "ext:core/mod.js"; +import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; +const { + Uint8Array, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + DataViewPrototypeGetBuffer, + DataViewPrototypeGetByteLength, + DataViewPrototypeGetByteOffset, +} = primordials; +const { isTypedArray, isDataView } = core; + +const enc = new TextEncoder(); +const toU8 = (input) => { + if (typeof input === "string") { + return enc.encode(input); + } + + if (isTypedArray(input)) { + return new Uint8Array( + TypedArrayPrototypeGetBuffer(input), + TypedArrayPrototypeGetByteOffset(input), + TypedArrayPrototypeGetByteLength(input), + ); + } else if (isDataView(input)) { + return new Uint8Array( + DataViewPrototypeGetBuffer(input), + DataViewPrototypeGetByteOffset(input), + DataViewPrototypeGetByteLength(input), + ); + } + + return input; +}; + +export function crc32(data, value = 0) { + if (typeof data !== "string" && !isArrayBufferView(data)) { + throw new ERR_INVALID_ARG_TYPE("data", [ + "Buffer", + "TypedArray", + "DataView", + "string", + ], data); + } + validateUint32(value, "value"); + + return op_zlib_crc32(toU8(data), value); +} export class Options { constructor() { @@ -87,6 +139,7 @@ export default { BrotliOptions, codes, constants, + crc32, createBrotliCompress, createBrotliDecompress, createDeflate, diff --git a/tests/unit_node/zlib_test.ts b/tests/unit_node/zlib_test.ts index 0eff95445d..de2d2450d1 100644 --- a/tests/unit_node/zlib_test.ts +++ b/tests/unit_node/zlib_test.ts @@ -7,6 +7,7 @@ import { brotliCompressSync, brotliDecompressSync, constants, + crc32, createBrotliCompress, createBrotliDecompress, createDeflate, @@ -225,3 +226,7 @@ Deno.test("gzip() and gzipSync() accept ArrayBuffer", async () => { const outputSync = gzipSync(buf); assert(outputSync instanceof Buffer); }); + +Deno.test("crc32()", () => { + assertEquals(crc32("hello world"), 222957957); +}); From f091d1ad69b4e5217ae3272b641171781a372c4f Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 13 Nov 2024 10:10:09 -0500 Subject: [PATCH 16/18] feat(node): stabilize detecting if CJS via `"type": "commonjs"` in a package.json (#26439) This will respect `"type": "commonjs"` in a package.json to determine if `.js`/`.jsx`/`.ts`/.tsx` files are CJS or ESM. If the file is found to be ESM it will be loaded as ESM though. --- cli/args/flags.rs | 2 - cli/args/mod.rs | 70 ++++- cli/emit.rs | 8 +- cli/factory.rs | 25 +- cli/graph_util.rs | 79 ++++- cli/lsp/analysis.rs | 37 ++- cli/lsp/completions.rs | 22 +- cli/lsp/config.rs | 12 + cli/lsp/diagnostics.rs | 1 + cli/lsp/documents.rs | 136 +++++++-- cli/lsp/language_server.rs | 40 ++- cli/lsp/repl.rs | 2 +- cli/lsp/resolver.rs | 287 ++++++++++------- cli/lsp/tsc.rs | 34 +-- cli/module_loader.rs | 79 +++-- cli/node.rs | 12 +- cli/resolver.rs | 289 +++++++++++------- cli/schemas/config-file.v1.json | 1 - cli/standalone/binary.rs | 1 - cli/standalone/mod.rs | 40 ++- cli/tools/coverage/mod.rs | 2 +- cli/tools/installer.rs | 1 + cli/tools/jupyter/mod.rs | 2 +- cli/tools/lint/mod.rs | 2 +- cli/tools/lint/rules/no_sloppy_imports.rs | 1 + cli/tools/repl/session.rs | 17 +- cli/tools/run/hmr.rs | 7 - cli/tsc/99_main_compiler.js | 41 +-- cli/tsc/mod.rs | 78 +++-- cli/worker.rs | 2 + ext/node/lib.rs | 7 +- ext/node/ops/require.rs | 44 +-- ext/node/polyfills/01_require.js | 31 +- ext/node/polyfills/process.ts | 2 +- resolvers/node/package_json.rs | 4 +- resolvers/node/resolution.rs | 23 +- runtime/fmt_errors.rs | 7 +- runtime/web_worker.rs | 7 + tests/integration/lsp_tests.rs | 49 +++ tests/node_compat/package.json | 3 + tests/node_compat/test/common/package.json | 1 - tests/node_compat/test/fixtures/package.json | 1 - tests/node_compat/test/internet/package.json | 1 - tests/node_compat/test/parallel/package.json | 1 - .../node_compat/test/pseudo-tty/package.json | 1 - tests/node_compat/test/pummel/package.json | 1 - .../node_compat/test/sequential/package.json | 1 - .../1.0.0/install.js | 12 + .../1.0.0/package.json | 7 + .../install-no-ext/1.0.0/install/check.js | 1 + .../install-no-ext/1.0.0/install/index.js | 1 + .../install-no-ext/1.0.0/install/output.js | 1 + .../install-no-ext/1.0.0/package.json | 7 + tests/specs/compile/detect_cjs/deno.json | 5 - .../__test__.jsonc | 0 .../{detect_cjs => package_json_type}/add.js | 0 .../compile.out | 0 .../{detect_cjs => package_json_type}/main.js | 0 .../output.out | 0 .../package.json | 0 .../subtract.ts | 0 .../eval/pkg_json_type_cjs/__test__.jsonc | 4 + .../specs/eval/pkg_json_type_cjs/package.json | 3 + .../__test__.jsonc | 5 + .../output.out | 4 + .../package.json | 5 + .../scripts_install_no_ext/__test__.jsonc | 5 + .../install/scripts_install_no_ext/output.out | 4 + .../scripts_install_no_ext/package.json | 5 + tests/specs/mod.rs | 20 ++ tests/specs/npm/dual_cjs_esm/__test__.jsonc | 4 - .../dual_cjs_esm/cjs_referrer/__test__.jsonc | 14 + .../npm/dual_cjs_esm/cjs_referrer/check.out | 8 + .../npm/dual_cjs_esm/cjs_referrer/main.cts | 4 + .../npm/dual_cjs_esm/cjs_referrer/main.out | 4 + .../dual_cjs_esm/cjs_referrer/package.json | 5 + .../dual_cjs_esm/esm_referrer/__test__.jsonc | 4 + .../{dual_cjs_esm => esm_referrer}/main.out | 0 .../{dual_cjs_esm => esm_referrer}/main.ts | 0 .../ts_referrer_type_cjs/__test__.jsonc | 14 + .../ts_referrer_type_cjs/check.out | 8 + .../ts_referrer_type_cjs/main.out | 4 + .../dual_cjs_esm/ts_referrer_type_cjs/main.ts | 4 + .../ts_referrer_type_cjs/package.json | 6 + .../__test__.jsonc | 5 +- .../foo/config.js | 0 .../foo/package.json | 5 + .../main.out | 0 .../{permissions_outside_package => }/main.ts | 2 +- .../permissions_outside_package/package.json | 2 + .../foo/package.json | 4 - .../specs/run/import_common_js/__test__.jsonc | 20 +- tests/specs/run/import_common_js/a.js | 7 - .../run/import_common_js/exports_error.out | 7 +- tests/specs/run/import_common_js/index.cjs | 2 - tests/specs/run/import_common_js/index.out | 2 +- tests/specs/run/import_common_js/main.out | 2 - .../run/import_common_js/module_error.out | 7 +- .../node_modules/foo/index.mjs | 1 - .../run/import_common_js/require_error.out | 7 +- .../npm_pkg_requires_esm_js/__test__.jsonc | 5 + .../specs/run/npm_pkg_requires_esm_js/file.js | 1 + .../npm_pkg_requires_esm_js/logs_require.js | 1 + .../specs/run/npm_pkg_requires_esm_js/main.js | 5 + .../node_modules/package/index.js | 3 + .../node_modules/package/package.json | 4 + .../run/npm_pkg_requires_esm_js/output.out | 12 + .../run/npm_pkg_requires_esm_js/package.json | 0 .../commonjs/basic/deno.jsonc | 5 - .../commonjs/basic/main_mix.out | 7 +- .../package_json_type/commonjs/jsx/deno.jsonc | 5 +- .../run/package_json_type/none/deno.jsonc | 5 - .../run/package_json_type/none/main_cjs.out | 7 +- tests/specs/run/require_esm/main.out | 11 +- tests/specs/run/stdin_type_cjs/__test__.jsonc | 5 + tests/specs/run/stdin_type_cjs/package.json | 3 + .../run/stdin_type_cjs/stdin_read_all.ts | 1 + tests/specs/schema.json | 3 + tests/util/server/src/servers/mod.rs | 22 +- 119 files changed, 1244 insertions(+), 616 deletions(-) create mode 100644 tests/node_compat/package.json delete mode 100644 tests/node_compat/test/common/package.json delete mode 100644 tests/node_compat/test/fixtures/package.json delete mode 100644 tests/node_compat/test/internet/package.json delete mode 100644 tests/node_compat/test/parallel/package.json delete mode 100644 tests/node_compat/test/pseudo-tty/package.json delete mode 100644 tests/node_compat/test/pummel/package.json delete mode 100644 tests/node_compat/test/sequential/package.json create mode 100644 tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/install.js create mode 100644 tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/package.json create mode 100644 tests/registry/npm/@denotest/install-no-ext/1.0.0/install/check.js create mode 100644 tests/registry/npm/@denotest/install-no-ext/1.0.0/install/index.js create mode 100644 tests/registry/npm/@denotest/install-no-ext/1.0.0/install/output.js create mode 100644 tests/registry/npm/@denotest/install-no-ext/1.0.0/package.json delete mode 100644 tests/specs/compile/detect_cjs/deno.json rename tests/specs/compile/{detect_cjs => package_json_type}/__test__.jsonc (100%) rename tests/specs/compile/{detect_cjs => package_json_type}/add.js (100%) rename tests/specs/compile/{detect_cjs => package_json_type}/compile.out (100%) rename tests/specs/compile/{detect_cjs => package_json_type}/main.js (100%) rename tests/specs/compile/{detect_cjs => package_json_type}/output.out (100%) rename tests/specs/compile/{detect_cjs => package_json_type}/package.json (100%) rename tests/specs/compile/{detect_cjs => package_json_type}/subtract.ts (100%) create mode 100644 tests/specs/eval/pkg_json_type_cjs/__test__.jsonc create mode 100644 tests/specs/eval/pkg_json_type_cjs/package.json create mode 100644 tests/specs/install/scripts_install_launch_cjs_temp_dir/__test__.jsonc create mode 100644 tests/specs/install/scripts_install_launch_cjs_temp_dir/output.out create mode 100644 tests/specs/install/scripts_install_launch_cjs_temp_dir/package.json create mode 100644 tests/specs/install/scripts_install_no_ext/__test__.jsonc create mode 100644 tests/specs/install/scripts_install_no_ext/output.out create mode 100644 tests/specs/install/scripts_install_no_ext/package.json delete mode 100644 tests/specs/npm/dual_cjs_esm/__test__.jsonc create mode 100644 tests/specs/npm/dual_cjs_esm/cjs_referrer/__test__.jsonc create mode 100644 tests/specs/npm/dual_cjs_esm/cjs_referrer/check.out create mode 100644 tests/specs/npm/dual_cjs_esm/cjs_referrer/main.cts create mode 100644 tests/specs/npm/dual_cjs_esm/cjs_referrer/main.out create mode 100644 tests/specs/npm/dual_cjs_esm/cjs_referrer/package.json create mode 100644 tests/specs/npm/dual_cjs_esm/esm_referrer/__test__.jsonc rename tests/specs/npm/dual_cjs_esm/{dual_cjs_esm => esm_referrer}/main.out (100%) rename tests/specs/npm/dual_cjs_esm/{dual_cjs_esm => esm_referrer}/main.ts (100%) create mode 100644 tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/__test__.jsonc create mode 100644 tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/check.out create mode 100644 tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.out create mode 100644 tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.ts create mode 100644 tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/package.json rename tests/specs/npm/permissions_outside_package/{permissions_outside_package => }/foo/config.js (100%) create mode 100644 tests/specs/npm/permissions_outside_package/foo/package.json rename tests/specs/npm/permissions_outside_package/{permissions_outside_package => }/main.out (100%) rename tests/specs/npm/permissions_outside_package/{permissions_outside_package => }/main.ts (64%) create mode 100644 tests/specs/npm/permissions_outside_package/package.json delete mode 100644 tests/specs/npm/permissions_outside_package/permissions_outside_package/foo/package.json delete mode 100644 tests/specs/run/import_common_js/a.js create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/__test__.jsonc create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/file.js create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/logs_require.js create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/main.js create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/index.js create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/package.json create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/output.out create mode 100644 tests/specs/run/npm_pkg_requires_esm_js/package.json delete mode 100644 tests/specs/run/package_json_type/commonjs/basic/deno.jsonc delete mode 100644 tests/specs/run/package_json_type/none/deno.jsonc create mode 100644 tests/specs/run/stdin_type_cjs/__test__.jsonc create mode 100644 tests/specs/run/stdin_type_cjs/package.json create mode 100644 tests/specs/run/stdin_type_cjs/stdin_read_all.ts diff --git a/cli/args/flags.rs b/cli/args/flags.rs index eb77971748..37f5899372 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -576,7 +576,6 @@ pub struct UnstableConfig { // TODO(bartlomieju): remove in Deno 2.5 pub legacy_flag_enabled: bool, // --unstable pub bare_node_builtins: bool, - pub detect_cjs: bool, pub sloppy_imports: bool, pub features: Vec, // --unstabe-kv --unstable-cron } @@ -5720,7 +5719,6 @@ fn unstable_args_parse( flags.unstable_config.bare_node_builtins = matches.get_flag("unstable-bare-node-builtins"); - flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs"); flags.unstable_config.sloppy_imports = matches.get_flag("unstable-sloppy-imports"); diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 3aaf2bd438..5e5bae87da 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -7,6 +7,7 @@ mod import_map; mod lockfile; mod package_json; +use deno_ast::MediaType; use deno_ast::SourceMapOption; use deno_config::deno_json::NodeModulesDirMode; use deno_config::workspace::CreateResolverOptions; @@ -34,7 +35,6 @@ use import_map::resolve_import_map_value_from_specifier; pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::ConfigFile; pub use deno_config::deno_json::FmtOptionsConfig; -pub use deno_config::deno_json::JsxImportSourceConfig; pub use deno_config::deno_json::LintRulesConfig; pub use deno_config::deno_json::ProseWrap; pub use deno_config::deno_json::TsConfig; @@ -1155,21 +1155,34 @@ impl CliOptions { self .main_module_cell .get_or_init(|| { - let main_module = match &self.flags.subcommand { + Ok(match &self.flags.subcommand { DenoSubcommand::Compile(compile_flags) => { resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())? } DenoSubcommand::Eval(_) => { - resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())? + resolve_url_or_path("./$deno$eval.mts", self.initial_cwd())? } DenoSubcommand::Repl(_) => { - resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())? + resolve_url_or_path("./$deno$repl.mts", self.initial_cwd())? } DenoSubcommand::Run(run_flags) => { if run_flags.is_stdin() { - resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())? + resolve_url_or_path("./$deno$stdin.mts", self.initial_cwd())? } else { - resolve_url_or_path(&run_flags.script, self.initial_cwd())? + let url = + resolve_url_or_path(&run_flags.script, self.initial_cwd())?; + if self.is_node_main() + && url.scheme() == "file" + && MediaType::from_specifier(&url) == MediaType::Unknown + { + try_resolve_node_binary_main_entrypoint( + &run_flags.script, + self.initial_cwd(), + )? + .unwrap_or(url) + } else { + url + } } } DenoSubcommand::Serve(run_flags) => { @@ -1178,9 +1191,7 @@ impl CliOptions { _ => { bail!("No main module.") } - }; - - Ok(main_module) + }) }) .as_ref() .map_err(|err| deno_core::anyhow::anyhow!("{}", err)) @@ -1229,7 +1240,7 @@ impl CliOptions { // This is triggered via a secret environment variable which is used // for functionality like child_process.fork. Users should NOT depend // on this functionality. - pub fn is_npm_main(&self) -> bool { + pub fn is_node_main(&self) -> bool { NPM_PROCESS_STATE.is_some() } @@ -1607,9 +1618,11 @@ impl CliOptions { || self.workspace().has_unstable("bare-node-builtins") } - pub fn unstable_detect_cjs(&self) -> bool { - self.flags.unstable_config.detect_cjs - || self.workspace().has_unstable("detect-cjs") + pub fn detect_cjs(&self) -> bool { + // only enabled when there's a package.json in order to not have a + // perf penalty for non-npm Deno projects of searching for the closest + // package.json beside each module + self.workspace().package_jsons().next().is_some() || self.is_node_main() } fn byonm_enabled(&self) -> bool { @@ -1673,7 +1686,6 @@ impl CliOptions { "byonm", "bare-node-builtins", "fmt-component", - "detect-cjs", ]) .collect(); @@ -1811,6 +1823,36 @@ fn resolve_node_modules_folder( Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) } +fn try_resolve_node_binary_main_entrypoint( + specifier: &str, + initial_cwd: &Path, +) -> Result, AnyError> { + // node allows running files at paths without a `.js` extension + // or at directories with an index.js file + let path = deno_core::normalize_path(initial_cwd.join(specifier)); + if path.is_dir() { + let index_file = path.join("index.js"); + Ok(if index_file.is_file() { + Some(deno_path_util::url_from_file_path(&index_file)?) + } else { + None + }) + } else { + let path = path.with_extension( + path + .extension() + .and_then(|s| s.to_str()) + .map(|s| format!("{}.js", s)) + .unwrap_or("js".to_string()), + ); + if path.is_file() { + Ok(Some(deno_path_util::url_from_file_path(&path)?)) + } else { + Ok(None) + } + } +} + fn resolve_import_map_specifier( maybe_import_map_path: Option<&str>, maybe_config_file: Option<&ConfigFile>, diff --git a/cli/emit.rs b/cli/emit.rs index 8c4f2091cf..3cd23b7abb 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -181,7 +181,6 @@ impl Emitter { pub async fn load_and_emit_for_hmr( &self, specifier: &ModuleSpecifier, - module_kind: deno_ast::ModuleKind, ) -> Result { let media_type = MediaType::from_specifier(specifier); let source_code = tokio::fs::read_to_string( @@ -203,11 +202,16 @@ impl Emitter { // this statement is probably wrong) let mut options = self.transpile_and_emit_options.1.clone(); options.source_map = SourceMapOption::None; + let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( + specifier, + media_type, + parsed_source.compute_is_script(), + )?; let transpiled_source = parsed_source .transpile( &self.transpile_and_emit_options.0, &deno_ast::TranspileModuleOptions { - module_kind: Some(module_kind), + module_kind: Some(ModuleKind::from_is_cjs(is_cjs)), }, &options, )? diff --git a/cli/factory.rs b/cli/factory.rs index 417f771a30..5cb2dd7b3a 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -42,12 +42,12 @@ use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::resolver::CjsTracker; -use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; -use crate::resolver::CliGraphResolver; -use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliNodeResolver; +use crate::resolver::CliResolver; +use crate::resolver::CliResolverOptions; use crate::resolver::CliSloppyImportsResolver; +use crate::resolver::IsCjsResolverOptions; use crate::resolver::NpmModuleLoader; use crate::resolver::SloppyImportsCachedFs; use crate::standalone::DenoCompileBinaryWriter; @@ -201,7 +201,7 @@ struct CliFactoryServices { parsed_source_cache: Deferred>, permission_desc_parser: Deferred>, pkg_json_resolver: Deferred>, - resolver: Deferred>, + resolver: Deferred>, root_cert_store_provider: Deferred>, root_permissions_container: Deferred, sloppy_imports_resolver: Deferred>>, @@ -523,14 +523,14 @@ impl CliFactory { .await } - pub async fn resolver(&self) -> Result<&Arc, AnyError> { + pub async fn resolver(&self) -> Result<&Arc, AnyError> { self .services .resolver .get_or_try_init_async( async { let cli_options = self.cli_options()?; - Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions { + Ok(Arc::new(CliResolver::new(CliResolverOptions { sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(), node_resolver: Some(self.cli_node_resolver().await?.clone()), npm_resolver: if cli_options.no_npm() { @@ -541,9 +541,6 @@ impl CliFactory { workspace_resolver: self.workspace_resolver().await?.clone(), bare_node_builtins_enabled: cli_options .unstable_bare_node_builtins(), - maybe_jsx_import_source_config: cli_options - .workspace() - .to_maybe_jsx_import_source_config()?, maybe_vendor_dir: cli_options.vendor_dir_path(), }))) } @@ -652,7 +649,6 @@ impl CliFactory { self.cjs_tracker()?.clone(), self.fs().clone(), Some(self.parsed_source_cache().clone()), - self.cli_options()?.is_npm_main(), ); Ok(Arc::new(NodeCodeTranslator::new( @@ -706,6 +702,7 @@ impl CliFactory { let cli_options = self.cli_options()?; Ok(Arc::new(ModuleGraphBuilder::new( self.caches()?.clone(), + self.cjs_tracker()?.clone(), cli_options.clone(), self.file_fetcher()?.clone(), self.fs().clone(), @@ -794,8 +791,9 @@ impl CliFactory { Ok(Arc::new(CjsTracker::new( self.in_npm_pkg_checker()?.clone(), self.pkg_json_resolver().clone(), - CjsTrackerOptions { - unstable_detect_cjs: options.unstable_detect_cjs(), + IsCjsResolverOptions { + detect_cjs: options.detect_cjs(), + is_node_main: options.is_node_main(), }, ))) }) @@ -809,7 +807,6 @@ impl CliFactory { .cli_node_resolver .get_or_try_init_async(async { Ok(Arc::new(CliNodeResolver::new( - self.cjs_tracker()?.clone(), self.fs().clone(), self.in_npm_pkg_checker()?.clone(), self.node_resolver().await?.clone(), @@ -950,10 +947,8 @@ impl CliFactory { let create_hmr_runner = if cli_options.has_hmr() { let watcher_communicator = self.watcher_communicator.clone().unwrap(); let emitter = self.emitter()?.clone(); - let cjs_tracker = self.cjs_tracker()?.clone(); let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| { Box::new(HmrRunner::new( - cjs_tracker.clone(), emitter.clone(), session, watcher_communicator.clone(), diff --git a/cli/graph_util.rs b/cli/graph_util.rs index 46257cf785..3f48449bc5 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -13,16 +13,19 @@ use crate::colors; use crate::errors::get_error_class_name; use crate::file_fetcher::FileFetcher; use crate::npm::CliNpmResolver; -use crate::resolver::CliGraphResolver; +use crate::resolver::CjsTracker; +use crate::resolver::CliResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; +use deno_config::deno_json::JsxImportSourceConfig; use deno_config::workspace::JsrPackageConfig; use deno_core::anyhow::bail; use deno_graph::source::LoaderChecksum; +use deno_graph::source::ResolutionMode; use deno_graph::FillFromLockfileOptions; use deno_graph::JsrLoadError; use deno_graph::ModuleLoadError; @@ -379,6 +382,7 @@ pub struct BuildFastCheckGraphOptions<'a> { pub struct ModuleGraphBuilder { caches: Arc, + cjs_tracker: Arc, cli_options: Arc, file_fetcher: Arc, fs: Arc, @@ -389,7 +393,7 @@ pub struct ModuleGraphBuilder { module_info_cache: Arc, npm_resolver: Arc, parsed_source_cache: Arc, - resolver: Arc, + resolver: Arc, root_permissions_container: PermissionsContainer, } @@ -397,6 +401,7 @@ impl ModuleGraphBuilder { #[allow(clippy::too_many_arguments)] pub fn new( caches: Arc, + cjs_tracker: Arc, cli_options: Arc, file_fetcher: Arc, fs: Arc, @@ -407,11 +412,12 @@ impl ModuleGraphBuilder { module_info_cache: Arc, npm_resolver: Arc, parsed_source_cache: Arc, - resolver: Arc, + resolver: Arc, root_permissions_container: PermissionsContainer, ) -> Self { Self { caches, + cjs_tracker, cli_options, file_fetcher, fs, @@ -518,7 +524,7 @@ impl ModuleGraphBuilder { None => MutLoaderRef::Owned(self.create_graph_loader()), }; let cli_resolver = &self.resolver; - let graph_resolver = cli_resolver.as_graph_resolver(); + let graph_resolver = self.create_graph_resolver()?; let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); let maybe_file_watcher_reporter = self .maybe_file_watcher_reporter @@ -543,7 +549,7 @@ impl ModuleGraphBuilder { npm_resolver: Some(&graph_npm_resolver), module_analyzer: &analyzer, reporter: maybe_file_watcher_reporter, - resolver: Some(graph_resolver), + resolver: Some(&graph_resolver), locker: locker.as_mut().map(|l| l as _), }, ) @@ -666,7 +672,7 @@ impl ModuleGraphBuilder { }; let parser = self.parsed_source_cache.as_capturing_parser(); let cli_resolver = &self.resolver; - let graph_resolver = cli_resolver.as_graph_resolver(); + let graph_resolver = self.create_graph_resolver()?; let graph_npm_resolver = cli_resolver.create_graph_npm_resolver(); graph.build_fast_check_type_graph( @@ -675,7 +681,7 @@ impl ModuleGraphBuilder { fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_dts: false, jsr_url_provider: &CliJsrUrlProvider, - resolver: Some(graph_resolver), + resolver: Some(&graph_resolver), npm_resolver: Some(&graph_npm_resolver), workspace_fast_check: options.workspace_fast_check, }, @@ -739,6 +745,18 @@ impl ModuleGraphBuilder { }, ) } + + fn create_graph_resolver(&self) -> Result { + let jsx_import_source_config = self + .cli_options + .workspace() + .to_maybe_jsx_import_source_config()?; + Ok(CliGraphResolver { + cjs_tracker: &self.cjs_tracker, + resolver: &self.resolver, + jsx_import_source_config, + }) + } } /// Adds more explanatory information to a resolution error. @@ -1143,6 +1161,53 @@ fn format_deno_graph_error(err: &dyn Error) -> String { message } +#[derive(Debug)] +struct CliGraphResolver<'a> { + cjs_tracker: &'a CjsTracker, + resolver: &'a CliResolver, + jsx_import_source_config: Option, +} + +impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> { + fn default_jsx_import_source(&self) -> Option { + self + .jsx_import_source_config + .as_ref() + .and_then(|c| c.default_specifier.clone()) + } + + fn default_jsx_import_source_types(&self) -> Option { + self + .jsx_import_source_config + .as_ref() + .and_then(|c| c.default_types_specifier.clone()) + } + + fn jsx_import_source_module(&self) -> &str { + self + .jsx_import_source_config + .as_ref() + .map(|c| c.module.as_str()) + .unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE) + } + + fn resolve( + &self, + raw_specifier: &str, + referrer_range: &deno_graph::Range, + mode: ResolutionMode, + ) -> Result { + self.resolver.resolve( + raw_specifier, + referrer_range, + self + .cjs_tracker + .get_referrer_kind(&referrer_range.specifier), + mode, + ) + } +} + #[cfg(test)] mod test { use std::sync::Arc; diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 683a59c219..9f26de70cb 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -39,6 +39,7 @@ use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; use deno_semver::Version; use import_map::ImportMap; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Regex; use std::borrow::Cow; @@ -467,6 +468,7 @@ impl<'a> TsResponseImportMapper<'a> { &self, specifier: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, ) -> Option { let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier); let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain( @@ -477,7 +479,7 @@ impl<'a> TsResponseImportMapper<'a> { for specifier in specifiers { if let Some(specifier) = self .resolver - .as_graph_resolver(Some(&self.file_referrer)) + .as_cli_resolver(Some(&self.file_referrer)) .resolve( &specifier, &deno_graph::Range { @@ -485,6 +487,7 @@ impl<'a> TsResponseImportMapper<'a> { start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, ResolutionMode::Types, ) .ok() @@ -507,10 +510,11 @@ impl<'a> TsResponseImportMapper<'a> { &self, specifier_text: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, ) -> bool { self .resolver - .as_graph_resolver(Some(&self.file_referrer)) + .as_cli_resolver(Some(&self.file_referrer)) .resolve( specifier_text, &deno_graph::Range { @@ -518,6 +522,7 @@ impl<'a> TsResponseImportMapper<'a> { start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, deno_graph::source::ResolutionMode::Types, ) .is_ok() @@ -586,6 +591,7 @@ fn try_reverse_map_package_json_exports( /// like an import and rewrite the import specifier to include the extension pub fn fix_ts_import_changes( referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, changes: &[tsc::FileTextChanges], language_server: &language_server::Inner, ) -> Result, AnyError> { @@ -602,8 +608,8 @@ pub fn fix_ts_import_changes( if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) { let specifier = captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); - if let Some(new_specifier) = - import_mapper.check_unresolved_specifier(specifier, referrer) + if let Some(new_specifier) = import_mapper + .check_unresolved_specifier(specifier, referrer, referrer_kind) { line.replace(specifier, &new_specifier) } else { @@ -633,6 +639,7 @@ pub fn fix_ts_import_changes( /// resolution by Deno (includes the extension). fn fix_ts_import_action<'a>( referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, action: &'a tsc::CodeFixAction, language_server: &language_server::Inner, ) -> Option> { @@ -652,7 +659,7 @@ fn fix_ts_import_action<'a>( }; let import_mapper = language_server.get_ts_response_import_mapper(referrer); if let Some(new_specifier) = - import_mapper.check_unresolved_specifier(specifier, referrer) + import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind) { let description = action.description.replace(specifier, &new_specifier); let changes = action @@ -683,7 +690,7 @@ fn fix_ts_import_action<'a>( fix_id: None, fix_all_description: None, })) - } else if !import_mapper.is_valid_import(specifier, referrer) { + } else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) { None } else { Some(Cow::Borrowed(action)) @@ -1017,6 +1024,7 @@ impl CodeActionCollection { pub fn add_ts_fix_action( &mut self, specifier: &ModuleSpecifier, + specifier_kind: NodeModuleKind, action: &tsc::CodeFixAction, diagnostic: &lsp::Diagnostic, language_server: &language_server::Inner, @@ -1034,7 +1042,8 @@ impl CodeActionCollection { "The action returned from TypeScript is unsupported.", )); } - let Some(action) = fix_ts_import_action(specifier, action, language_server) + let Some(action) = + fix_ts_import_action(specifier, specifier_kind, action, language_server) else { return Ok(()); }; @@ -1276,6 +1285,9 @@ impl CodeActionCollection { import_start_from_specifier(document, i) })?; let referrer = document.specifier(); + let referrer_kind = language_server + .is_cjs_resolver + .get_doc_module_kind(document); let file_referrer = document.file_referrer(); let config_data = language_server .config @@ -1298,10 +1310,11 @@ impl CodeActionCollection { if !config_data.byonm { return None; } - if !language_server - .resolver - .is_bare_package_json_dep(&dep_key, referrer) - { + if !language_server.resolver.is_bare_package_json_dep( + &dep_key, + referrer, + referrer_kind, + ) { return None; } NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()? @@ -1320,7 +1333,7 @@ impl CodeActionCollection { } if language_server .resolver - .npm_to_file_url(&npm_ref, document.specifier(), file_referrer) + .npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer) .is_some() { // The package import has types. diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index 1590743b2b..3ee8ae93e4 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -9,6 +9,7 @@ use super::jsr::CliJsrSearchApi; use super::lsp_custom; use super::npm::CliNpmSearchApi; use super::registries::ModuleRegistry; +use super::resolver::LspIsCjsResolver; use super::resolver::LspResolver; use super::search::PackageSearchApi; use super::tsc; @@ -35,6 +36,7 @@ use deno_semver::package::PackageNv; use import_map::ImportMap; use indexmap::IndexSet; use lsp_types::CompletionList; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Regex; use tower_lsp::lsp_types as lsp; @@ -159,15 +161,17 @@ pub async fn get_import_completions( jsr_search_api: &CliJsrSearchApi, npm_search_api: &CliNpmSearchApi, documents: &Documents, + is_cjs_resolver: &LspIsCjsResolver, resolver: &LspResolver, maybe_import_map: Option<&ImportMap>, ) -> Option { let document = documents.get(specifier)?; + let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document); let file_referrer = document.file_referrer(); let (text, _, range) = document.get_maybe_dependency(position)?; let range = to_narrow_lsp_range(document.text_info(), &range); let resolved = resolver - .as_graph_resolver(file_referrer) + .as_cli_resolver(file_referrer) .resolve( &text, &Range { @@ -175,6 +179,7 @@ pub async fn get_import_completions( start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + specifier_kind, ResolutionMode::Execution, ) .ok(); @@ -201,7 +206,7 @@ pub async fn get_import_completions( // completions for import map specifiers Some(lsp::CompletionResponse::List(completion_list)) } else if let Some(completion_list) = - get_local_completions(specifier, &text, &range, resolver) + get_local_completions(specifier, specifier_kind, &text, &range, resolver) { // completions for local relative modules Some(lsp::CompletionResponse::List(completion_list)) @@ -355,24 +360,26 @@ fn get_import_map_completions( /// Return local completions that are relative to the base specifier. fn get_local_completions( - base: &ModuleSpecifier, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, text: &str, range: &lsp::Range, resolver: &LspResolver, ) -> Option { - if base.scheme() != "file" { + if referrer.scheme() != "file" { return None; } let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1]; let resolved_parent = resolver - .as_graph_resolver(Some(base)) + .as_cli_resolver(Some(referrer)) .resolve( parent, &Range { - specifier: base.clone(), + specifier: referrer.clone(), start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, ResolutionMode::Execution, ) .ok()?; @@ -385,7 +392,7 @@ fn get_local_completions( let de = de.ok()?; let label = de.path().file_name()?.to_string_lossy().to_string(); let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?; - if entry_specifier == *base { + if entry_specifier == *referrer { return None; } let full_text = format!("{parent}{label}"); @@ -905,6 +912,7 @@ mod tests { ModuleSpecifier::from_file_path(file_c).expect("could not create"); let actual = get_local_completions( &specifier, + NodeModuleKind::Esm, "./", &lsp::Range { start: lsp::Position { diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 34bf64446d..ea77e36bcf 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -4,6 +4,7 @@ use deno_ast::MediaType; use deno_config::deno_json::DenoJsonCache; use deno_config::deno_json::FmtConfig; use deno_config::deno_json::FmtOptionsConfig; +use deno_config::deno_json::JsxImportSourceConfig; use deno_config::deno_json::LintConfig; use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::TestConfig; @@ -1654,6 +1655,17 @@ impl ConfigData { self.member_dir.maybe_pkg_json() } + pub fn maybe_jsx_import_source_config( + &self, + ) -> Option { + self + .member_dir + .workspace + .to_maybe_jsx_import_source_config() + .ok() + .flatten() + } + pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool { specifier.as_str().starts_with(self.scope.as_str()) || self diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 83c00d27ed..e4fb82e58d 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1707,6 +1707,7 @@ mod tests { documents: Arc::new(documents), assets: Default::default(), config: Arc::new(config), + is_cjs_resolver: Default::default(), resolver, }, ) diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index ce13c32157..b62fa85533 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -3,7 +3,9 @@ use super::cache::calculate_fs_version; use super::cache::LspCache; use super::config::Config; +use super::resolver::LspIsCjsResolver; use super::resolver::LspResolver; +use super::resolver::SingleReferrerGraphResolver; use super::testing::TestCollector; use super::testing::TestModule; use super::text::LineIndex; @@ -33,6 +35,7 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use indexmap::IndexMap; use indexmap::IndexSet; +use node_resolver::NodeModuleKind; use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; @@ -293,6 +296,8 @@ pub struct Document { /// Contains the last-known-good set of dependencies from parsing the module. config: Arc, dependencies: Arc>, + /// If this is maybe a CJS script and maybe not an ES module. + is_script: Option, // TODO(nayeemrmn): This is unused, use it for scope attribution for remote // modules. file_referrer: Option, @@ -323,6 +328,7 @@ impl Document { maybe_lsp_version: Option, maybe_language_id: Option, maybe_headers: Option>, + is_cjs_resolver: &LspIsCjsResolver, resolver: Arc, config: Arc, cache: &Arc, @@ -342,6 +348,7 @@ impl Document { maybe_headers.as_ref(), media_type, file_referrer.as_ref(), + is_cjs_resolver, &resolver, ) } else { @@ -367,6 +374,7 @@ impl Document { file_referrer.as_ref(), ), file_referrer, + is_script: maybe_module.as_ref().map(|m| m.is_script), maybe_types_dependency, line_index, maybe_language_id, @@ -388,6 +396,7 @@ impl Document { fn with_new_config( &self, + is_cjs_resolver: &LspIsCjsResolver, resolver: Arc, config: Arc, ) -> Arc { @@ -399,6 +408,7 @@ impl Document { let dependencies; let maybe_types_dependency; let maybe_parsed_source; + let is_script; let maybe_test_module_fut; if media_type != self.media_type { let parsed_source_result = @@ -408,6 +418,7 @@ impl Document { &parsed_source_result, self.maybe_headers.as_ref(), self.file_referrer.as_ref(), + is_cjs_resolver, &resolver, ) .ok(); @@ -415,6 +426,7 @@ impl Document { .as_ref() .map(|m| Arc::new(m.dependencies.clone())) .unwrap_or_default(); + is_script = maybe_module.as_ref().map(|m| m.is_script); maybe_types_dependency = maybe_module .as_ref() .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); @@ -422,10 +434,19 @@ impl Document { maybe_test_module_fut = get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config); } else { - let graph_resolver = - resolver.as_graph_resolver(self.file_referrer.as_ref()); + let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref()); let npm_resolver = resolver.create_graph_npm_resolver(self.file_referrer.as_ref()); + let config_data = resolver.as_config_data(self.file_referrer.as_ref()); + let jsx_import_source_config = + config_data.and_then(|d| d.maybe_jsx_import_source_config()); + let resolver = SingleReferrerGraphResolver { + valid_referrer: &self.specifier, + referrer_kind: is_cjs_resolver + .get_lsp_referrer_kind(&self.specifier, self.is_script), + cli_resolver, + jsx_import_source_config: jsx_import_source_config.as_ref(), + }; dependencies = Arc::new( self .dependencies @@ -436,7 +457,7 @@ impl Document { d.with_new_resolver( s, &CliJsrUrlProvider, - Some(graph_resolver), + Some(&resolver), Some(&npm_resolver), ), ) @@ -446,10 +467,11 @@ impl Document { maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| { Arc::new(d.with_new_resolver( &CliJsrUrlProvider, - Some(graph_resolver), + Some(&resolver), Some(&npm_resolver), )) }); + is_script = self.is_script; maybe_parsed_source = self.maybe_parsed_source().cloned(); maybe_test_module_fut = self .maybe_test_module_fut @@ -461,6 +483,7 @@ impl Document { // updated properties dependencies, file_referrer: self.file_referrer.clone(), + is_script, maybe_types_dependency, maybe_navigation_tree: Mutex::new(None), // maintain - this should all be copies/clones @@ -485,6 +508,7 @@ impl Document { fn with_change( &self, + is_cjs_resolver: &LspIsCjsResolver, version: i32, changes: Vec, ) -> Result, AnyError> { @@ -518,6 +542,7 @@ impl Document { self.maybe_headers.as_ref(), media_type, self.file_referrer.as_ref(), + is_cjs_resolver, self.resolver.as_ref(), ) } else { @@ -541,6 +566,7 @@ impl Document { get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config); Ok(Arc::new(Self { config: self.config.clone(), + is_script: maybe_module.as_ref().map(|m| m.is_script), specifier: self.specifier.clone(), file_referrer: self.file_referrer.clone(), maybe_fs_version: self.maybe_fs_version.clone(), @@ -575,6 +601,7 @@ impl Document { ), maybe_language_id: self.maybe_language_id, dependencies: self.dependencies.clone(), + is_script: self.is_script, maybe_types_dependency: self.maybe_types_dependency.clone(), text: self.text.clone(), text_info_cell: once_cell::sync::OnceCell::new(), @@ -602,6 +629,7 @@ impl Document { ), maybe_language_id: self.maybe_language_id, dependencies: self.dependencies.clone(), + is_script: self.is_script, maybe_types_dependency: self.maybe_types_dependency.clone(), text: self.text.clone(), text_info_cell: once_cell::sync::OnceCell::new(), @@ -650,6 +678,13 @@ impl Document { }) } + /// If this is maybe a CJS script and maybe not an ES module. + /// + /// Use `LspIsCjsResolver` to determine for sure. + pub fn is_script(&self) -> Option { + self.is_script + } + pub fn line_index(&self) -> Arc { self.line_index.clone() } @@ -797,6 +832,7 @@ impl FileSystemDocuments { pub fn get( &self, specifier: &ModuleSpecifier, + is_cjs_resolver: &LspIsCjsResolver, resolver: &Arc, config: &Arc, cache: &Arc, @@ -820,7 +856,14 @@ impl FileSystemDocuments { }; if dirty { // attempt to update the file on the file system - self.refresh_document(specifier, resolver, config, cache, file_referrer) + self.refresh_document( + specifier, + is_cjs_resolver, + resolver, + config, + cache, + file_referrer, + ) } else { old_doc } @@ -831,6 +874,7 @@ impl FileSystemDocuments { fn refresh_document( &self, specifier: &ModuleSpecifier, + is_cjs_resolver: &LspIsCjsResolver, resolver: &Arc, config: &Arc, cache: &Arc, @@ -847,6 +891,7 @@ impl FileSystemDocuments { None, None, None, + is_cjs_resolver, resolver.clone(), config.clone(), cache, @@ -863,6 +908,7 @@ impl FileSystemDocuments { None, None, None, + is_cjs_resolver, resolver.clone(), config.clone(), cache, @@ -890,6 +936,7 @@ impl FileSystemDocuments { None, None, maybe_headers, + is_cjs_resolver, resolver.clone(), config.clone(), cache, @@ -930,6 +977,11 @@ pub struct Documents { /// The DENO_DIR that the documents looks for non-file based modules. cache: Arc, config: Arc, + /// Resolver for detecting if a document is CJS or ESM. + is_cjs_resolver: Arc, + /// A resolver that takes into account currently loaded import map and JSX + /// settings. + resolver: Arc, /// A flag that indicates that stated data is potentially invalid and needs to /// be recalculated before being considered valid. dirty: bool, @@ -937,9 +989,6 @@ pub struct Documents { open_docs: HashMap>, /// Documents stored on the file system. file_system_docs: Arc, - /// A resolver that takes into account currently loaded import map and JSX - /// settings. - resolver: Arc, /// The npm package requirements found in npm specifiers. npm_reqs_by_scope: Arc, BTreeSet>>, @@ -970,6 +1019,7 @@ impl Documents { // the cache for remote modules here in order to get the // x-typescript-types? None, + &self.is_cjs_resolver, self.resolver.clone(), self.config.clone(), &self.cache, @@ -1004,7 +1054,7 @@ impl Documents { )) })?; self.dirty = true; - let doc = doc.with_change(version, changes)?; + let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?; self.open_docs.insert(doc.specifier().clone(), doc.clone()); Ok(doc) } @@ -1133,6 +1183,7 @@ impl Documents { if let Some(old_doc) = old_doc { self.file_system_docs.get( specifier, + &self.is_cjs_resolver, &self.resolver, &self.config, &self.cache, @@ -1157,6 +1208,7 @@ impl Documents { } else { self.file_system_docs.get( &specifier, + &self.is_cjs_resolver, &self.resolver, &self.config, &self.cache, @@ -1215,12 +1267,15 @@ impl Documents { referrer: &ModuleSpecifier, file_referrer: Option<&ModuleSpecifier>, ) -> Vec> { - let document = self.get(referrer); - let file_referrer = document + let referrer_doc = self.get(referrer); + let file_referrer = referrer_doc .as_ref() .and_then(|d| d.file_referrer()) .or(file_referrer); - let dependencies = document.as_ref().map(|d| d.dependencies()); + let dependencies = referrer_doc.as_ref().map(|d| d.dependencies()); + let referrer_kind = self + .is_cjs_resolver + .get_maybe_doc_module_kind(referrer, referrer_doc.as_deref()); let mut results = Vec::new(); for raw_specifier in raw_specifiers { if raw_specifier.starts_with("asset:") { @@ -1237,31 +1292,35 @@ impl Documents { results.push(self.resolve_dependency( specifier, referrer, + referrer_kind, file_referrer, )); } else if let Some(specifier) = dep.maybe_code.maybe_specifier() { results.push(self.resolve_dependency( specifier, referrer, + referrer_kind, file_referrer, )); } else { results.push(None); } } else if let Ok(specifier) = - self.resolver.as_graph_resolver(file_referrer).resolve( + self.resolver.as_cli_resolver(file_referrer).resolve( raw_specifier, &deno_graph::Range { specifier: referrer.clone(), start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + referrer_kind, ResolutionMode::Types, ) { results.push(self.resolve_dependency( &specifier, referrer, + referrer_kind, file_referrer, )); } else { @@ -1280,7 +1339,11 @@ impl Documents { ) { self.config = Arc::new(config.clone()); self.cache = Arc::new(cache.clone()); + self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache)); self.resolver = resolver.clone(); + + node_resolver::PackageJsonThreadLocalCache::clear(); + { let fs_docs = &self.file_system_docs; // Clean up non-existent documents. @@ -1300,14 +1363,21 @@ impl Documents { if !config.specifier_enabled(doc.specifier()) { continue; } - *doc = doc.with_new_config(self.resolver.clone(), self.config.clone()); + *doc = doc.with_new_config( + &self.is_cjs_resolver, + self.resolver.clone(), + self.config.clone(), + ); } for mut doc in self.file_system_docs.docs.iter_mut() { if !config.specifier_enabled(doc.specifier()) { continue; } - *doc.value_mut() = - doc.with_new_config(self.resolver.clone(), self.config.clone()); + *doc.value_mut() = doc.with_new_config( + &self.is_cjs_resolver, + self.resolver.clone(), + self.config.clone(), + ); } self.open_docs = open_docs; let mut preload_count = 0; @@ -1324,6 +1394,7 @@ impl Documents { { fs_docs.refresh_document( specifier, + &self.is_cjs_resolver, &self.resolver, &self.config, &self.cache, @@ -1409,6 +1480,7 @@ impl Documents { &self, specifier: &ModuleSpecifier, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, file_referrer: Option<&ModuleSpecifier>, ) -> Option<(ModuleSpecifier, MediaType)> { if let Some(module_name) = specifier.as_str().strip_prefix("node:") { @@ -1422,10 +1494,12 @@ impl Documents { let mut specifier = specifier.clone(); let mut media_type = None; if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) { - let (s, mt) = - self - .resolver - .npm_to_file_url(&npm_ref, referrer, file_referrer)?; + let (s, mt) = self.resolver.npm_to_file_url( + &npm_ref, + referrer, + referrer_kind, + file_referrer, + )?; specifier = s; media_type = Some(mt); } @@ -1435,7 +1509,8 @@ impl Documents { return Some((specifier, media_type)); }; if let Some(types) = doc.maybe_types_dependency().maybe_specifier() { - self.resolve_dependency(types, &specifier, file_referrer) + let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc); + self.resolve_dependency(types, &specifier, specifier_kind, file_referrer) } else { Some((doc.specifier().clone(), doc.media_type())) } @@ -1503,6 +1578,7 @@ fn parse_and_analyze_module( maybe_headers: Option<&HashMap>, media_type: MediaType, file_referrer: Option<&ModuleSpecifier>, + is_cjs_resolver: &LspIsCjsResolver, resolver: &LspResolver, ) -> (Option, Option) { let parsed_source_result = parse_source(specifier.clone(), text, media_type); @@ -1511,6 +1587,7 @@ fn parse_and_analyze_module( &parsed_source_result, maybe_headers, file_referrer, + is_cjs_resolver, resolver, ); (Some(parsed_source_result), Some(module_result)) @@ -1536,11 +1613,26 @@ fn analyze_module( parsed_source_result: &ParsedSourceResult, maybe_headers: Option<&HashMap>, file_referrer: Option<&ModuleSpecifier>, + is_cjs_resolver: &LspIsCjsResolver, resolver: &LspResolver, ) -> ModuleResult { match parsed_source_result { Ok(parsed_source) => { let npm_resolver = resolver.create_graph_npm_resolver(file_referrer); + let cli_resolver = resolver.as_cli_resolver(file_referrer); + let config_data = resolver.as_config_data(file_referrer); + let valid_referrer = specifier.clone(); + let jsx_import_source_config = + config_data.and_then(|d| d.maybe_jsx_import_source_config()); + let resolver = SingleReferrerGraphResolver { + valid_referrer: &valid_referrer, + referrer_kind: is_cjs_resolver.get_lsp_referrer_kind( + &specifier, + Some(parsed_source.compute_is_script()), + ), + cli_resolver, + jsx_import_source_config: jsx_import_source_config.as_ref(), + }; Ok(deno_graph::parse_module_from_ast( deno_graph::ParseModuleFromAstOptions { graph_kind: deno_graph::GraphKind::TypesOnly, @@ -1551,7 +1643,7 @@ fn analyze_module( // dynamic imports like import(`./dir/${something}`) in the LSP file_system: &deno_graph::source::NullFileSystem, jsr_url_provider: &CliJsrUrlProvider, - maybe_resolver: Some(resolver.as_graph_resolver(file_referrer)), + maybe_resolver: Some(&resolver), maybe_npm_resolver: Some(&npm_resolver), }, )) diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 2554fa34b1..b2bd72416a 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -22,6 +22,7 @@ use deno_semver::jsr::JsrPackageReqReference; use indexmap::Equivalent; use indexmap::IndexSet; use log::error; +use node_resolver::NodeModuleKind; use serde::Deserialize; use serde_json::from_value; use std::collections::BTreeMap; @@ -77,6 +78,7 @@ use super::parent_process_checker; use super::performance::Performance; use super::refactor; use super::registries::ModuleRegistry; +use super::resolver::LspIsCjsResolver; use super::resolver::LspResolver; use super::testing; use super::text; @@ -144,6 +146,7 @@ pub struct StateSnapshot { pub project_version: usize, pub assets: AssetsSnapshot, pub config: Arc, + pub is_cjs_resolver: Arc, pub documents: Arc, pub resolver: Arc, } @@ -203,6 +206,7 @@ pub struct Inner { pub documents: Documents, http_client_provider: Arc, initial_cwd: PathBuf, + pub is_cjs_resolver: Arc, jsr_search_api: CliJsrSearchApi, /// Handles module registries, which allow discovery of modules module_registry: ModuleRegistry, @@ -480,6 +484,7 @@ impl Inner { let initial_cwd = std::env::current_dir().unwrap_or_else(|_| { panic!("Could not resolve current working directory") }); + let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache)); Self { assets, @@ -491,6 +496,7 @@ impl Inner { documents, http_client_provider, initial_cwd: initial_cwd.clone(), + is_cjs_resolver, jsr_search_api, project_version: 0, task_queue: Default::default(), @@ -601,6 +607,7 @@ impl Inner { project_version: self.project_version, assets: self.assets.snapshot(), config: Arc::new(self.config.clone()), + is_cjs_resolver: self.is_cjs_resolver.clone(), documents: Arc::new(self.documents.clone()), resolver: self.resolver.snapshot(), }) @@ -622,6 +629,7 @@ impl Inner { } }); self.cache = LspCache::new(global_cache_url); + self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache)); let deno_dir = self.cache.deno_dir(); let workspace_settings = self.config.workspace_settings(); let maybe_root_path = self @@ -982,7 +990,7 @@ impl Inner { spawn(async move { let specifier = { let inner = ls.inner.read().await; - let resolver = inner.resolver.as_graph_resolver(Some(&referrer)); + let resolver = inner.resolver.as_cli_resolver(Some(&referrer)); let Ok(specifier) = resolver.resolve( &specifier, &deno_graph::Range { @@ -990,6 +998,7 @@ impl Inner { start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + NodeModuleKind::Esm, deno_graph::source::ResolutionMode::Types, ) else { return; @@ -1622,6 +1631,10 @@ impl Inner { let file_diagnostics = self .diagnostics_server .get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version()); + let specifier_kind = asset_or_doc + .document() + .map(|d| self.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm); let mut includes_no_cache = false; for diagnostic in &fixable_diagnostics { match diagnostic.source.as_deref() { @@ -1660,7 +1673,13 @@ impl Inner { .await; for action in actions { code_actions - .add_ts_fix_action(&specifier, &action, diagnostic, self) + .add_ts_fix_action( + &specifier, + specifier_kind, + &action, + diagnostic, + self, + ) .map_err(|err| { error!("Unable to convert fix: {:#}", err); LspError::internal_error() @@ -1806,10 +1825,9 @@ impl Inner { error!("Unable to decode code action data: {:#}", err); LspError::invalid_params("The CodeAction's data is invalid.") })?; - let scope = self - .get_asset_or_document(&code_action_data.specifier) - .ok() - .and_then(|d| d.scope().cloned()); + let maybe_asset_or_doc = + self.get_asset_or_document(&code_action_data.specifier).ok(); + let scope = maybe_asset_or_doc.as_ref().and_then(|d| d.scope().cloned()); let combined_code_actions = self .ts_server .get_combined_code_fix( @@ -1836,6 +1854,11 @@ impl Inner { let changes = if code_action_data.fix_id == "fixMissingImport" { fix_ts_import_changes( &code_action_data.specifier, + maybe_asset_or_doc + .as_ref() + .and_then(|d| d.document()) + .map(|d| self.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm), &combined_code_actions.changes, self, ) @@ -1889,6 +1912,10 @@ impl Inner { if kind_suffix == ".rewrite.function.returnType" { refactor_edit_info.edits = fix_ts_import_changes( &action_data.specifier, + asset_or_doc + .document() + .map(|d| self.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm), &refactor_edit_info.edits, self, ) @@ -2238,6 +2265,7 @@ impl Inner { &self.jsr_search_api, &self.npm_search_api, &self.documents, + &self.is_cjs_resolver, self.resolver.as_ref(), self .config diff --git a/cli/lsp/repl.rs b/cli/lsp/repl.rs index fa5809045e..b4aaa8cd0d 100644 --- a/cli/lsp/repl.rs +++ b/cli/lsp/repl.rs @@ -263,7 +263,7 @@ impl ReplLanguageServer { } fn get_document_uri(&self) -> Uri { - uri_parse_unencoded(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str()) + uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str()) .unwrap() } } diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index f5df24d575..37f63b912c 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -2,16 +2,18 @@ use dashmap::DashMap; use deno_ast::MediaType; -use deno_ast::ParsedSource; use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::HttpCache; +use deno_config::deno_json::JsxImportSourceConfig; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; use deno_core::url::Url; -use deno_graph::source::Resolver; +use deno_graph::source::ResolutionMode; use deno_graph::GraphImport; use deno_graph::ModuleSpecifier; +use deno_graph::Range; use deno_npm::NpmSystemInfo; +use deno_path_util::url_from_directory_path; use deno_path_util::url_to_file_path; use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; @@ -24,6 +26,7 @@ use deno_semver::package::PackageReq; use indexmap::IndexMap; use node_resolver::errors::ClosestPkgJsonError; use node_resolver::InNpmPackageChecker; +use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; use std::borrow::Cow; use std::collections::BTreeMap; @@ -33,6 +36,7 @@ use std::collections::HashSet; use std::sync::Arc; use super::cache::LspCache; +use super::documents::Document; use super::jsr::JsrCacheResolver; use crate::args::create_default_npmrc; use crate::args::CacheSetting; @@ -53,21 +57,20 @@ use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::npm::ManagedCliNpmResolver; -use crate::resolver::CjsTracker; -use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; -use crate::resolver::CliGraphResolver; -use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliNodeResolver; +use crate::resolver::CliResolver; +use crate::resolver::CliResolverOptions; +use crate::resolver::IsCjsResolver; use crate::resolver::WorkerCliNpmGraphResolver; use crate::tsc::into_specifier_and_media_type; +use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; #[derive(Debug, Clone)] struct LspScopeResolver { - cjs_tracker: Option>, - graph_resolver: Arc, + resolver: Arc, jsr_resolver: Option>, npm_resolver: Option>, node_resolver: Option>, @@ -81,8 +84,7 @@ struct LspScopeResolver { impl Default for LspScopeResolver { fn default() -> Self { Self { - cjs_tracker: None, - graph_resolver: create_graph_resolver(None, None, None), + resolver: create_cli_resolver(None, None, None), jsr_resolver: None, npm_resolver: None, node_resolver: None, @@ -103,7 +105,6 @@ impl LspScopeResolver { ) -> Self { let mut npm_resolver = None; let mut node_resolver = None; - let mut lsp_cjs_tracker = None; let fs = Arc::new(deno_fs::RealFs); let pkg_json_resolver = Arc::new(PackageJsonResolver::new( deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), @@ -118,14 +119,7 @@ impl LspScopeResolver { .await; if let Some(npm_resolver) = &npm_resolver { let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver); - let cjs_tracker = create_cjs_tracker( - in_npm_pkg_checker.clone(), - pkg_json_resolver.clone(), - ); - lsp_cjs_tracker = - Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone()))); node_resolver = Some(create_node_resolver( - cjs_tracker, fs.clone(), in_npm_pkg_checker, npm_resolver, @@ -133,7 +127,7 @@ impl LspScopeResolver { )); } } - let graph_resolver = create_graph_resolver( + let cli_resolver = create_cli_resolver( config_data.map(|d| d.as_ref()), npm_resolver.as_ref(), node_resolver.as_ref(), @@ -146,7 +140,9 @@ impl LspScopeResolver { cache.for_specifier(config_data.map(|d| d.scope.as_ref())), config_data.and_then(|d| d.lockfile.clone()), ))); - let npm_graph_resolver = graph_resolver.create_graph_npm_resolver(); + let npm_graph_resolver = cli_resolver.create_graph_npm_resolver(); + let maybe_jsx_import_source_config = + config_data.and_then(|d| d.maybe_jsx_import_source_config()); let graph_imports = config_data .and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok()) .map(|imports| { @@ -154,11 +150,18 @@ impl LspScopeResolver { imports .into_iter() .map(|(referrer, imports)| { + let resolver = SingleReferrerGraphResolver { + valid_referrer: &referrer, + referrer_kind: NodeModuleKind::Esm, + cli_resolver: &cli_resolver, + jsx_import_source_config: maybe_jsx_import_source_config + .as_ref(), + }; let graph_import = GraphImport::new( &referrer, imports, &CliJsrUrlProvider, - Some(graph_resolver.as_ref()), + Some(&resolver), Some(&npm_graph_resolver), ); (referrer, graph_import) @@ -182,6 +185,8 @@ impl LspScopeResolver { .resolve_req_reference( &req_ref, &referrer, + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + NodeModuleKind::Esm, NodeResolutionMode::Types, ) .ok()?, @@ -195,8 +200,7 @@ impl LspScopeResolver { let package_json_deps_by_resolution = Arc::new(package_json_deps_by_resolution.unwrap_or_default()); Self { - cjs_tracker: lsp_cjs_tracker, - graph_resolver, + resolver: cli_resolver, jsr_resolver, npm_resolver, node_resolver, @@ -216,30 +220,22 @@ impl LspScopeResolver { deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), )); let mut node_resolver = None; - let mut lsp_cjs_tracker = None; if let Some(npm_resolver) = &npm_resolver { let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver); - let cjs_tracker = create_cjs_tracker( - in_npm_pkg_checker.clone(), - pkg_json_resolver.clone(), - ); - lsp_cjs_tracker = Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone()))); node_resolver = Some(create_node_resolver( - cjs_tracker, fs, in_npm_pkg_checker, npm_resolver, pkg_json_resolver.clone(), )); } - let graph_resolver = create_graph_resolver( + let graph_resolver = create_cli_resolver( self.config_data.as_deref(), npm_resolver.as_ref(), node_resolver.as_ref(), ); Arc::new(Self { - cjs_tracker: lsp_cjs_tracker, - graph_resolver, + resolver: graph_resolver, jsr_resolver: self.jsr_resolver.clone(), npm_resolver, node_resolver, @@ -334,12 +330,12 @@ impl LspResolver { } } - pub fn as_graph_resolver( + pub fn as_cli_resolver( &self, file_referrer: Option<&ModuleSpecifier>, - ) -> &dyn Resolver { + ) -> &CliResolver { let resolver = self.get_scope_resolver(file_referrer); - resolver.graph_resolver.as_ref() + resolver.resolver.as_ref() } pub fn create_graph_npm_resolver( @@ -347,15 +343,15 @@ impl LspResolver { file_referrer: Option<&ModuleSpecifier>, ) -> WorkerCliNpmGraphResolver { let resolver = self.get_scope_resolver(file_referrer); - resolver.graph_resolver.create_graph_npm_resolver() + resolver.resolver.create_graph_npm_resolver() } - pub fn maybe_cjs_tracker( + pub fn as_config_data( &self, file_referrer: Option<&ModuleSpecifier>, - ) -> Option<&Arc> { + ) -> Option<&Arc> { let resolver = self.get_scope_resolver(file_referrer); - resolver.cjs_tracker.as_ref() + resolver.config_data.as_ref() } pub fn maybe_node_resolver( @@ -429,13 +425,19 @@ impl LspResolver { &self, req_ref: &NpmPackageReqReference, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, file_referrer: Option<&ModuleSpecifier>, ) -> Option<(ModuleSpecifier, MediaType)> { let resolver = self.get_scope_resolver(file_referrer); let node_resolver = resolver.node_resolver.as_ref()?; Some(into_specifier_and_media_type(Some( node_resolver - .resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) + .resolve_req_reference( + req_ref, + referrer, + referrer_kind, + NodeResolutionMode::Types, + ) .ok()?, ))) } @@ -478,6 +480,7 @@ impl LspResolver { &self, specifier_text: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, ) -> bool { let resolver = self.get_scope_resolver(Some(referrer)); let Some(node_resolver) = resolver.node_resolver.as_ref() else { @@ -487,6 +490,7 @@ impl LspResolver { .resolve_if_for_npm_pkg( specifier_text, referrer, + referrer_kind, NodeResolutionMode::Types, ) .ok() @@ -615,21 +619,6 @@ async fn create_npm_resolver( Some(create_cli_npm_resolver_for_lsp(options).await) } -fn create_cjs_tracker( - in_npm_pkg_checker: Arc, - pkg_json_resolver: Arc, -) -> Arc { - Arc::new(CjsTracker::new( - in_npm_pkg_checker, - pkg_json_resolver, - CjsTrackerOptions { - // todo(dsherret): support in the lsp by stabilizing the feature - // so that we don't have to pipe the config in here - unstable_detect_cjs: false, - }, - )) -} - fn create_in_npm_pkg_checker( npm_resolver: &Arc, ) -> Arc { @@ -649,7 +638,6 @@ fn create_in_npm_pkg_checker( } fn create_node_resolver( - cjs_tracker: Arc, fs: Arc, in_npm_pkg_checker: Arc, npm_resolver: &Arc, @@ -662,7 +650,6 @@ fn create_node_resolver( pkg_json_resolver.clone(), )); Arc::new(CliNodeResolver::new( - cjs_tracker.clone(), fs, in_npm_pkg_checker, node_resolver_inner, @@ -670,13 +657,12 @@ fn create_node_resolver( )) } -fn create_graph_resolver( +fn create_cli_resolver( config_data: Option<&ConfigData>, npm_resolver: Option<&Arc>, node_resolver: Option<&Arc>, -) -> Arc { - let workspace = config_data.map(|d| &d.member_dir.workspace); - Arc::new(CliGraphResolver::new(CliGraphResolverOptions { +) -> Arc { + Arc::new(CliResolver::new(CliResolverOptions { node_resolver: node_resolver.cloned(), npm_resolver: npm_resolver.cloned(), workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else( @@ -691,9 +677,6 @@ fn create_graph_resolver( )) }, ), - maybe_jsx_import_source_config: workspace.and_then(|workspace| { - workspace.to_maybe_jsx_import_source_config().ok().flatten() - }), maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()), bare_node_builtins_enabled: config_data .is_some_and(|d| d.unstable.contains("bare-node-builtins")), @@ -726,6 +709,141 @@ impl std::fmt::Debug for RedirectResolver { } } +#[derive(Debug)] +pub struct LspIsCjsResolver { + inner: IsCjsResolver, +} + +impl Default for LspIsCjsResolver { + fn default() -> Self { + LspIsCjsResolver::new(&Default::default()) + } +} + +impl LspIsCjsResolver { + pub fn new(cache: &LspCache) -> Self { + #[derive(Debug)] + struct LspInNpmPackageChecker { + global_cache_dir: ModuleSpecifier, + } + + impl LspInNpmPackageChecker { + pub fn new(cache: &LspCache) -> Self { + let npm_folder_path = cache.deno_dir().npm_folder_path(); + Self { + global_cache_dir: url_from_directory_path( + &canonicalize_path_maybe_not_exists(&npm_folder_path) + .unwrap_or(npm_folder_path), + ) + .unwrap_or_else(|_| { + ModuleSpecifier::parse("file:///invalid/").unwrap() + }), + } + } + } + + impl InNpmPackageChecker for LspInNpmPackageChecker { + fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + if specifier.scheme() != "file" { + return false; + } + if specifier + .as_str() + .starts_with(self.global_cache_dir.as_str()) + { + return true; + } + specifier.as_str().contains("/node_modules/") + } + } + + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + + LspIsCjsResolver { + inner: IsCjsResolver::new( + Arc::new(LspInNpmPackageChecker::new(cache)), + pkg_json_resolver, + crate::resolver::IsCjsResolverOptions { + detect_cjs: true, + is_node_main: false, + }, + ), + } + } + + pub fn get_maybe_doc_module_kind( + &self, + specifier: &ModuleSpecifier, + maybe_document: Option<&Document>, + ) -> NodeModuleKind { + self.get_lsp_referrer_kind( + specifier, + maybe_document.and_then(|d| d.is_script()), + ) + } + + pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind { + self.get_lsp_referrer_kind(document.specifier(), document.is_script()) + } + + pub fn get_lsp_referrer_kind( + &self, + specifier: &ModuleSpecifier, + is_script: Option, + ) -> NodeModuleKind { + self.inner.get_lsp_referrer_kind(specifier, is_script) + } +} + +#[derive(Debug)] +pub struct SingleReferrerGraphResolver<'a> { + pub valid_referrer: &'a ModuleSpecifier, + pub referrer_kind: NodeModuleKind, + pub cli_resolver: &'a CliResolver, + pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>, +} + +impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> { + fn default_jsx_import_source(&self) -> Option { + self + .jsx_import_source_config + .and_then(|c| c.default_specifier.clone()) + } + + fn default_jsx_import_source_types(&self) -> Option { + self + .jsx_import_source_config + .and_then(|c| c.default_types_specifier.clone()) + } + + fn jsx_import_source_module(&self) -> &str { + self + .jsx_import_source_config + .map(|c| c.module.as_str()) + .unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE) + } + + fn resolve( + &self, + specifier_text: &str, + referrer_range: &Range, + mode: ResolutionMode, + ) -> Result { + // this resolver assumes it will only be used with a single referrer + // with the provided referrer kind + debug_assert_eq!(referrer_range.specifier, *self.valid_referrer); + self.cli_resolver.resolve( + specifier_text, + referrer_range, + self.referrer_kind, + mode, + ) + } +} + impl RedirectResolver { fn new( cache: Arc, @@ -842,45 +960,6 @@ impl RedirectResolver { } } -#[derive(Debug)] -pub struct LspCjsTracker { - cjs_tracker: Arc, -} - -impl LspCjsTracker { - pub fn new(cjs_tracker: Arc) -> Self { - Self { cjs_tracker } - } - - pub fn is_cjs( - &self, - specifier: &ModuleSpecifier, - media_type: MediaType, - maybe_parsed_source: Option<&ParsedSource>, - ) -> bool { - if let Some(module_kind) = - self.cjs_tracker.get_known_kind(specifier, media_type) - { - module_kind.is_cjs() - } else { - let maybe_is_script = maybe_parsed_source.map(|p| p.compute_is_script()); - maybe_is_script - .and_then(|is_script| { - self - .cjs_tracker - .is_cjs_with_known_is_script(specifier, media_type, is_script) - .ok() - }) - .unwrap_or_else(|| { - self - .cjs_tracker - .is_maybe_cjs(specifier, media_type) - .unwrap_or(false) - }) - } - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 6f63ced5be..c9b24176ad 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -69,6 +69,7 @@ use indexmap::IndexMap; use indexmap::IndexSet; use lazy_regex::lazy_regex; use log::error; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Captures; use regex::Regex; @@ -4401,25 +4402,15 @@ fn op_load<'s>( None } else { let asset_or_document = state.get_asset_or_document(&specifier); - asset_or_document.map(|doc| { - let maybe_cjs_tracker = state - .state_snapshot - .resolver - .maybe_cjs_tracker(Some(&specifier)); - LoadResponse { - data: doc.text(), - script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), - version: state.script_version(&specifier), - is_cjs: maybe_cjs_tracker - .map(|t| { - t.is_cjs( - &specifier, - doc.media_type(), - doc.maybe_parsed_source().and_then(|p| p.as_ref().ok()), - ) - }) - .unwrap_or(false), - } + asset_or_document.map(|doc| LoadResponse { + data: doc.text(), + script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), + version: state.script_version(&specifier), + is_cjs: doc + .document() + .map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d)) + .unwrap_or(NodeModuleKind::Esm) + == NodeModuleKind::Cjs, }) }; @@ -4662,6 +4653,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { let (types, _) = documents.resolve_dependency( types, specifier, + state + .state_snapshot + .is_cjs_resolver + .get_doc_module_kind(doc), doc.file_referrer(), )?; let types_doc = documents.get_or_load(&types, doc.file_referrer())?; @@ -5544,6 +5539,7 @@ mod tests { documents: Arc::new(documents), assets: Default::default(), config: Arc::new(config), + is_cjs_resolver: Default::default(), resolver, }); let performance = Arc::new(Performance::default()); diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 43c9e1aa07..f9c974d77e 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -27,8 +27,8 @@ use crate::node; use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; use crate::resolver::CjsTracker; -use crate::resolver::CliGraphResolver; use crate::resolver::CliNodeResolver; +use crate::resolver::CliResolver; use crate::resolver::ModuleCodeStringSource; use crate::resolver::NotSupportedKindInNpmError; use crate::resolver::NpmModuleLoader; @@ -60,7 +60,6 @@ use deno_core::RequestedModuleType; use deno_core::ResolutionKind; use deno_core::SourceCodeCacheInfo; use deno_graph::source::ResolutionMode; -use deno_graph::source::Resolver; use deno_graph::GraphKind; use deno_graph::JsModule; use deno_graph::JsonModule; @@ -73,6 +72,7 @@ use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::errors::ClosestPkgJsonError; use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionMode; @@ -206,7 +206,6 @@ struct SharedCliModuleLoaderState { lib_worker: TsTypeLib, initial_cwd: PathBuf, is_inspecting: bool, - is_npm_main: bool, is_repl: bool, cjs_tracker: Arc, code_cache: Option>, @@ -220,7 +219,7 @@ struct SharedCliModuleLoaderState { npm_resolver: Arc, npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc, - resolver: Arc, + resolver: Arc, } pub struct CliModuleLoaderFactory { @@ -243,7 +242,7 @@ impl CliModuleLoaderFactory { npm_resolver: Arc, npm_module_loader: NpmModuleLoader, parsed_source_cache: Arc, - resolver: Arc, + resolver: Arc, ) -> Self { Self { shared: Arc::new(SharedCliModuleLoaderState { @@ -252,7 +251,6 @@ impl CliModuleLoaderFactory { lib_worker: options.ts_type_lib_worker(), initial_cwd: options.initial_cwd().to_path_buf(), is_inspecting: options.is_inspecting(), - is_npm_main: options.is_npm_main(), is_repl: matches!( options.sub_command(), DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) @@ -286,7 +284,6 @@ impl CliModuleLoaderFactory { Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { lib, is_worker, - is_npm_main: self.shared.is_npm_main, parent_permissions, permissions, graph_container: graph_container.clone(), @@ -295,13 +292,14 @@ impl CliModuleLoaderFactory { parsed_source_cache: self.shared.parsed_source_cache.clone(), shared: self.shared.clone(), }))); - let node_require_loader = Rc::new(CliNodeRequireLoader::new( - self.shared.emitter.clone(), - self.shared.fs.clone(), + let node_require_loader = Rc::new(CliNodeRequireLoader { + cjs_tracker: self.shared.cjs_tracker.clone(), + emitter: self.shared.emitter.clone(), + fs: self.shared.fs.clone(), graph_container, - self.shared.in_npm_pkg_checker.clone(), - self.shared.npm_resolver.clone(), - )); + in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(), + npm_resolver: self.shared.npm_resolver.clone(), + }); CreateModuleLoaderResult { module_loader, node_require_loader, @@ -343,7 +341,6 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { struct CliModuleLoaderInner { lib: TsTypeLib, - is_npm_main: bool, is_worker: bool, /// The initial set of permissions used to resolve the static imports in the /// worker. These are "allow all" for main worker, and parent thread @@ -450,7 +447,7 @@ impl let referrer = if referrer.is_empty() && self.shared.is_repl { // FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL // and `Deno.core.evalContext` API. Ideally we should always have a referrer filled - "./$deno$repl.ts" + "./$deno$repl.mts" } else { referrer }; @@ -478,7 +475,12 @@ impl self .shared .node_resolver - .resolve(raw_specifier, referrer, NodeResolutionMode::Execution)? + .resolve( + raw_specifier, + referrer, + self.shared.cjs_tracker.get_referrer_kind(referrer), + NodeResolutionMode::Execution, + )? .into_url(), ); } @@ -508,6 +510,7 @@ impl start: deno_graph::Position::zeroed(), end: deno_graph::Position::zeroed(), }, + self.shared.cjs_tracker.get_referrer_kind(referrer), ResolutionMode::Execution, )?), }; @@ -518,6 +521,7 @@ impl return self.shared.node_resolver.resolve_req_reference( &reference, referrer, + self.shared.cjs_tracker.get_referrer_kind(referrer), NodeResolutionMode::Execution, ); } @@ -538,6 +542,7 @@ impl &package_folder, module.nv_reference.sub_path(), Some(referrer), + self.shared.cjs_tracker.get_referrer_kind(referrer), NodeResolutionMode::Execution, ) .with_context(|| { @@ -668,14 +673,11 @@ impl is_script, .. })) => { - // todo(dsherret): revert in https://github.com/denoland/deno/pull/26439 - if self.is_npm_main && *is_script - || self.shared.cjs_tracker.is_cjs_with_known_is_script( - specifier, - *media_type, - *is_script, - )? - { + if self.shared.cjs_tracker.is_cjs_with_known_is_script( + specifier, + *media_type, + *is_script, + )? { return Ok(Some(CodeOrDeferredEmit::Cjs { specifier, media_type: *media_type, @@ -1031,6 +1033,7 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { #[derive(Debug)] struct CliNodeRequireLoader { + cjs_tracker: Arc, emitter: Arc, fs: Arc, graph_container: TGraphContainer, @@ -1038,26 +1041,6 @@ struct CliNodeRequireLoader { npm_resolver: Arc, } -impl - CliNodeRequireLoader -{ - pub fn new( - emitter: Arc, - fs: Arc, - graph_container: TGraphContainer, - in_npm_pkg_checker: Arc, - npm_resolver: Arc, - ) -> Self { - Self { - emitter, - fs, - graph_container, - in_npm_pkg_checker, - npm_resolver, - } - } -} - impl NodeRequireLoader for CliNodeRequireLoader { @@ -1103,4 +1086,12 @@ impl NodeRequireLoader Ok(text) } } + + fn is_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + ) -> Result { + let media_type = MediaType::from_specifier(specifier); + self.cjs_tracker.is_maybe_cjs(specifier, media_type) + } } diff --git a/cli/node.rs b/cli/node.rs index 1d410a726a..8235745a91 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -62,10 +62,6 @@ pub struct CliCjsCodeAnalyzer { cjs_tracker: Arc, fs: deno_fs::FileSystemRc, parsed_source_cache: Option>, - // todo(dsherret): hack, remove in https://github.com/denoland/deno/pull/26439 - // For example, this does not properly handle if cjs analysis was already done - // and has been cached. - is_npm_main: bool, } impl CliCjsCodeAnalyzer { @@ -74,14 +70,12 @@ impl CliCjsCodeAnalyzer { cjs_tracker: Arc, fs: deno_fs::FileSystemRc, parsed_source_cache: Option>, - is_npm_main: bool, ) -> Self { Self { cache, cjs_tracker, fs, parsed_source_cache, - is_npm_main, } } @@ -106,9 +100,7 @@ impl CliCjsCodeAnalyzer { } let cjs_tracker = self.cjs_tracker.clone(); - let is_npm_main = self.is_npm_main; - let is_maybe_cjs = - cjs_tracker.is_maybe_cjs(specifier, media_type)? || is_npm_main; + let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?; let analysis = if is_maybe_cjs { let maybe_parsed_source = self .parsed_source_cache @@ -135,7 +127,7 @@ impl CliCjsCodeAnalyzer { parsed_source.specifier(), media_type, is_script, - )? || is_script && is_npm_main; + )?; if is_cjs { let analysis = parsed_source.analyze_cjs(); Ok(CliCjsAnalysis::Cjs { diff --git a/cli/resolver.rs b/cli/resolver.rs index 710b975093..786e5d0dbc 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -4,7 +4,6 @@ use async_trait::async_trait; use dashmap::DashMap; use dashmap::DashSet; use deno_ast::MediaType; -use deno_ast::ModuleKind; use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; @@ -17,9 +16,7 @@ use deno_core::ModuleSourceCode; use deno_core::ModuleSpecifier; use deno_graph::source::ResolutionMode; use deno_graph::source::ResolveError; -use deno_graph::source::Resolver; use deno_graph::source::UnknownBuiltInNodeModuleError; -use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE; use deno_graph::NpmLoadError; use deno_graph::NpmResolvePkgReqsResult; use deno_npm::resolution::NpmResolutionError; @@ -52,7 +49,6 @@ use std::path::PathBuf; use std::sync::Arc; use thiserror::Error; -use crate::args::JsxImportSourceConfig; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; @@ -108,7 +104,6 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { #[derive(Debug)] pub struct CliNodeResolver { - cjs_tracker: Arc, fs: Arc, in_npm_pkg_checker: Arc, node_resolver: Arc, @@ -117,14 +112,12 @@ pub struct CliNodeResolver { impl CliNodeResolver { pub fn new( - cjs_tracker: Arc, fs: Arc, in_npm_pkg_checker: Arc, node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { - cjs_tracker, fs, in_npm_pkg_checker, node_resolver, @@ -140,9 +133,11 @@ impl CliNodeResolver { &self, specifier: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, mode: NodeResolutionMode, ) -> Result, AnyError> { - let resolution_result = self.resolve(specifier, referrer, mode); + let resolution_result = + self.resolve(specifier, referrer, referrer_kind, mode); match resolution_result { Ok(res) => Ok(Some(res)), Err(err) => { @@ -213,35 +208,26 @@ impl CliNodeResolver { &self, specifier: &str, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, mode: NodeResolutionMode, ) -> Result { - let referrer_kind = if self - .cjs_tracker - .is_maybe_cjs(referrer, MediaType::from_specifier(referrer)) - .map_err(|err| NodeResolveErrorKind::PackageResolve(err.into()))? - { - NodeModuleKind::Cjs - } else { - NodeModuleKind::Esm - }; - - let res = - self - .node_resolver - .resolve(specifier, referrer, referrer_kind, mode)?; - Ok(res) + self + .node_resolver + .resolve(specifier, referrer, referrer_kind, mode) } pub fn resolve_req_reference( &self, req_ref: &NpmPackageReqReference, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, mode: NodeResolutionMode, ) -> Result { self.resolve_req_with_sub_path( req_ref.req(), req_ref.sub_path(), referrer, + referrer_kind, mode, ) } @@ -251,6 +237,7 @@ impl CliNodeResolver { req: &PackageReq, sub_path: Option<&str>, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, mode: NodeResolutionMode, ) -> Result { let package_folder = self @@ -260,6 +247,7 @@ impl CliNodeResolver { &package_folder, sub_path, Some(referrer), + referrer_kind, mode, ); match resolution_result { @@ -284,12 +272,14 @@ impl CliNodeResolver { package_folder: &Path, sub_path: Option<&str>, maybe_referrer: Option<&ModuleSpecifier>, + referrer_kind: NodeModuleKind, mode: NodeResolutionMode, ) -> Result { self.node_resolver.resolve_package_subpath_from_deno_module( package_folder, sub_path, maybe_referrer, + referrer_kind, mode, ) } @@ -419,10 +409,6 @@ impl NpmModuleLoader { } } -pub struct CjsTrackerOptions { - pub unstable_detect_cjs: bool, -} - /// Keeps track of what module specifiers were resolved as CJS. /// /// Modules that are `.js` or `.ts` are only known to be CJS or @@ -430,22 +416,22 @@ pub struct CjsTrackerOptions { /// will be "maybe CJS" until they're loaded. #[derive(Debug)] pub struct CjsTracker { - in_npm_pkg_checker: Arc, - pkg_json_resolver: Arc, - unstable_detect_cjs: bool, - known: DashMap, + is_cjs_resolver: IsCjsResolver, + known: DashMap, } impl CjsTracker { pub fn new( in_npm_pkg_checker: Arc, pkg_json_resolver: Arc, - options: CjsTrackerOptions, + options: IsCjsResolverOptions, ) -> Self { Self { - in_npm_pkg_checker, - pkg_json_resolver, - unstable_detect_cjs: options.unstable_detect_cjs, + is_cjs_resolver: IsCjsResolver::new( + in_npm_pkg_checker, + pkg_json_resolver, + options, + ), known: Default::default(), } } @@ -485,47 +471,90 @@ impl CjsTracker { .get_known_kind_with_is_script(specifier, media_type, is_script) { Some(kind) => kind, - None => self.check_based_on_pkg_json(specifier)?, + None => self.is_cjs_resolver.check_based_on_pkg_json(specifier)?, }; - Ok(kind.is_cjs()) + Ok(kind == NodeModuleKind::Cjs) } pub fn get_known_kind( &self, specifier: &ModuleSpecifier, media_type: MediaType, - ) -> Option { + ) -> Option { self.get_known_kind_with_is_script(specifier, media_type, None) } + pub fn get_referrer_kind( + &self, + specifier: &ModuleSpecifier, + ) -> NodeModuleKind { + if specifier.scheme() != "file" { + return NodeModuleKind::Esm; + } + self + .get_known_kind(specifier, MediaType::from_specifier(specifier)) + .unwrap_or(NodeModuleKind::Esm) + } + fn get_known_kind_with_is_script( &self, specifier: &ModuleSpecifier, media_type: MediaType, is_script: Option, - ) -> Option { - if specifier.scheme() != "file" { - return Some(ModuleKind::Esm); - } + ) -> Option { + self.is_cjs_resolver.get_known_kind_with_is_script( + specifier, + media_type, + is_script, + &self.known, + ) + } +} - match media_type { - MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ModuleKind::Esm), - MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ModuleKind::Cjs), +#[derive(Debug)] +pub struct IsCjsResolverOptions { + pub detect_cjs: bool, + pub is_node_main: bool, +} + +#[derive(Debug)] +pub struct IsCjsResolver { + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, + options: IsCjsResolverOptions, +} + +impl IsCjsResolver { + pub fn new( + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, + options: IsCjsResolverOptions, + ) -> Self { + Self { + in_npm_pkg_checker, + pkg_json_resolver, + options, + } + } + + pub fn get_lsp_referrer_kind( + &self, + specifier: &ModuleSpecifier, + is_script: Option, + ) -> NodeModuleKind { + if specifier.scheme() != "file" { + return NodeModuleKind::Esm; + } + match MediaType::from_specifier(specifier) { + MediaType::Mts | MediaType::Mjs | MediaType::Dmts => NodeModuleKind::Esm, + MediaType::Cjs | MediaType::Cts | MediaType::Dcts => NodeModuleKind::Cjs, MediaType::Dts => { // dts files are always determined based on the package.json because // they contain imports/exports even when considered CJS - if let Some(value) = self.known.get(specifier).map(|v| *v) { - Some(value) - } else { - let value = self.check_based_on_pkg_json(specifier).ok(); - if let Some(value) = value { - self.known.insert(specifier.clone(), value); - } - Some(value.unwrap_or(ModuleKind::Esm)) - } + self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm) } MediaType::Wasm | - MediaType::Json => Some(ModuleKind::Esm), + MediaType::Json => NodeModuleKind::Esm, MediaType::JavaScript | MediaType::Jsx | MediaType::TypeScript @@ -534,18 +563,63 @@ impl CjsTracker { | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => { - if let Some(value) = self.known.get(specifier).map(|v| *v) { - if value.is_cjs() && is_script == Some(false) { + match is_script { + Some(true) => self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm), + Some(false) | None => NodeModuleKind::Esm, + } + } + } + } + + fn get_known_kind_with_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: Option, + known_cache: &DashMap, + ) -> Option { + if specifier.scheme() != "file" { + return Some(NodeModuleKind::Esm); + } + + match media_type { + MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(NodeModuleKind::Esm), + MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(NodeModuleKind::Cjs), + MediaType::Dts => { + // dts files are always determined based on the package.json because + // they contain imports/exports even when considered CJS + if let Some(value) = known_cache.get(specifier).map(|v| *v) { + Some(value) + } else { + let value = self.check_based_on_pkg_json(specifier).ok(); + if let Some(value) = value { + known_cache.insert(specifier.clone(), value); + } + Some(value.unwrap_or(NodeModuleKind::Esm)) + } + } + MediaType::Wasm | + MediaType::Json => Some(NodeModuleKind::Esm), + MediaType::JavaScript + | MediaType::Jsx + | MediaType::TypeScript + | MediaType::Tsx + // treat these as unknown + | MediaType::Css + | MediaType::SourceMap + | MediaType::Unknown => { + if let Some(value) = known_cache.get(specifier).map(|v| *v) { + if value == NodeModuleKind::Cjs && is_script == Some(false) { // we now know this is actually esm - self.known.insert(specifier.clone(), ModuleKind::Esm); - Some(ModuleKind::Esm) + known_cache.insert(specifier.clone(), NodeModuleKind::Esm); + Some(NodeModuleKind::Esm) } else { Some(value) } } else if is_script == Some(false) { // we know this is esm - self.known.insert(specifier.clone(), ModuleKind::Esm); - Some(ModuleKind::Esm) + known_cache.insert(specifier.clone(), NodeModuleKind::Esm); + Some(NodeModuleKind::Esm) } else { None } @@ -556,27 +630,38 @@ impl CjsTracker { fn check_based_on_pkg_json( &self, specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { if self.in_npm_pkg_checker.in_npm_package(specifier) { if let Some(pkg_json) = self.pkg_json_resolver.get_closest_package_json(specifier)? { let is_file_location_cjs = pkg_json.typ != "module"; - Ok(ModuleKind::from_is_cjs(is_file_location_cjs)) + Ok(if is_file_location_cjs { + NodeModuleKind::Cjs + } else { + NodeModuleKind::Esm + }) } else { - Ok(ModuleKind::Cjs) + Ok(NodeModuleKind::Cjs) } - } else if self.unstable_detect_cjs { + } else if self.options.detect_cjs || self.options.is_node_main { if let Some(pkg_json) = self.pkg_json_resolver.get_closest_package_json(specifier)? { - let is_cjs_type = pkg_json.typ == "commonjs"; - Ok(ModuleKind::from_is_cjs(is_cjs_type)) + let is_cjs_type = pkg_json.typ == "commonjs" + || self.options.is_node_main && pkg_json.typ == "none"; + Ok(if is_cjs_type { + NodeModuleKind::Cjs + } else { + NodeModuleKind::Esm + }) + } else if self.options.is_node_main { + Ok(NodeModuleKind::Cjs) } else { - Ok(ModuleKind::Esm) + Ok(NodeModuleKind::Esm) } } else { - Ok(ModuleKind::Esm) + Ok(NodeModuleKind::Esm) } } } @@ -587,48 +672,33 @@ pub type CliSloppyImportsResolver = /// A resolver that takes care of resolution, taking into account loaded /// import map, JSX settings. #[derive(Debug)] -pub struct CliGraphResolver { +pub struct CliResolver { node_resolver: Option>, npm_resolver: Option>, sloppy_imports_resolver: Option>, workspace_resolver: Arc, - maybe_default_jsx_import_source: Option, - maybe_default_jsx_import_source_types: Option, - maybe_jsx_import_source_module: Option, maybe_vendor_specifier: Option, found_package_json_dep_flag: AtomicFlag, bare_node_builtins_enabled: bool, warned_pkgs: DashSet, } -pub struct CliGraphResolverOptions<'a> { +pub struct CliResolverOptions<'a> { pub node_resolver: Option>, pub npm_resolver: Option>, pub sloppy_imports_resolver: Option>, pub workspace_resolver: Arc, pub bare_node_builtins_enabled: bool, - pub maybe_jsx_import_source_config: Option, pub maybe_vendor_dir: Option<&'a PathBuf>, } -impl CliGraphResolver { - pub fn new(options: CliGraphResolverOptions) -> Self { +impl CliResolver { + pub fn new(options: CliResolverOptions) -> Self { Self { node_resolver: options.node_resolver, npm_resolver: options.npm_resolver, sloppy_imports_resolver: options.sloppy_imports_resolver, workspace_resolver: options.workspace_resolver, - maybe_default_jsx_import_source: options - .maybe_jsx_import_source_config - .as_ref() - .and_then(|c| c.default_specifier.clone()), - maybe_default_jsx_import_source_types: options - .maybe_jsx_import_source_config - .as_ref() - .and_then(|c| c.default_types_specifier.clone()), - maybe_jsx_import_source_module: options - .maybe_jsx_import_source_config - .map(|c| c.module), maybe_vendor_specifier: options .maybe_vendor_dir .and_then(|v| ModuleSpecifier::from_directory_path(v).ok()), @@ -638,10 +708,6 @@ impl CliGraphResolver { } } - pub fn as_graph_resolver(&self) -> &dyn Resolver { - self - } - pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver { WorkerCliNpmGraphResolver { npm_resolver: self.npm_resolver.as_ref(), @@ -649,28 +715,12 @@ impl CliGraphResolver { bare_node_builtins_enabled: self.bare_node_builtins_enabled, } } -} -impl Resolver for CliGraphResolver { - fn default_jsx_import_source(&self) -> Option { - self.maybe_default_jsx_import_source.clone() - } - - fn default_jsx_import_source_types(&self) -> Option { - self.maybe_default_jsx_import_source_types.clone() - } - - fn jsx_import_source_module(&self) -> &str { - self - .maybe_jsx_import_source_module - .as_deref() - .unwrap_or(DEFAULT_JSX_IMPORT_SOURCE_MODULE) - } - - fn resolve( + pub fn resolve( &self, raw_specifier: &str, referrer_range: &deno_graph::Range, + referrer_kind: NodeModuleKind, mode: ResolutionMode, ) -> Result { fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode { @@ -686,7 +736,7 @@ impl Resolver for CliGraphResolver { if let Some(node_resolver) = self.node_resolver.as_ref() { if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) { return node_resolver - .resolve(raw_specifier, referrer, to_node_mode(mode)) + .resolve(raw_specifier, referrer, referrer_kind, to_node_mode(mode)) .map(|res| res.into_url()) .map_err(|e| ResolveError::Other(e.into())); } @@ -759,6 +809,7 @@ impl Resolver for CliGraphResolver { pkg_json.dir_path(), sub_path.as_deref(), Some(referrer), + referrer_kind, to_node_mode(mode), ) .map_err(|e| ResolveError::Other(e.into())), @@ -800,6 +851,7 @@ impl Resolver for CliGraphResolver { pkg_folder, sub_path.as_deref(), Some(referrer), + referrer_kind, to_node_mode(mode), ) .map_err(|e| ResolveError::Other(e.into())) @@ -847,6 +899,7 @@ impl Resolver for CliGraphResolver { pkg_folder, npm_req_ref.sub_path(), Some(referrer), + referrer_kind, to_node_mode(mode), ) .map_err(|e| ResolveError::Other(e.into())); @@ -855,7 +908,12 @@ impl Resolver for CliGraphResolver { // do npm resolution for byonm if is_byonm { return node_resolver - .resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode)) + .resolve_req_reference( + &npm_req_ref, + referrer, + referrer_kind, + to_node_mode(mode), + ) .map_err(|err| err.into()); } } @@ -869,7 +927,12 @@ impl Resolver for CliGraphResolver { // If byonm, check if the bare specifier resolves to an npm package if is_byonm && referrer.scheme() == "file" { let maybe_resolution = node_resolver - .resolve_if_for_npm_pkg(raw_specifier, referrer, to_node_mode(mode)) + .resolve_if_for_npm_pkg( + raw_specifier, + referrer, + referrer_kind, + to_node_mode(mode), + ) .map_err(ResolveError::Other)?; if let Some(res) = maybe_resolution { match res { diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index 27c8499ea2..ed80eb17b1 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -528,7 +528,6 @@ "bare-node-builtins", "byonm", "cron", - "detect-cjs", "ffi", "fs", "fmt-component", diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 960aad1578..b48e1c97ce 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -720,7 +720,6 @@ impl<'a> DenoCompileBinaryWriter<'a> { unstable_config: UnstableConfig { legacy_flag_enabled: false, bare_node_builtins: cli_options.unstable_bare_node_builtins(), - detect_cjs: cli_options.unstable_detect_cjs(), sloppy_imports: cli_options.unstable_sloppy_imports(), features: cli_options.unstable_features(), }, diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index bb0ab423dd..15937c7aee 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -45,6 +45,8 @@ use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use import_map::parse_from_json; use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; use serialization::DenoCompileModuleSource; use std::borrow::Cow; @@ -76,9 +78,9 @@ use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CreateInNpmPkgCheckerOptions; use crate::resolver::CjsTracker; -use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNodeResolver; +use crate::resolver::IsCjsResolverOptions; use crate::resolver::NpmModuleLoader; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; @@ -146,13 +148,27 @@ impl ModuleLoader for EmbeddedModuleLoader { type_error(format!("Referrer uses invalid specifier: {}", err)) })? }; + let referrer_kind = if self + .shared + .cjs_tracker + .is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))? + { + NodeModuleKind::Cjs + } else { + NodeModuleKind::Esm + }; if self.shared.node_resolver.in_npm_package(&referrer) { return Ok( self .shared .node_resolver - .resolve(raw_specifier, &referrer, NodeResolutionMode::Execution)? + .resolve( + raw_specifier, + &referrer, + referrer_kind, + NodeResolutionMode::Execution, + )? .into_url(), ); } @@ -178,6 +194,7 @@ impl ModuleLoader for EmbeddedModuleLoader { pkg_json.dir_path(), sub_path.as_deref(), Some(&referrer), + referrer_kind, NodeResolutionMode::Execution, )?, ), @@ -192,6 +209,7 @@ impl ModuleLoader for EmbeddedModuleLoader { req, sub_path.as_deref(), &referrer, + referrer_kind, NodeResolutionMode::Execution, ) } @@ -211,6 +229,7 @@ impl ModuleLoader for EmbeddedModuleLoader { pkg_folder, sub_path.as_deref(), Some(&referrer), + referrer_kind, NodeResolutionMode::Execution, )?, ) @@ -224,6 +243,7 @@ impl ModuleLoader for EmbeddedModuleLoader { return self.shared.node_resolver.resolve_req_reference( &reference, &referrer, + referrer_kind, NodeResolutionMode::Execution, ); } @@ -250,6 +270,7 @@ impl ModuleLoader for EmbeddedModuleLoader { let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg( raw_specifier, &referrer, + referrer_kind, NodeResolutionMode::Execution, )?; if let Some(res) = maybe_res { @@ -429,6 +450,14 @@ impl NodeRequireLoader for EmbeddedModuleLoader { ) -> Result { Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?) } + + fn is_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + ) -> Result { + let media_type = MediaType::from_specifier(specifier); + self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type) + } } struct StandaloneModuleLoaderFactory { @@ -628,14 +657,14 @@ pub async fn run(data: StandaloneData) -> Result { let cjs_tracker = Arc::new(CjsTracker::new( in_npm_pkg_checker.clone(), pkg_json_resolver.clone(), - CjsTrackerOptions { - unstable_detect_cjs: metadata.unstable_config.detect_cjs, + IsCjsResolverOptions { + detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(), + is_node_main: false, }, )); let cache_db = Caches::new(deno_dir_provider.clone()); let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); let cli_node_resolver = Arc::new(CliNodeResolver::new( - cjs_tracker.clone(), fs.clone(), in_npm_pkg_checker.clone(), node_resolver.clone(), @@ -646,7 +675,6 @@ pub async fn run(data: StandaloneData) -> Result { cjs_tracker.clone(), fs.clone(), None, - false, ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index f593332475..2a554c1335 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -480,7 +480,7 @@ fn filter_coverages( .filter(|e| { let is_internal = e.url.starts_with("ext:") || e.url.ends_with("__anonymous__") - || e.url.ends_with("$deno$test.js") + || e.url.ends_with("$deno$test.mjs") || e.url.ends_with(".snap") || is_supported_test_path(Path::new(e.url.as_str())) || doc_test_re.is_match(e.url.as_str()) diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index ed86e86c79..1655f0a322 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -1396,6 +1396,7 @@ mod tests { .env_clear() // use the deno binary in the target directory .env("PATH", test_util::target_dir()) + .env("RUST_BACKTRACE", "1") .spawn() .unwrap() .wait() diff --git a/cli/tools/jupyter/mod.rs b/cli/tools/jupyter/mod.rs index 0ffd0da1ee..732f95c49f 100644 --- a/cli/tools/jupyter/mod.rs +++ b/cli/tools/jupyter/mod.rs @@ -61,7 +61,7 @@ pub async fn kernel( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; let main_module = - resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd()) + resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd()) .unwrap(); // TODO(bartlomieju): should we run with all permissions? let permissions = diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index e096b486ef..d8edf24048 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -63,7 +63,7 @@ pub use rules::LintRuleProvider; const JSON_SCHEMA_VERSION: u8 = 1; -static STDIN_FILE_NAME: &str = "$deno$stdin.ts"; +static STDIN_FILE_NAME: &str = "$deno$stdin.mts"; pub async fn lint( flags: Arc, diff --git a/cli/tools/lint/rules/no_sloppy_imports.rs b/cli/tools/lint/rules/no_sloppy_imports.rs index 2f60875885..94bf9a7c67 100644 --- a/cli/tools/lint/rules/no_sloppy_imports.rs +++ b/cli/tools/lint/rules/no_sloppy_imports.rs @@ -87,6 +87,7 @@ impl LintRule for NoSloppyImportsRule { captures: Default::default(), }; + // fill this and capture the sloppy imports in the resolver deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions { graph_kind: deno_graph::GraphKind::All, specifier: context.specifier().clone(), diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 23b0f11ac5..8e05c4abbc 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -7,7 +7,7 @@ use crate::cdp; use crate::colors; use crate::lsp::ReplLanguageServer; use crate::npm::CliNpmResolver; -use crate::resolver::CliGraphResolver; +use crate::resolver::CliResolver; use crate::tools::test::report_tests; use crate::tools::test::reporters::PrettyTestReporter; use crate::tools::test::reporters::TestReporter; @@ -44,12 +44,12 @@ use deno_core::url::Url; use deno_core::LocalInspectorSession; use deno_core::PollEventLoopOptions; use deno_graph::source::ResolutionMode; -use deno_graph::source::Resolver; use deno_graph::Position; use deno_graph::PositionRange; use deno_graph::SpecifierWithRange; use deno_runtime::worker::MainWorker; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::NodeModuleKind; use once_cell::sync::Lazy; use regex::Match; use regex::Regex; @@ -180,7 +180,7 @@ struct ReplJsxState { pub struct ReplSession { npm_resolver: Arc, - resolver: Arc, + resolver: Arc, pub worker: MainWorker, session: LocalInspectorSession, pub context_id: u64, @@ -199,7 +199,7 @@ impl ReplSession { pub async fn initialize( cli_options: &CliOptions, npm_resolver: Arc, - resolver: Arc, + resolver: Arc, mut worker: MainWorker, main_module: ModuleSpecifier, test_event_receiver: TestEventReceiver, @@ -245,7 +245,7 @@ impl ReplSession { assert_ne!(context_id, 0); let referrer = - deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd()) + deno_core::resolve_path("./$deno$repl.mts", cli_options.initial_cwd()) .unwrap(); let cwd_url = @@ -712,7 +712,12 @@ impl ReplSession { .flat_map(|i| { self .resolver - .resolve(i, &referrer_range, ResolutionMode::Execution) + .resolve( + i, + &referrer_range, + NodeModuleKind::Esm, + ResolutionMode::Execution, + ) .ok() .or_else(|| ModuleSpecifier::parse(i).ok()) }) diff --git a/cli/tools/run/hmr.rs b/cli/tools/run/hmr.rs index 6cebedd012..373c207d69 100644 --- a/cli/tools/run/hmr.rs +++ b/cli/tools/run/hmr.rs @@ -4,8 +4,6 @@ use std::collections::HashMap; use std::path::PathBuf; use std::sync::Arc; -use deno_ast::MediaType; -use deno_ast::ModuleKind; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::StreamExt; @@ -18,7 +16,6 @@ use tokio::select; use crate::cdp; use crate::emit::Emitter; -use crate::resolver::CjsTracker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; @@ -63,7 +60,6 @@ pub struct HmrRunner { session: LocalInspectorSession, watcher_communicator: Arc, script_ids: HashMap, - cjs_tracker: Arc, emitter: Arc, } @@ -146,7 +142,6 @@ impl crate::worker::HmrRunner for HmrRunner { let source_code = self.emitter.load_and_emit_for_hmr( &module_url, - ModuleKind::from_is_cjs(self.cjs_tracker.is_maybe_cjs(&module_url, MediaType::from_specifier(&module_url))?), ).await?; let mut tries = 1; @@ -179,14 +174,12 @@ impl crate::worker::HmrRunner for HmrRunner { impl HmrRunner { pub fn new( - cjs_tracker: Arc, emitter: Arc, session: LocalInspectorSession, watcher_communicator: Arc, ) -> Self { Self { session, - cjs_tracker, emitter, watcher_communicator, script_ids: HashMap::new(), diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 52c9134dad..68d099253a 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -121,8 +121,8 @@ delete Object.prototype.__proto__; /** @type {Map} */ const sourceFileCache = new Map(); - /** @type {Map} */ - const sourceTextCache = new Map(); + /** @type {Map} */ + const scriptSnapshotCache = new Map(); /** @type {Map} */ const sourceRefCounts = new Map(); @@ -133,9 +133,6 @@ delete Object.prototype.__proto__; /** @type {Map} */ const isNodeSourceFileCache = new Map(); - /** @type {Map} */ - const isCjsCache = new Map(); - // Maps asset specifiers to the first scope that the asset was loaded into. /** @type {Map} */ const assetScopes = new Map(); @@ -210,12 +207,13 @@ delete Object.prototype.__proto__; const mapKey = path + key; let sourceFile = documentRegistrySourceFileCache.get(mapKey); if (!sourceFile || sourceFile.version !== version) { + const isCjs = /** @type {any} */ (scriptSnapshot).isCjs; sourceFile = ts.createLanguageServiceSourceFile( fileName, scriptSnapshot, { ...getCreateSourceFileOptions(sourceFileOptions), - impliedNodeFormat: (isCjsCache.get(fileName) ?? false) + impliedNodeFormat: isCjs ? ts.ModuleKind.CommonJS : ts.ModuleKind.ESNext, // in the lsp we want to be able to show documentation @@ -320,7 +318,7 @@ delete Object.prototype.__proto__; if (lastRequestMethod != "cleanupSemanticCache") { const mapKey = path + key; documentRegistrySourceFileCache.delete(mapKey); - sourceTextCache.delete(path); + scriptSnapshotCache.delete(path); ops.op_release(path); } } else { @@ -624,8 +622,6 @@ delete Object.prototype.__proto__; `"data" is unexpectedly null for "${specifier}".`, ); - isCjsCache.set(specifier, isCjs); - sourceFile = ts.createSourceFile( specifier, data, @@ -699,7 +695,7 @@ delete Object.prototype.__proto__; /** @type {[string, ts.Extension] | undefined} */ const resolved = ops.op_resolve( containingFilePath, - isCjsCache.get(containingFilePath) ?? false, + containingFileMode === ts.ModuleKind.CommonJS, [fileReference.fileName], )?.[0]; if (resolved) { @@ -723,7 +719,14 @@ delete Object.prototype.__proto__; } }); }, - resolveModuleNames(specifiers, base) { + resolveModuleNames( + specifiers, + base, + _reusedNames, + _redirectedReference, + _options, + containingSourceFile, + ) { if (logDebug) { debug(`host.resolveModuleNames()`); debug(` base: ${base}`); @@ -732,7 +735,7 @@ delete Object.prototype.__proto__; /** @type {Array<[string, ts.Extension] | undefined>} */ const resolved = ops.op_resolve( base, - isCjsCache.get(base) ?? false, + containingSourceFile?.impliedNodeFormat === ts.ModuleKind.CommonJS, specifiers, ); if (resolved) { @@ -814,19 +817,19 @@ delete Object.prototype.__proto__; return ts.ScriptSnapshot.fromString(sourceFile.text); } } - let sourceText = sourceTextCache.get(specifier); - if (sourceText == undefined) { + let scriptSnapshot = scriptSnapshotCache.get(specifier); + if (scriptSnapshot == undefined) { /** @type {{ data: string, version: string, isCjs: boolean }} */ const fileInfo = ops.op_load(specifier); if (!fileInfo) { return undefined; } - isCjsCache.set(specifier, fileInfo.isCjs); - sourceTextCache.set(specifier, fileInfo.data); + scriptSnapshot = ts.ScriptSnapshot.fromString(fileInfo.data); + scriptSnapshot.isCjs = fileInfo.isCjs; + scriptSnapshotCache.set(specifier, scriptSnapshot); scriptVersionCache.set(specifier, fileInfo.version); - sourceText = fileInfo.data; } - return ts.ScriptSnapshot.fromString(sourceText); + return scriptSnapshot; }, }; @@ -1238,7 +1241,7 @@ delete Object.prototype.__proto__; closed = true; } scriptVersionCache.delete(script); - sourceTextCache.delete(script); + scriptSnapshotCache.delete(script); } if (newConfigsByScope || opened || closed) { diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index dc7fc38f7a..a569061625 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -343,31 +343,36 @@ impl TypeCheckingCjsTracker { media_type: MediaType, code: &Arc, ) -> bool { - if let Some(module_kind) = - self.cjs_tracker.get_known_kind(specifier, media_type) - { - module_kind.is_cjs() - } else { - let maybe_is_script = self - .module_info_cache - .as_module_analyzer() - .analyze_sync(specifier, media_type, code) - .ok() - .map(|info| info.is_script); - maybe_is_script - .and_then(|is_script| { - self - .cjs_tracker - .is_cjs_with_known_is_script(specifier, media_type, is_script) - .ok() - }) - .unwrap_or_else(|| { - self - .cjs_tracker - .is_maybe_cjs(specifier, media_type) - .unwrap_or(false) - }) - } + let maybe_is_script = self + .module_info_cache + .as_module_analyzer() + .analyze_sync(specifier, media_type, code) + .ok() + .map(|info| info.is_script); + maybe_is_script + .and_then(|is_script| { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + .ok() + }) + .unwrap_or_else(|| { + self + .cjs_tracker + .is_maybe_cjs(specifier, media_type) + .unwrap_or(false) + }) + } + + pub fn is_cjs_with_known_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: bool, + ) -> Result { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) } } @@ -627,8 +632,12 @@ fn op_load_inner( match module { Module::Js(module) => { media_type = module.media_type; - if matches!(media_type, MediaType::Cjs | MediaType::Cts) { - is_cjs = true; + if let Some(npm_state) = &state.maybe_npm { + is_cjs = npm_state.cjs_tracker.is_cjs_with_known_is_script( + specifier, + module.media_type, + module.is_script, + )?; } let source = module .fast_check_module() @@ -737,6 +746,7 @@ fn op_resolve_inner( "Error converting a string module specifier for \"op_resolve\".", )? }; + let referrer_module = state.graph.get(&referrer); for specifier in args.specifiers { if specifier.starts_with("node:") { resolved.push(( @@ -752,16 +762,19 @@ fn op_resolve_inner( continue; } - let graph = &state.graph; - let resolved_dep = graph - .get(&referrer) + let resolved_dep = referrer_module .and_then(|m| m.js()) .and_then(|m| m.dependencies_prefer_fast_check().get(&specifier)) .and_then(|d| d.maybe_type.ok().or_else(|| d.maybe_code.ok())); let maybe_result = match resolved_dep { Some(ResolutionResolved { specifier, .. }) => { - resolve_graph_specifier_types(specifier, &referrer, state)? + resolve_graph_specifier_types( + specifier, + &referrer, + referrer_kind, + state, + )? } _ => { match resolve_non_graph_specifier_types( @@ -834,6 +847,7 @@ fn op_resolve_inner( fn resolve_graph_specifier_types( specifier: &ModuleSpecifier, referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, state: &State, ) -> Result, AnyError> { let graph = &state.graph; @@ -886,6 +900,7 @@ fn resolve_graph_specifier_types( &package_folder, module.nv_reference.sub_path(), Some(referrer), + referrer_kind, NodeResolutionMode::Types, ); let maybe_url = match res_result { @@ -965,6 +980,7 @@ fn resolve_non_graph_specifier_types( &package_folder, npm_req_ref.sub_path(), Some(referrer), + referrer_kind, NodeResolutionMode::Types, ); let maybe_url = match res_result { diff --git a/cli/worker.rs b/cli/worker.rs index 402644a42c..83e36b36c2 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -44,6 +44,7 @@ use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use deno_terminal::colors; +use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; use tokio::select; @@ -680,6 +681,7 @@ impl CliMainWorkerFactory { package_folder, sub_path, /* referrer */ None, + NodeModuleKind::Esm, NodeResolutionMode::Execution, )?; if specifier diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 6d320b92c1..702c919f47 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -14,6 +14,7 @@ use deno_core::url::Url; #[allow(unused_imports)] use deno_core::v8; use deno_core::v8::ExternalReference; +use node_resolver::errors::ClosestPkgJsonError; use node_resolver::NpmResolverRc; use once_cell::sync::Lazy; @@ -157,6 +158,10 @@ pub trait NodeRequireLoader { ) -> Result, AnyError>; fn load_text_file_lossy(&self, path: &Path) -> Result; + + /// Get if the module kind is maybe CJS and loading should determine + /// if its CJS or ESM. + fn is_maybe_cjs(&self, specifier: &Url) -> Result; } pub static NODE_ENV_VAR_ALLOWLIST: Lazy> = Lazy::new(|| { @@ -385,6 +390,7 @@ deno_core::extension!(deno_node, ops::require::op_require_proxy_path, ops::require::op_require_is_deno_dir_package, ops::require::op_require_resolve_deno_dir, + ops::require::op_require_is_maybe_cjs, ops::require::op_require_is_request_relative, ops::require::op_require_resolve_lookup_paths, ops::require::op_require_try_self_parent_path

, @@ -398,7 +404,6 @@ deno_core::extension!(deno_node, ops::require::op_require_read_file

, ops::require::op_require_as_file_path, ops::require::op_require_resolve_exports

, - ops::require::op_require_read_closest_package_json

, ops::require::op_require_read_package_scope

, ops::require::op_require_package_imports_resolve

, ops::require::op_require_break_on_next_statement, diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 30db8b6293..b7fa8feb20 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -1,16 +1,18 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::Url; use deno_core::v8; use deno_core::JsRuntimeInspector; -use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_fs::FileSystemRc; +use deno_package_json::NodeModuleKind; use deno_package_json::PackageJsonRc; use deno_path_util::normalize_path; +use deno_path_util::url_from_file_path; use deno_path_util::url_to_file_path; -use node_resolver::NodeModuleKind; +use node_resolver::errors::ClosestPkgJsonError; use node_resolver::NodeResolutionMode; use node_resolver::REQUIRE_CONDITIONS; use std::borrow::Cow; @@ -217,17 +219,17 @@ pub fn op_require_resolve_deno_dir( state: &mut OpState, #[string] request: String, #[string] parent_filename: String, -) -> Option { +) -> Result, AnyError> { let resolver = state.borrow::(); - resolver - .resolve_package_folder_from_package( - &request, - &ModuleSpecifier::from_file_path(&parent_filename).unwrap_or_else(|_| { - panic!("Url::from_file_path: [{:?}]", parent_filename) - }), - ) - .ok() - .map(|p| p.to_string_lossy().into_owned()) + Ok( + resolver + .resolve_package_folder_from_package( + &request, + &url_from_file_path(&PathBuf::from(parent_filename))?, + ) + .ok() + .map(|p| p.to_string_lossy().into_owned()), + ) } #[op2(fast)] @@ -564,19 +566,17 @@ where })) } -#[op2] -#[serde] -pub fn op_require_read_closest_package_json

( +#[op2(fast)] +pub fn op_require_is_maybe_cjs( state: &mut OpState, #[string] filename: String, -) -> Result, node_resolver::errors::ClosestPkgJsonError> -where - P: NodePermissions + 'static, -{ +) -> Result { let filename = PathBuf::from(filename); - // permissions: allow reading the closest package.json files - let pkg_json_resolver = state.borrow::(); - pkg_json_resolver.get_closest_package_json_from_path(&filename) + let Ok(url) = url_from_file_path(&filename) else { + return Ok(false); + }; + let loader = state.borrow::(); + loader.is_maybe_cjs(&url) } #[op2] diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 0d267ca444..083d4e49be 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -11,6 +11,7 @@ import { op_require_can_parse_as_esm, op_require_init_paths, op_require_is_deno_dir_package, + op_require_is_maybe_cjs, op_require_is_request_relative, op_require_node_module_paths, op_require_package_imports_resolve, @@ -19,7 +20,6 @@ import { op_require_path_is_absolute, op_require_path_resolve, op_require_proxy_path, - op_require_read_closest_package_json, op_require_read_file, op_require_read_package_scope, op_require_real_path, @@ -1060,36 +1060,13 @@ Module.prototype._compile = function (content, filename, format) { return result; }; -Module._extensions[".js"] = function (module, filename) { - const content = op_require_read_file(filename); - - let format; - if (StringPrototypeEndsWith(filename, ".js")) { - const pkg = op_require_read_closest_package_json(filename); - if (pkg?.type === "module") { - format = "module"; - } else if (pkg?.type === "commonjs") { - format = "commonjs"; - } - } - - module._compile(content, filename, format); -}; - -Module._extensions[".ts"] = +Module._extensions[".js"] = + Module._extensions[".ts"] = Module._extensions[".jsx"] = Module._extensions[".tsx"] = function (module, filename) { const content = op_require_read_file(filename); - - let format; - const pkg = op_require_read_closest_package_json(filename); - if (pkg?.type === "module") { - format = "module"; - } else if (pkg?.type === "commonjs") { - format = "commonjs"; - } - + const format = op_require_is_maybe_cjs(filename) ? undefined : "module"; module._compile(content, filename, format); }; diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index bf626e4100..647376d5cf 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -919,7 +919,7 @@ Object.defineProperty(argv, "1", { if (Deno.mainModule?.startsWith("file:")) { return pathFromURL(new URL(Deno.mainModule)); } else { - return join(Deno.cwd(), "$deno$node.js"); + return join(Deno.cwd(), "$deno$node.mjs"); } }, }); diff --git a/resolvers/node/package_json.rs b/resolvers/node/package_json.rs index 6967779e5d..ae016ebe3e 100644 --- a/resolvers/node/package_json.rs +++ b/resolvers/node/package_json.rs @@ -15,8 +15,8 @@ use crate::errors::CanonicalizingPkgJsonDirError; use crate::errors::ClosestPkgJsonError; use crate::errors::PackageJsonLoadError; -// todo(dsherret): this isn't exactly correct and we should change it to instead -// be created per worker and passed down as a ctor arg to the pkg json resolver +// it would be nice if this was passed down as a ctor arg to the package.json resolver, +// but it's a little bit complicated to do that, so we just maintain a thread local cache thread_local! { static CACHE: RefCell> = RefCell::new(HashMap::new()); } diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs index d44539e978..fcff292425 100644 --- a/resolvers/node/resolution.rs +++ b/resolvers/node/resolution.rs @@ -50,6 +50,15 @@ pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"]; pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"]; static TYPES_ONLY_CONDITIONS: &[&str] = &["types"]; +fn conditions_from_module_kind( + kind: NodeModuleKind, +) -> &'static [&'static str] { + match kind { + NodeModuleKind::Esm => DEFAULT_CONDITIONS, + NodeModuleKind::Cjs => REQUIRE_CONDITIONS, + } +} + pub type NodeModuleKind = deno_package_json::NodeModuleKind; #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -166,8 +175,7 @@ impl NodeResolver { specifier, referrer, referrer_kind, - // even though the referrer may be CJS, if we're here that means we're doing ESM resolution - DEFAULT_CONDITIONS, + conditions_from_module_kind(referrer_kind), mode, )?; @@ -299,9 +307,9 @@ impl NodeResolver { package_dir: &Path, package_subpath: Option<&str>, maybe_referrer: Option<&Url>, + referrer_kind: NodeModuleKind, mode: NodeResolutionMode, ) -> Result { - let node_module_kind = NodeModuleKind::Esm; let package_subpath = package_subpath .map(|s| format!("./{s}")) .unwrap_or_else(|| ".".to_string()); @@ -309,8 +317,8 @@ impl NodeResolver { package_dir, &package_subpath, maybe_referrer, - node_module_kind, - DEFAULT_CONDITIONS, + referrer_kind, + conditions_from_module_kind(referrer_kind), mode, )?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and @@ -441,10 +449,7 @@ impl NodeResolver { /* sub path */ ".", maybe_referrer, referrer_kind, - match referrer_kind { - NodeModuleKind::Esm => DEFAULT_CONDITIONS, - NodeModuleKind::Cjs => REQUIRE_CONDITIONS, - }, + conditions_from_module_kind(referrer_kind), NodeResolutionMode::Types, ); if let Ok(resolution) = resolution_result { diff --git a/runtime/fmt_errors.rs b/runtime/fmt_errors.rs index 4cd8a06345..28cd702966 100644 --- a/runtime/fmt_errors.rs +++ b/runtime/fmt_errors.rs @@ -310,14 +310,13 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec { { return vec![ FixSuggestion::info_multiline(&[ - cstr!("Deno supports CommonJS modules in .cjs files, or when there's a package.json"), - cstr!("with \"type\": \"commonjs\" option and --unstable-detect-cjs flag is used.") + cstr!("Deno supports CommonJS modules in .cjs files, or when the closest"), + cstr!("package.json has a \"type\": \"commonjs\" option.") ]), FixSuggestion::hint_multiline(&[ "Rewrite this module to ESM,", cstr!("or change the file extension to .cjs,"), - cstr!("or add package.json next to the file with \"type\": \"commonjs\" option"), - cstr!("and pass --unstable-detect-cjs flag."), + cstr!("or add package.json next to the file with \"type\": \"commonjs\" option."), ]), FixSuggestion::docs("https://docs.deno.com/go/commonjs"), ]; diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index d81c82c501..e272291538 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -393,6 +393,13 @@ pub struct WebWorker { maybe_worker_metadata: Option, } +impl Drop for WebWorker { + fn drop(&mut self) { + // clean up the package.json thread local cache + node_resolver::PackageJsonThreadLocalCache::clear(); + } +} + impl WebWorker { pub fn bootstrap_from_options( services: WebWorkerServiceOptions, diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index af5f9de23e..dcef696084 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -16139,6 +16139,55 @@ fn lsp_cjs_import_dual() { ); } +#[test] +fn lsp_type_commonjs() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .add_npm_env_vars() + .build(); + let temp_dir = context.temp_dir(); + temp_dir.write("deno.json", r#"{}"#); + temp_dir.write( + "package.json", + r#"{ + "type": "commonjs", + "dependencies": { + "@denotest/dual-cjs-esm": "1" + } +}"#, + ); + context.run_npm("install"); + + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + let main_url = temp_dir.path().join("main.ts").url_file(); + let diagnostics = client.did_open( + json!({ + "textDocument": { + "uri": main_url, + "languageId": "typescript", + "version": 1, + // getKind() should resolve as "cjs" and cause a type checker error + "text": "import mod = require('@denotest/dual-cjs-esm');\nconst kind: 'other' = mod.getKind(); console.log(kind);", + } + }), + ); + assert_eq!( + json!(diagnostics.all()), + json!([{ + "range": { + "start": { "line": 1, "character": 6, }, + "end": { "line": 1, "character": 10, }, + }, + "severity": 1, + "code": 2322, + "source": "deno-ts", + "message": "Type '\"cjs\"' is not assignable to type '\"other\"'.", + }]) + ); +} + #[test] fn lsp_ts_code_fix_any_param() { let context = TestContextBuilder::new().use_temp_cwd().build(); diff --git a/tests/node_compat/package.json b/tests/node_compat/package.json new file mode 100644 index 0000000000..5bbefffbab --- /dev/null +++ b/tests/node_compat/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/tests/node_compat/test/common/package.json b/tests/node_compat/test/common/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/common/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/node_compat/test/fixtures/package.json b/tests/node_compat/test/fixtures/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/fixtures/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/node_compat/test/internet/package.json b/tests/node_compat/test/internet/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/internet/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/node_compat/test/parallel/package.json b/tests/node_compat/test/parallel/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/parallel/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/node_compat/test/pseudo-tty/package.json b/tests/node_compat/test/pseudo-tty/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/pseudo-tty/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/node_compat/test/pummel/package.json b/tests/node_compat/test/pummel/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/pummel/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/node_compat/test/sequential/package.json b/tests/node_compat/test/sequential/package.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/tests/node_compat/test/sequential/package.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/install.js b/tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/install.js new file mode 100644 index 0000000000..61aadeb839 --- /dev/null +++ b/tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/install.js @@ -0,0 +1,12 @@ +const tempDir = Deno.makeTempDirSync(); +try { + // should work requiring these because this was launched via a node binary entrypoint + Deno.writeTextFileSync(`${tempDir}/index.js`, "module.exports = require('./other');"); + Deno.writeTextFileSync(`${tempDir}/other.js`, "module.exports = (a, b) => a + b;"); + const add = require(`${tempDir}/index.js`); + if (add(1, 2) !== 3) { + throw new Error("FAILED"); + } +} finally { + Deno.removeSync(tempDir, { recursive: true }); +} diff --git a/tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/package.json b/tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/package.json new file mode 100644 index 0000000000..c3cf8dc4c3 --- /dev/null +++ b/tests/registry/npm/@denotest/install-launch-cjs-temp-dir/1.0.0/package.json @@ -0,0 +1,7 @@ +{ + "name": "@denotest/install-launch-cjs-temp-dir", + "version": "1.0.0", + "scripts": { + "install": "node install.js" + } +} \ No newline at end of file diff --git a/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/check.js b/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/check.js new file mode 100644 index 0000000000..7d55c2481d --- /dev/null +++ b/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/check.js @@ -0,0 +1 @@ +require("./output"); diff --git a/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/index.js b/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/index.js new file mode 100644 index 0000000000..7d55c2481d --- /dev/null +++ b/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/index.js @@ -0,0 +1 @@ +require("./output"); diff --git a/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/output.js b/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/output.js new file mode 100644 index 0000000000..69668cd625 --- /dev/null +++ b/tests/registry/npm/@denotest/install-no-ext/1.0.0/install/output.js @@ -0,0 +1 @@ +console.log("SUCCESS"); diff --git a/tests/registry/npm/@denotest/install-no-ext/1.0.0/package.json b/tests/registry/npm/@denotest/install-no-ext/1.0.0/package.json new file mode 100644 index 0000000000..b9abed1f6e --- /dev/null +++ b/tests/registry/npm/@denotest/install-no-ext/1.0.0/package.json @@ -0,0 +1,7 @@ +{ + "name": "@denotest/install-no-ext", + "version": "1.0.0", + "scripts": { + "install": "node install/check && node install" + } +} \ No newline at end of file diff --git a/tests/specs/compile/detect_cjs/deno.json b/tests/specs/compile/detect_cjs/deno.json deleted file mode 100644 index 35f64c86f4..0000000000 --- a/tests/specs/compile/detect_cjs/deno.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "unstable": [ - "detect-cjs" - ] -} diff --git a/tests/specs/compile/detect_cjs/__test__.jsonc b/tests/specs/compile/package_json_type/__test__.jsonc similarity index 100% rename from tests/specs/compile/detect_cjs/__test__.jsonc rename to tests/specs/compile/package_json_type/__test__.jsonc diff --git a/tests/specs/compile/detect_cjs/add.js b/tests/specs/compile/package_json_type/add.js similarity index 100% rename from tests/specs/compile/detect_cjs/add.js rename to tests/specs/compile/package_json_type/add.js diff --git a/tests/specs/compile/detect_cjs/compile.out b/tests/specs/compile/package_json_type/compile.out similarity index 100% rename from tests/specs/compile/detect_cjs/compile.out rename to tests/specs/compile/package_json_type/compile.out diff --git a/tests/specs/compile/detect_cjs/main.js b/tests/specs/compile/package_json_type/main.js similarity index 100% rename from tests/specs/compile/detect_cjs/main.js rename to tests/specs/compile/package_json_type/main.js diff --git a/tests/specs/compile/detect_cjs/output.out b/tests/specs/compile/package_json_type/output.out similarity index 100% rename from tests/specs/compile/detect_cjs/output.out rename to tests/specs/compile/package_json_type/output.out diff --git a/tests/specs/compile/detect_cjs/package.json b/tests/specs/compile/package_json_type/package.json similarity index 100% rename from tests/specs/compile/detect_cjs/package.json rename to tests/specs/compile/package_json_type/package.json diff --git a/tests/specs/compile/detect_cjs/subtract.ts b/tests/specs/compile/package_json_type/subtract.ts similarity index 100% rename from tests/specs/compile/detect_cjs/subtract.ts rename to tests/specs/compile/package_json_type/subtract.ts diff --git a/tests/specs/eval/pkg_json_type_cjs/__test__.jsonc b/tests/specs/eval/pkg_json_type_cjs/__test__.jsonc new file mode 100644 index 0000000000..cd3804d773 --- /dev/null +++ b/tests/specs/eval/pkg_json_type_cjs/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "eval console.log(1)", + "output": "1\n" +} diff --git a/tests/specs/eval/pkg_json_type_cjs/package.json b/tests/specs/eval/pkg_json_type_cjs/package.json new file mode 100644 index 0000000000..5bbefffbab --- /dev/null +++ b/tests/specs/eval/pkg_json_type_cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/tests/specs/install/scripts_install_launch_cjs_temp_dir/__test__.jsonc b/tests/specs/install/scripts_install_launch_cjs_temp_dir/__test__.jsonc new file mode 100644 index 0000000000..087d08eff0 --- /dev/null +++ b/tests/specs/install/scripts_install_launch_cjs_temp_dir/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "tempDir": true, + "args": "install --allow-scripts", + "output": "output.out" +} diff --git a/tests/specs/install/scripts_install_launch_cjs_temp_dir/output.out b/tests/specs/install/scripts_install_launch_cjs_temp_dir/output.out new file mode 100644 index 0000000000..d5f06cc6ea --- /dev/null +++ b/tests/specs/install/scripts_install_launch_cjs_temp_dir/output.out @@ -0,0 +1,4 @@ +Download http://localhost:4260/@denotest%2finstall-launch-cjs-temp-dir +Download http://localhost:4260/@denotest/install-launch-cjs-temp-dir/1.0.0.tgz +Initialize @denotest/install-launch-cjs-temp-dir@1.0.0 +Initialize @denotest/install-launch-cjs-temp-dir@1.0.0: running 'install' script diff --git a/tests/specs/install/scripts_install_launch_cjs_temp_dir/package.json b/tests/specs/install/scripts_install_launch_cjs_temp_dir/package.json new file mode 100644 index 0000000000..71672f9bc1 --- /dev/null +++ b/tests/specs/install/scripts_install_launch_cjs_temp_dir/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "@denotest/install-launch-cjs-temp-dir": "*" + } +} diff --git a/tests/specs/install/scripts_install_no_ext/__test__.jsonc b/tests/specs/install/scripts_install_no_ext/__test__.jsonc new file mode 100644 index 0000000000..087d08eff0 --- /dev/null +++ b/tests/specs/install/scripts_install_no_ext/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "tempDir": true, + "args": "install --allow-scripts", + "output": "output.out" +} diff --git a/tests/specs/install/scripts_install_no_ext/output.out b/tests/specs/install/scripts_install_no_ext/output.out new file mode 100644 index 0000000000..074e978121 --- /dev/null +++ b/tests/specs/install/scripts_install_no_ext/output.out @@ -0,0 +1,4 @@ +Download http://localhost:4260/@denotest%2finstall-no-ext +Download http://localhost:4260/@denotest/install-no-ext/1.0.0.tgz +Initialize @denotest/install-no-ext@1.0.0 +Initialize @denotest/install-no-ext@1.0.0: running 'install' script diff --git a/tests/specs/install/scripts_install_no_ext/package.json b/tests/specs/install/scripts_install_no_ext/package.json new file mode 100644 index 0000000000..7ac9ca6b29 --- /dev/null +++ b/tests/specs/install/scripts_install_no_ext/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "@denotest/install-no-ext": "*" + } +} diff --git a/tests/specs/mod.rs b/tests/specs/mod.rs index 34221dd9da..b4c8781d31 100644 --- a/tests/specs/mod.rs +++ b/tests/specs/mod.rs @@ -119,6 +119,9 @@ struct MultiStepMetaData { /// steps. #[serde(default)] pub temp_dir: bool, + /// Whether the temporary directory should be symlinked to another path. + #[serde(default)] + pub symlinked_temp_dir: bool, /// The base environment to use for the test. #[serde(default)] pub base: Option, @@ -142,6 +145,8 @@ struct SingleTestMetaData { #[serde(default)] pub temp_dir: bool, #[serde(default)] + pub symlinked_temp_dir: bool, + #[serde(default)] pub repeat: Option, #[serde(flatten)] pub step: StepMetaData, @@ -155,6 +160,7 @@ impl SingleTestMetaData { base: self.base, cwd: None, temp_dir: self.temp_dir, + symlinked_temp_dir: self.symlinked_temp_dir, repeat: self.repeat, envs: Default::default(), steps: vec![self.step], @@ -330,6 +336,20 @@ fn test_context_from_metadata( builder = builder.cwd(cwd.to_string_lossy()); } + if metadata.symlinked_temp_dir { + // not actually deprecated, we just want to discourage its use + // because it's mostly used for testing purposes locally + #[allow(deprecated)] + { + builder = builder.use_symlinked_temp_dir(); + } + if cfg!(not(debug_assertions)) { + // panic to prevent using this on the CI as CI already uses + // a symlinked temp directory for every test + panic!("Cannot use symlinkedTempDir in release mode"); + } + } + match &metadata.base { // todo(dsherret): add bases in the future as needed Some(base) => panic!("Unknown test base: {}", base), diff --git a/tests/specs/npm/dual_cjs_esm/__test__.jsonc b/tests/specs/npm/dual_cjs_esm/__test__.jsonc deleted file mode 100644 index f2b0d694e3..0000000000 --- a/tests/specs/npm/dual_cjs_esm/__test__.jsonc +++ /dev/null @@ -1,4 +0,0 @@ -{ - "args": "run -A --quiet dual_cjs_esm/main.ts", - "output": "dual_cjs_esm/main.out" -} diff --git a/tests/specs/npm/dual_cjs_esm/cjs_referrer/__test__.jsonc b/tests/specs/npm/dual_cjs_esm/cjs_referrer/__test__.jsonc new file mode 100644 index 0000000000..de2c1a0bc5 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/cjs_referrer/__test__.jsonc @@ -0,0 +1,14 @@ +{ + "tempDir": true, + "tests": { + "check": { + "args": "check --node-modules-dir=auto main.cts", + "output": "check.out", + "exitCode": 1 + }, + "run": { + "args": "run --node-modules-dir=auto --allow-read main.cts", + "output": "main.out" + } + } +} diff --git a/tests/specs/npm/dual_cjs_esm/cjs_referrer/check.out b/tests/specs/npm/dual_cjs_esm/cjs_referrer/check.out new file mode 100644 index 0000000000..267d31fb75 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/cjs_referrer/check.out @@ -0,0 +1,8 @@ +Download http://localhost:4260/@denotest%2fdual-cjs-esm +Download http://localhost:4260/@denotest/dual-cjs-esm/1.0.0.tgz +Initialize @denotest/dual-cjs-esm@1.0.0 +Check file:///[WILDLINE]/main.cts +error: TS2322 [ERROR]: Type '"cjs"' is not assignable to type '"other"'. +const kind: "other" = mod.getKind(); + ~~~~ + at file:///[WILDLINE]/main.cts:3:7 diff --git a/tests/specs/npm/dual_cjs_esm/cjs_referrer/main.cts b/tests/specs/npm/dual_cjs_esm/cjs_referrer/main.cts new file mode 100644 index 0000000000..b8dd343f8b --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/cjs_referrer/main.cts @@ -0,0 +1,4 @@ +import mod = require("@denotest/dual-cjs-esm"); + +const kind: "other" = mod.getKind(); +console.log(kind); diff --git a/tests/specs/npm/dual_cjs_esm/cjs_referrer/main.out b/tests/specs/npm/dual_cjs_esm/cjs_referrer/main.out new file mode 100644 index 0000000000..62ddbf4793 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/cjs_referrer/main.out @@ -0,0 +1,4 @@ +Download http://localhost:4260/@denotest%2fdual-cjs-esm +Download http://localhost:4260/@denotest/dual-cjs-esm/1.0.0.tgz +Initialize @denotest/dual-cjs-esm@1.0.0 +cjs diff --git a/tests/specs/npm/dual_cjs_esm/cjs_referrer/package.json b/tests/specs/npm/dual_cjs_esm/cjs_referrer/package.json new file mode 100644 index 0000000000..e1b1e1a5f8 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/cjs_referrer/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "@denotest/dual-cjs-esm": "*" + } +} diff --git a/tests/specs/npm/dual_cjs_esm/esm_referrer/__test__.jsonc b/tests/specs/npm/dual_cjs_esm/esm_referrer/__test__.jsonc new file mode 100644 index 0000000000..0ef1472536 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/esm_referrer/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -A --quiet main.ts", + "output": "main.out" +} diff --git a/tests/specs/npm/dual_cjs_esm/dual_cjs_esm/main.out b/tests/specs/npm/dual_cjs_esm/esm_referrer/main.out similarity index 100% rename from tests/specs/npm/dual_cjs_esm/dual_cjs_esm/main.out rename to tests/specs/npm/dual_cjs_esm/esm_referrer/main.out diff --git a/tests/specs/npm/dual_cjs_esm/dual_cjs_esm/main.ts b/tests/specs/npm/dual_cjs_esm/esm_referrer/main.ts similarity index 100% rename from tests/specs/npm/dual_cjs_esm/dual_cjs_esm/main.ts rename to tests/specs/npm/dual_cjs_esm/esm_referrer/main.ts diff --git a/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/__test__.jsonc b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/__test__.jsonc new file mode 100644 index 0000000000..cf19217d18 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/__test__.jsonc @@ -0,0 +1,14 @@ +{ + "tempDir": true, + "tests": { + "check": { + "args": "check --node-modules-dir=auto main.ts", + "output": "check.out", + "exitCode": 1 + }, + "run": { + "args": "run --node-modules-dir=auto --allow-read main.ts", + "output": "main.out" + } + } +} diff --git a/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/check.out b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/check.out new file mode 100644 index 0000000000..cbd7740a9f --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/check.out @@ -0,0 +1,8 @@ +Download http://localhost:4260/@denotest%2fdual-cjs-esm +Download http://localhost:4260/@denotest/dual-cjs-esm/1.0.0.tgz +Initialize @denotest/dual-cjs-esm@1.0.0 +Check file:///[WILDLINE]/main.ts +error: TS2322 [ERROR]: Type '"cjs"' is not assignable to type '"other"'. +const kind: "other" = mod.getKind(); + ~~~~ + at file:///[WILDLINE]/main.ts:3:7 diff --git a/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.out b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.out new file mode 100644 index 0000000000..62ddbf4793 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.out @@ -0,0 +1,4 @@ +Download http://localhost:4260/@denotest%2fdual-cjs-esm +Download http://localhost:4260/@denotest/dual-cjs-esm/1.0.0.tgz +Initialize @denotest/dual-cjs-esm@1.0.0 +cjs diff --git a/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.ts b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.ts new file mode 100644 index 0000000000..b8dd343f8b --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/main.ts @@ -0,0 +1,4 @@ +import mod = require("@denotest/dual-cjs-esm"); + +const kind: "other" = mod.getKind(); +console.log(kind); diff --git a/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/package.json b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/package.json new file mode 100644 index 0000000000..419d3d9f11 --- /dev/null +++ b/tests/specs/npm/dual_cjs_esm/ts_referrer_type_cjs/package.json @@ -0,0 +1,6 @@ +{ + "type": "commonjs", + "dependencies": { + "@denotest/dual-cjs-esm": "*" + } +} diff --git a/tests/specs/npm/permissions_outside_package/__test__.jsonc b/tests/specs/npm/permissions_outside_package/__test__.jsonc index 56228296b3..d5f6bf4908 100644 --- a/tests/specs/npm/permissions_outside_package/__test__.jsonc +++ b/tests/specs/npm/permissions_outside_package/__test__.jsonc @@ -1,4 +1,5 @@ { - "args": "run --allow-read permissions_outside_package/main.ts", - "output": "permissions_outside_package/main.out" + "tempDir": true, + "args": "run --allow-read --node-modules-dir=none main.ts", + "output": "main.out" } diff --git a/tests/specs/npm/permissions_outside_package/permissions_outside_package/foo/config.js b/tests/specs/npm/permissions_outside_package/foo/config.js similarity index 100% rename from tests/specs/npm/permissions_outside_package/permissions_outside_package/foo/config.js rename to tests/specs/npm/permissions_outside_package/foo/config.js diff --git a/tests/specs/npm/permissions_outside_package/foo/package.json b/tests/specs/npm/permissions_outside_package/foo/package.json new file mode 100644 index 0000000000..95b43077e4 --- /dev/null +++ b/tests/specs/npm/permissions_outside_package/foo/package.json @@ -0,0 +1,5 @@ +{ + "name": "foobar", + "version": "0.0.1", + "type": "commonjs" +} diff --git a/tests/specs/npm/permissions_outside_package/permissions_outside_package/main.out b/tests/specs/npm/permissions_outside_package/main.out similarity index 100% rename from tests/specs/npm/permissions_outside_package/permissions_outside_package/main.out rename to tests/specs/npm/permissions_outside_package/main.out diff --git a/tests/specs/npm/permissions_outside_package/permissions_outside_package/main.ts b/tests/specs/npm/permissions_outside_package/main.ts similarity index 64% rename from tests/specs/npm/permissions_outside_package/permissions_outside_package/main.ts rename to tests/specs/npm/permissions_outside_package/main.ts index 934a3eebcf..a80713fa72 100644 --- a/tests/specs/npm/permissions_outside_package/permissions_outside_package/main.ts +++ b/tests/specs/npm/permissions_outside_package/main.ts @@ -1,5 +1,5 @@ import { loadConfigFile } from "npm:@denotest/permissions-outside-package"; -const fileName = `${Deno.cwd()}/permissions_outside_package/foo/config.js`; +const fileName = `${Deno.cwd()}/foo/config.js`; const config = loadConfigFile(fileName); console.log(config); diff --git a/tests/specs/npm/permissions_outside_package/package.json b/tests/specs/npm/permissions_outside_package/package.json new file mode 100644 index 0000000000..2c63c08510 --- /dev/null +++ b/tests/specs/npm/permissions_outside_package/package.json @@ -0,0 +1,2 @@ +{ +} diff --git a/tests/specs/npm/permissions_outside_package/permissions_outside_package/foo/package.json b/tests/specs/npm/permissions_outside_package/permissions_outside_package/foo/package.json deleted file mode 100644 index cc049e6ce9..0000000000 --- a/tests/specs/npm/permissions_outside_package/permissions_outside_package/foo/package.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "foobar", - "version": "0.0.1" -} diff --git a/tests/specs/run/import_common_js/__test__.jsonc b/tests/specs/run/import_common_js/__test__.jsonc index 6510dbad7b..0602a09baa 100644 --- a/tests/specs/run/import_common_js/__test__.jsonc +++ b/tests/specs/run/import_common_js/__test__.jsonc @@ -1,21 +1,27 @@ { - "steps": [ - { "args": "run -R index.cjs", "output": "index.out" }, - { "args": "run -R main.ts", "output": "main.out" }, - { + "tests": { + "cjs_entrypoint": { + "args": "run -R index.cjs", + "output": "index.out" + }, + "esm_entrypoint": { + "args": "run -R main.ts", + "output": "main.out" + }, + "module_error": { "args": "run module_error.js", "output": "module_error.out", "exitCode": 1 }, - { + "exports_error": { "args": "run exports_error.js", "output": "exports_error.out", "exitCode": 1 }, - { + "require_error": { "args": "run require_error.js", "output": "require_error.out", "exitCode": 1 } - ] + } } diff --git a/tests/specs/run/import_common_js/a.js b/tests/specs/run/import_common_js/a.js deleted file mode 100644 index c465ab588b..0000000000 --- a/tests/specs/run/import_common_js/a.js +++ /dev/null @@ -1,7 +0,0 @@ -function foobar() { - console.log("foobar"); -} - -module.exports = { - foobar, -}; diff --git a/tests/specs/run/import_common_js/exports_error.out b/tests/specs/run/import_common_js/exports_error.out index b979cce5c7..baa44682be 100644 --- a/tests/specs/run/import_common_js/exports_error.out +++ b/tests/specs/run/import_common_js/exports_error.out @@ -3,10 +3,9 @@ Object.defineProperty(exports, "__esModule", { value: true }); ^ at [WILDCARD]exports_error.js:1:23 - info: Deno supports CommonJS modules in .cjs files, or when there's a package.json - with "type": "commonjs" option and --unstable-detect-cjs flag is used. + info: Deno supports CommonJS modules in .cjs files, or when the closest + package.json has a "type": "commonjs" option. hint: Rewrite this module to ESM, or change the file extension to .cjs, - or add package.json next to the file with "type": "commonjs" option - and pass --unstable-detect-cjs flag. + or add package.json next to the file with "type": "commonjs" option. docs: https://docs.deno.com/go/commonjs diff --git a/tests/specs/run/import_common_js/index.cjs b/tests/specs/run/import_common_js/index.cjs index 18caf81e94..0026e237d1 100644 --- a/tests/specs/run/import_common_js/index.cjs +++ b/tests/specs/run/import_common_js/index.cjs @@ -1,9 +1,7 @@ const process = require("process"); -const a = require("./a"); console.log(process.cwd()); module.exports = { cwd: process.cwd, - foobar: a.foobar, }; diff --git a/tests/specs/run/import_common_js/index.out b/tests/specs/run/import_common_js/index.out index 3650631b7a..6a734b9948 100644 --- a/tests/specs/run/import_common_js/index.out +++ b/tests/specs/run/import_common_js/index.out @@ -1 +1 @@ -[WILDCARD]import_common_js +[WILDLINE]import_common_js diff --git a/tests/specs/run/import_common_js/main.out b/tests/specs/run/import_common_js/main.out index 03301b3620..9df3129754 100644 --- a/tests/specs/run/import_common_js/main.out +++ b/tests/specs/run/import_common_js/main.out @@ -1,5 +1,3 @@ hello from foo node module [WILDCARD]import_common_js cjsModule.cwd() [WILDCARD]import_common_js -foobar -cjsModule.foobar() undefined diff --git a/tests/specs/run/import_common_js/module_error.out b/tests/specs/run/import_common_js/module_error.out index 654ee838dd..957b19cb1e 100644 --- a/tests/specs/run/import_common_js/module_error.out +++ b/tests/specs/run/import_common_js/module_error.out @@ -3,10 +3,9 @@ module.exports = { ^ at [WILDCARD]module_error.js:1:1 - info: Deno supports CommonJS modules in .cjs files, or when there's a package.json - with "type": "commonjs" option and --unstable-detect-cjs flag is used. + info: Deno supports CommonJS modules in .cjs files, or when the closest + package.json has a "type": "commonjs" option. hint: Rewrite this module to ESM, or change the file extension to .cjs, - or add package.json next to the file with "type": "commonjs" option - and pass --unstable-detect-cjs flag. + or add package.json next to the file with "type": "commonjs" option. docs: https://docs.deno.com/go/commonjs diff --git a/tests/specs/run/import_common_js/node_modules/foo/index.mjs b/tests/specs/run/import_common_js/node_modules/foo/index.mjs index cc93554c73..7a11d39ae6 100644 --- a/tests/specs/run/import_common_js/node_modules/foo/index.mjs +++ b/tests/specs/run/import_common_js/node_modules/foo/index.mjs @@ -10,5 +10,4 @@ export default async function () { const cjsModule = await import(url.pathToFileURL(cjsFileToImport)); console.log("cjsModule.cwd()", cjsModule.cwd()); - console.log("cjsModule.foobar()", cjsModule.foobar()); } diff --git a/tests/specs/run/import_common_js/require_error.out b/tests/specs/run/import_common_js/require_error.out index 81ffd6591f..e13db85e8e 100644 --- a/tests/specs/run/import_common_js/require_error.out +++ b/tests/specs/run/import_common_js/require_error.out @@ -3,10 +3,9 @@ const process = require("process"); ^ at [WILDCARD]require_error.js:1:17 - info: Deno supports CommonJS modules in .cjs files, or when there's a package.json - with "type": "commonjs" option and --unstable-detect-cjs flag is used. + info: Deno supports CommonJS modules in .cjs files, or when the closest + package.json has a "type": "commonjs" option. hint: Rewrite this module to ESM, or change the file extension to .cjs, - or add package.json next to the file with "type": "commonjs" option - and pass --unstable-detect-cjs flag. + or add package.json next to the file with "type": "commonjs" option. docs: https://docs.deno.com/go/commonjs diff --git a/tests/specs/run/npm_pkg_requires_esm_js/__test__.jsonc b/tests/specs/run/npm_pkg_requires_esm_js/__test__.jsonc new file mode 100644 index 0000000000..3da8db4048 --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "run -A main.js", + "output": "output.out", + "exitCode": 1 +} diff --git a/tests/specs/run/npm_pkg_requires_esm_js/file.js b/tests/specs/run/npm_pkg_requires_esm_js/file.js new file mode 100644 index 0000000000..d9536a69b3 --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/file.js @@ -0,0 +1 @@ +console.log(import.meta.url); diff --git a/tests/specs/run/npm_pkg_requires_esm_js/logs_require.js b/tests/specs/run/npm_pkg_requires_esm_js/logs_require.js new file mode 100644 index 0000000000..984e1f3e74 --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/logs_require.js @@ -0,0 +1 @@ +console.log(require); diff --git a/tests/specs/run/npm_pkg_requires_esm_js/main.js b/tests/specs/run/npm_pkg_requires_esm_js/main.js new file mode 100644 index 0000000000..3704c8bf68 --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/main.js @@ -0,0 +1,5 @@ +import doRequire from "package"; +import path from "node:path"; + +doRequire(path.resolve(import.meta.dirname, "file.js")); +doRequire(path.resolve(import.meta.dirname, "logs_require.js")); diff --git a/tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/index.js b/tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/index.js new file mode 100644 index 0000000000..5d78723713 --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/index.js @@ -0,0 +1,3 @@ +module.exports = (file) => { + return require(file); +}; diff --git a/tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/package.json b/tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/package.json new file mode 100644 index 0000000000..5723987e9f --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/node_modules/package/package.json @@ -0,0 +1,4 @@ +{ + "name": "package", + "version": "1.0.0" +} \ No newline at end of file diff --git a/tests/specs/run/npm_pkg_requires_esm_js/output.out b/tests/specs/run/npm_pkg_requires_esm_js/output.out new file mode 100644 index 0000000000..2cae7108b0 --- /dev/null +++ b/tests/specs/run/npm_pkg_requires_esm_js/output.out @@ -0,0 +1,12 @@ +file:///[WILDLINE]/file.js +error: Uncaught (in promise) ReferenceError: require is not defined +console.log(require); + ^ + at [WILDCARD] + + info: Deno supports CommonJS modules in .cjs files, or when the closest + package.json has a "type": "commonjs" option. + hint: Rewrite this module to ESM, + or change the file extension to .cjs, + or add package.json next to the file with "type": "commonjs" option. + docs: https://docs.deno.com/go/commonjs diff --git a/tests/specs/run/npm_pkg_requires_esm_js/package.json b/tests/specs/run/npm_pkg_requires_esm_js/package.json new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/specs/run/package_json_type/commonjs/basic/deno.jsonc b/tests/specs/run/package_json_type/commonjs/basic/deno.jsonc deleted file mode 100644 index 35f64c86f4..0000000000 --- a/tests/specs/run/package_json_type/commonjs/basic/deno.jsonc +++ /dev/null @@ -1,5 +0,0 @@ -{ - "unstable": [ - "detect-cjs" - ] -} diff --git a/tests/specs/run/package_json_type/commonjs/basic/main_mix.out b/tests/specs/run/package_json_type/commonjs/basic/main_mix.out index 78f421644c..65671fd618 100644 --- a/tests/specs/run/package_json_type/commonjs/basic/main_mix.out +++ b/tests/specs/run/package_json_type/commonjs/basic/main_mix.out @@ -4,10 +4,9 @@ console.log(require("./add").add(1, 2)); ^ at file:///[WILDLINE]main_mix.js:[WILDLINE] - info: Deno supports CommonJS modules in .cjs files, or when there's a package.json - with "type": "commonjs" option and --unstable-detect-cjs flag is used. + info: Deno supports CommonJS modules in .cjs files, or when the closest + package.json has a "type": "commonjs" option. hint: Rewrite this module to ESM, or change the file extension to .cjs, - or add package.json next to the file with "type": "commonjs" option - and pass --unstable-detect-cjs flag. + or add package.json next to the file with "type": "commonjs" option. docs: https://docs.deno.com/go/commonjs diff --git a/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc b/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc index 192ddb98c4..31d05ffb74 100644 --- a/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc +++ b/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc @@ -3,8 +3,5 @@ "compilerOptions": { "jsx": "react-jsx", "jsxImportSource": "react" - }, - "unstable": [ - "detect-cjs" - ] + } } diff --git a/tests/specs/run/package_json_type/none/deno.jsonc b/tests/specs/run/package_json_type/none/deno.jsonc deleted file mode 100644 index 35f64c86f4..0000000000 --- a/tests/specs/run/package_json_type/none/deno.jsonc +++ /dev/null @@ -1,5 +0,0 @@ -{ - "unstable": [ - "detect-cjs" - ] -} diff --git a/tests/specs/run/package_json_type/none/main_cjs.out b/tests/specs/run/package_json_type/none/main_cjs.out index 8d34808fb1..afa5028f4f 100644 --- a/tests/specs/run/package_json_type/none/main_cjs.out +++ b/tests/specs/run/package_json_type/none/main_cjs.out @@ -3,10 +3,9 @@ const { add } = require("./add"); ^ at file:///[WILDLINE] - info: Deno supports CommonJS modules in .cjs files, or when there's a package.json - with "type": "commonjs" option and --unstable-detect-cjs flag is used. + info: Deno supports CommonJS modules in .cjs files, or when the closest + package.json has a "type": "commonjs" option. hint: Rewrite this module to ESM, or change the file extension to .cjs, - or add package.json next to the file with "type": "commonjs" option - and pass --unstable-detect-cjs flag. + or add package.json next to the file with "type": "commonjs" option. docs: https://docs.deno.com/go/commonjs diff --git a/tests/specs/run/require_esm/main.out b/tests/specs/run/require_esm/main.out index 57b842b345..4890e1a492 100644 --- a/tests/specs/run/require_esm/main.out +++ b/tests/specs/run/require_esm/main.out @@ -1,13 +1,4 @@ [Module: null prototype] { sync_js: 1 } [Module: null prototype] { sync_mjs: 1 } error: Uncaught (in promise) Error: Top-level await is not allowed in synchronous evaluation - at loadESMFromCJS (node:module:[WILDCARD]) - at Module._compile (node:module:[WILDCARD]) - at Object.Module._extensions..js (node:module:[WILDCARD]) - at Module.load (node:module:[WILDCARD]) - at Function.Module._load (node:module:[WILDCARD]) - at Module.require (node:module:[WILDCARD]) - at require (node:module:[WILDCARD]) - at Object. (file:[WILDCARD]/tests/specs/run/require_esm/main.cjs:[WILDCARD]) - at Object. (file:[WILDCARD]/tests/specs/run/require_esm/main.cjs:[WILDCARD]) - at Module._compile (node:module:[WILDCARD]) + at [WILDCARD] diff --git a/tests/specs/run/stdin_type_cjs/__test__.jsonc b/tests/specs/run/stdin_type_cjs/__test__.jsonc new file mode 100644 index 0000000000..e60af4a803 --- /dev/null +++ b/tests/specs/run/stdin_type_cjs/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "run --quiet -", + "output": "1\n", + "input": "console.log(1)" +} diff --git a/tests/specs/run/stdin_type_cjs/package.json b/tests/specs/run/stdin_type_cjs/package.json new file mode 100644 index 0000000000..5bbefffbab --- /dev/null +++ b/tests/specs/run/stdin_type_cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/tests/specs/run/stdin_type_cjs/stdin_read_all.ts b/tests/specs/run/stdin_type_cjs/stdin_read_all.ts new file mode 100644 index 0000000000..2ecae40b71 --- /dev/null +++ b/tests/specs/run/stdin_type_cjs/stdin_read_all.ts @@ -0,0 +1 @@ +Deno.stdin.readable.pipeTo(Deno.stdout.writable); diff --git a/tests/specs/schema.json b/tests/specs/schema.json index 8f3953ee44..2b35d9bd7d 100644 --- a/tests/specs/schema.json +++ b/tests/specs/schema.json @@ -36,6 +36,9 @@ "flaky": { "type": "boolean" }, + "symlinkedTempDir": { + "type": "boolean" + }, "if": { "type": "string", "examples": [ diff --git a/tests/util/server/src/servers/mod.rs b/tests/util/server/src/servers/mod.rs index d9adde5420..0b1d99aeb9 100644 --- a/tests/util/server/src/servers/mod.rs +++ b/tests/util/server/src/servers/mod.rs @@ -807,17 +807,17 @@ async fn main_server( (_, "/jsx/jsx-runtime") | (_, "/jsx/jsx-dev-runtime") => { let mut res = Response::new(string_body( r#"export function jsx( - _type, - _props, - _key, - _source, - _self, - ) {} - export const jsxs = jsx; - export const jsxDEV = jsx; - export const Fragment = Symbol("Fragment"); - console.log("imported", import.meta.url); - "#, + _type, + _props, + _key, + _source, + _self, +) {} +export const jsxs = jsx; +export const jsxDEV = jsx; +export const Fragment = Symbol("Fragment"); +console.log("imported", import.meta.url); +"#, )); res.headers_mut().insert( "Content-type", From 6b5cb41545086a7a550c698620f5b7bb19b5524f Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 13 Nov 2024 10:39:40 -0500 Subject: [PATCH 17/18] fix(fmt): error instead of panic on unstable format (#26859) --- cli/tools/fmt.rs | 48 ++++++++++++++++++++++++++++-------------------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index f7f8dabc6f..56b1632cfb 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -790,28 +790,26 @@ fn format_ensure_stable( return Ok(Some(current_text)); } Err(err) => { - panic!( + bail!( concat!( "Formatting succeeded initially, but failed when ensuring a ", "stable format. This indicates a bug in the formatter where ", "the text it produces is not syntactically correct. As a temporary ", - "workaround you can ignore this file ({}).\n\n{:#}" + "workaround you can ignore this file.\n\n{:#}" ), - file_path.display(), err, ) } } count += 1; if count == 5 { - panic!( + bail!( concat!( "Formatting not stable. Bailed after {} tries. This indicates a bug ", - "in the formatter where it formats the file ({}) differently each time. As a ", + "in the formatter where it formats the file differently each time. As a ", "temporary workaround you can ignore this file." ), count, - file_path.display(), ) } } @@ -1215,6 +1213,8 @@ fn is_supported_ext_fmt(path: &Path) -> bool { #[cfg(test)] mod test { + use test_util::assert_starts_with; + use super::*; #[test] @@ -1270,12 +1270,16 @@ mod test { } #[test] - #[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")] fn test_format_ensure_stable_unstable_format() { - format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { - Ok(Some(format!("1{file_text}"))) - }) - .unwrap(); + let err = + format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { + Ok(Some(format!("1{file_text}"))) + }) + .unwrap_err(); + assert_starts_with!( + err.to_string(), + "Formatting not stable. Bailed after 5 tries." + ); } #[test] @@ -1289,16 +1293,20 @@ mod test { } #[test] - #[should_panic(expected = "Formatting succeeded initially, but failed when")] fn test_format_ensure_stable_error_second() { - format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { - if file_text == "1" { - Ok(Some("11".to_string())) - } else { - bail!("Error formatting.") - } - }) - .unwrap(); + let err = + format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| { + if file_text == "1" { + Ok(Some("11".to_string())) + } else { + bail!("Error formatting.") + } + }) + .unwrap_err(); + assert_starts_with!( + err.to_string(), + "Formatting succeeded initially, but failed when" + ); } #[test] From 15fae197482a9ac84e0227200ef8f4d0d4e4bd33 Mon Sep 17 00:00:00 2001 From: Yusuke Tanaka Date: Thu, 14 Nov 2024 14:04:32 +0900 Subject: [PATCH 18/18] fix(cli): preserve comments in doc tests (#26828) This commit makes comments in code snippets in JSDoc or markdown preserved when they are executed as tests. In particular, this is needed to get TypeScript special comments such as `@ts-ignore` or `@ts-expect-error` to work correctly. Fixes #26728 --- cli/util/extract.rs | 57 ++++++++++++++++++- .../test/doc_ts_expect_error/__test__.jsonc | 5 ++ tests/specs/test/doc_ts_expect_error/mod.out | 8 +++ tests/specs/test/doc_ts_expect_error/mod.ts | 13 +++++ .../markdown_ts_expect_error/__test__.jsonc | 5 ++ .../test/markdown_ts_expect_error/main.md | 8 +++ .../test/markdown_ts_expect_error/main.out | 6 ++ 7 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 tests/specs/test/doc_ts_expect_error/__test__.jsonc create mode 100644 tests/specs/test/doc_ts_expect_error/mod.out create mode 100644 tests/specs/test/doc_ts_expect_error/mod.ts create mode 100644 tests/specs/test/markdown_ts_expect_error/__test__.jsonc create mode 100644 tests/specs/test/markdown_ts_expect_error/main.md create mode 100644 tests/specs/test/markdown_ts_expect_error/main.out diff --git a/cli/util/extract.rs b/cli/util/extract.rs index f577cbefec..be68202aa1 100644 --- a/cli/util/extract.rs +++ b/cli/util/extract.rs @@ -586,7 +586,10 @@ fn generate_pseudo_file( wrap_kind, })); - let source = deno_ast::swc::codegen::to_code(&transformed); + let source = deno_ast::swc::codegen::to_code_with_comments( + Some(&parsed.comments().as_single_threaded()), + &transformed, + ); log::debug!("{}:\n{}", file.specifier, source); @@ -1165,6 +1168,33 @@ Deno.test("file:///main.ts$3-6.ts", async ()=>{ media_type: MediaType::TypeScript, }], }, + // https://github.com/denoland/deno/issues/26728 + Test { + input: Input { + source: r#" +/** + * ```ts + * // @ts-expect-error: can only add numbers + * add('1', '2'); + * ``` + */ +export function add(first: number, second: number) { + return first + second; +} +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { add } from "file:///main.ts"; +Deno.test("file:///main.ts$3-7.ts", async ()=>{ + // @ts-expect-error: can only add numbers + add('1', '2'); +}); +"#, + specifier: "file:///main.ts$3-7.ts", + media_type: MediaType::TypeScript, + }], + }, ]; for test in tests { @@ -1376,6 +1406,31 @@ console.log(Foo); media_type: MediaType::TypeScript, }], }, + // https://github.com/denoland/deno/issues/26728 + Test { + input: Input { + source: r#" +/** + * ```ts + * // @ts-expect-error: can only add numbers + * add('1', '2'); + * ``` + */ +export function add(first: number, second: number) { + return first + second; +} +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { add } from "file:///main.ts"; +// @ts-expect-error: can only add numbers +add('1', '2'); +"#, + specifier: "file:///main.ts$3-7.ts", + media_type: MediaType::TypeScript, + }], + }, ]; for test in tests { diff --git a/tests/specs/test/doc_ts_expect_error/__test__.jsonc b/tests/specs/test/doc_ts_expect_error/__test__.jsonc new file mode 100644 index 0000000000..ba64887a32 --- /dev/null +++ b/tests/specs/test/doc_ts_expect_error/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "test --doc mod.ts", + "exitCode": 0, + "output": "mod.out" +} diff --git a/tests/specs/test/doc_ts_expect_error/mod.out b/tests/specs/test/doc_ts_expect_error/mod.out new file mode 100644 index 0000000000..d464d13d71 --- /dev/null +++ b/tests/specs/test/doc_ts_expect_error/mod.out @@ -0,0 +1,8 @@ +Check [WILDCARD]/mod.ts +Check [WILDCARD]/mod.ts$2-10.ts +running 0 tests from ./mod.ts +running 1 test from ./mod.ts$2-10.ts +[WILDCARD]/mod.ts$2-10.ts ... ok ([WILDCARD]ms) + +ok | 1 passed | 0 failed ([WILDCARD]ms) + diff --git a/tests/specs/test/doc_ts_expect_error/mod.ts b/tests/specs/test/doc_ts_expect_error/mod.ts new file mode 100644 index 0000000000..eeace602a9 --- /dev/null +++ b/tests/specs/test/doc_ts_expect_error/mod.ts @@ -0,0 +1,13 @@ +/** + * ```ts + * import { add } from "./mod.ts"; + * + * add(1, 2); + * + * // @ts-expect-error: can only add numbers + * add('1', '2'); + * ``` + */ +export function add(first: number, second: number) { + return first + second; +} diff --git a/tests/specs/test/markdown_ts_expect_error/__test__.jsonc b/tests/specs/test/markdown_ts_expect_error/__test__.jsonc new file mode 100644 index 0000000000..ad73f6df6f --- /dev/null +++ b/tests/specs/test/markdown_ts_expect_error/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "test --doc main.md", + "exitCode": 0, + "output": "main.out" +} diff --git a/tests/specs/test/markdown_ts_expect_error/main.md b/tests/specs/test/markdown_ts_expect_error/main.md new file mode 100644 index 0000000000..9be70e919c --- /dev/null +++ b/tests/specs/test/markdown_ts_expect_error/main.md @@ -0,0 +1,8 @@ +# Documentation + +This test case checks if `@ts-expect-error` comment works as expected. + +```ts +// @ts-expect-error +const a: string = 42; +``` diff --git a/tests/specs/test/markdown_ts_expect_error/main.out b/tests/specs/test/markdown_ts_expect_error/main.out new file mode 100644 index 0000000000..65990cd3f0 --- /dev/null +++ b/tests/specs/test/markdown_ts_expect_error/main.out @@ -0,0 +1,6 @@ +Check [WILDCARD]/main.md$5-9.ts +running 1 test from ./main.md$5-9.ts +[WILDCARD]/main.md$5-9.ts ... ok ([WILDCARD]ms) + +ok | 1 passed | 0 failed ([WILDCARD]ms) +