2020-07-19 13:49:44 -04:00
|
|
|
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
|
|
|
|
|
|
|
// This module is the entry point for "compiler" isolate, ie. the one
|
|
|
|
// that is created when Deno needs to compile TS/WASM to JS.
|
|
|
|
//
|
2020-07-23 09:29:36 -04:00
|
|
|
// It provides two functions that should be called by Rust:
|
2020-10-13 19:52:49 -04:00
|
|
|
// - `startup`
|
2020-07-19 13:49:44 -04:00
|
|
|
// This functions must be called when creating isolate
|
|
|
|
// to properly setup runtime.
|
2020-07-23 09:29:36 -04:00
|
|
|
// - `tsCompilerOnMessage`
|
|
|
|
// This function must be called when sending a request
|
|
|
|
// to the compiler.
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
// Removes the `__proto__` for security reasons. This intentionally makes
|
|
|
|
// Deno non compliant with ECMA-262 Annex B.2.2.1
|
|
|
|
//
|
|
|
|
delete Object.prototype.__proto__;
|
|
|
|
|
|
|
|
((window) => {
|
2020-09-16 16:22:43 -04:00
|
|
|
const core = window.Deno.core;
|
2020-07-23 09:29:36 -04:00
|
|
|
|
2020-09-25 08:04:51 -04:00
|
|
|
let logDebug = false;
|
|
|
|
let logSource = "JS";
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
/** Instructs the host to behave in a legacy fashion, with the legacy
|
|
|
|
* pipeline for handling code. Setting the value to `true` will cause the
|
|
|
|
* host to behave in the modern way. */
|
|
|
|
let legacy = true;
|
|
|
|
|
2020-09-25 08:04:51 -04:00
|
|
|
function setLogDebug(debug, source) {
|
|
|
|
logDebug = debug;
|
|
|
|
if (source) {
|
|
|
|
logSource = source;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
function debug(...args) {
|
2020-09-25 08:04:51 -04:00
|
|
|
if (logDebug) {
|
2020-10-01 06:33:15 -04:00
|
|
|
const stringifiedArgs = args.map((arg) => JSON.stringify(arg)).join(" ");
|
2020-09-25 08:04:51 -04:00
|
|
|
core.print(`DEBUG ${logSource} - ${stringifiedArgs}\n`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class AssertionError extends Error {
|
|
|
|
constructor(msg) {
|
|
|
|
super(msg);
|
|
|
|
this.name = "AssertionError";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function assert(cond, msg = "Assertion failed.") {
|
|
|
|
if (!cond) {
|
|
|
|
throw new AssertionError(msg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-13 19:52:49 -04:00
|
|
|
/** @type {Map<string, ts.SourceFile>} */
|
|
|
|
const sourceFileCache = new Map();
|
|
|
|
|
2020-09-12 05:53:57 -04:00
|
|
|
/**
|
|
|
|
* @param {import("../dts/typescript").DiagnosticRelatedInformation} diagnostic
|
|
|
|
*/
|
|
|
|
function fromRelatedInformation({
|
|
|
|
start,
|
|
|
|
length,
|
|
|
|
file,
|
|
|
|
messageText: msgText,
|
|
|
|
...ri
|
|
|
|
}) {
|
|
|
|
let messageText;
|
|
|
|
let messageChain;
|
|
|
|
if (typeof msgText === "object") {
|
|
|
|
messageChain = msgText;
|
|
|
|
} else {
|
|
|
|
messageText = msgText;
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
2020-09-12 05:53:57 -04:00
|
|
|
if (start !== undefined && length !== undefined && file) {
|
|
|
|
const startPos = file.getLineAndCharacterOfPosition(start);
|
|
|
|
const sourceLine = file.getFullText().split("\n")[startPos.line];
|
|
|
|
const fileName = file.fileName;
|
2020-07-19 13:49:44 -04:00
|
|
|
return {
|
2020-09-12 05:53:57 -04:00
|
|
|
start: startPos,
|
|
|
|
end: file.getLineAndCharacterOfPosition(start + length),
|
|
|
|
fileName,
|
|
|
|
messageChain,
|
|
|
|
messageText,
|
|
|
|
sourceLine,
|
|
|
|
...ri,
|
2020-07-19 13:49:44 -04:00
|
|
|
};
|
|
|
|
} else {
|
2020-09-12 05:53:57 -04:00
|
|
|
return {
|
|
|
|
messageChain,
|
|
|
|
messageText,
|
|
|
|
...ri,
|
|
|
|
};
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-12 05:53:57 -04:00
|
|
|
/**
|
|
|
|
* @param {import("../dts/typescript").Diagnostic[]} diagnostics
|
|
|
|
*/
|
|
|
|
function fromTypeScriptDiagnostic(diagnostics) {
|
|
|
|
return diagnostics.map(({ relatedInformation: ri, source, ...diag }) => {
|
|
|
|
const value = fromRelatedInformation(diag);
|
|
|
|
value.relatedInformation = ri
|
|
|
|
? ri.map(fromRelatedInformation)
|
|
|
|
: undefined;
|
|
|
|
value.source = source;
|
|
|
|
return value;
|
|
|
|
});
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// We really don't want to depend on JSON dispatch during snapshotting, so
|
|
|
|
// this op exchanges strings with Rust as raw byte arrays.
|
|
|
|
function getAsset(name) {
|
|
|
|
const opId = core.ops()["op_fetch_asset"];
|
|
|
|
const sourceCodeBytes = core.dispatch(opId, core.encode(name));
|
|
|
|
return core.decode(sourceCodeBytes);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Constants used by `normalizeString` and `resolvePath`
|
|
|
|
const CHAR_DOT = 46; /* . */
|
|
|
|
const CHAR_FORWARD_SLASH = 47; /* / */
|
|
|
|
// Using incremental compile APIs requires that all
|
|
|
|
// paths must be either relative or absolute. Since
|
|
|
|
// analysis in Rust operates on fully resolved URLs,
|
|
|
|
// it makes sense to use the same scheme here.
|
2020-10-01 06:33:15 -04:00
|
|
|
const ASSETS = "asset:///";
|
2020-07-19 13:49:44 -04:00
|
|
|
const OUT_DIR = "deno://";
|
2020-09-09 07:37:22 -04:00
|
|
|
const CACHE = "cache:///";
|
2020-07-19 13:49:44 -04:00
|
|
|
// This constant is passed to compiler settings when
|
|
|
|
// doing incremental compiles. Contents of this
|
|
|
|
// file are passed back to Rust and saved to $DENO_DIR.
|
|
|
|
const TS_BUILD_INFO = "cache:///tsbuildinfo.json";
|
|
|
|
|
|
|
|
const DEFAULT_COMPILE_OPTIONS = {
|
|
|
|
allowJs: false,
|
|
|
|
allowNonTsExtensions: true,
|
|
|
|
checkJs: false,
|
|
|
|
esModuleInterop: true,
|
|
|
|
jsx: ts.JsxEmit.React,
|
|
|
|
module: ts.ModuleKind.ESNext,
|
|
|
|
outDir: OUT_DIR,
|
|
|
|
sourceMap: true,
|
|
|
|
strict: true,
|
|
|
|
removeComments: true,
|
|
|
|
target: ts.ScriptTarget.ESNext,
|
|
|
|
};
|
|
|
|
|
|
|
|
const CompilerHostTarget = {
|
|
|
|
Main: "main",
|
|
|
|
Runtime: "runtime",
|
|
|
|
Worker: "worker",
|
|
|
|
};
|
|
|
|
|
|
|
|
// Warning! The values in this enum are duplicated in `cli/msg.rs`
|
|
|
|
// Update carefully!
|
|
|
|
const MediaType = {
|
|
|
|
0: "JavaScript",
|
|
|
|
1: "JSX",
|
|
|
|
2: "TypeScript",
|
2020-09-14 08:27:44 -04:00
|
|
|
3: "Dts",
|
|
|
|
4: "TSX",
|
|
|
|
5: "Json",
|
|
|
|
6: "Wasm",
|
|
|
|
7: "BuildInfo",
|
|
|
|
8: "Unknown",
|
2020-07-19 13:49:44 -04:00
|
|
|
JavaScript: 0,
|
|
|
|
JSX: 1,
|
|
|
|
TypeScript: 2,
|
2020-09-14 08:27:44 -04:00
|
|
|
Dts: 3,
|
|
|
|
TSX: 4,
|
|
|
|
Json: 5,
|
|
|
|
Wasm: 6,
|
|
|
|
BuildInfo: 7,
|
2020-07-19 13:49:44 -04:00
|
|
|
Unknown: 6,
|
|
|
|
};
|
|
|
|
|
|
|
|
function getExtension(fileName, mediaType) {
|
|
|
|
switch (mediaType) {
|
|
|
|
case MediaType.JavaScript:
|
|
|
|
return ts.Extension.Js;
|
|
|
|
case MediaType.JSX:
|
|
|
|
return ts.Extension.Jsx;
|
|
|
|
case MediaType.TypeScript:
|
|
|
|
return fileName.endsWith(".d.ts") ? ts.Extension.Dts : ts.Extension.Ts;
|
|
|
|
case MediaType.TSX:
|
|
|
|
return ts.Extension.Tsx;
|
|
|
|
case MediaType.Wasm:
|
|
|
|
// Custom marker for Wasm type.
|
|
|
|
return ts.Extension.Js;
|
|
|
|
case MediaType.Unknown:
|
|
|
|
default:
|
|
|
|
throw TypeError(
|
|
|
|
`Cannot resolve extension for "${fileName}" with mediaType "${
|
|
|
|
MediaType[mediaType]
|
|
|
|
}".`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** A global cache of module source files that have been loaded.
|
|
|
|
* This cache will be rewritten to be populated on compiler startup
|
|
|
|
* with files provided from Rust in request message.
|
|
|
|
*/
|
|
|
|
const SOURCE_FILE_CACHE = new Map();
|
|
|
|
/** A map of maps which cache resolved specifier for each import in a file.
|
|
|
|
* This cache is used so `resolveModuleNames` ops is called as few times
|
|
|
|
* as possible.
|
|
|
|
*
|
|
|
|
* First map's key is "referrer" URL ("file://a/b/c/mod.ts")
|
|
|
|
* Second map's key is "raw" import specifier ("./foo.ts")
|
|
|
|
* Second map's value is resolved import URL ("file:///a/b/c/foo.ts")
|
|
|
|
*/
|
|
|
|
const RESOLVED_SPECIFIER_CACHE = new Map();
|
|
|
|
|
2020-08-31 14:12:24 -04:00
|
|
|
function parseCompilerOptions(compilerOptions) {
|
2020-07-19 13:49:44 -04:00
|
|
|
const { options, errors } = ts.convertCompilerOptionsFromJson(
|
2020-08-31 14:12:24 -04:00
|
|
|
compilerOptions,
|
2020-09-09 07:37:22 -04:00
|
|
|
"",
|
|
|
|
"tsconfig.json",
|
2020-07-19 13:49:44 -04:00
|
|
|
);
|
|
|
|
return {
|
2020-08-31 14:12:24 -04:00
|
|
|
options,
|
2020-07-19 13:49:44 -04:00
|
|
|
diagnostics: errors.length ? errors : undefined,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
class SourceFile {
|
|
|
|
constructor(json) {
|
|
|
|
this.processed = false;
|
|
|
|
Object.assign(this, json);
|
|
|
|
this.extension = getExtension(this.url, this.mediaType);
|
|
|
|
}
|
|
|
|
|
|
|
|
static addToCache(json) {
|
|
|
|
if (SOURCE_FILE_CACHE.has(json.url)) {
|
|
|
|
throw new TypeError("SourceFile already exists");
|
|
|
|
}
|
|
|
|
const sf = new SourceFile(json);
|
|
|
|
SOURCE_FILE_CACHE.set(sf.url, sf);
|
|
|
|
return sf;
|
|
|
|
}
|
|
|
|
|
|
|
|
static getCached(url) {
|
|
|
|
return SOURCE_FILE_CACHE.get(url);
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
static cacheResolvedUrl(resolvedUrl, rawModuleSpecifier, containingFile) {
|
2020-07-19 13:49:44 -04:00
|
|
|
containingFile = containingFile || "";
|
|
|
|
let innerCache = RESOLVED_SPECIFIER_CACHE.get(containingFile);
|
|
|
|
if (!innerCache) {
|
|
|
|
innerCache = new Map();
|
|
|
|
RESOLVED_SPECIFIER_CACHE.set(containingFile, innerCache);
|
|
|
|
}
|
|
|
|
innerCache.set(rawModuleSpecifier, resolvedUrl);
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
static getResolvedUrl(moduleSpecifier, containingFile) {
|
2020-07-19 13:49:44 -04:00
|
|
|
const containingCache = RESOLVED_SPECIFIER_CACHE.get(containingFile);
|
|
|
|
if (containingCache) {
|
|
|
|
return containingCache.get(moduleSpecifier);
|
|
|
|
}
|
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function getAssetInternal(filename) {
|
|
|
|
const lastSegment = filename.split("/").pop();
|
|
|
|
const url = ts.libMap.has(lastSegment)
|
|
|
|
? ts.libMap.get(lastSegment)
|
|
|
|
: lastSegment;
|
|
|
|
const sourceFile = SourceFile.getCached(url);
|
|
|
|
if (sourceFile) {
|
|
|
|
return sourceFile;
|
|
|
|
}
|
|
|
|
const name = url.includes(".") ? url : `${url}.d.ts`;
|
|
|
|
const sourceCode = getAsset(name);
|
|
|
|
return SourceFile.addToCache({
|
|
|
|
url,
|
|
|
|
filename: `${ASSETS}/${name}`,
|
|
|
|
mediaType: MediaType.TypeScript,
|
|
|
|
versionHash: "1",
|
|
|
|
sourceCode,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
/** There was some private state in the legacy host, that is moved out to
|
|
|
|
* here which can then be refactored out later. */
|
|
|
|
const legacyHostState = {
|
|
|
|
buildInfo: "",
|
|
|
|
target: CompilerHostTarget.Main,
|
|
|
|
writeFile: (_fileName, _data, _sourceFiles) => {},
|
|
|
|
};
|
2020-07-19 13:49:44 -04:00
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
/** @type {import("../dts/typescript").CompilerHost} */
|
|
|
|
const host = {
|
2020-09-22 21:39:20 -04:00
|
|
|
fileExists(fileName) {
|
2020-10-01 06:33:15 -04:00
|
|
|
debug(`host.fileExists("${fileName}")`);
|
2020-09-22 21:39:20 -04:00
|
|
|
return false;
|
2020-10-01 06:33:15 -04:00
|
|
|
},
|
2020-10-13 19:52:49 -04:00
|
|
|
readFile(specifier) {
|
|
|
|
debug(`host.readFile("${specifier}")`);
|
2020-10-01 06:33:15 -04:00
|
|
|
if (legacy) {
|
2020-10-13 19:52:49 -04:00
|
|
|
if (specifier == TS_BUILD_INFO) {
|
2020-10-01 06:33:15 -04:00
|
|
|
return legacyHostState.buildInfo;
|
|
|
|
}
|
|
|
|
return unreachable();
|
|
|
|
} else {
|
2020-10-13 19:52:49 -04:00
|
|
|
return core.jsonOpSync("op_load", { specifier }).data;
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
},
|
2020-07-19 13:49:44 -04:00
|
|
|
getSourceFile(
|
2020-10-01 06:33:15 -04:00
|
|
|
specifier,
|
2020-07-19 13:49:44 -04:00
|
|
|
languageVersion,
|
|
|
|
onError,
|
|
|
|
shouldCreateNewSourceFile,
|
|
|
|
) {
|
2020-10-01 06:33:15 -04:00
|
|
|
debug(
|
|
|
|
`host.getSourceFile("${specifier}", ${
|
|
|
|
ts.ScriptTarget[languageVersion]
|
|
|
|
})`,
|
|
|
|
);
|
|
|
|
if (legacy) {
|
|
|
|
try {
|
|
|
|
assert(!shouldCreateNewSourceFile);
|
|
|
|
const sourceFile = specifier.startsWith(ASSETS)
|
|
|
|
? getAssetInternal(specifier)
|
|
|
|
: SourceFile.getCached(specifier);
|
|
|
|
assert(sourceFile != null);
|
|
|
|
if (!sourceFile.tsSourceFile) {
|
|
|
|
assert(sourceFile.sourceCode != null);
|
|
|
|
const tsSourceFileName = specifier.startsWith(ASSETS)
|
|
|
|
? sourceFile.filename
|
|
|
|
: specifier;
|
|
|
|
|
|
|
|
sourceFile.tsSourceFile = ts.createSourceFile(
|
|
|
|
tsSourceFileName,
|
|
|
|
sourceFile.sourceCode,
|
|
|
|
languageVersion,
|
|
|
|
);
|
|
|
|
sourceFile.tsSourceFile.version = sourceFile.versionHash;
|
|
|
|
delete sourceFile.sourceCode;
|
2020-10-13 19:52:49 -04:00
|
|
|
|
|
|
|
// This code is to support transition from the "legacy" compiler
|
|
|
|
// to the new one, by populating the new source file cache.
|
|
|
|
if (
|
|
|
|
!sourceFileCache.has(specifier) && specifier.startsWith(ASSETS)
|
|
|
|
) {
|
|
|
|
sourceFileCache.set(specifier, sourceFile.tsSourceFile);
|
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
}
|
|
|
|
return sourceFile.tsSourceFile;
|
|
|
|
} catch (e) {
|
|
|
|
if (onError) {
|
|
|
|
onError(String(e));
|
|
|
|
} else {
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
} else {
|
2020-10-13 19:52:49 -04:00
|
|
|
let sourceFile = sourceFileCache.get(specifier);
|
2020-10-01 06:33:15 -04:00
|
|
|
if (sourceFile) {
|
|
|
|
return sourceFile;
|
|
|
|
}
|
|
|
|
|
2020-10-13 19:52:49 -04:00
|
|
|
/** @type {{ data: string; hash: string; }} */
|
|
|
|
const { data, hash } = core.jsonOpSync(
|
|
|
|
"op_load",
|
|
|
|
{ specifier },
|
|
|
|
);
|
|
|
|
assert(data, `"data" is unexpectedly null for "${specifier}".`);
|
|
|
|
sourceFile = ts.createSourceFile(
|
|
|
|
specifier,
|
|
|
|
data,
|
|
|
|
languageVersion,
|
|
|
|
);
|
|
|
|
sourceFile.moduleName = specifier;
|
|
|
|
sourceFile.version = hash;
|
|
|
|
sourceFileCache.set(specifier, sourceFile);
|
|
|
|
return sourceFile;
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
},
|
|
|
|
getDefaultLibFileName() {
|
|
|
|
if (legacy) {
|
|
|
|
switch (legacyHostState.target) {
|
|
|
|
case CompilerHostTarget.Main:
|
|
|
|
case CompilerHostTarget.Runtime:
|
|
|
|
return `${ASSETS}/lib.deno.window.d.ts`;
|
|
|
|
case CompilerHostTarget.Worker:
|
|
|
|
return `${ASSETS}/lib.deno.worker.d.ts`;
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
} else {
|
2020-10-13 19:52:49 -04:00
|
|
|
return `${ASSETS}/lib.esnext.d.ts`;
|
2020-10-01 06:33:15 -04:00
|
|
|
}
|
|
|
|
},
|
|
|
|
getDefaultLibLocation() {
|
|
|
|
return ASSETS;
|
|
|
|
},
|
|
|
|
writeFile(fileName, data, _writeByteOrderMark, _onError, sourceFiles) {
|
|
|
|
debug(`host.writeFile("${fileName}")`);
|
|
|
|
if (legacy) {
|
|
|
|
legacyHostState.writeFile(fileName, data, sourceFiles);
|
|
|
|
} else {
|
2020-10-13 19:52:49 -04:00
|
|
|
let maybeSpecifiers;
|
2020-10-01 06:33:15 -04:00
|
|
|
if (sourceFiles) {
|
2020-10-13 19:52:49 -04:00
|
|
|
maybeSpecifiers = sourceFiles.map((sf) => sf.moduleName);
|
|
|
|
debug(` specifiers: ${maybeSpecifiers.join(", ")}`);
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
return core.jsonOpSync(
|
2020-10-13 19:52:49 -04:00
|
|
|
"op_emit",
|
|
|
|
{ maybeSpecifiers, fileName, data },
|
2020-10-01 06:33:15 -04:00
|
|
|
);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
getCurrentDirectory() {
|
|
|
|
return CACHE;
|
|
|
|
},
|
|
|
|
getCanonicalFileName(fileName) {
|
|
|
|
return fileName;
|
|
|
|
},
|
2020-07-19 13:49:44 -04:00
|
|
|
useCaseSensitiveFileNames() {
|
|
|
|
return true;
|
2020-10-01 06:33:15 -04:00
|
|
|
},
|
|
|
|
getNewLine() {
|
|
|
|
return "\n";
|
|
|
|
},
|
|
|
|
resolveModuleNames(specifiers, base) {
|
|
|
|
debug(`host.resolveModuleNames()`);
|
|
|
|
debug(` base: ${base}`);
|
|
|
|
debug(` specifiers: ${specifiers.join(", ")}`);
|
|
|
|
if (legacy) {
|
|
|
|
const resolved = specifiers.map((specifier) => {
|
|
|
|
const maybeUrl = SourceFile.getResolvedUrl(specifier, base);
|
|
|
|
|
|
|
|
debug("compiler::host.resolveModuleNames maybeUrl", {
|
|
|
|
specifier,
|
|
|
|
maybeUrl,
|
|
|
|
});
|
|
|
|
|
|
|
|
let sourceFile = undefined;
|
|
|
|
|
|
|
|
if (specifier.startsWith(ASSETS)) {
|
|
|
|
sourceFile = getAssetInternal(specifier);
|
|
|
|
} else if (typeof maybeUrl !== "undefined") {
|
|
|
|
sourceFile = SourceFile.getCached(maybeUrl);
|
|
|
|
}
|
2020-07-19 13:49:44 -04:00
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
if (!sourceFile) {
|
|
|
|
return undefined;
|
|
|
|
}
|
2020-07-19 13:49:44 -04:00
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
return {
|
|
|
|
resolvedFileName: sourceFile.url,
|
|
|
|
isExternalLibraryImport: specifier.startsWith(ASSETS),
|
|
|
|
extension: sourceFile.extension,
|
|
|
|
};
|
|
|
|
});
|
|
|
|
debug(resolved);
|
|
|
|
return resolved;
|
|
|
|
} else {
|
|
|
|
/** @type {Array<[string, import("../dts/typescript").Extension]>} */
|
2020-10-13 19:52:49 -04:00
|
|
|
const resolved = core.jsonOpSync("op_resolve", {
|
2020-10-01 06:33:15 -04:00
|
|
|
specifiers,
|
|
|
|
base,
|
|
|
|
});
|
|
|
|
return resolved.map(([resolvedFileName, extension]) => ({
|
|
|
|
resolvedFileName,
|
|
|
|
extension,
|
|
|
|
isExternalLibraryImport: false,
|
|
|
|
}));
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
},
|
|
|
|
createHash(data) {
|
|
|
|
return core.jsonOpSync("op_create_hash", { data }).hash;
|
|
|
|
},
|
|
|
|
};
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
// This is a hacky way of adding our libs to the libs available in TypeScript()
|
|
|
|
// as these are internal APIs of TypeScript which maintain valid libs
|
|
|
|
ts.libs.push("deno.ns", "deno.window", "deno.worker", "deno.shared_globals");
|
|
|
|
ts.libMap.set("deno.ns", "lib.deno.ns.d.ts");
|
2020-08-07 10:55:02 -04:00
|
|
|
ts.libMap.set("deno.web", "lib.deno.web.d.ts");
|
2020-09-18 09:20:55 -04:00
|
|
|
ts.libMap.set("deno.fetch", "lib.deno.fetch.d.ts");
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.libMap.set("deno.window", "lib.deno.window.d.ts");
|
|
|
|
ts.libMap.set("deno.worker", "lib.deno.worker.d.ts");
|
|
|
|
ts.libMap.set("deno.shared_globals", "lib.deno.shared_globals.d.ts");
|
|
|
|
ts.libMap.set("deno.unstable", "lib.deno.unstable.d.ts");
|
|
|
|
|
|
|
|
// this pre-populates the cache at snapshot time of our library files, so they
|
|
|
|
// are available in the future when needed.
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.ns.d.ts`,
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.web.d.ts`,
|
2020-08-07 10:55:02 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.fetch.d.ts`,
|
2020-09-18 09:20:55 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.window.d.ts`,
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.worker.d.ts`,
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.shared_globals.d.ts`,
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
2020-10-01 06:33:15 -04:00
|
|
|
host.getSourceFile(
|
|
|
|
`${ASSETS}lib.deno.unstable.d.ts`,
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.ScriptTarget.ESNext,
|
|
|
|
);
|
|
|
|
|
|
|
|
// We never use this program; it's only created
|
|
|
|
// during snapshotting to hydrate and populate
|
|
|
|
// source file cache with lib declaration files.
|
|
|
|
const _TS_SNAPSHOT_PROGRAM = ts.createProgram({
|
2020-10-01 06:33:15 -04:00
|
|
|
rootNames: [`${ASSETS}bootstrap.ts`],
|
|
|
|
options: DEFAULT_COMPILE_OPTIONS,
|
|
|
|
host,
|
2020-07-19 13:49:44 -04:00
|
|
|
});
|
|
|
|
|
|
|
|
// This function is called only during snapshotting process
|
|
|
|
const SYSTEM_LOADER = getAsset("system_loader.js");
|
|
|
|
const SYSTEM_LOADER_ES5 = getAsset("system_loader_es5.js");
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function buildLocalSourceFileCache(sourceFileMap) {
|
2020-07-19 13:49:44 -04:00
|
|
|
for (const entry of Object.values(sourceFileMap)) {
|
|
|
|
assert(entry.sourceCode.length > 0);
|
|
|
|
SourceFile.addToCache({
|
|
|
|
url: entry.url,
|
|
|
|
filename: entry.url,
|
|
|
|
mediaType: entry.mediaType,
|
|
|
|
sourceCode: entry.sourceCode,
|
|
|
|
versionHash: entry.versionHash,
|
|
|
|
});
|
|
|
|
|
|
|
|
for (const importDesc of entry.imports) {
|
|
|
|
let mappedUrl = importDesc.resolvedSpecifier;
|
|
|
|
const importedFile = sourceFileMap[importDesc.resolvedSpecifier];
|
|
|
|
assert(importedFile);
|
|
|
|
const isJsOrJsx = importedFile.mediaType === MediaType.JavaScript ||
|
|
|
|
importedFile.mediaType === MediaType.JSX;
|
|
|
|
// If JS or JSX perform substitution for types if available
|
|
|
|
if (isJsOrJsx) {
|
2020-07-24 08:21:36 -04:00
|
|
|
// @deno-types has highest precedence, followed by
|
|
|
|
// X-TypeScript-Types header
|
|
|
|
if (importDesc.resolvedTypeDirective) {
|
|
|
|
mappedUrl = importDesc.resolvedTypeDirective;
|
|
|
|
} else if (importedFile.typeHeaders.length > 0) {
|
2020-07-19 13:49:44 -04:00
|
|
|
const typeHeaders = importedFile.typeHeaders[0];
|
|
|
|
mappedUrl = typeHeaders.resolvedSpecifier;
|
|
|
|
} else if (importedFile.typesDirectives.length > 0) {
|
|
|
|
const typeDirective = importedFile.typesDirectives[0];
|
|
|
|
mappedUrl = typeDirective.resolvedSpecifier;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
mappedUrl = mappedUrl.replace("memory://", "");
|
|
|
|
SourceFile.cacheResolvedUrl(mappedUrl, importDesc.specifier, entry.url);
|
|
|
|
}
|
|
|
|
for (const fileRef of entry.referencedFiles) {
|
|
|
|
SourceFile.cacheResolvedUrl(
|
|
|
|
fileRef.resolvedSpecifier.replace("memory://", ""),
|
|
|
|
fileRef.specifier,
|
|
|
|
entry.url,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
for (const fileRef of entry.libDirectives) {
|
|
|
|
SourceFile.cacheResolvedUrl(
|
|
|
|
fileRef.resolvedSpecifier.replace("memory://", ""),
|
|
|
|
fileRef.specifier,
|
|
|
|
entry.url,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function buildSourceFileCache(sourceFileMap) {
|
2020-07-19 13:49:44 -04:00
|
|
|
for (const entry of Object.values(sourceFileMap)) {
|
|
|
|
SourceFile.addToCache({
|
|
|
|
url: entry.url,
|
|
|
|
filename: entry.url,
|
|
|
|
mediaType: entry.mediaType,
|
|
|
|
sourceCode: entry.sourceCode,
|
|
|
|
versionHash: entry.versionHash,
|
|
|
|
});
|
|
|
|
|
|
|
|
for (const importDesc of entry.imports) {
|
|
|
|
let mappedUrl = importDesc.resolvedSpecifier;
|
|
|
|
const importedFile = sourceFileMap[importDesc.resolvedSpecifier];
|
|
|
|
// IMPORTANT: due to HTTP redirects we might end up in situation
|
|
|
|
// where URL points to a file with completely different URL.
|
|
|
|
// In that case we take value of `redirect` field and cache
|
|
|
|
// resolved specifier pointing to the value of the redirect.
|
|
|
|
// It's not very elegant solution and should be rethinked.
|
|
|
|
assert(importedFile);
|
|
|
|
if (importedFile.redirect) {
|
|
|
|
mappedUrl = importedFile.redirect;
|
|
|
|
}
|
|
|
|
const isJsOrJsx = importedFile.mediaType === MediaType.JavaScript ||
|
|
|
|
importedFile.mediaType === MediaType.JSX;
|
|
|
|
// If JS or JSX perform substitution for types if available
|
|
|
|
if (isJsOrJsx) {
|
2020-07-24 08:21:36 -04:00
|
|
|
// @deno-types has highest precedence, followed by
|
|
|
|
// X-TypeScript-Types header
|
|
|
|
if (importDesc.resolvedTypeDirective) {
|
|
|
|
mappedUrl = importDesc.resolvedTypeDirective;
|
|
|
|
} else if (importedFile.typeHeaders.length > 0) {
|
2020-07-19 13:49:44 -04:00
|
|
|
const typeHeaders = importedFile.typeHeaders[0];
|
|
|
|
mappedUrl = typeHeaders.resolvedSpecifier;
|
|
|
|
} else if (importedFile.typesDirectives.length > 0) {
|
|
|
|
const typeDirective = importedFile.typesDirectives[0];
|
|
|
|
mappedUrl = typeDirective.resolvedSpecifier;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
SourceFile.cacheResolvedUrl(mappedUrl, importDesc.specifier, entry.url);
|
|
|
|
}
|
|
|
|
for (const fileRef of entry.referencedFiles) {
|
|
|
|
SourceFile.cacheResolvedUrl(
|
|
|
|
fileRef.resolvedSpecifier,
|
|
|
|
fileRef.specifier,
|
|
|
|
entry.url,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
for (const fileRef of entry.libDirectives) {
|
|
|
|
SourceFile.cacheResolvedUrl(
|
|
|
|
fileRef.resolvedSpecifier,
|
|
|
|
fileRef.specifier,
|
|
|
|
entry.url,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Warning! The values in this enum are duplicated in `cli/msg.rs`
|
|
|
|
// Update carefully!
|
|
|
|
const CompilerRequestType = {
|
|
|
|
Compile: 0,
|
2020-08-31 05:02:13 -04:00
|
|
|
Bundle: 1,
|
|
|
|
RuntimeCompile: 2,
|
|
|
|
RuntimeBundle: 3,
|
|
|
|
RuntimeTranspile: 4,
|
2020-07-19 13:49:44 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
function createBundleWriteFile(state) {
|
2020-08-05 14:44:03 -04:00
|
|
|
return function writeFile(_fileName, data, sourceFiles) {
|
2020-07-19 13:49:44 -04:00
|
|
|
assert(sourceFiles != null);
|
2020-10-01 06:33:15 -04:00
|
|
|
assert(state.options);
|
2020-07-19 13:49:44 -04:00
|
|
|
// we only support single root names for bundles
|
|
|
|
assert(state.rootNames.length === 1);
|
|
|
|
state.bundleOutput = buildBundle(
|
|
|
|
state.rootNames[0],
|
|
|
|
data,
|
|
|
|
sourceFiles,
|
2020-10-01 06:33:15 -04:00
|
|
|
state.options.target ?? ts.ScriptTarget.ESNext,
|
2020-07-19 13:49:44 -04:00
|
|
|
);
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function createCompileWriteFile(state) {
|
|
|
|
return function writeFile(fileName, data, sourceFiles) {
|
2020-07-19 13:49:44 -04:00
|
|
|
const isBuildInfo = fileName === TS_BUILD_INFO;
|
|
|
|
|
|
|
|
if (isBuildInfo) {
|
|
|
|
assert(isBuildInfo);
|
|
|
|
state.buildInfo = data;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
assert(sourceFiles);
|
|
|
|
assert(sourceFiles.length === 1);
|
|
|
|
state.emitMap[fileName] = {
|
|
|
|
filename: sourceFiles[0].fileName,
|
|
|
|
contents: data,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function createRuntimeCompileWriteFile(state) {
|
|
|
|
return function writeFile(fileName, data, sourceFiles) {
|
2020-07-19 13:49:44 -04:00
|
|
|
assert(sourceFiles);
|
|
|
|
assert(sourceFiles.length === 1);
|
|
|
|
state.emitMap[fileName] = {
|
|
|
|
filename: sourceFiles[0].fileName,
|
|
|
|
contents: data,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-08-31 14:12:24 -04:00
|
|
|
const IGNORED_DIAGNOSTICS = [
|
2020-07-19 13:49:44 -04:00
|
|
|
// TS2306: File 'file:///Users/rld/src/deno/cli/tests/subdir/amd_like.js' is
|
|
|
|
// not a module.
|
|
|
|
2306,
|
|
|
|
// TS1375: 'await' expressions are only allowed at the top level of a file
|
|
|
|
// when that file is a module, but this file has no imports or exports.
|
|
|
|
// Consider adding an empty 'export {}' to make this file a module.
|
|
|
|
1375,
|
|
|
|
// TS1103: 'for-await-of' statement is only allowed within an async function
|
|
|
|
// or async generator.
|
|
|
|
1103,
|
|
|
|
// TS2691: An import path cannot end with a '.ts' extension. Consider
|
|
|
|
// importing 'bad-module' instead.
|
|
|
|
2691,
|
|
|
|
// TS5009: Cannot find the common subdirectory path for the input files.
|
|
|
|
5009,
|
|
|
|
// TS5055: Cannot write file
|
|
|
|
// 'http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js'
|
|
|
|
// because it would overwrite input file.
|
|
|
|
5055,
|
|
|
|
// TypeScript is overly opinionated that only CommonJS modules kinds can
|
|
|
|
// support JSON imports. Allegedly this was fixed in
|
|
|
|
// Microsoft/TypeScript#26825 but that doesn't seem to be working here,
|
|
|
|
// so we will ignore complaints about this compiler setting.
|
|
|
|
5070,
|
|
|
|
// TS7016: Could not find a declaration file for module '...'. '...'
|
|
|
|
// implicitly has an 'any' type. This is due to `allowJs` being off by
|
|
|
|
// default but importing of a JavaScript module.
|
|
|
|
7016,
|
|
|
|
];
|
|
|
|
|
2020-09-08 09:28:42 -04:00
|
|
|
const IGNORED_COMPILE_DIAGNOSTICS = [
|
|
|
|
// TS1208: All files must be modules when the '--isolatedModules' flag is
|
|
|
|
// provided. We can ignore because we guarantuee that all files are
|
|
|
|
// modules.
|
|
|
|
1208,
|
|
|
|
];
|
|
|
|
|
2020-10-13 19:52:49 -04:00
|
|
|
/** @type {Array<{ key: string, value: number }>} */
|
2020-07-19 13:49:44 -04:00
|
|
|
const stats = [];
|
|
|
|
let statsStart = 0;
|
|
|
|
|
|
|
|
function performanceStart() {
|
|
|
|
stats.length = 0;
|
|
|
|
// TODO(kitsonk) replace with performance.mark() when landed
|
2020-09-26 10:33:25 -04:00
|
|
|
statsStart = new Date();
|
2020-07-19 13:49:44 -04:00
|
|
|
ts.performance.enable();
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function performanceProgram({ program, fileCount }) {
|
2020-07-19 13:49:44 -04:00
|
|
|
if (program) {
|
|
|
|
if ("getProgram" in program) {
|
|
|
|
program = program.getProgram();
|
|
|
|
}
|
|
|
|
stats.push({ key: "Files", value: program.getSourceFiles().length });
|
|
|
|
stats.push({ key: "Nodes", value: program.getNodeCount() });
|
|
|
|
stats.push({ key: "Identifiers", value: program.getIdentifierCount() });
|
|
|
|
stats.push({ key: "Symbols", value: program.getSymbolCount() });
|
|
|
|
stats.push({ key: "Types", value: program.getTypeCount() });
|
|
|
|
stats.push({
|
|
|
|
key: "Instantiations",
|
|
|
|
value: program.getInstantiationCount(),
|
|
|
|
});
|
|
|
|
} else if (fileCount != null) {
|
|
|
|
stats.push({ key: "Files", value: fileCount });
|
|
|
|
}
|
|
|
|
const programTime = ts.performance.getDuration("Program");
|
|
|
|
const bindTime = ts.performance.getDuration("Bind");
|
|
|
|
const checkTime = ts.performance.getDuration("Check");
|
|
|
|
const emitTime = ts.performance.getDuration("Emit");
|
|
|
|
stats.push({ key: "Parse time", value: programTime });
|
|
|
|
stats.push({ key: "Bind time", value: bindTime });
|
|
|
|
stats.push({ key: "Check time", value: checkTime });
|
|
|
|
stats.push({ key: "Emit time", value: emitTime });
|
|
|
|
stats.push({
|
|
|
|
key: "Total TS time",
|
|
|
|
value: programTime + bindTime + checkTime + emitTime,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
function performanceEnd() {
|
2020-09-26 10:33:25 -04:00
|
|
|
const duration = new Date() - statsStart;
|
2020-07-19 13:49:44 -04:00
|
|
|
stats.push({ key: "Compile time", value: duration });
|
|
|
|
return stats;
|
|
|
|
}
|
|
|
|
|
|
|
|
function normalizeString(path) {
|
|
|
|
let res = "";
|
|
|
|
let lastSegmentLength = 0;
|
|
|
|
let lastSlash = -1;
|
|
|
|
let dots = 0;
|
|
|
|
let code;
|
|
|
|
for (let i = 0, len = path.length; i <= len; ++i) {
|
|
|
|
if (i < len) code = path.charCodeAt(i);
|
|
|
|
else if (code === CHAR_FORWARD_SLASH) break;
|
|
|
|
else code = CHAR_FORWARD_SLASH;
|
|
|
|
|
|
|
|
if (code === CHAR_FORWARD_SLASH) {
|
|
|
|
if (lastSlash === i - 1 || dots === 1) {
|
|
|
|
// NOOP
|
|
|
|
} else if (lastSlash !== i - 1 && dots === 2) {
|
|
|
|
if (
|
|
|
|
res.length < 2 ||
|
|
|
|
lastSegmentLength !== 2 ||
|
|
|
|
res.charCodeAt(res.length - 1) !== CHAR_DOT ||
|
|
|
|
res.charCodeAt(res.length - 2) !== CHAR_DOT
|
|
|
|
) {
|
|
|
|
if (res.length > 2) {
|
|
|
|
const lastSlashIndex = res.lastIndexOf("/");
|
|
|
|
if (lastSlashIndex === -1) {
|
|
|
|
res = "";
|
|
|
|
lastSegmentLength = 0;
|
|
|
|
} else {
|
|
|
|
res = res.slice(0, lastSlashIndex);
|
|
|
|
lastSegmentLength = res.length - 1 - res.lastIndexOf("/");
|
|
|
|
}
|
|
|
|
lastSlash = i;
|
|
|
|
dots = 0;
|
|
|
|
continue;
|
|
|
|
} else if (res.length === 2 || res.length === 1) {
|
|
|
|
res = "";
|
|
|
|
lastSegmentLength = 0;
|
|
|
|
lastSlash = i;
|
|
|
|
dots = 0;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (res.length > 0) res += "/" + path.slice(lastSlash + 1, i);
|
|
|
|
else res = path.slice(lastSlash + 1, i);
|
|
|
|
lastSegmentLength = i - lastSlash - 1;
|
|
|
|
}
|
|
|
|
lastSlash = i;
|
|
|
|
dots = 0;
|
|
|
|
} else if (code === CHAR_DOT && dots !== -1) {
|
|
|
|
++dots;
|
|
|
|
} else {
|
|
|
|
dots = -1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
function commonPath(paths, sep = "/") {
|
|
|
|
const [first = "", ...remaining] = paths;
|
|
|
|
if (first === "" || remaining.length === 0) {
|
|
|
|
return first.substring(0, first.lastIndexOf(sep) + 1);
|
|
|
|
}
|
|
|
|
const parts = first.split(sep);
|
|
|
|
|
|
|
|
let endOfPrefix = parts.length;
|
|
|
|
for (const path of remaining) {
|
|
|
|
const compare = path.split(sep);
|
|
|
|
for (let i = 0; i < endOfPrefix; i++) {
|
|
|
|
if (compare[i] !== parts[i]) {
|
|
|
|
endOfPrefix = i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (endOfPrefix === 0) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const prefix = parts.slice(0, endOfPrefix).join(sep);
|
|
|
|
return prefix.endsWith(sep) ? prefix : `${prefix}${sep}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
let rootExports;
|
|
|
|
|
|
|
|
function normalizeUrl(rootName) {
|
|
|
|
const match = /^(\S+:\/{2,3})(.+)$/.exec(rootName);
|
|
|
|
if (match) {
|
|
|
|
const [, protocol, path] = match;
|
|
|
|
return `${protocol}${normalizeString(path)}`;
|
|
|
|
} else {
|
|
|
|
return rootName;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function buildBundle(rootName, data, sourceFiles, target) {
|
2020-07-19 13:49:44 -04:00
|
|
|
// when outputting to AMD and a single outfile, TypeScript makes up the module
|
|
|
|
// specifiers which are used to define the modules, and doesn't expose them
|
|
|
|
// publicly, so we have to try to replicate
|
|
|
|
const sources = sourceFiles.map((sf) => sf.fileName);
|
|
|
|
const sharedPath = commonPath(sources);
|
|
|
|
rootName = normalizeUrl(rootName)
|
|
|
|
.replace(sharedPath, "")
|
|
|
|
.replace(/\.\w+$/i, "");
|
|
|
|
// If one of the modules requires support for top-level-await, TypeScript will
|
|
|
|
// emit the execute function as an async function. When this is the case we
|
|
|
|
// need to bubble up the TLA to the instantiation, otherwise we instantiate
|
|
|
|
// synchronously.
|
|
|
|
const hasTla = data.match(/execute:\sasync\sfunction\s/);
|
|
|
|
let instantiate;
|
|
|
|
if (rootExports && rootExports.length) {
|
|
|
|
instantiate = hasTla
|
|
|
|
? `const __exp = await __instantiate("${rootName}", true);\n`
|
|
|
|
: `const __exp = __instantiate("${rootName}", false);\n`;
|
|
|
|
for (const rootExport of rootExports) {
|
|
|
|
if (rootExport === "default") {
|
|
|
|
instantiate += `export default __exp["${rootExport}"];\n`;
|
|
|
|
} else {
|
|
|
|
instantiate +=
|
|
|
|
`export const ${rootExport} = __exp["${rootExport}"];\n`;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
instantiate = hasTla
|
|
|
|
? `await __instantiate("${rootName}", true);\n`
|
|
|
|
: `__instantiate("${rootName}", false);\n`;
|
|
|
|
}
|
|
|
|
const es5Bundle = target === ts.ScriptTarget.ES3 ||
|
|
|
|
target === ts.ScriptTarget.ES5 ||
|
|
|
|
target === ts.ScriptTarget.ES2015 ||
|
|
|
|
target === ts.ScriptTarget.ES2016;
|
|
|
|
return `${
|
|
|
|
es5Bundle ? SYSTEM_LOADER_ES5 : SYSTEM_LOADER
|
|
|
|
}\n${data}\n${instantiate}`;
|
|
|
|
}
|
|
|
|
|
|
|
|
function setRootExports(program, rootModule) {
|
|
|
|
// get a reference to the type checker, this will let us find symbols from
|
|
|
|
// the AST.
|
|
|
|
const checker = program.getTypeChecker();
|
|
|
|
// get a reference to the main source file for the bundle
|
|
|
|
const mainSourceFile = program.getSourceFile(rootModule);
|
|
|
|
assert(mainSourceFile);
|
|
|
|
// retrieve the internal TypeScript symbol for this AST node
|
|
|
|
const mainSymbol = checker.getSymbolAtLocation(mainSourceFile);
|
|
|
|
if (!mainSymbol) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
rootExports = checker
|
|
|
|
.getExportsOfModule(mainSymbol)
|
|
|
|
// .getExportsOfModule includes type only symbols which are exported from
|
|
|
|
// the module, so we need to try to filter those out. While not critical
|
|
|
|
// someone looking at the bundle would think there is runtime code behind
|
|
|
|
// that when there isn't. There appears to be no clean way of figuring that
|
|
|
|
// out, so inspecting SymbolFlags that might be present that are type only
|
|
|
|
.filter(
|
|
|
|
(sym) =>
|
|
|
|
sym.flags & ts.SymbolFlags.Class ||
|
|
|
|
!(
|
|
|
|
sym.flags & ts.SymbolFlags.Interface ||
|
|
|
|
sym.flags & ts.SymbolFlags.TypeLiteral ||
|
|
|
|
sym.flags & ts.SymbolFlags.Signature ||
|
|
|
|
sym.flags & ts.SymbolFlags.TypeParameter ||
|
|
|
|
sym.flags & ts.SymbolFlags.TypeAlias ||
|
|
|
|
sym.flags & ts.SymbolFlags.Type ||
|
|
|
|
sym.flags & ts.SymbolFlags.Namespace ||
|
|
|
|
sym.flags & ts.SymbolFlags.InterfaceExcludes ||
|
|
|
|
sym.flags & ts.SymbolFlags.TypeParameterExcludes ||
|
|
|
|
sym.flags & ts.SymbolFlags.TypeAliasExcludes
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.map((sym) => sym.getName());
|
|
|
|
}
|
|
|
|
|
|
|
|
function compile({
|
|
|
|
buildInfo,
|
2020-08-31 14:12:24 -04:00
|
|
|
compilerOptions,
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
target,
|
|
|
|
sourceFileMap,
|
|
|
|
type,
|
|
|
|
performance,
|
|
|
|
}) {
|
|
|
|
if (performance) {
|
|
|
|
performanceStart();
|
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
debug(">>> compile start", { rootNames, type: CompilerRequestType[type] });
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
// When a programme is emitted, TypeScript will call `writeFile` with
|
|
|
|
// each file that needs to be emitted. The Deno compiler host delegates
|
|
|
|
// this, to make it easier to perform the right actions, which vary
|
|
|
|
// based a lot on the request.
|
|
|
|
const state = {
|
|
|
|
rootNames,
|
|
|
|
emitMap: {},
|
|
|
|
};
|
2020-08-31 14:12:24 -04:00
|
|
|
|
2020-07-19 13:49:44 -04:00
|
|
|
let diagnostics = [];
|
|
|
|
|
2020-08-31 14:12:24 -04:00
|
|
|
const { options, diagnostics: diags } = parseCompilerOptions(
|
|
|
|
compilerOptions,
|
|
|
|
);
|
|
|
|
|
|
|
|
diagnostics = diags.filter(
|
|
|
|
({ code }) => code != 5023 && !IGNORED_DIAGNOSTICS.includes(code),
|
|
|
|
);
|
2020-07-19 13:49:44 -04:00
|
|
|
|
2020-08-31 14:12:24 -04:00
|
|
|
// TODO(bartlomieju): this options is excluded by `ts.convertCompilerOptionsFromJson`
|
|
|
|
// however stuff breaks if it's not passed (type_directives_js_main.js, compiler_js_error.ts)
|
|
|
|
options.allowNonTsExtensions = true;
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
legacyHostState.target = target;
|
|
|
|
legacyHostState.writeFile = createCompileWriteFile(state);
|
|
|
|
legacyHostState.buildInfo = buildInfo;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
buildSourceFileCache(sourceFileMap);
|
|
|
|
// if there was a configuration and no diagnostics with it, we will continue
|
|
|
|
// to generate the program and possibly emit it.
|
|
|
|
if (diagnostics.length === 0) {
|
|
|
|
const program = ts.createIncrementalProgram({
|
|
|
|
rootNames,
|
|
|
|
options,
|
|
|
|
host,
|
|
|
|
});
|
|
|
|
|
|
|
|
// TODO(bartlomieju): check if this is ok
|
|
|
|
diagnostics = [
|
|
|
|
...program.getConfigFileParsingDiagnostics(),
|
|
|
|
...program.getSyntacticDiagnostics(),
|
|
|
|
...program.getOptionsDiagnostics(),
|
|
|
|
...program.getGlobalDiagnostics(),
|
|
|
|
...program.getSemanticDiagnostics(),
|
|
|
|
];
|
|
|
|
diagnostics = diagnostics.filter(
|
2020-09-08 09:28:42 -04:00
|
|
|
({ code }) =>
|
|
|
|
!IGNORED_DIAGNOSTICS.includes(code) &&
|
|
|
|
!IGNORED_COMPILE_DIAGNOSTICS.includes(code),
|
2020-07-19 13:49:44 -04:00
|
|
|
);
|
|
|
|
|
|
|
|
// We will only proceed with the emit if there are no diagnostics.
|
|
|
|
if (diagnostics.length === 0) {
|
|
|
|
const emitResult = program.emit();
|
|
|
|
// If `checkJs` is off we still might be compiling entry point JavaScript file
|
|
|
|
// (if it has `.ts` imports), but it won't be emitted. In that case we skip
|
|
|
|
// assertion.
|
|
|
|
if (options.checkJs) {
|
|
|
|
assert(
|
|
|
|
emitResult.emitSkipped === false,
|
|
|
|
"Unexpected skip of the emit.",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
// emitResult.diagnostics is `readonly` in TS3.5+ and can't be assigned
|
|
|
|
// without casting.
|
|
|
|
diagnostics = emitResult.diagnostics;
|
|
|
|
}
|
|
|
|
performanceProgram({ program });
|
|
|
|
}
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
debug("<<< compile end", { rootNames, type: CompilerRequestType[type] });
|
2020-07-19 13:49:44 -04:00
|
|
|
const stats = performance ? performanceEnd() : undefined;
|
|
|
|
|
|
|
|
return {
|
|
|
|
emitMap: state.emitMap,
|
|
|
|
buildInfo: state.buildInfo,
|
|
|
|
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
|
|
|
stats,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
function bundle({
|
2020-08-31 14:12:24 -04:00
|
|
|
compilerOptions,
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
target,
|
|
|
|
sourceFileMap,
|
|
|
|
type,
|
2020-07-23 09:29:36 -04:00
|
|
|
performance,
|
2020-07-19 13:49:44 -04:00
|
|
|
}) {
|
|
|
|
if (performance) {
|
|
|
|
performanceStart();
|
|
|
|
}
|
2020-10-01 06:33:15 -04:00
|
|
|
debug(">>> bundle start", {
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
type: CompilerRequestType[type],
|
|
|
|
});
|
|
|
|
|
|
|
|
// When a programme is emitted, TypeScript will call `writeFile` with
|
|
|
|
// each file that needs to be emitted. The Deno compiler host delegates
|
|
|
|
// this, to make it easier to perform the right actions, which vary
|
|
|
|
// based a lot on the request.
|
|
|
|
const state = {
|
|
|
|
rootNames,
|
|
|
|
bundleOutput: undefined,
|
|
|
|
};
|
2020-08-31 14:12:24 -04:00
|
|
|
|
|
|
|
const { options, diagnostics: diags } = parseCompilerOptions(
|
|
|
|
compilerOptions,
|
|
|
|
);
|
|
|
|
|
|
|
|
diagnostics = diags.filter(
|
|
|
|
({ code }) => code != 5023 && !IGNORED_DIAGNOSTICS.includes(code),
|
|
|
|
);
|
|
|
|
|
|
|
|
// TODO(bartlomieju): this options is excluded by `ts.convertCompilerOptionsFromJson`
|
|
|
|
// however stuff breaks if it's not passed (type_directives_js_main.js)
|
|
|
|
options.allowNonTsExtensions = true;
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
legacyHostState.target = target;
|
|
|
|
legacyHostState.writeFile = createBundleWriteFile(state);
|
|
|
|
state.options = options;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
buildSourceFileCache(sourceFileMap);
|
|
|
|
// if there was a configuration and no diagnostics with it, we will continue
|
|
|
|
// to generate the program and possibly emit it.
|
|
|
|
if (diagnostics.length === 0) {
|
|
|
|
const program = ts.createProgram({
|
|
|
|
rootNames,
|
|
|
|
options,
|
|
|
|
host,
|
|
|
|
});
|
|
|
|
|
|
|
|
diagnostics = ts
|
|
|
|
.getPreEmitDiagnostics(program)
|
2020-08-31 14:12:24 -04:00
|
|
|
.filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code));
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
// We will only proceed with the emit if there are no diagnostics.
|
|
|
|
if (diagnostics.length === 0) {
|
|
|
|
// we only support a single root module when bundling
|
|
|
|
assert(rootNames.length === 1);
|
|
|
|
setRootExports(program, rootNames[0]);
|
|
|
|
const emitResult = program.emit();
|
|
|
|
assert(
|
|
|
|
emitResult.emitSkipped === false,
|
|
|
|
"Unexpected skip of the emit.",
|
|
|
|
);
|
|
|
|
// emitResult.diagnostics is `readonly` in TS3.5+ and can't be assigned
|
|
|
|
// without casting.
|
|
|
|
diagnostics = emitResult.diagnostics;
|
|
|
|
}
|
|
|
|
if (performance) {
|
|
|
|
performanceProgram({ program });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let bundleOutput;
|
|
|
|
|
|
|
|
if (diagnostics.length === 0) {
|
|
|
|
assert(state.bundleOutput);
|
|
|
|
bundleOutput = state.bundleOutput;
|
|
|
|
}
|
|
|
|
|
|
|
|
const stats = performance ? performanceEnd() : undefined;
|
|
|
|
|
|
|
|
const result = {
|
|
|
|
bundleOutput,
|
|
|
|
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
|
|
|
stats,
|
|
|
|
};
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
debug("<<< bundle end", {
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
type: CompilerRequestType[type],
|
|
|
|
});
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function runtimeCompile(request) {
|
2020-08-31 14:12:24 -04:00
|
|
|
const { compilerOptions, rootNames, target, sourceFileMap } = request;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
debug(">>> runtime compile start", {
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
});
|
|
|
|
|
|
|
|
// if there are options, convert them into TypeScript compiler options,
|
|
|
|
// and resolve any external file references
|
2020-08-31 14:12:24 -04:00
|
|
|
const result = parseCompilerOptions(
|
|
|
|
compilerOptions,
|
|
|
|
);
|
|
|
|
const options = result.options;
|
|
|
|
// TODO(bartlomieju): this options is excluded by `ts.convertCompilerOptionsFromJson`
|
|
|
|
// however stuff breaks if it's not passed (type_directives_js_main.js, compiler_js_error.ts)
|
|
|
|
options.allowNonTsExtensions = true;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
buildLocalSourceFileCache(sourceFileMap);
|
|
|
|
|
|
|
|
const state = {
|
|
|
|
rootNames,
|
|
|
|
emitMap: {},
|
|
|
|
};
|
2020-10-01 06:33:15 -04:00
|
|
|
legacyHostState.target = target;
|
|
|
|
legacyHostState.writeFile = createRuntimeCompileWriteFile(state);
|
2020-07-19 13:49:44 -04:00
|
|
|
const program = ts.createProgram({
|
|
|
|
rootNames,
|
2020-10-01 06:33:15 -04:00
|
|
|
options,
|
2020-07-19 13:49:44 -04:00
|
|
|
host,
|
|
|
|
});
|
|
|
|
|
|
|
|
const diagnostics = ts
|
|
|
|
.getPreEmitDiagnostics(program)
|
2020-09-08 09:28:42 -04:00
|
|
|
.filter(({ code }) =>
|
|
|
|
!IGNORED_DIAGNOSTICS.includes(code) &&
|
|
|
|
!IGNORED_COMPILE_DIAGNOSTICS.includes(code)
|
|
|
|
);
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
const emitResult = program.emit();
|
|
|
|
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
debug("<<< runtime compile finish", {
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
emitMap: Object.keys(state.emitMap),
|
|
|
|
});
|
|
|
|
|
|
|
|
const maybeDiagnostics = diagnostics.length
|
2020-09-12 05:53:57 -04:00
|
|
|
? fromTypeScriptDiagnostic(diagnostics)
|
2020-07-19 13:49:44 -04:00
|
|
|
: [];
|
|
|
|
|
|
|
|
return {
|
|
|
|
diagnostics: maybeDiagnostics,
|
|
|
|
emitMap: state.emitMap,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
function runtimeBundle(request) {
|
2020-08-31 14:12:24 -04:00
|
|
|
const { compilerOptions, rootNames, target, sourceFileMap } = request;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
debug(">>> runtime bundle start", {
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
});
|
|
|
|
|
|
|
|
// if there are options, convert them into TypeScript compiler options,
|
|
|
|
// and resolve any external file references
|
2020-08-31 14:12:24 -04:00
|
|
|
const result = parseCompilerOptions(
|
|
|
|
compilerOptions,
|
|
|
|
);
|
|
|
|
const options = result.options;
|
|
|
|
// TODO(bartlomieju): this options is excluded by `ts.convertCompilerOptionsFromJson`
|
|
|
|
// however stuff breaks if it's not passed (type_directives_js_main.js, compiler_js_error.ts)
|
|
|
|
options.allowNonTsExtensions = true;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
buildLocalSourceFileCache(sourceFileMap);
|
|
|
|
|
|
|
|
const state = {
|
|
|
|
rootNames,
|
|
|
|
bundleOutput: undefined,
|
|
|
|
};
|
2020-08-31 14:12:24 -04:00
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
legacyHostState.target = target;
|
|
|
|
legacyHostState.writeFile = createBundleWriteFile(state);
|
|
|
|
state.options = options;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
const program = ts.createProgram({
|
|
|
|
rootNames,
|
2020-10-01 06:33:15 -04:00
|
|
|
options,
|
2020-07-19 13:49:44 -04:00
|
|
|
host,
|
|
|
|
});
|
|
|
|
|
|
|
|
setRootExports(program, rootNames[0]);
|
|
|
|
const diagnostics = ts
|
|
|
|
.getPreEmitDiagnostics(program)
|
2020-08-31 14:12:24 -04:00
|
|
|
.filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code));
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
const emitResult = program.emit();
|
|
|
|
|
|
|
|
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
|
|
|
|
|
2020-10-01 06:33:15 -04:00
|
|
|
debug("<<< runtime bundle finish", {
|
2020-07-19 13:49:44 -04:00
|
|
|
rootNames,
|
|
|
|
});
|
|
|
|
|
|
|
|
const maybeDiagnostics = diagnostics.length
|
2020-09-12 05:53:57 -04:00
|
|
|
? fromTypeScriptDiagnostic(diagnostics)
|
2020-07-19 13:49:44 -04:00
|
|
|
: [];
|
|
|
|
|
|
|
|
return {
|
|
|
|
diagnostics: maybeDiagnostics,
|
|
|
|
output: state.bundleOutput,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-08-05 14:44:03 -04:00
|
|
|
function runtimeTranspile(request) {
|
2020-07-19 13:49:44 -04:00
|
|
|
const result = {};
|
2020-08-31 14:12:24 -04:00
|
|
|
const { sources, compilerOptions } = request;
|
|
|
|
|
|
|
|
const parseResult = parseCompilerOptions(
|
|
|
|
compilerOptions,
|
|
|
|
);
|
|
|
|
const options = parseResult.options;
|
|
|
|
// TODO(bartlomieju): this options is excluded by `ts.convertCompilerOptionsFromJson`
|
|
|
|
// however stuff breaks if it's not passed (type_directives_js_main.js, compiler_js_error.ts)
|
|
|
|
options.allowNonTsExtensions = true;
|
2020-07-19 13:49:44 -04:00
|
|
|
|
|
|
|
for (const [fileName, inputText] of Object.entries(sources)) {
|
|
|
|
const { outputText: source, sourceMapText: map } = ts.transpileModule(
|
|
|
|
inputText,
|
|
|
|
{
|
|
|
|
fileName,
|
2020-08-31 14:12:24 -04:00
|
|
|
compilerOptions: options,
|
2020-07-19 13:49:44 -04:00
|
|
|
},
|
|
|
|
);
|
|
|
|
result[fileName] = { source, map };
|
|
|
|
}
|
2020-09-26 10:33:25 -04:00
|
|
|
return result;
|
2020-07-19 13:49:44 -04:00
|
|
|
}
|
|
|
|
|
2020-07-23 09:29:36 -04:00
|
|
|
function opCompilerRespond(msg) {
|
2020-09-16 16:22:43 -04:00
|
|
|
core.jsonOpSync("op_compiler_respond", msg);
|
2020-07-23 09:29:36 -04:00
|
|
|
}
|
|
|
|
|
2020-09-26 10:33:25 -04:00
|
|
|
function tsCompilerOnMessage(msg) {
|
2020-07-23 09:29:36 -04:00
|
|
|
const request = msg.data;
|
2020-07-19 13:49:44 -04:00
|
|
|
switch (request.type) {
|
|
|
|
case CompilerRequestType.Compile: {
|
|
|
|
const result = compile(request);
|
2020-07-23 09:29:36 -04:00
|
|
|
opCompilerRespond(result);
|
2020-07-19 13:49:44 -04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case CompilerRequestType.Bundle: {
|
|
|
|
const result = bundle(request);
|
2020-07-23 09:29:36 -04:00
|
|
|
opCompilerRespond(result);
|
2020-07-19 13:49:44 -04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case CompilerRequestType.RuntimeCompile: {
|
|
|
|
const result = runtimeCompile(request);
|
2020-07-23 09:29:36 -04:00
|
|
|
opCompilerRespond(result);
|
2020-07-19 13:49:44 -04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case CompilerRequestType.RuntimeBundle: {
|
|
|
|
const result = runtimeBundle(request);
|
2020-07-23 09:29:36 -04:00
|
|
|
opCompilerRespond(result);
|
2020-07-19 13:49:44 -04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case CompilerRequestType.RuntimeTranspile: {
|
2020-09-26 10:33:25 -04:00
|
|
|
const result = runtimeTranspile(request);
|
2020-07-23 09:29:36 -04:00
|
|
|
opCompilerRespond(result);
|
2020-07-19 13:49:44 -04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
2020-07-23 09:29:36 -04:00
|
|
|
throw new Error(
|
2020-07-19 13:49:44 -04:00
|
|
|
`!!! unhandled CompilerRequestType: ${request.type} (${
|
|
|
|
CompilerRequestType[request.type]
|
|
|
|
})`,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-13 19:52:49 -04:00
|
|
|
/**
|
|
|
|
* @typedef {object} Request
|
|
|
|
* @property {Record<string, any>} config
|
|
|
|
* @property {boolean} debug
|
|
|
|
* @property {string[]} rootNames
|
|
|
|
*/
|
2020-07-23 09:29:36 -04:00
|
|
|
|
2020-10-13 19:52:49 -04:00
|
|
|
/** The API that is called by Rust when executing a request.
|
|
|
|
* @param {Request} request
|
|
|
|
*/
|
|
|
|
function exec({ config, debug: debugFlag, rootNames }) {
|
|
|
|
setLogDebug(debugFlag, "TS");
|
|
|
|
performanceStart();
|
|
|
|
debug(">>> exec start", { rootNames });
|
|
|
|
debug(config);
|
|
|
|
|
|
|
|
const { options, errors: configFileParsingDiagnostics } = ts
|
|
|
|
.convertCompilerOptionsFromJson(config, "", "tsconfig.json");
|
|
|
|
const program = ts.createIncrementalProgram({
|
|
|
|
rootNames,
|
|
|
|
options,
|
|
|
|
host,
|
|
|
|
configFileParsingDiagnostics,
|
|
|
|
});
|
|
|
|
|
|
|
|
const { diagnostics: emitDiagnostics } = program.emit();
|
|
|
|
|
|
|
|
const diagnostics = [
|
|
|
|
...program.getConfigFileParsingDiagnostics(),
|
|
|
|
...program.getSyntacticDiagnostics(),
|
|
|
|
...program.getOptionsDiagnostics(),
|
|
|
|
...program.getGlobalDiagnostics(),
|
|
|
|
...program.getSemanticDiagnostics(),
|
|
|
|
...emitDiagnostics,
|
|
|
|
].filter(({ code }) =>
|
|
|
|
!IGNORED_DIAGNOSTICS.includes(code) &&
|
|
|
|
!IGNORED_COMPILE_DIAGNOSTICS.includes(code)
|
|
|
|
);
|
|
|
|
performanceProgram({ program });
|
|
|
|
|
|
|
|
// TODO(@kitsonk) when legacy stats are removed, convert to just tuples
|
|
|
|
let stats = performanceEnd().map(({ key, value }) => [key, value]);
|
|
|
|
core.jsonOpSync("op_respond", {
|
|
|
|
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
|
|
|
stats,
|
|
|
|
});
|
|
|
|
debug("<<< exec stop");
|
|
|
|
}
|
|
|
|
|
|
|
|
let hasStarted = false;
|
|
|
|
|
|
|
|
/** Startup the runtime environment, setting various flags.
|
|
|
|
* @param {{ debugFlag?: boolean; legacyFlag?: boolean; }} msg
|
|
|
|
*/
|
|
|
|
function startup({ debugFlag = false, legacyFlag = true }) {
|
|
|
|
if (hasStarted) {
|
|
|
|
throw new Error("The compiler runtime already started.");
|
2020-07-23 09:29:36 -04:00
|
|
|
}
|
2020-10-13 19:52:49 -04:00
|
|
|
hasStarted = true;
|
2020-09-26 10:33:25 -04:00
|
|
|
core.ops();
|
2020-10-13 19:52:49 -04:00
|
|
|
core.registerErrorClass("Error", Error);
|
2020-09-26 10:33:25 -04:00
|
|
|
setLogDebug(!!debugFlag, "TS");
|
2020-10-13 19:52:49 -04:00
|
|
|
legacy = legacyFlag;
|
2020-07-23 09:29:36 -04:00
|
|
|
}
|
|
|
|
|
2020-10-13 19:52:49 -04:00
|
|
|
globalThis.startup = startup;
|
|
|
|
globalThis.exec = exec;
|
|
|
|
// TODO(@kitsonk) remove when converted from legacy tsc
|
2020-07-23 09:29:36 -04:00
|
|
|
globalThis.tsCompilerOnMessage = tsCompilerOnMessage;
|
2020-07-19 13:49:44 -04:00
|
|
|
})(this);
|