1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-21 23:04:45 -05:00

feat(unstable): add JS linting plugin infrastructure (#27416)

This PR extracts the core part of
https://github.com/denoland/deno/pull/27203 to make it easier to review
and land in parts.

It contains:
-  The JS plugin code the deserializes and walks the buffer
- The Rust portion to serialize SWC to the buffer format (a bunch of
nodes are still todos, but imo these can land anytime later)
- Basic lint plugin types, without the AST node types to make this PR
easier to review
- Added more code comments to explain the format etc.


More fixes and changes will be done in follow-up PRs.

---------

Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
This commit is contained in:
Marvin Hagemeister 2024-12-21 00:58:03 +01:00 committed by GitHub
parent 77e1af79bd
commit 26425a137b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 5499 additions and 3 deletions

783
cli/js/40_lint.js Normal file
View file

@ -0,0 +1,783 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// @ts-check
import { core, internals } from "ext:core/mod.js";
const {
op_lint_create_serialized_ast,
} = core.ops;
// Keep in sync with Rust
// These types are expected to be present on every node. Note that this
// isn't set in stone. We could revise this at a future point.
const AST_PROP_TYPE = 0;
const AST_PROP_PARENT = 1;
const AST_PROP_RANGE = 2;
// Keep in sync with Rust
// Each node property is tagged with this enum to denote
// what kind of value it holds.
/** @enum {number} */
const PropFlags = {
/** This is an offset to another node */
Ref: 0,
/** This is an array of offsets to other nodes (like children of a BlockStatement) */
RefArr: 1,
/**
* This is a string id. The actual string needs to be looked up in
* the string table that was included in the message.
*/
String: 2,
/** This value is either 0 = false, or 1 = true */
Bool: 3,
/** No value, it's null */
Null: 4,
/** No value, it's undefined */
Undefined: 5,
};
/** @typedef {import("./40_lint_types.d.ts").AstContext} AstContext */
/** @typedef {import("./40_lint_types.d.ts").VisitorFn} VisitorFn */
/** @typedef {import("./40_lint_types.d.ts").CompiledVisitor} CompiledVisitor */
/** @typedef {import("./40_lint_types.d.ts").LintState} LintState */
/** @typedef {import("./40_lint_types.d.ts").RuleContext} RuleContext */
/** @typedef {import("./40_lint_types.d.ts").NodeFacade} NodeFacade */
/** @typedef {import("./40_lint_types.d.ts").LintPlugin} LintPlugin */
/** @typedef {import("./40_lint_types.d.ts").LintReportData} LintReportData */
/** @typedef {import("./40_lint_types.d.ts").TestReportData} TestReportData */
/** @type {LintState} */
const state = {
plugins: [],
installedPlugins: new Set(),
};
/**
* Every rule gets their own instance of this class. This is the main
* API lint rules interact with.
* @implements {RuleContext}
*/
export class Context {
id;
fileName;
/**
* @param {string} id
* @param {string} fileName
*/
constructor(id, fileName) {
this.id = id;
this.fileName = fileName;
}
}
/**
* @param {LintPlugin} plugin
*/
export function installPlugin(plugin) {
if (typeof plugin !== "object") {
throw new Error("Linter plugin must be an object");
}
if (typeof plugin.name !== "string") {
throw new Error("Linter plugin name must be a string");
}
if (typeof plugin.rules !== "object") {
throw new Error("Linter plugin rules must be an object");
}
if (state.installedPlugins.has(plugin.name)) {
throw new Error(`Linter plugin ${plugin.name} has already been registered`);
}
state.plugins.push(plugin);
state.installedPlugins.add(plugin.name);
}
/**
* @param {AstContext} ctx
* @param {number} offset
* @returns
*/
function getNode(ctx, offset) {
if (offset === 0) return null;
const cached = ctx.nodes.get(offset);
if (cached !== undefined) return cached;
const node = new Node(ctx, offset);
ctx.nodes.set(offset, /** @type {*} */ (cached));
return node;
}
/**
* Find the offset of a specific property of a specific node. This will
* be used later a lot more for selectors.
* @param {Uint8Array} buf
* @param {number} search
* @param {number} offset
* @returns {number}
*/
function findPropOffset(buf, offset, search) {
// type + parentId + SpanLo + SpanHi
offset += 1 + 4 + 4 + 4;
const propCount = buf[offset];
offset += 1;
for (let i = 0; i < propCount; i++) {
const maybe = offset;
const prop = buf[offset++];
const kind = buf[offset++];
if (prop === search) return maybe;
if (kind === PropFlags.Ref) {
offset += 4;
} else if (kind === PropFlags.RefArr) {
const len = readU32(buf, offset);
offset += 4 + (len * 4);
} else if (kind === PropFlags.String) {
offset += 4;
} else if (kind === PropFlags.Bool) {
offset++;
} else if (kind === PropFlags.Null || kind === PropFlags.Undefined) {
// No value
} else {
offset++;
}
}
return -1;
}
const INTERNAL_CTX = Symbol("ctx");
const INTERNAL_OFFSET = Symbol("offset");
// This class is a facade for all materialized nodes. Instead of creating a
// unique class per AST node, we have one class with getters for every
// possible node property. This allows us to lazily materialize child node
// only when they are needed.
class Node {
[INTERNAL_CTX];
[INTERNAL_OFFSET];
/**
* @param {AstContext} ctx
* @param {number} offset
*/
constructor(ctx, offset) {
this[INTERNAL_CTX] = ctx;
this[INTERNAL_OFFSET] = offset;
}
/**
* Logging a class with only getters prints just the class name. This
* makes debugging difficult because you don't see any of the properties.
* For that reason we'll intercept inspection and serialize the node to
* a plain JSON structure which can be logged and allows users to see all
* properties and their values.
*
* This is only expected to be used during development of a rule.
* @param {*} _
* @param {Deno.InspectOptions} options
* @returns {string}
*/
[Symbol.for("Deno.customInspect")](_, options) {
const json = toJsValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET]);
return Deno.inspect(json, options);
}
[Symbol.for("Deno.lint.toJsValue")]() {
return toJsValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET]);
}
}
/** @type {Set<number>} */
const appliedGetters = new Set();
/**
* Add getters for all potential properties found in the message.
* @param {AstContext} ctx
*/
function setNodeGetters(ctx) {
if (appliedGetters.size === ctx.strByProp.length) return;
for (let i = 0; i < ctx.strByProp.length; i++) {
const id = ctx.strByProp[i];
if (id === 0 || appliedGetters.has(i)) continue;
appliedGetters.add(i);
const name = getString(ctx.strTable, id);
Object.defineProperty(Node.prototype, name, {
get() {
return readValue(this[INTERNAL_CTX], this[INTERNAL_OFFSET], i);
},
});
}
}
/**
* Serialize a node recursively to plain JSON
* @param {AstContext} ctx
* @param {number} offset
* @returns {*}
*/
function toJsValue(ctx, offset) {
const { buf } = ctx;
/** @type {Record<string, any>} */
const node = {
type: readValue(ctx, offset, AST_PROP_TYPE),
range: readValue(ctx, offset, AST_PROP_RANGE),
};
// type + parentId + SpanLo + SpanHi
offset += 1 + 4 + 4 + 4;
const count = buf[offset++];
for (let i = 0; i < count; i++) {
const prop = buf[offset++];
const kind = buf[offset++];
const name = getString(ctx.strTable, ctx.strByProp[prop]);
if (kind === PropFlags.Ref) {
const v = readU32(buf, offset);
offset += 4;
node[name] = v === 0 ? null : toJsValue(ctx, v);
} else if (kind === PropFlags.RefArr) {
const len = readU32(buf, offset);
offset += 4;
const nodes = new Array(len);
for (let i = 0; i < len; i++) {
const v = readU32(buf, offset);
if (v === 0) continue;
nodes[i] = toJsValue(ctx, v);
offset += 4;
}
node[name] = nodes;
} else if (kind === PropFlags.Bool) {
const v = buf[offset++];
node[name] = v === 1;
} else if (kind === PropFlags.String) {
const v = readU32(buf, offset);
offset += 4;
node[name] = getString(ctx.strTable, v);
} else if (kind === PropFlags.Null) {
node[name] = null;
} else if (kind === PropFlags.Undefined) {
node[name] = undefined;
}
}
return node;
}
/**
* Read a specific property from a node
* @param {AstContext} ctx
* @param {number} offset
* @param {number} search
* @returns {*}
*/
function readValue(ctx, offset, search) {
const { buf } = ctx;
const type = buf[offset];
if (search === AST_PROP_TYPE) {
return getString(ctx.strTable, ctx.strByType[type]);
} else if (search === AST_PROP_RANGE) {
const start = readU32(buf, offset + 1 + 4);
const end = readU32(buf, offset + 1 + 4 + 4);
return [start, end];
} else if (search === AST_PROP_PARENT) {
const pos = readU32(buf, offset + 1);
return getNode(ctx, pos);
}
offset = findPropOffset(ctx.buf, offset, search);
if (offset === -1) return undefined;
const kind = buf[offset + 1];
if (kind === PropFlags.Ref) {
const value = readU32(buf, offset + 2);
return getNode(ctx, value);
} else if (kind === PropFlags.RefArr) {
const len = readU32(buf, offset);
offset += 4;
const nodes = new Array(len);
for (let i = 0; i < len; i++) {
nodes[i] = getNode(ctx, readU32(buf, offset));
offset += 4;
}
return nodes;
} else if (kind === PropFlags.Bool) {
return buf[offset] === 1;
} else if (kind === PropFlags.String) {
const v = readU32(buf, offset);
return getString(ctx.strTable, v);
} else if (kind === PropFlags.Null) {
return null;
} else if (kind === PropFlags.Undefined) {
return undefined;
}
throw new Error(`Unknown prop kind: ${kind}`);
}
const DECODER = new TextDecoder();
/**
* TODO: Check if it's faster to use the `ArrayView` API instead.
* @param {Uint8Array} buf
* @param {number} i
* @returns {number}
*/
function readU32(buf, i) {
return (buf[i] << 24) + (buf[i + 1] << 16) + (buf[i + 2] << 8) +
buf[i + 3];
}
/**
* Get a string by id and error if it wasn't found
* @param {AstContext["strTable"]} strTable
* @param {number} id
* @returns {string}
*/
function getString(strTable, id) {
const name = strTable.get(id);
if (name === undefined) {
throw new Error(`Missing string id: ${id}`);
}
return name;
}
/**
* @param {Uint8Array} buf
* @param {AstContext} buf
*/
function createAstContext(buf) {
/** @type {Map<number, string>} */
const strTable = new Map();
// The buffer has a few offsets at the end which allows us to easily
// jump to the relevant sections of the message.
const typeMapOffset = readU32(buf, buf.length - 16);
const propMapOffset = readU32(buf, buf.length - 12);
const strTableOffset = readU32(buf, buf.length - 8);
// Offset of the topmost node in the AST Tree.
const rootOffset = readU32(buf, buf.length - 4);
let offset = strTableOffset;
const stringCount = readU32(buf, offset);
offset += 4;
// TODO(@marvinhagemeister): We could lazily decode the strings on an as needed basis.
// Not sure if this matters much in practice though.
let id = 0;
for (let i = 0; i < stringCount; i++) {
const len = readU32(buf, offset);
offset += 4;
const strBytes = buf.slice(offset, offset + len);
offset += len;
const s = DECODER.decode(strBytes);
strTable.set(id, s);
id++;
}
if (strTable.size !== stringCount) {
throw new Error(
`Could not deserialize string table. Expected ${stringCount} items, but got ${strTable.size}`,
);
}
offset = typeMapOffset;
const typeCount = readU32(buf, offset);
offset += 4;
const typeByStr = new Map();
const strByType = new Array(typeCount).fill(0);
for (let i = 0; i < typeCount; i++) {
const v = readU32(buf, offset);
offset += 4;
strByType[i] = v;
typeByStr.set(strTable.get(v), i);
}
offset = propMapOffset;
const propCount = readU32(buf, offset);
offset += 4;
const propByStr = new Map();
const strByProp = new Array(propCount).fill(0);
for (let i = 0; i < propCount; i++) {
const v = readU32(buf, offset);
offset += 4;
strByProp[i] = v;
propByStr.set(strTable.get(v), i);
}
/** @type {AstContext} */
const ctx = {
buf,
strTable,
rootOffset,
nodes: new Map(),
strTableOffset,
strByProp,
strByType,
typeByStr,
propByStr,
};
setNodeGetters(ctx);
// DEV ONLY: Enable this to inspect the buffer message
// _dump(ctx);
return ctx;
}
/**
* @param {*} _node
*/
const NOOP = (_node) => {};
/**
* Kick off the actual linting process of JS plugins.
* @param {string} fileName
* @param {Uint8Array} serializedAst
*/
export function runPluginsForFile(fileName, serializedAst) {
const ctx = createAstContext(serializedAst);
/** @type {Map<string, { enter: VisitorFn, exit: VisitorFn}>} */
const bySelector = new Map();
const destroyFns = [];
// Instantiate and merge visitors. This allows us to only traverse
// the AST once instead of per plugin. When ever we enter or exit a
// node we'll call all visitors that match.
for (let i = 0; i < state.plugins.length; i++) {
const plugin = state.plugins[i];
for (const name of Object.keys(plugin.rules)) {
const rule = plugin.rules[name];
const id = `${plugin.name}/${name}`;
const ctx = new Context(id, fileName);
const visitor = rule.create(ctx);
// deno-lint-ignore guard-for-in
for (let key in visitor) {
const fn = visitor[key];
if (fn === undefined) continue;
// Support enter and exit callbacks on a visitor.
// Exit callbacks are marked by having `:exit` at the end.
let isExit = false;
if (key.endsWith(":exit")) {
isExit = true;
key = key.slice(0, -":exit".length);
}
let info = bySelector.get(key);
if (info === undefined) {
info = { enter: NOOP, exit: NOOP };
bySelector.set(key, info);
}
const prevFn = isExit ? info.exit : info.enter;
/**
* @param {*} node
*/
const wrapped = (node) => {
prevFn(node);
try {
fn(node);
} catch (err) {
throw new Error(`Visitor "${name}" of plugin "${id}" errored`, {
cause: err,
});
}
};
if (isExit) {
info.exit = wrapped;
} else {
info.enter = wrapped;
}
}
if (typeof rule.destroy === "function") {
const destroyFn = rule.destroy.bind(rule);
destroyFns.push(() => {
try {
destroyFn(ctx);
} catch (err) {
throw new Error(`Destroy hook of "${id}" errored`, { cause: err });
}
});
}
}
}
/** @type {CompiledVisitor[]} */
const visitors = [];
for (const [sel, info] of bySelector.entries()) {
// This will make more sense once selectors land as it's faster
// to precompile them once upfront.
// Convert the visiting element name to a number. This number
// is part of the serialized buffer and comparing a single number
// is quicker than strings.
const elemId = ctx.typeByStr.get(sel) ?? -1;
visitors.push({
info,
// Check if we should call this visitor
matcher: (offset) => {
const type = ctx.buf[offset];
return type === elemId;
},
});
}
// Traverse ast with all visitors at the same time to avoid traversing
// multiple times.
try {
traverse(ctx, visitors, ctx.rootOffset);
} finally {
ctx.nodes.clear();
// Optional: Destroy rules
for (let i = 0; i < destroyFns.length; i++) {
destroyFns[i]();
}
}
}
/**
* @param {AstContext} ctx
* @param {CompiledVisitor[]} visitors
* @param {number} offset
*/
function traverse(ctx, visitors, offset) {
// The 0 offset is used to denote an empty/placeholder node
if (offset === 0) return;
const { buf } = ctx;
/** @type {VisitorFn[] | null} */
let exits = null;
for (let i = 0; i < visitors.length; i++) {
const v = visitors[i];
if (v.matcher(offset)) {
if (v.info.exit !== NOOP) {
if (exits === null) {
exits = [v.info.exit];
} else {
exits.push(v.info.exit);
}
}
if (v.info.enter !== NOOP) {
const node = /** @type {*} */ (getNode(ctx, offset));
v.info.enter(node);
}
}
}
// Search for node references in the properties of the current node. All
// other properties can be ignored.
try {
// type + parentId + SpanLo + SpanHi
offset += 1 + 4 + 4 + 4;
const propCount = buf[offset];
offset += 1;
for (let i = 0; i < propCount; i++) {
const kind = buf[offset + 1];
offset += 2; // propId + propFlags
if (kind === PropFlags.Ref) {
const next = readU32(buf, offset);
offset += 4;
traverse(ctx, visitors, next);
} else if (kind === PropFlags.RefArr) {
const len = readU32(buf, offset);
offset += 4;
for (let j = 0; j < len; j++) {
const child = readU32(buf, offset);
offset += 4;
traverse(ctx, visitors, child);
}
} else if (kind === PropFlags.String) {
offset += 4;
} else if (kind === PropFlags.Bool) {
offset += 1;
} else if (kind === PropFlags.Null || kind === PropFlags.Undefined) {
// No value
}
}
} finally {
if (exits !== null) {
for (let i = 0; i < exits.length; i++) {
const node = /** @type {*} */ (getNode(ctx, offset));
exits[i](node);
}
}
}
}
/**
* This is useful debugging helper to display the buffer's contents.
* @param {AstContext} ctx
*/
function _dump(ctx) {
const { buf, strTableOffset, strTable, strByType, strByProp } = ctx;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(strTable);
for (let i = 0; i < strByType.length; i++) {
const v = strByType[i];
// @ts-ignore dump fn
// deno-lint-ignore no-console
if (v > 0) console.log(" > type:", i, getString(ctx.strTable, v), v);
}
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log();
for (let i = 0; i < strByProp.length; i++) {
const v = strByProp[i];
// @ts-ignore dump fn
// deno-lint-ignore no-console
if (v > 0) console.log(" > prop:", i, getString(ctx.strTable, v), v);
}
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log();
let offset = 0;
while (offset < strTableOffset) {
const type = buf[offset];
const name = getString(ctx.strTable, ctx.strByType[type]);
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(`${name}, offset: ${offset}, type: ${type}`);
offset += 1;
const parent = readU32(buf, offset);
offset += 4;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` parent: ${parent}`);
const start = readU32(buf, offset);
offset += 4;
const end = readU32(buf, offset);
offset += 4;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` range: ${start} -> ${end}`);
const count = buf[offset++];
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` prop count: ${count}`);
for (let i = 0; i < count; i++) {
const prop = buf[offset++];
const kind = buf[offset++];
const name = getString(ctx.strTable, ctx.strByProp[prop]);
let kindName = "unknown";
for (const k in PropFlags) {
// @ts-ignore dump fn
if (kind === PropFlags[k]) {
kindName = k;
}
}
if (kind === PropFlags.Ref) {
const v = readU32(buf, offset);
offset += 4;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` ${name}: ${v} (${kindName}, ${prop})`);
} else if (kind === PropFlags.RefArr) {
const len = readU32(buf, offset);
offset += 4;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` ${name}: Array(${len}) (${kindName}, ${prop})`);
for (let j = 0; j < len; j++) {
const v = readU32(buf, offset);
offset += 4;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` - ${v} (${prop})`);
}
} else if (kind === PropFlags.Bool) {
const v = buf[offset];
offset += 1;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` ${name}: ${v} (${kindName}, ${prop})`);
} else if (kind === PropFlags.String) {
const v = readU32(buf, offset);
offset += 4;
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(
` ${name}: ${getString(ctx.strTable, v)} (${kindName}, ${prop})`,
);
} else if (kind === PropFlags.Null) {
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` ${name}: null (${kindName}, ${prop})`);
} else if (kind === PropFlags.Undefined) {
// @ts-ignore dump fn
// deno-lint-ignore no-console
console.log(` ${name}: undefined (${kindName}, ${prop})`);
}
}
}
}
// TODO(bartlomieju): this is temporary, until we get plugins plumbed through
// the CLI linter
/**
* @param {LintPlugin} plugin
* @param {string} fileName
* @param {string} sourceText
*/
function runLintPlugin(plugin, fileName, sourceText) {
installPlugin(plugin);
const serializedAst = op_lint_create_serialized_ast(fileName, sourceText);
try {
runPluginsForFile(fileName, serializedAst);
} finally {
// During testing we don't want to keep plugins around
state.installedPlugins.clear();
}
}
// TODO(bartlomieju): this is temporary, until we get plugins plumbed through
// the CLI linter
internals.runLintPlugin = runLintPlugin;

50
cli/js/40_lint_types.d.ts vendored Normal file
View file

@ -0,0 +1,50 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
export interface NodeFacade {
type: string;
range: [number, number];
[key: string]: unknown;
}
export interface AstContext {
buf: Uint8Array;
strTable: Map<number, string>;
strTableOffset: number;
rootOffset: number;
nodes: Map<number, NodeFacade>;
strByType: number[];
strByProp: number[];
typeByStr: Map<string, number>;
propByStr: Map<string, number>;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface RuleContext {
id: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintRule {
create(ctx: RuleContext): Record<string, (node: unknown) => void>;
destroy?(ctx: RuleContext): void;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintPlugin {
name: string;
rules: Record<string, LintRule>;
}
export interface LintState {
plugins: LintPlugin[];
installedPlugins: Set<string>;
}
export type VisitorFn = (node: unknown) => void;
export interface CompiledVisitor {
matcher: (offset: number) => boolean;
info: { enter: VisitorFn; exit: VisitorFn };
}
export {};

34
cli/ops/lint.rs Normal file
View file

@ -0,0 +1,34 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::op2;
use crate::tools::lint;
deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],);
#[op2]
#[buffer]
fn op_lint_create_serialized_ast(
#[string] file_name: &str,
#[string] source: String,
) -> Result<Vec<u8>, AnyError> {
let file_text = deno_ast::strip_bom(source);
let path = std::env::current_dir()?.join(file_name);
let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| {
generic_error(format!("Failed to parse path as URL: {}", path.display()))
})?;
let media_type = MediaType::from_specifier(&specifier);
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
specifier,
text: file_text.into(),
media_type,
capture_tokens: false,
scope_analysis: false,
maybe_syntax: None,
})?;
Ok(lint::serialize_ast_to_buffer(&parsed_source))
}

View file

@ -2,4 +2,5 @@
pub mod bench;
pub mod jupyter;
pub mod lint;
pub mod testing;

View file

@ -0,0 +1,516 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::fmt::Display;
use deno_ast::swc::common::Span;
use deno_ast::swc::common::DUMMY_SP;
use indexmap::IndexMap;
/// Each property has this flag to mark what kind of value it holds-
/// Plain objects and arrays are not supported yet, but could be easily
/// added if needed.
#[derive(Debug, PartialEq)]
pub enum PropFlags {
Ref,
RefArr,
String,
Bool,
Null,
Undefined,
}
impl From<PropFlags> for u8 {
fn from(m: PropFlags) -> u8 {
m as u8
}
}
impl TryFrom<u8> for PropFlags {
type Error = &'static str;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(PropFlags::Ref),
1 => Ok(PropFlags::RefArr),
2 => Ok(PropFlags::String),
3 => Ok(PropFlags::Bool),
4 => Ok(PropFlags::Null),
5 => Ok(PropFlags::Undefined),
_ => Err("Unknown Prop flag"),
}
}
}
const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000;
const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000;
const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000;
const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111;
// TODO: There is probably a native Rust function to do this.
pub fn append_u32(result: &mut Vec<u8>, value: u32) {
let v1: u8 = ((value & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((value & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((value & MASK_U32_3) >> 8) as u8;
let v4: u8 = (value & MASK_U32_4) as u8;
result.push(v1);
result.push(v2);
result.push(v3);
result.push(v4);
}
pub fn append_usize(result: &mut Vec<u8>, value: usize) {
let raw = u32::try_from(value).unwrap();
append_u32(result, raw);
}
pub fn write_usize(result: &mut [u8], value: usize, idx: usize) {
let raw = u32::try_from(value).unwrap();
let v1: u8 = ((raw & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((raw & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((raw & MASK_U32_3) >> 8) as u8;
let v4: u8 = (raw & MASK_U32_4) as u8;
result[idx] = v1;
result[idx + 1] = v2;
result[idx + 2] = v3;
result[idx + 3] = v4;
}
#[derive(Debug)]
pub struct StringTable {
id: usize,
table: IndexMap<String, usize>,
}
impl StringTable {
pub fn new() -> Self {
Self {
id: 0,
table: IndexMap::new(),
}
}
pub fn insert(&mut self, s: &str) -> usize {
if let Some(id) = self.table.get(s) {
return *id;
}
let id = self.id;
self.id += 1;
self.table.insert(s.to_string(), id);
id
}
pub fn serialize(&mut self) -> Vec<u8> {
let mut result: Vec<u8> = vec![];
append_u32(&mut result, self.table.len() as u32);
// Assume that it's sorted by id
for (s, _id) in &self.table {
let bytes = s.as_bytes();
append_u32(&mut result, bytes.len() as u32);
result.append(&mut bytes.to_vec());
}
result
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct NodeRef(pub usize);
#[derive(Debug)]
pub struct BoolPos(pub usize);
#[derive(Debug)]
pub struct FieldPos(pub usize);
#[derive(Debug)]
pub struct FieldArrPos(pub usize);
#[derive(Debug)]
pub struct StrPos(pub usize);
#[derive(Debug)]
pub struct UndefPos(pub usize);
#[derive(Debug)]
pub struct NullPos(pub usize);
#[derive(Debug)]
pub enum NodePos {
Bool(BoolPos),
#[allow(dead_code)]
Field(FieldPos),
#[allow(dead_code)]
FieldArr(FieldArrPos),
Str(StrPos),
Undef(UndefPos),
#[allow(dead_code)]
Null(NullPos),
}
pub trait AstBufSerializer<K, P>
where
K: Into<u8> + Display,
P: Into<u8> + Display,
{
fn header(
&mut self,
kind: K,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef;
fn ref_field(&mut self, prop: P) -> FieldPos;
fn ref_vec_field(&mut self, prop: P, len: usize) -> FieldArrPos;
fn str_field(&mut self, prop: P) -> StrPos;
fn bool_field(&mut self, prop: P) -> BoolPos;
fn undefined_field(&mut self, prop: P) -> UndefPos;
#[allow(dead_code)]
fn null_field(&mut self, prop: P) -> NullPos;
fn write_ref(&mut self, pos: FieldPos, value: NodeRef);
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>);
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>);
fn write_str(&mut self, pos: StrPos, value: &str);
fn write_bool(&mut self, pos: BoolPos, value: bool);
fn serialize(&mut self) -> Vec<u8>;
}
#[derive(Debug)]
pub struct SerializeCtx {
buf: Vec<u8>,
start_buf: NodeRef,
str_table: StringTable,
kind_map: Vec<usize>,
prop_map: Vec<usize>,
}
/// This is the internal context used to allocate and fill the buffer. The point
/// is to be able to write absolute offsets directly in place.
///
/// The typical workflow is to reserve all necessary space for the currrent
/// node with placeholders for the offsets of the child nodes. Once child
/// nodes have been traversed, we know their offsets and can replace the
/// placeholder values with the actual ones.
impl SerializeCtx {
pub fn new(kind_len: u8, prop_len: u8) -> Self {
let kind_size = kind_len as usize;
let prop_size = prop_len as usize;
let mut ctx = Self {
start_buf: NodeRef(0),
buf: vec![],
str_table: StringTable::new(),
kind_map: vec![0; kind_size + 1],
prop_map: vec![0; prop_size + 1],
};
ctx.str_table.insert("");
// Placeholder node is always 0
ctx.append_node(0, NodeRef(0), &DUMMY_SP, 0);
ctx.kind_map[0] = 0;
ctx.start_buf = NodeRef(ctx.buf.len());
// Insert default props that are always present
let type_str = ctx.str_table.insert("type");
let parent_str = ctx.str_table.insert("parent");
let range_str = ctx.str_table.insert("range");
// These values are expected to be in this order on the JS side
ctx.prop_map[0] = type_str;
ctx.prop_map[1] = parent_str;
ctx.prop_map[2] = range_str;
ctx
}
/// Allocate a node's header
fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.buf.len();
let n: u8 = prop.clone().into();
self.buf.push(n);
if let Some(v) = self.prop_map.get::<usize>(n.into()) {
if *v == 0 {
let id = self.str_table.insert(&format!("{prop}"));
self.prop_map[n as usize] = id;
}
}
let flags: u8 = prop_flags.into();
self.buf.push(flags);
offset
}
/// Allocate a property pointing to another node.
fn field<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.field_header(prop, prop_flags);
append_usize(&mut self.buf, 0);
offset
}
fn append_node(
&mut self,
kind: u8,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef {
let offset = self.buf.len();
// Node type fits in a u8
self.buf.push(kind);
// Offset to the parent node. Will be 0 if none exists
append_usize(&mut self.buf, parent.0);
// Span, the start and end location of this node
append_u32(&mut self.buf, span.lo.0);
append_u32(&mut self.buf, span.hi.0);
// No node has more than <10 properties
debug_assert!(prop_count < 10);
self.buf.push(prop_count as u8);
NodeRef(offset)
}
/// Allocate the node header. It's always the same for every node.
/// <type u8>
/// <parent offset u32>
/// <span lo u32>
/// <span high u32>
/// <property count u8> (There is no node with more than 10 properties)
pub fn header<N>(
&mut self,
kind: N,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef
where
N: Into<u8> + Display + Clone,
{
let n: u8 = kind.clone().into();
if let Some(v) = self.kind_map.get::<usize>(n.into()) {
if *v == 0 {
let id = self.str_table.insert(&format!("{kind}"));
self.kind_map[n as usize] = id;
}
}
self.append_node(n, parent, span, prop_count)
}
/// Allocate a reference property that will hold the offset of
/// another node.
pub fn ref_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field(prop, PropFlags::Ref)
}
/// Allocate a property that is a vec of node offsets pointing to other
/// nodes.
pub fn ref_vec_field<P>(&mut self, prop: P, len: usize) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.field(prop, PropFlags::RefArr);
for _ in 0..len {
append_u32(&mut self.buf, 0);
}
offset
}
// Allocate a property representing a string. Strings are deduplicated
// in the message and the property will only contain the string id.
pub fn str_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field(prop, PropFlags::String)
}
/// Allocate a bool field
pub fn bool_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
let offset = self.field_header(prop, PropFlags::Bool);
self.buf.push(0);
offset
}
/// Allocate an undefined field
pub fn undefined_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Undefined)
}
/// Allocate an undefined field
#[allow(dead_code)]
pub fn null_field<P>(&mut self, prop: P) -> usize
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Null)
}
/// Replace the placeholder of a reference field with the actual offset
/// to the node we want to point to.
pub fn write_ref(&mut self, field_offset: usize, value: NodeRef) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
panic!("Trying to write a ref into a non-ref field")
}
}
write_usize(&mut self.buf, value.0, field_offset + 2);
}
/// Helper for writing optional node offsets
pub fn write_maybe_ref(
&mut self,
field_offset: usize,
value: Option<NodeRef>,
) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
panic!("Trying to write a ref into a non-ref field")
}
}
let ref_value = if let Some(v) = value { v } else { NodeRef(0) };
write_usize(&mut self.buf, ref_value.0, field_offset + 2);
}
/// Write a vec of node offsets into the property. The necessary space
/// has been reserved earlier.
pub fn write_refs(&mut self, field_offset: usize, value: Vec<NodeRef>) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::RefArr {
panic!("Trying to write a ref into a non-ref array field")
}
}
let mut offset = field_offset + 2;
write_usize(&mut self.buf, value.len(), offset);
offset += 4;
for item in value {
write_usize(&mut self.buf, item.0, offset);
offset += 4;
}
}
/// Store the string in our string table and save the id of the string
/// in the current field.
pub fn write_str(&mut self, field_offset: usize, value: &str) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::String {
panic!("Trying to write a ref into a non-string field")
}
}
let id = self.str_table.insert(value);
write_usize(&mut self.buf, id, field_offset + 2);
}
/// Write a bool to a field.
pub fn write_bool(&mut self, field_offset: usize, value: bool) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Bool {
panic!("Trying to write a ref into a non-bool field")
}
}
self.buf[field_offset + 2] = if value { 1 } else { 0 };
}
/// Serialize all information we have into a buffer that can be sent to JS.
/// It has the following structure:
///
/// <...ast>
/// <string table>
/// <node kind map> <- node kind id maps to string id
/// <node prop map> <- node property id maps to string id
/// <offset kind map>
/// <offset prop map>
/// <offset str table>
pub fn serialize(&mut self) -> Vec<u8> {
let mut buf: Vec<u8> = vec![];
// The buffer starts with the serialized AST first, because that
// contains absolute offsets. By butting this at the start of the
// message we don't have to waste time updating any offsets.
buf.append(&mut self.buf);
// Next follows the string table. We'll keep track of the offset
// in the message of where the string table begins
let offset_str_table = buf.len();
// Serialize string table
buf.append(&mut self.str_table.serialize());
// Next, serialize the mappings of kind -> string of encountered
// nodes in the AST. We use this additional lookup table to compress
// the message so that we can save space by using a u8 . All nodes of
// JS, TS and JSX together are <200
let offset_kind_map = buf.len();
// Write the total number of entries in the kind -> str mapping table
// TODO: make this a u8
append_usize(&mut buf, self.kind_map.len());
for v in &self.kind_map {
append_usize(&mut buf, *v);
}
// Store offset to prop -> string map. It's the same as with node kind
// as the total number of properties is <120 which allows us to store it
// as u8.
let offset_prop_map = buf.len();
// Write the total number of entries in the kind -> str mapping table
append_usize(&mut buf, self.prop_map.len());
for v in &self.prop_map {
append_usize(&mut buf, *v);
}
// Putting offsets of relevant parts of the buffer at the end. This
// allows us to hop to the relevant part by merely looking at the last
// for values in the message. Each value represents an offset into the
// buffer.
append_usize(&mut buf, offset_kind_map);
append_usize(&mut buf, offset_prop_map);
append_usize(&mut buf, offset_str_table);
append_usize(&mut buf, self.start_buf.0);
buf
}
}

View file

@ -0,0 +1,13 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::ParsedSource;
use swc::serialize_swc_to_buffer;
mod buffer;
mod swc;
mod ts_estree;
pub fn serialize_ast_to_buffer(parsed_source: &ParsedSource) -> Vec<u8> {
// TODO: We could support multiple languages here
serialize_swc_to_buffer(parsed_source)
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,513 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::fmt;
use std::fmt::Debug;
use std::fmt::Display;
use deno_ast::swc::common::Span;
use super::buffer::AstBufSerializer;
use super::buffer::BoolPos;
use super::buffer::FieldArrPos;
use super::buffer::FieldPos;
use super::buffer::NodeRef;
use super::buffer::NullPos;
use super::buffer::SerializeCtx;
use super::buffer::StrPos;
use super::buffer::UndefPos;
#[derive(Debug, Clone, PartialEq)]
pub enum AstNode {
// First node must always be the empty/invalid node
Invalid,
// Typically the
Program,
// Module declarations
ExportAllDeclaration,
ExportDefaultDeclaration,
ExportNamedDeclaration,
ImportDeclaration,
TsExportAssignment,
TsImportEquals,
TsNamespaceExport,
// Decls
ClassDeclaration,
FunctionDeclaration,
TSEnumDeclaration,
TSInterface,
TsModule,
TsTypeAlias,
Using,
VariableDeclaration,
// Statements
BlockStatement,
BreakStatement,
ContinueStatement,
DebuggerStatement,
DoWhileStatement,
EmptyStatement,
ExpressionStatement,
ForInStatement,
ForOfStatement,
ForStatement,
IfStatement,
LabeledStatement,
ReturnStatement,
SwitchCase,
SwitchStatement,
ThrowStatement,
TryStatement,
WhileStatement,
WithStatement,
// Expressions
ArrayExpression,
ArrowFunctionExpression,
AssignmentExpression,
AwaitExpression,
BinaryExpression,
CallExpression,
ChainExpression,
ClassExpression,
ConditionalExpression,
FunctionExpression,
Identifier,
ImportExpression,
LogicalExpression,
MemberExpression,
MetaProp,
NewExpression,
ObjectExpression,
PrivateIdentifier,
SequenceExpression,
Super,
TaggedTemplateExpression,
TemplateLiteral,
ThisExpression,
TSAsExpression,
TsConstAssertion,
TsInstantiation,
TSNonNullExpression,
TSSatisfiesExpression,
TSTypeAssertion,
UnaryExpression,
UpdateExpression,
YieldExpression,
// TODO: TSEsTree uses a single literal node
// Literals
StringLiteral,
Bool,
Null,
NumericLiteral,
BigIntLiteral,
RegExpLiteral,
EmptyExpr,
SpreadElement,
Property,
VariableDeclarator,
CatchClause,
RestElement,
ExportSpecifier,
TemplateElement,
MethodDefinition,
ClassBody,
// Patterns
ArrayPattern,
AssignmentPattern,
ObjectPattern,
// JSX
JSXAttribute,
JSXClosingElement,
JSXClosingFragment,
JSXElement,
JSXEmptyExpression,
JSXExpressionContainer,
JSXFragment,
JSXIdentifier,
JSXMemberExpression,
JSXNamespacedName,
JSXOpeningElement,
JSXOpeningFragment,
JSXSpreadAttribute,
JSXSpreadChild,
JSXText,
TSTypeAnnotation,
TSTypeParameterDeclaration,
TSTypeParameter,
TSTypeParameterInstantiation,
TSEnumMember,
TSInterfaceBody,
TSInterfaceHeritage,
TSTypeReference,
TSThisType,
TSLiteralType,
TSInferType,
TSConditionalType,
TSUnionType,
TSIntersectionType,
TSMappedType,
TSTypeQuery,
TSTupleType,
TSNamedTupleMember,
TSFunctionType,
TsCallSignatureDeclaration,
TSPropertySignature,
TSMethodSignature,
TSIndexSignature,
TSIndexedAccessType,
TSTypeOperator,
TSTypePredicate,
TSImportType,
TSRestType,
TSArrayType,
TSClassImplements,
TSAnyKeyword,
TSBigIntKeyword,
TSBooleanKeyword,
TSIntrinsicKeyword,
TSNeverKeyword,
TSNullKeyword,
TSNumberKeyword,
TSObjectKeyword,
TSStringKeyword,
TSSymbolKeyword,
TSUndefinedKeyword,
TSUnknownKeyword,
TSVoidKeyword,
TSEnumBody, // Last value is used for max value
}
impl Display for AstNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(self, f)
}
}
impl From<AstNode> for u8 {
fn from(m: AstNode) -> u8 {
m as u8
}
}
#[derive(Debug, Clone)]
pub enum AstProp {
// Base, these three must be in sync with JS. The
// order here for these 3 fields is important.
Type,
Parent,
Range,
// Starting from here the order doesn't matter.
// Following are all possible AST node properties.
Abstract,
Accessibility,
Alternate,
Argument,
Arguments,
Asserts,
Async,
Attributes,
Await,
Block,
Body,
Callee,
Cases,
Children,
CheckType,
ClosingElement,
ClosingFragment,
Computed,
Consequent,
Const,
Constraint,
Cooked,
Declaration,
Declarations,
Declare,
Default,
Definite,
Delegate,
Discriminant,
Elements,
ElementType,
ElementTypes,
ExprName,
Expression,
Expressions,
Exported,
Extends,
ExtendsType,
FalseType,
Finalizer,
Flags,
Generator,
Handler,
Id,
In,
IndexType,
Init,
Initializer,
Implements,
Key,
Kind,
Label,
Left,
Literal,
Local,
Members,
Meta,
Method,
Name,
Namespace,
NameType,
Object,
ObjectType,
OpeningElement,
OpeningFragment,
Operator,
Optional,
Out,
Param,
ParameterName,
Params,
Pattern,
Prefix,
Properties,
Property,
Qualifier,
Quasi,
Quasis,
Raw,
Readonly,
ReturnType,
Right,
SelfClosing,
Shorthand,
Source,
SourceType,
Specifiers,
Static,
SuperClass,
SuperTypeArguments,
Tag,
Tail,
Test,
TrueType,
TypeAnnotation,
TypeArguments,
TypeName,
TypeParameter,
TypeParameters,
Types,
Update,
Value, // Last value is used for max value
}
// TODO: Feels like there should be an easier way to iterater over an
// enum in Rust and lowercase the first letter.
impl Display for AstProp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match self {
AstProp::Parent => "parent",
AstProp::Range => "range",
AstProp::Type => "type",
AstProp::Abstract => "abstract",
AstProp::Accessibility => "accessibility",
AstProp::Alternate => "alternate",
AstProp::Argument => "argument",
AstProp::Arguments => "arguments",
AstProp::Asserts => "asserts",
AstProp::Async => "async",
AstProp::Attributes => "attributes",
AstProp::Await => "await",
AstProp::Block => "block",
AstProp::Body => "body",
AstProp::Callee => "callee",
AstProp::Cases => "cases",
AstProp::Children => "children",
AstProp::CheckType => "checkType",
AstProp::ClosingElement => "closingElement",
AstProp::ClosingFragment => "closingFragment",
AstProp::Computed => "computed",
AstProp::Consequent => "consequent",
AstProp::Const => "const",
AstProp::Constraint => "constraint",
AstProp::Cooked => "cooked",
AstProp::Declaration => "declaration",
AstProp::Declarations => "declarations",
AstProp::Declare => "declare",
AstProp::Default => "default",
AstProp::Definite => "definite",
AstProp::Delegate => "delegate",
AstProp::Discriminant => "discriminant",
AstProp::Elements => "elements",
AstProp::ElementType => "elementType",
AstProp::ElementTypes => "elementTypes",
AstProp::ExprName => "exprName",
AstProp::Expression => "expression",
AstProp::Expressions => "expressions",
AstProp::Exported => "exported",
AstProp::Extends => "extends",
AstProp::ExtendsType => "extendsType",
AstProp::FalseType => "falseType",
AstProp::Finalizer => "finalizer",
AstProp::Flags => "flags",
AstProp::Generator => "generator",
AstProp::Handler => "handler",
AstProp::Id => "id",
AstProp::In => "in",
AstProp::IndexType => "indexType",
AstProp::Init => "init",
AstProp::Initializer => "initializer",
AstProp::Implements => "implements",
AstProp::Key => "key",
AstProp::Kind => "kind",
AstProp::Label => "label",
AstProp::Left => "left",
AstProp::Literal => "literal",
AstProp::Local => "local",
AstProp::Members => "members",
AstProp::Meta => "meta",
AstProp::Method => "method",
AstProp::Name => "name",
AstProp::Namespace => "namespace",
AstProp::NameType => "nameType",
AstProp::Object => "object",
AstProp::ObjectType => "objectType",
AstProp::OpeningElement => "openingElement",
AstProp::OpeningFragment => "openingFragment",
AstProp::Operator => "operator",
AstProp::Optional => "optional",
AstProp::Out => "out",
AstProp::Param => "param",
AstProp::ParameterName => "parameterName",
AstProp::Params => "params",
AstProp::Pattern => "pattern",
AstProp::Prefix => "prefix",
AstProp::Properties => "properties",
AstProp::Property => "property",
AstProp::Qualifier => "qualifier",
AstProp::Quasi => "quasi",
AstProp::Quasis => "quasis",
AstProp::Raw => "raw",
AstProp::Readonly => "readonly",
AstProp::ReturnType => "returnType",
AstProp::Right => "right",
AstProp::SelfClosing => "selfClosing",
AstProp::Shorthand => "shorthand",
AstProp::Source => "source",
AstProp::SourceType => "sourceType",
AstProp::Specifiers => "specifiers",
AstProp::Static => "static",
AstProp::SuperClass => "superClass",
AstProp::SuperTypeArguments => "superTypeArguments",
AstProp::Tag => "tag",
AstProp::Tail => "tail",
AstProp::Test => "test",
AstProp::TrueType => "trueType",
AstProp::TypeAnnotation => "typeAnnotation",
AstProp::TypeArguments => "typeArguments",
AstProp::TypeName => "typeName",
AstProp::TypeParameter => "typeParameter",
AstProp::TypeParameters => "typeParameters",
AstProp::Types => "types",
AstProp::Update => "update",
AstProp::Value => "value",
};
write!(f, "{}", s)
}
}
impl From<AstProp> for u8 {
fn from(m: AstProp) -> u8 {
m as u8
}
}
pub struct TsEsTreeBuilder {
ctx: SerializeCtx,
}
// TODO: Add a builder API to make it easier to convert from different source
// ast formats.
impl TsEsTreeBuilder {
pub fn new() -> Self {
// Max values
// TODO: Maybe there is a rust macro to grab the last enum value?
let kind_count: u8 = AstNode::TSEnumBody.into();
let prop_count: u8 = AstProp::Value.into();
Self {
ctx: SerializeCtx::new(kind_count, prop_count),
}
}
}
impl AstBufSerializer<AstNode, AstProp> for TsEsTreeBuilder {
fn header(
&mut self,
kind: AstNode,
parent: NodeRef,
span: &Span,
prop_count: usize,
) -> NodeRef {
self.ctx.header(kind, parent, span, prop_count)
}
fn ref_field(&mut self, prop: AstProp) -> FieldPos {
FieldPos(self.ctx.ref_field(prop))
}
fn ref_vec_field(&mut self, prop: AstProp, len: usize) -> FieldArrPos {
FieldArrPos(self.ctx.ref_vec_field(prop, len))
}
fn str_field(&mut self, prop: AstProp) -> StrPos {
StrPos(self.ctx.str_field(prop))
}
fn bool_field(&mut self, prop: AstProp) -> BoolPos {
BoolPos(self.ctx.bool_field(prop))
}
fn undefined_field(&mut self, prop: AstProp) -> UndefPos {
UndefPos(self.ctx.undefined_field(prop))
}
fn null_field(&mut self, prop: AstProp) -> NullPos {
NullPos(self.ctx.null_field(prop))
}
fn write_ref(&mut self, pos: FieldPos, value: NodeRef) {
self.ctx.write_ref(pos.0, value);
}
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>) {
self.ctx.write_maybe_ref(pos.0, value);
}
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>) {
self.ctx.write_refs(pos.0, value);
}
fn write_str(&mut self, pos: StrPos, value: &str) {
self.ctx.write_str(pos.0, value);
}
fn write_bool(&mut self, pos: BoolPos, value: bool) {
self.ctx.write_bool(pos.0, value);
}
fn serialize(&mut self) -> Vec<u8> {
self.ctx.serialize()
}
}

View file

@ -51,10 +51,13 @@ use crate::util::fs::canonicalize_path;
use crate::util::path::is_script_ext;
use crate::util::sync::AtomicFlag;
mod ast_buffer;
mod linter;
mod reporters;
mod rules;
// TODO(bartlomieju): remove once we wire plugins through the CLI linter
pub use ast_buffer::serialize_ast_to_buffer;
pub use linter::CliLinter;
pub use linter::CliLinterOptions;
pub use rules::collect_no_slow_type_diagnostics;

View file

@ -616,7 +616,10 @@ async fn configure_main_worker(
WorkerExecutionMode::Test,
specifier.clone(),
permissions_container,
vec![ops::testing::deno_test::init_ops(worker_sender.sender)],
vec![
ops::testing::deno_test::init_ops(worker_sender.sender),
ops::lint::deno_lint::init_ops(),
],
Stdio {
stdin: StdioPipe::inherit(),
stdout: StdioPipe::file(worker_sender.stdout),

View file

@ -656,7 +656,8 @@ impl CliMainWorkerFactory {
"40_test_common.js",
"40_test.js",
"40_bench.js",
"40_jupyter.js"
"40_jupyter.js",
"40_lint.js"
);
}

View file

@ -526,6 +526,9 @@ const NOT_IMPORTED_OPS = [
// Used in jupyter API
"op_base64_encode",
// Used in the lint API
"op_lint_create_serialized_ast",
// Related to `Deno.test()` API
"op_test_event_step_result_failed",
"op_test_event_step_result_ignored",

View file

@ -52,6 +52,7 @@ util::unit_test_factory!(
kv_queue_test,
kv_queue_undelivered_test,
link_test,
lint_plugin_test,
make_temp_test,
message_channel_test,
mkdir_test,

View file

@ -0,0 +1,557 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { assertEquals } from "./test_util.ts";
// TODO(@marvinhagemeister) Remove once we land "official" types
export interface LintReportData {
// deno-lint-ignore no-explicit-any
node: any;
message: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
interface LintContext {
id: string;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
// deno-lint-ignore no-explicit-any
type LintVisitor = Record<string, (node: any) => void>;
// TODO(@marvinhagemeister) Remove once we land "official" types
interface LintRule {
create(ctx: LintContext): LintVisitor;
destroy?(): void;
}
// TODO(@marvinhagemeister) Remove once we land "official" types
interface LintPlugin {
name: string;
rules: Record<string, LintRule>;
}
function runLintPlugin(plugin: LintPlugin, fileName: string, source: string) {
// deno-lint-ignore no-explicit-any
return (Deno as any)[(Deno as any).internal].runLintPlugin(
plugin,
fileName,
source,
);
}
function testPlugin(
source: string,
rule: LintRule,
) {
const plugin = {
name: "test-plugin",
rules: {
testRule: rule,
},
};
return runLintPlugin(plugin, "source.tsx", source);
}
function testVisit(source: string, ...selectors: string[]): string[] {
const log: string[] = [];
testPlugin(source, {
create() {
const visitor: LintVisitor = {};
for (const s of selectors) {
visitor[s] = () => log.push(s);
}
return visitor;
},
});
return log;
}
function testLintNode(source: string, ...selectors: string[]) {
// deno-lint-ignore no-explicit-any
const log: any[] = [];
testPlugin(source, {
create() {
const visitor: LintVisitor = {};
for (const s of selectors) {
visitor[s] = (node) => {
log.push(node[Symbol.for("Deno.lint.toJsValue")]());
};
}
return visitor;
},
});
return log;
}
Deno.test("Plugin - visitor enter/exit", () => {
const enter = testVisit("foo", "Identifier");
assertEquals(enter, ["Identifier"]);
const exit = testVisit("foo", "Identifier:exit");
assertEquals(exit, ["Identifier:exit"]);
const both = testVisit("foo", "Identifier", "Identifier:exit");
assertEquals(both, ["Identifier", "Identifier:exit"]);
});
Deno.test("Plugin - Program", () => {
const node = testLintNode("", "Program");
assertEquals(node[0], {
type: "Program",
sourceType: "script",
range: [1, 1],
body: [],
});
});
Deno.test("Plugin - BlockStatement", () => {
const node = testLintNode("{ foo; }", "BlockStatement");
assertEquals(node[0], {
type: "BlockStatement",
range: [1, 9],
body: [{
type: "ExpressionStatement",
range: [3, 7],
expression: {
type: "Identifier",
name: "foo",
range: [3, 6],
},
}],
});
});
Deno.test("Plugin - BreakStatement", () => {
let node = testLintNode("break;", "BreakStatement");
assertEquals(node[0], {
type: "BreakStatement",
range: [1, 7],
label: null,
});
node = testLintNode("break foo;", "BreakStatement");
assertEquals(node[0], {
type: "BreakStatement",
range: [1, 11],
label: {
type: "Identifier",
range: [7, 10],
name: "foo",
},
});
});
Deno.test("Plugin - ContinueStatement", () => {
let node = testLintNode("continue;", "ContinueStatement");
assertEquals(node[0], {
type: "ContinueStatement",
range: [1, 10],
label: null,
});
node = testLintNode("continue foo;", "ContinueStatement");
assertEquals(node[0], {
type: "ContinueStatement",
range: [1, 14],
label: {
type: "Identifier",
range: [10, 13],
name: "foo",
},
});
});
Deno.test("Plugin - DebuggerStatement", () => {
const node = testLintNode("debugger;", "DebuggerStatement");
assertEquals(node[0], {
type: "DebuggerStatement",
range: [1, 10],
});
});
Deno.test("Plugin - DoWhileStatement", () => {
const node = testLintNode("do {} while (foo);", "DoWhileStatement");
assertEquals(node[0], {
type: "DoWhileStatement",
range: [1, 19],
test: {
type: "Identifier",
range: [14, 17],
name: "foo",
},
body: {
type: "BlockStatement",
range: [4, 6],
body: [],
},
});
});
Deno.test("Plugin - ExpressionStatement", () => {
const node = testLintNode("foo;", "ExpressionStatement");
assertEquals(node[0], {
type: "ExpressionStatement",
range: [1, 5],
expression: {
type: "Identifier",
range: [1, 4],
name: "foo",
},
});
});
Deno.test("Plugin - ForInStatement", () => {
const node = testLintNode("for (a in b) {}", "ForInStatement");
assertEquals(node[0], {
type: "ForInStatement",
range: [1, 16],
left: {
type: "Identifier",
range: [6, 7],
name: "a",
},
right: {
type: "Identifier",
range: [11, 12],
name: "b",
},
body: {
type: "BlockStatement",
range: [14, 16],
body: [],
},
});
});
Deno.test("Plugin - ForOfStatement", () => {
let node = testLintNode("for (a of b) {}", "ForOfStatement");
assertEquals(node[0], {
type: "ForOfStatement",
range: [1, 16],
await: false,
left: {
type: "Identifier",
range: [6, 7],
name: "a",
},
right: {
type: "Identifier",
range: [11, 12],
name: "b",
},
body: {
type: "BlockStatement",
range: [14, 16],
body: [],
},
});
node = testLintNode("for await (a of b) {}", "ForOfStatement");
assertEquals(node[0], {
type: "ForOfStatement",
range: [1, 22],
await: true,
left: {
type: "Identifier",
range: [12, 13],
name: "a",
},
right: {
type: "Identifier",
range: [17, 18],
name: "b",
},
body: {
type: "BlockStatement",
range: [20, 22],
body: [],
},
});
});
Deno.test("Plugin - ForStatement", () => {
let node = testLintNode("for (;;) {}", "ForStatement");
assertEquals(node[0], {
type: "ForStatement",
range: [1, 12],
init: null,
test: null,
update: null,
body: {
type: "BlockStatement",
range: [10, 12],
body: [],
},
});
node = testLintNode("for (a; b; c) {}", "ForStatement");
assertEquals(node[0], {
type: "ForStatement",
range: [1, 17],
init: {
type: "Identifier",
range: [6, 7],
name: "a",
},
test: {
type: "Identifier",
range: [9, 10],
name: "b",
},
update: {
type: "Identifier",
range: [12, 13],
name: "c",
},
body: {
type: "BlockStatement",
range: [15, 17],
body: [],
},
});
});
Deno.test("Plugin - IfStatement", () => {
let node = testLintNode("if (foo) {}", "IfStatement");
assertEquals(node[0], {
type: "IfStatement",
range: [1, 12],
test: {
type: "Identifier",
name: "foo",
range: [5, 8],
},
consequent: {
type: "BlockStatement",
range: [10, 12],
body: [],
},
alternate: null,
});
node = testLintNode("if (foo) {} else {}", "IfStatement");
assertEquals(node[0], {
type: "IfStatement",
range: [1, 20],
test: {
type: "Identifier",
name: "foo",
range: [5, 8],
},
consequent: {
type: "BlockStatement",
range: [10, 12],
body: [],
},
alternate: {
type: "BlockStatement",
range: [18, 20],
body: [],
},
});
});
Deno.test("Plugin - LabeledStatement", () => {
const node = testLintNode("foo: {};", "LabeledStatement");
assertEquals(node[0], {
type: "LabeledStatement",
range: [1, 8],
label: {
type: "Identifier",
name: "foo",
range: [1, 4],
},
body: {
type: "BlockStatement",
range: [6, 8],
body: [],
},
});
});
Deno.test("Plugin - ReturnStatement", () => {
let node = testLintNode("return", "ReturnStatement");
assertEquals(node[0], {
type: "ReturnStatement",
range: [1, 7],
argument: null,
});
node = testLintNode("return foo;", "ReturnStatement");
assertEquals(node[0], {
type: "ReturnStatement",
range: [1, 12],
argument: {
type: "Identifier",
name: "foo",
range: [8, 11],
},
});
});
Deno.test("Plugin - SwitchStatement", () => {
const node = testLintNode(
`switch (foo) {
case foo:
case bar:
break;
default:
{}
}`,
"SwitchStatement",
);
assertEquals(node[0], {
type: "SwitchStatement",
range: [1, 94],
discriminant: {
type: "Identifier",
range: [9, 12],
name: "foo",
},
cases: [
{
type: "SwitchCase",
range: [22, 31],
test: {
type: "Identifier",
range: [27, 30],
name: "foo",
},
consequent: [],
},
{
type: "SwitchCase",
range: [38, 62],
test: {
type: "Identifier",
range: [43, 46],
name: "bar",
},
consequent: [
{
type: "BreakStatement",
label: null,
range: [56, 62],
},
],
},
{
type: "SwitchCase",
range: [69, 88],
test: null,
consequent: [
{
type: "BlockStatement",
range: [86, 88],
body: [],
},
],
},
],
});
});
Deno.test("Plugin - ThrowStatement", () => {
const node = testLintNode("throw foo;", "ThrowStatement");
assertEquals(node[0], {
type: "ThrowStatement",
range: [1, 11],
argument: {
type: "Identifier",
range: [7, 10],
name: "foo",
},
});
});
Deno.test("Plugin - TryStatement", () => {
let node = testLintNode("try {} catch {};", "TryStatement");
assertEquals(node[0], {
type: "TryStatement",
range: [1, 16],
block: {
type: "BlockStatement",
range: [5, 7],
body: [],
},
handler: {
type: "CatchClause",
range: [8, 16],
param: null,
body: {
type: "BlockStatement",
range: [14, 16],
body: [],
},
},
finalizer: null,
});
node = testLintNode("try {} catch (e) {};", "TryStatement");
assertEquals(node[0], {
type: "TryStatement",
range: [1, 20],
block: {
type: "BlockStatement",
range: [5, 7],
body: [],
},
handler: {
type: "CatchClause",
range: [8, 20],
param: {
type: "Identifier",
range: [15, 16],
name: "e",
},
body: {
type: "BlockStatement",
range: [18, 20],
body: [],
},
},
finalizer: null,
});
node = testLintNode("try {} finally {};", "TryStatement");
assertEquals(node[0], {
type: "TryStatement",
range: [1, 18],
block: {
type: "BlockStatement",
range: [5, 7],
body: [],
},
handler: null,
finalizer: {
type: "BlockStatement",
range: [16, 18],
body: [],
},
});
});
Deno.test("Plugin - WhileStatement", () => {
const node = testLintNode("while (foo) {}", "WhileStatement");
assertEquals(node[0], {
type: "WhileStatement",
range: [1, 15],
test: {
type: "Identifier",
range: [8, 11],
name: "foo",
},
body: {
type: "BlockStatement",
range: [13, 15],
body: [],
},
});
});

View file

@ -1,6 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const EXPECTED_OP_COUNT = 12;
const EXPECTED_OP_COUNT = 13;
Deno.test(function checkExposedOps() {
// @ts-ignore TS doesn't allow to index with symbol