1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-28 16:20:57 -05:00

chore: more debug logging and avoid allocating strings in ts logging when not debug (#16689)

This commit is contained in:
David Sherret 2022-11-23 13:34:44 -05:00 committed by GitHub
parent cbf4fa143f
commit beaa0d8867
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 55 additions and 20 deletions

1
cli/cache/check.rs vendored
View file

@ -17,6 +17,7 @@ pub struct TypeCheckCache(Option<Connection>);
impl TypeCheckCache { impl TypeCheckCache {
pub fn new(db_file_path: &Path) -> Self { pub fn new(db_file_path: &Path) -> Self {
log::debug!("Loading type check cache.");
match Self::try_new(db_file_path) { match Self::try_new(db_file_path) {
Ok(cache) => cache, Ok(cache) => cache,
Err(err) => { Err(err) => {

View file

@ -164,6 +164,7 @@ struct SqlIncrementalCache {
impl SqlIncrementalCache { impl SqlIncrementalCache {
pub fn new(db_file_path: &Path, state_hash: u64) -> Result<Self, AnyError> { pub fn new(db_file_path: &Path, state_hash: u64) -> Result<Self, AnyError> {
log::debug!("Loading incremental cache.");
let conn = Connection::open(db_file_path)?; let conn = Connection::open(db_file_path)?;
Self::from_connection(conn, state_hash, crate::version::deno()) Self::from_connection(conn, state_hash, crate::version::deno())
} }

1
cli/cache/node.rs vendored
View file

@ -142,6 +142,7 @@ impl NodeAnalysisCacheInner {
db_file_path: Option<&Path>, db_file_path: Option<&Path>,
version: String, version: String,
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
log::debug!("Opening node analysis cache.");
let conn = match db_file_path { let conn = match db_file_path {
Some(path) => Connection::open(path)?, Some(path) => Connection::open(path)?,
None => Connection::open_in_memory()?, None => Connection::open_in_memory()?,

View file

@ -143,6 +143,7 @@ impl ParsedSourceCacheModuleAnalyzer {
cli_version: String, cli_version: String,
sources: ParsedSourceCacheSources, sources: ParsedSourceCacheSources,
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
log::debug!("Loading cached module analyzer.");
let conn = match db_file_path { let conn = match db_file_path {
Some(path) => Connection::open(path)?, Some(path) => Connection::open(path)?,
None => Connection::open_in_memory()?, None => Connection::open_in_memory()?,

View file

@ -292,6 +292,7 @@ impl ProcState {
dynamic_permissions: Permissions, dynamic_permissions: Permissions,
reload_on_watch: bool, reload_on_watch: bool,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
log::debug!("Preparing module load.");
let _pb_clear_guard = self.progress_bar.clear_guard(); let _pb_clear_guard = self.progress_bar.clear_guard();
let has_root_npm_specifier = roots.iter().any(|r| { let has_root_npm_specifier = roots.iter().any(|r| {
@ -375,6 +376,7 @@ impl ProcState {
}; };
let analyzer = self.parsed_source_cache.as_analyzer(); let analyzer = self.parsed_source_cache.as_analyzer();
log::debug!("Creating module graph.");
let graph = create_graph( let graph = create_graph(
roots.clone(), roots.clone(),
&mut loader, &mut loader,
@ -423,6 +425,7 @@ impl ProcState {
// type check if necessary // type check if necessary
if self.options.type_check_mode() != TypeCheckMode::None { if self.options.type_check_mode() != TypeCheckMode::None {
log::debug!("Type checking.");
let maybe_config_specifier = self.options.maybe_config_file_specifier(); let maybe_config_specifier = self.options.maybe_config_file_specifier();
let roots = roots.clone(); let roots = roots.clone();
let options = check::CheckOptions { let options = check::CheckOptions {
@ -464,6 +467,8 @@ impl ProcState {
g.write()?; g.write()?;
} }
log::debug!("Prepared module load.");
Ok(()) Ok(())
} }

View file

@ -480,12 +480,16 @@ delete Object.prototype.__proto__;
* @type {ts.CompilerHost & ts.LanguageServiceHost} */ * @type {ts.CompilerHost & ts.LanguageServiceHost} */
const host = { const host = {
fileExists(specifier) { fileExists(specifier) {
debug(`host.fileExists("${specifier}")`); if (logDebug) {
debug(`host.fileExists("${specifier}")`);
}
specifier = normalizedToOriginalMap.get(specifier) ?? specifier; specifier = normalizedToOriginalMap.get(specifier) ?? specifier;
return ops.op_exists({ specifier }); return ops.op_exists({ specifier });
}, },
readFile(specifier) { readFile(specifier) {
debug(`host.readFile("${specifier}")`); if (logDebug) {
debug(`host.readFile("${specifier}")`);
}
return ops.op_load({ specifier }).data; return ops.op_load({ specifier }).data;
}, },
getCancellationToken() { getCancellationToken() {
@ -499,11 +503,13 @@ delete Object.prototype.__proto__;
_shouldCreateNewSourceFile, _shouldCreateNewSourceFile,
) { ) {
const createOptions = getCreateSourceFileOptions(languageVersion); const createOptions = getCreateSourceFileOptions(languageVersion);
debug( if (logDebug) {
`host.getSourceFile("${specifier}", ${ debug(
ts.ScriptTarget[createOptions.languageVersion] `host.getSourceFile("${specifier}", ${
})`, ts.ScriptTarget[createOptions.languageVersion]
); })`,
);
}
// Needs the original specifier // Needs the original specifier
specifier = normalizedToOriginalMap.get(specifier) ?? specifier; specifier = normalizedToOriginalMap.get(specifier) ?? specifier;
@ -546,13 +552,17 @@ delete Object.prototype.__proto__;
return ASSETS; return ASSETS;
}, },
writeFile(fileName, data, _writeByteOrderMark, _onError, _sourceFiles) { writeFile(fileName, data, _writeByteOrderMark, _onError, _sourceFiles) {
debug(`host.writeFile("${fileName}")`); if (logDebug) {
debug(`host.writeFile("${fileName}")`);
}
return ops.op_emit( return ops.op_emit(
{ fileName, data }, { fileName, data },
); );
}, },
getCurrentDirectory() { getCurrentDirectory() {
debug(`host.getCurrentDirectory()`); if (logDebug) {
debug(`host.getCurrentDirectory()`);
}
return cwd ?? ops.op_cwd(); return cwd ?? ops.op_cwd();
}, },
getCanonicalFileName(fileName) { getCanonicalFileName(fileName) {
@ -609,9 +619,11 @@ delete Object.prototype.__proto__;
}); });
}, },
resolveModuleNames(specifiers, base) { resolveModuleNames(specifiers, base) {
debug(`host.resolveModuleNames()`); if (logDebug) {
debug(` base: ${base}`); debug(`host.resolveModuleNames()`);
debug(` specifiers: ${specifiers.join(", ")}`); debug(` base: ${base}`);
debug(` specifiers: ${specifiers.join(", ")}`);
}
/** @type {Array<[string, ts.Extension] | undefined>} */ /** @type {Array<[string, ts.Extension] | undefined>} */
const resolved = ops.op_resolve({ const resolved = ops.op_resolve({
specifiers, specifiers,
@ -646,11 +658,15 @@ delete Object.prototype.__proto__;
// LanguageServiceHost // LanguageServiceHost
getCompilationSettings() { getCompilationSettings() {
debug("host.getCompilationSettings()"); if (logDebug) {
debug("host.getCompilationSettings()");
}
return compilationSettings; return compilationSettings;
}, },
getScriptFileNames() { getScriptFileNames() {
debug("host.getScriptFileNames()"); if (logDebug) {
debug("host.getScriptFileNames()");
}
// tsc requests the script file names multiple times even though it can't // tsc requests the script file names multiple times even though it can't
// possibly have changed, so we will memoize it on a per request basis. // possibly have changed, so we will memoize it on a per request basis.
if (scriptFileNamesCache) { if (scriptFileNamesCache) {
@ -659,7 +675,9 @@ delete Object.prototype.__proto__;
return scriptFileNamesCache = ops.op_script_names(); return scriptFileNamesCache = ops.op_script_names();
}, },
getScriptVersion(specifier) { getScriptVersion(specifier) {
debug(`host.getScriptVersion("${specifier}")`); if (logDebug) {
debug(`host.getScriptVersion("${specifier}")`);
}
const sourceFile = sourceFileCache.get(specifier); const sourceFile = sourceFileCache.get(specifier);
if (sourceFile) { if (sourceFile) {
return sourceFile.version ?? "1"; return sourceFile.version ?? "1";
@ -674,7 +692,9 @@ delete Object.prototype.__proto__;
return scriptVersion; return scriptVersion;
}, },
getScriptSnapshot(specifier) { getScriptSnapshot(specifier) {
debug(`host.getScriptSnapshot("${specifier}")`); if (logDebug) {
debug(`host.getScriptSnapshot("${specifier}")`);
}
const sourceFile = sourceFileCache.get(specifier); const sourceFile = sourceFileCache.get(specifier);
if (sourceFile) { if (sourceFile) {
return { return {
@ -807,8 +827,10 @@ delete Object.prototype.__proto__;
setLogDebug(debugFlag, "TS"); setLogDebug(debugFlag, "TS");
performanceStart(); performanceStart();
debug(">>> exec start", { rootNames }); if (logDebug) {
debug(config); debug(">>> exec start", { rootNames });
debug(config);
}
rootNames.forEach(checkNormalizedPath); rootNames.forEach(checkNormalizedPath);
@ -877,7 +899,9 @@ delete Object.prototype.__proto__;
* @param {LanguageServerRequest} request * @param {LanguageServerRequest} request
*/ */
function serverRequest({ id, ...request }) { function serverRequest({ id, ...request }) {
debug(`serverRequest()`, { id, ...request }); if (logDebug) {
debug(`serverRequest()`, { id, ...request });
}
// reset all memoized source files names // reset all memoized source files names
scriptFileNamesCache = undefined; scriptFileNamesCache = undefined;
@ -1000,7 +1024,9 @@ delete Object.prototype.__proto__;
); );
} }
case "getCompletionDetails": { case "getCompletionDetails": {
debug("request", request); if (logDebug) {
debug("request", request);
}
return respond( return respond(
id, id,
languageService.getCompletionEntryDetails( languageService.getCompletionEntryDetails(