diff --git a/Cargo.lock b/Cargo.lock index 49a668dee7570f..fc41d3ce38b3ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1337,6 +1337,10 @@ dependencies = [ "notify", "once_cell", "open", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", "p256", "pathdiff", "percent-encoding", @@ -1367,7 +1371,10 @@ dependencies = [ "thiserror 2.0.3", "tokio", "tokio-util", + "tower 0.5.2", "tracing", + "tracing-opentelemetry", + "tracing-subscriber", "twox-hash", "typed-arena", "uuid", @@ -5558,21 +5565,22 @@ checksum = "bc1b6902ff63b32ef6c489e8048c5e253e2e4a803ea3ea7e783914536eb15c52" [[package]] name = "opentelemetry_sdk" -version = "0.27.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b742c1cae4693792cc564e58d75a2a0ba29421a34a85b50da92efa89ecb2bc" +checksum = "231e9d6ceef9b0b2546ddf52335785ce41252bc7474ee8ba05bfad277be13ab8" dependencies = [ "async-trait", "futures-channel", "futures-executor", "futures-util", "glob", - "once_cell", "opentelemetry", "percent-encoding", "rand", "serde_json", "thiserror 1.0.64", + "tokio", + "tokio-stream", "tracing", ] @@ -8395,9 +8403,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -8407,9 +8415,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", @@ -8418,9 +8426,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -8437,11 +8445,29 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-opentelemetry" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a971f6058498b5c0f1affa23e7ea202057a7301dbff68e968b2d578bcbd053" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", diff --git a/Cargo.toml b/Cargo.toml index ca32b029dfbe1c..a2db6eef8ebfe6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -233,7 +233,7 @@ opentelemetry = "0.27.0" opentelemetry-http = "0.27.0" opentelemetry-otlp = { version = "0.27.0", features = ["logs", "http-proto", "http-json"] } opentelemetry-semantic-conventions = { version = "0.27.0", features = ["semconv_experimental"] } -opentelemetry_sdk = "0.27.0" +opentelemetry_sdk = { version = "0.27.0", features = ["rt-tokio", "trace"] } # crypto hkdf = "0.12.3" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 54de3db642ffab..548f34505fe4a3 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -140,6 +140,10 @@ monch.workspace = true notify.workspace = true once_cell.workspace = true open = "5.0.1" +opentelemetry.workspace = true +opentelemetry-otlp.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry_sdk.workspace = true p256.workspace = true pathdiff = "0.2.1" percent-encoding.workspace = true @@ -165,8 +169,11 @@ text_lines = "=0.6.0" thiserror.workspace = true tokio.workspace = true tokio-util.workspace = true +tower.workspace = true tower-lsp.workspace = true -tracing = { version = "0.1", features = ["log", "default"] } +tracing = { version = "0.1", features = ["log"] } +tracing-opentelemetry = "0.28.0" +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } twox-hash.workspace = true typed-arena = "=2.0.2" uuid = { workspace = true, features = ["serde"] } diff --git a/cli/lib/util/logger.rs b/cli/lib/util/logger.rs index b280dc22ed6fcb..7f9f9d91684c0e 100644 --- a/cli/lib/util/logger.rs +++ b/cli/lib/util/logger.rs @@ -91,6 +91,7 @@ pub fn init< .filter_module("swc_ecma_parser", log::LevelFilter::Error) // Suppress span lifecycle logs since they are too verbose .filter_module("tracing::span", log::LevelFilter::Off) + .filter_module("tower_lsp", log::LevelFilter::Trace) // for deno_compile, this is too verbose .filter_module("editpe", log::LevelFilter::Error) .format(|buf, record| { diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index 7309d984b40bd5..d61cfe895ed39f 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -150,6 +150,7 @@ fn to_narrow_lsp_range( /// completion response, which will be valid import completions for the specific /// context. #[allow(clippy::too_many_arguments)] +#[tracing::instrument(skip_all)] pub async fn get_import_completions( specifier: &ModuleSpecifier, position: &lsp::Position, diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index e4570031fb7884..1f5f5c1dc76fbd 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -619,6 +619,9 @@ pub struct WorkspaceSettings { #[serde(default)] pub typescript: LanguageWorkspaceSettings, + + #[serde(default)] + pub tracing: Option, } impl Default for WorkspaceSettings { @@ -645,6 +648,7 @@ impl Default for WorkspaceSettings { unstable: Default::default(), javascript: Default::default(), typescript: Default::default(), + tracing: Default::default(), } } } @@ -2286,8 +2290,9 @@ mod tests { suggestion_actions: SuggestionActionsSettings { enabled: true }, update_imports_on_file_move: UpdateImportsOnFileMoveOptions { enabled: UpdateImportsOnFileMoveEnabled::Prompt - } + }, }, + tracing: Default::default() } ); } diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 012cdd1e452e87..8e586cbdebbf56 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -225,6 +225,8 @@ pub struct Inner { /// Set to `self.config.settings.enable_settings_hash()` after /// refreshing `self.workspace_files`. workspace_files_hash: u64, + + _tracing: Option, } impl LanguageServer { @@ -244,6 +246,7 @@ impl LanguageServer { /// Similar to `deno install --entrypoint` on the command line, where modules will be cached /// in the Deno cache, including any of their dependencies. + #[tracing::instrument(skip_all)] pub async fn cache( &self, specifiers: Vec, @@ -419,6 +422,7 @@ impl LanguageServer { } } + #[tracing::instrument(skip_all)] pub async fn refresh_configuration(&self) { let (folders, capable) = { let inner = self.inner.read().await; @@ -510,11 +514,13 @@ impl Inner { url_map: Default::default(), workspace_files: Default::default(), workspace_files_hash: 0, + _tracing: Default::default(), } } /// Searches assets and documents for the provided /// specifier erroring if it doesn't exist. + #[tracing::instrument(skip_all)] pub fn get_asset_or_document( &self, specifier: &ModuleSpecifier, @@ -541,6 +547,7 @@ impl Inner { } } + #[tracing::instrument(skip_all)] pub async fn get_navigation_tree( &self, specifier: &ModuleSpecifier, @@ -602,6 +609,7 @@ impl Inner { } } + #[tracing::instrument(skip_all)] pub fn snapshot(&self) -> Arc { Arc::new(StateSnapshot { project_version: self.project_version, @@ -612,6 +620,37 @@ impl Inner { }) } + pub fn update_tracing(&mut self) { + let tracing = + self + .config + .workspace_settings() + .tracing + .clone() + .or_else(|| { + std::env::var("DENO_LSP_TRACE").ok().map(|_| { + super::trace::TracingConfig { + enable: true, + ..Default::default() + } + .into() + }) + }); + self._tracing = tracing.and_then(|conf| { + if !conf.enabled() { + return None; + } + lsp_log!("Initializing tracing subscriber: {:#?}", conf); + let config = conf.into(); + super::trace::init_tracing_subscriber(&config) + .inspect_err(|e| { + lsp_warn!("Error initializing tracing subscriber: {e:#}"); + }) + .ok() + }); + } + + #[tracing::instrument(skip_all)] pub async fn update_global_cache(&mut self) { let mark = self.performance.mark("lsp.update_global_cache"); let maybe_cache = self.config.workspace_settings().cache.as_ref(); @@ -669,6 +708,7 @@ impl Inner { self.performance.measure(mark); } + #[tracing::instrument(skip_all)] pub fn update_cache(&mut self) { let mark = self.performance.mark("lsp.update_cache"); self.cache.update_config(&self.config); @@ -778,6 +818,7 @@ impl Inner { return Err(tower_lsp::jsonrpc::Error::internal_error()); }; + self.update_tracing(); self.update_debug_flag(); if capabilities.code_action_provider.is_some() { @@ -797,6 +838,7 @@ impl Inner { }) } + #[tracing::instrument(skip_all)] fn walk_workspace(config: &Config) -> (IndexSet, bool) { if !config.workspace_capable() { log::debug!("Skipped workspace walk due to client incapability."); @@ -956,6 +998,7 @@ impl Inner { self.workspace_files_hash = enable_settings_hash; } + #[tracing::instrument(skip_all)] async fn refresh_config_tree(&mut self) { let file_fetcher = CliFileFetcher::new( self.cache.global().clone(), @@ -1011,6 +1054,7 @@ impl Inner { } } + #[tracing::instrument(skip_all)] async fn refresh_resolver(&mut self) { self.resolver = Arc::new( LspResolver::from_config( @@ -1022,6 +1066,7 @@ impl Inner { ); } + #[tracing::instrument(skip_all)] async fn refresh_documents_config(&mut self) { self.documents.update_config( &self.config, @@ -1037,6 +1082,7 @@ impl Inner { self.project_changed([], true); } + #[tracing::instrument(skip_all)] async fn did_open(&mut self, params: DidOpenTextDocumentParams) { let mark = self.performance.mark_with_args("lsp.did_open", ¶ms); let Some(scheme) = params.text_document.uri.scheme() else { @@ -1086,11 +1132,13 @@ impl Inner { self.performance.measure(mark); } + #[tracing::instrument(skip_all)] async fn did_change(&mut self, params: DidChangeTextDocumentParams) { let mark = self.performance.mark_with_args("lsp.did_change", ¶ms); let specifier = self .url_map .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); + eprintln!("did_change: {specifier}"); match self.documents.change( &specifier, params.text_document.version, @@ -1123,6 +1171,7 @@ impl Inner { self.performance.measure(mark); } + #[tracing::instrument(skip_all)] fn did_save(&mut self, params: DidSaveTextDocumentParams) { let _mark = self.performance.measure_scope("lsp.did_save"); let specifier = self @@ -1151,6 +1200,7 @@ impl Inner { })); } + #[tracing::instrument(skip_all)] async fn refresh_dep_info(&mut self) { let dep_info_by_scope = self.documents.dep_info_by_scope(); let resolver = self.resolver.clone(); @@ -1162,6 +1212,7 @@ impl Inner { .ok(); } + #[tracing::instrument(skip_all)] async fn did_close(&mut self, params: DidCloseTextDocumentParams) { let mark = self.performance.mark_with_args("lsp.did_close", ¶ms); let Some(scheme) = params.text_document.uri.scheme() else { @@ -1188,6 +1239,8 @@ impl Inner { self.performance.measure(mark); } + #[tracing::instrument(skip_all)] + async fn did_change_configuration( &mut self, params: DidChangeConfigurationParams, @@ -1206,6 +1259,7 @@ impl Inner { self.config.set_workspace_settings(settings, vec![]); } }; + // self.update_tracing(); self.update_debug_flag(); self.update_global_cache().await; self.refresh_workspace_files(); @@ -1218,6 +1272,8 @@ impl Inner { self.send_testing_update(); } + #[tracing::instrument(skip(self))] + async fn did_change_watched_files( &mut self, params: DidChangeWatchedFilesParams, @@ -1303,6 +1359,8 @@ impl Inner { self.performance.measure(mark); } + #[tracing::instrument(skip_all)] + async fn document_symbol( &self, params: DocumentSymbolParams, @@ -1345,6 +1403,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn formatting( &self, params: DocumentFormattingParams, @@ -1458,6 +1518,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn hover(&self, params: HoverParams) -> LspResult> { let specifier = self.url_map.uri_to_specifier( ¶ms.text_document_position_params.text_document.uri, @@ -1544,6 +1606,8 @@ impl Inner { Ok(hover) } + #[tracing::instrument(skip_all)] + fn resolution_to_hover_text( &self, resolution: &Resolution, @@ -1586,6 +1650,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn code_action( &self, params: CodeActionParams, @@ -1805,6 +1871,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn code_action_resolve( &self, params: CodeAction, @@ -1958,6 +2026,8 @@ impl Inner { ) } + #[tracing::instrument(skip_all)] + async fn code_lens( &self, params: CodeLensParams, @@ -2023,6 +2093,8 @@ impl Inner { Ok(Some(code_lenses)) } + #[tracing::instrument(skip_all)] + async fn code_lens_resolve( &self, code_lens: CodeLens, @@ -2046,6 +2118,8 @@ impl Inner { result } + #[tracing::instrument(skip_all)] + async fn document_highlight( &self, params: DocumentHighlightParams, @@ -2090,6 +2164,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn references( &self, params: ReferenceParams, @@ -2146,6 +2222,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn goto_definition( &self, params: GotoDefinitionParams, @@ -2185,6 +2263,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn goto_type_definition( &self, params: GotoTypeDefinitionParams, @@ -2231,6 +2311,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn completion( &self, params: CompletionParams, @@ -2343,6 +2425,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn completion_resolve( &self, params: CompletionItem, @@ -2424,6 +2508,8 @@ impl Inner { Ok(completion_item) } + #[tracing::instrument(skip_all)] + async fn goto_implementation( &self, params: GotoImplementationParams, @@ -2473,6 +2559,8 @@ impl Inner { Ok(result) } + #[tracing::instrument(skip_all)] + async fn folding_range( &self, params: FoldingRangeParams, @@ -2520,6 +2608,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn incoming_calls( &self, params: CallHierarchyIncomingCallsParams, @@ -2569,6 +2659,8 @@ impl Inner { Ok(Some(resolved_items)) } + #[tracing::instrument(skip_all)] + async fn outgoing_calls( &self, params: CallHierarchyOutgoingCallsParams, @@ -2616,6 +2708,8 @@ impl Inner { Ok(Some(resolved_items)) } + #[tracing::instrument(skip_all)] + async fn prepare_call_hierarchy( &self, params: CallHierarchyPrepareParams, @@ -2680,6 +2774,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn rename( &self, params: RenameParams, @@ -2727,6 +2823,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn selection_range( &self, params: SelectionRangeParams, @@ -2765,6 +2863,8 @@ impl Inner { Ok(Some(selection_ranges)) } + #[tracing::instrument(skip_all)] + async fn semantic_tokens_full( &self, params: SemanticTokensParams, @@ -2818,6 +2918,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn semantic_tokens_range( &self, params: SemanticTokensRangeParams, @@ -2869,6 +2971,8 @@ impl Inner { Ok(response) } + #[tracing::instrument(skip_all)] + async fn signature_help( &self, params: SignatureHelpParams, @@ -2921,6 +3025,8 @@ impl Inner { } } + #[tracing::instrument(skip_all)] + async fn will_rename_files( &self, params: RenameFilesParams, @@ -2973,6 +3079,8 @@ impl Inner { file_text_changes_to_workspace_edit(&changes, self) } + #[tracing::instrument(skip_all)] + async fn symbol( &self, params: WorkspaceSymbolParams, @@ -3012,6 +3120,8 @@ impl Inner { Ok(maybe_symbol_information) } + #[tracing::instrument(skip_all)] + fn project_changed<'a>( &mut self, modified_scripts: impl IntoIterator, @@ -3033,6 +3143,7 @@ impl Inner { ); } + #[tracing::instrument(skip_all)] fn send_diagnostics_update(&self) { let snapshot = DiagnosticServerUpdateMessage { snapshot: self.snapshot(), @@ -3587,6 +3698,8 @@ impl Inner { registrations } + #[tracing::instrument(skip_all)] + fn prepare_cache( &mut self, specifiers: Vec, @@ -3665,6 +3778,8 @@ impl Inner { }) } + #[tracing::instrument(skip_all)] + async fn post_cache(&mut self) { self.resolver.did_cache(); self.refresh_dep_info().await; @@ -3675,6 +3790,8 @@ impl Inner { self.send_testing_update(); } + #[tracing::instrument(skip_all)] + fn pre_did_change_workspace_folders( &mut self, params: DidChangeWorkspaceFoldersParams, @@ -3703,6 +3820,8 @@ impl Inner { self.config.set_workspace_folders(workspace_folders); } + #[tracing::instrument(skip_all)] + async fn post_did_change_workspace_folders(&mut self) { self.refresh_workspace_files(); self.refresh_config_tree().await; diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs index 6b5d17798bee56..4eea59f21204f7 100644 --- a/cli/lsp/mod.rs +++ b/cli/lsp/mod.rs @@ -36,6 +36,7 @@ mod search; mod semantic_tokens; mod testing; mod text; +mod trace; mod tsc; mod urls; diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index f8c38d97ea46a6..ccdbae7256b1ed 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -613,6 +613,7 @@ FetchPermissionsOptionRef::AllowAll, /// For a string specifier from the client, provide a set of completions, if /// any, for the specifier. + #[tracing::instrument(skip_all)] pub async fn get_completions( &self, text: &str, diff --git a/cli/lsp/repl.rs b/cli/lsp/repl.rs index a30427312d3741..c682b7b05e302e 100644 --- a/cli/lsp/repl.rs +++ b/cli/lsp/repl.rs @@ -352,5 +352,6 @@ pub fn get_repl_workspace_settings() -> WorkspaceSettings { }, ..Default::default() }, + tracing: Default::default(), } } diff --git a/cli/lsp/trace.rs b/cli/lsp/trace.rs new file mode 100644 index 00000000000000..8384a846386dd1 --- /dev/null +++ b/cli/lsp/trace.rs @@ -0,0 +1,158 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::fmt; + +use deno_core::anyhow; +use opentelemetry::trace::TracerProvider; +use opentelemetry::KeyValue; +use opentelemetry_otlp::WithExportConfig; +use opentelemetry_sdk::Resource; +use opentelemetry_semantic_conventions::resource::SERVICE_NAME; +use serde::Deserialize; +use serde::Serialize; +use tracing::level_filters::LevelFilter; +use tracing_opentelemetry::OpenTelemetryLayer; +use tracing_subscriber::fmt::format::FmtSpan; +use tracing_subscriber::layer::SubscriberExt; + +use crate::lsp::logging::lsp_debug; + +pub(crate) fn make_tracer( + endpoint: Option<&str>, +) -> Result { + let endpoint = endpoint.unwrap_or("http://localhost:4317"); + let exporter = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_endpoint(endpoint) + .build()?; + let provider = opentelemetry_sdk::trace::Builder::default() + .with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio) + .with_resource(Resource::new(vec![KeyValue::new(SERVICE_NAME, "deno-lsp")])) + .build(); + opentelemetry::global::set_tracer_provider(provider.clone()); + Ok(provider.tracer("deno-lsp-tracer")) +} + +pub(crate) struct TracingGuard( + // #[allow(dead_code)] tracing::dispatcher::DefaultGuard, + (), +); + +impl fmt::Debug for TracingGuard { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("TracingGuard").finish() + } +} + +impl Drop for TracingGuard { + fn drop(&mut self) { + lsp_debug!("Shutting down tracing"); + tokio::task::spawn_blocking(|| { + opentelemetry::global::shutdown_tracer_provider() + }); + } +} + +#[derive( + Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Copy, Default, +)] +#[serde(rename_all = "camelCase")] +pub(crate) enum TracingCollector { + #[default] + OpenTelemetry, + Logging, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)] +#[serde(default, rename_all = "camelCase")] +pub(crate) struct TracingConfig { + /// Enable tracing. + pub(crate) enable: bool, + + /// The collector to use. Defaults to `OpenTelemetry`. + /// If `Logging` is used, the collected traces will be written to stderr. + pub(crate) collector: TracingCollector, + + /// The filter to use. Defaults to `INFO`. + pub(crate) filter: Option, + + /// The endpoint to use for the OpenTelemetry collector. + pub(crate) collector_endpoint: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] +#[serde(untagged)] +pub(crate) enum TracingConfigOrEnabled { + Config(TracingConfig), + Enabled(bool), +} + +impl From for TracingConfigOrEnabled { + fn from(value: TracingConfig) -> Self { + TracingConfigOrEnabled::Config(value) + } +} + +impl From for TracingConfig { + fn from(value: TracingConfigOrEnabled) -> Self { + match value { + TracingConfigOrEnabled::Config(config) => config, + TracingConfigOrEnabled::Enabled(enabled) => TracingConfig { + enable: enabled, + ..Default::default() + }, + } + } +} + +impl TracingConfigOrEnabled { + pub(crate) fn enabled(&self) -> bool { + match self { + TracingConfigOrEnabled::Config(config) => config.enable, + TracingConfigOrEnabled::Enabled(enabled) => *enabled, + } + } +} + +pub(crate) fn init_tracing_subscriber( + config: &TracingConfig, +) -> Result { + if !config.enable { + return Err(anyhow::anyhow!("Tracing is not enabled")); + } + let filter = tracing_subscriber::EnvFilter::builder() + .with_default_directive(LevelFilter::INFO.into()); + let filter = if let Some(directive) = config.filter.as_ref() { + filter.parse(directive)? + } else { + filter.with_env_var("DENO_LSP_TRACE").from_env()? + }; + let open_telemetry_layer = match config.collector { + TracingCollector::OpenTelemetry => Some(OpenTelemetryLayer::new( + make_tracer(config.collector_endpoint.as_deref())?, + )), + _ => None, + }; + let logging_layer = match config.collector { + TracingCollector::Logging => Some( + tracing_subscriber::fmt::layer() + .with_writer(std::io::stderr) + // Include span events in the log output. + // Without this, only events get logged (and at the moment we have none). + .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE), + ), + _ => None, + }; + + tracing::subscriber::set_global_default( + tracing_subscriber::registry() + .with(filter) + .with(logging_layer) + .with(open_telemetry_layer), + ) + .unwrap(); + // let guard = + // .set_global_default(); + let guard = (); + Ok(TracingGuard(guard)) +} diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index e523e0b31fc35d..eb87c4b06bdf2f 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -6,6 +6,7 @@ use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; use std::convert::Infallible; +use std::ffi::c_void; use std::net::SocketAddr; use std::ops::Range; use std::path::Path; @@ -64,6 +65,7 @@ use tokio_util::sync::CancellationToken; use tower_lsp::jsonrpc::Error as LspError; use tower_lsp::jsonrpc::Result as LspResult; use tower_lsp::lsp_types as lsp; +use tracing_opentelemetry::OpenTelemetrySpanExt; use super::analysis::CodeActionData; use super::code_lens; @@ -123,6 +125,7 @@ type Request = ( oneshot::Sender>, CancellationToken, Option, + Option, ); #[derive(Debug, Clone, Copy, Serialize_repr)] @@ -466,6 +469,7 @@ impl TsServer { } } + #[tracing::instrument(skip_all)] pub async fn get_diagnostics( &self, snapshot: Arc, @@ -526,6 +530,7 @@ impl TsServer { Ok((diagnostics_map, ambient_modules_by_scope)) } + #[tracing::instrument(skip_all)] pub async fn cleanup_semantic_cache(&self, snapshot: Arc) { for scope in snapshot .config @@ -547,6 +552,7 @@ impl TsServer { } } + #[tracing::instrument(skip_all)] pub async fn find_references( &self, snapshot: Arc, @@ -596,6 +602,7 @@ impl TsServer { Ok(Some(all_symbols.into_iter().collect())) } + #[tracing::instrument(skip_all)] pub async fn get_navigation_tree( &self, snapshot: Arc, @@ -608,6 +615,7 @@ impl TsServer { self.request(snapshot, req, scope).await } + #[tracing::instrument(skip_all)] pub async fn get_supported_code_fixes( &self, snapshot: Arc, @@ -619,6 +627,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_quick_info( &self, snapshot: Arc, @@ -637,6 +646,7 @@ impl TsServer { } #[allow(clippy::too_many_arguments)] + #[tracing::instrument(skip_all)] pub async fn get_code_fixes( &self, snapshot: Arc, @@ -678,6 +688,7 @@ impl TsServer { } #[allow(clippy::too_many_arguments)] + #[tracing::instrument(skip_all)] pub async fn get_applicable_refactors( &self, snapshot: Arc, @@ -706,6 +717,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_combined_code_fix( &self, snapshot: Arc, @@ -737,6 +749,7 @@ impl TsServer { } #[allow(clippy::too_many_arguments)] + #[tracing::instrument(skip_all)] pub async fn get_edits_for_refactor( &self, snapshot: Arc, @@ -769,6 +782,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_edits_for_file_rename( &self, snapshot: Arc, @@ -819,6 +833,7 @@ impl TsServer { Ok(all_changes.into_iter().collect()) } + #[tracing::instrument(skip_all)] pub async fn get_document_highlights( &self, snapshot: Arc, @@ -841,6 +856,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_definition( &self, snapshot: Arc, @@ -867,6 +883,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_type_definition( &self, snapshot: Arc, @@ -893,6 +910,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_completions( &self, snapshot: Arc, @@ -919,6 +937,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_completion_details( &self, snapshot: Arc, @@ -945,6 +964,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_implementations( &self, snapshot: Arc, @@ -994,6 +1014,7 @@ impl TsServer { Ok(Some(all_locations.into_iter().collect())) } + #[tracing::instrument(skip_all)] pub async fn get_outlining_spans( &self, snapshot: Arc, @@ -1009,6 +1030,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn provide_call_hierarchy_incoming_calls( &self, snapshot: Arc, @@ -1052,6 +1074,7 @@ impl TsServer { Ok(all_calls.into_iter().collect()) } + #[tracing::instrument(skip_all)] pub async fn provide_call_hierarchy_outgoing_calls( &self, snapshot: Arc, @@ -1078,6 +1101,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn prepare_call_hierarchy( &self, snapshot: Arc, @@ -1112,6 +1136,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn find_rename_locations( &self, snapshot: Arc, @@ -1164,6 +1189,7 @@ impl TsServer { Ok(Some(all_locations.into_iter().collect())) } + #[tracing::instrument(skip_all)] pub async fn get_smart_selection_range( &self, snapshot: Arc, @@ -1181,6 +1207,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_encoded_semantic_classifications( &self, snapshot: Arc, @@ -1202,6 +1229,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_signature_help_items( &self, snapshot: Arc, @@ -1221,6 +1249,7 @@ impl TsServer { }) } + #[tracing::instrument(skip_all)] pub async fn get_navigate_to_items( &self, snapshot: Arc, @@ -1264,6 +1293,7 @@ impl TsServer { Ok(all_items.into_iter().collect()) } + #[tracing::instrument(skip_all)] pub async fn provide_inlay_hints( &self, snapshot: Arc, @@ -1312,6 +1342,8 @@ impl TsServer { where R: de::DeserializeOwned, { + let context = tracing::Span::current().context(); + // When an LSP request is cancelled by the client, the future this is being // executed under and any local variables here will be dropped at the next // await point. To pass on that cancellation to the TS thread, we use drop_guard @@ -1323,7 +1355,15 @@ impl TsServer { if self .sender - .send((req, scope, snapshot, tx, token.clone(), change)) + .send(( + req, + scope, + snapshot, + tx, + token.clone(), + change, + Some(context), + )) .is_err() { return Err(anyhow!("failed to send request to tsc thread")); @@ -1332,6 +1372,7 @@ impl TsServer { value = &mut rx => { let value = value??; droppable_token.disarm(); + let _sp = tracing::info_span!("deserialize TSC response").entered(); Ok(serde_json::from_str(&value)?) } _ = token.cancelled() => { @@ -3685,12 +3726,14 @@ pub struct CompletionInfo { } impl CompletionInfo { + #[tracing::instrument(skip_all)] fn normalize(&mut self, specifier_map: &TscSpecifierMap) { for entry in &mut self.entries { entry.normalize(specifier_map); } } + #[tracing::instrument(skip_all, fields(entries = %self.entries.len()))] pub fn as_completion_response( &self, line_index: Arc, @@ -4401,6 +4444,7 @@ struct State { token: CancellationToken, pending_requests: Option>, mark: Option, + context: Option, } impl State { @@ -4420,6 +4464,7 @@ impl State { token: Default::default(), mark: None, pending_requests: Some(pending_requests), + context: None, } } @@ -4500,6 +4545,7 @@ fn op_load<'s>( state: &mut OpState, #[string] specifier: &str, ) -> Result, LoadError> { + let _span = tracing::info_span!("op_load").entered(); let state = state.borrow_mut::(); let mark = state .performance @@ -4531,6 +4577,7 @@ fn op_release( state: &mut OpState, #[string] specifier: &str, ) -> Result<(), deno_core::url::ParseError> { + let _span = tracing::info_span!("op_release").entered(); let state = state.borrow_mut::(); let mark = state .performance @@ -4549,6 +4596,7 @@ fn op_resolve( #[string] base: String, #[serde] specifiers: Vec<(bool, String)>, ) -> Result)>>, deno_core::url::ParseError> { + let _span = tracing::info_span!("op_resolve").entered(); op_resolve_inner(state, ResolveArgs { base, specifiers }) } @@ -4600,7 +4648,7 @@ async fn op_poll_requests( state.pending_requests.take().unwrap() }; - let Some((request, scope, snapshot, response_tx, token, change)) = + let Some((request, scope, snapshot, response_tx, token, change, context)) = pending_requests.recv().await else { return None.into(); @@ -4619,6 +4667,7 @@ async fn op_poll_requests( .performance .mark_with_args(format!("tsc.host.{}", request.method()), &request); state.mark = Some(mark); + state.context = context; Some(TscRequestArray { request, @@ -4666,6 +4715,7 @@ fn op_respond( #[string] response: String, #[string] error: String, ) { + let _span = tracing::info_span!("op_respond").entered(); let state = state.borrow_mut::(); state.performance.measure(state.mark.take().unwrap()); state.last_scope = None; @@ -4683,6 +4733,47 @@ fn op_respond( } } +struct TracingSpan(#[allow(dead_code)] tracing::span::EnteredSpan); +// struct TracingSpan(#[allow(dead_code)] ()); + +deno_core::external!(TracingSpan, "tracingspan"); + +fn span_with_context( + state: &State, + span: tracing::Span, +) -> tracing::span::EnteredSpan { + if let Some(context) = &state.context { + span.set_parent(context.clone()); + } + span.entered() +} + +#[op2(fast)] +fn op_make_span( + op_state: &mut OpState, + #[string] s: &str, + needs_context: bool, +) -> *const c_void { + let state = op_state.borrow_mut::(); + let sp = tracing::info_span!("js", otel.name = format!("js::{s}").as_str()); + let span = if needs_context { + span_with_context(state, sp) + } else { + sp.entered() + }; + deno_core::ExternalPointer::new(TracingSpan(span)).into_raw() +} + +#[op2(fast)] +fn op_exit_span(op_state: &mut OpState, span: *const c_void, root: bool) { + let ptr = deno_core::ExternalPointer::::from_raw(span); + let _span = unsafe { ptr.unsafely_take().0 }; + if root { + let state = op_state.borrow_mut::(); + state.context = None; + } +} + #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct ScriptNames { @@ -4693,6 +4784,7 @@ struct ScriptNames { #[op2] #[serde] fn op_script_names(state: &mut OpState) -> ScriptNames { + let _span = tracing::info_span!("op_script_names").entered(); let state = state.borrow_mut::(); let mark = state.performance.mark("tsc.op.op_script_names"); let mut result = ScriptNames { @@ -4960,6 +5052,8 @@ deno_core::extension!(deno_tsc, op_script_version, op_project_version, op_poll_requests, + op_make_span, + op_exit_span, ], options = { performance: Arc, diff --git a/cli/tsc/00_typescript.js b/cli/tsc/00_typescript.js index b7626fe08269bb..7b805a52f6735d 100644 --- a/cli/tsc/00_typescript.js +++ b/cli/tsc/00_typescript.js @@ -11305,8 +11305,13 @@ __export(deno_exports, { parseNpmPackageReference: () => parseNpmPackageReference, setIsNodeSourceFileCallback: () => setIsNodeSourceFileCallback, setNodeOnlyGlobalNames: () => setNodeOnlyGlobalNames, - tryParseNpmPackageReference: () => tryParseNpmPackageReference + tryParseNpmPackageReference: () => tryParseNpmPackageReference, + setSpanned: () => setSpanned, }); +var spanned = (name, f) => ((...args) => f(...args)); +function setSpanned(s) { + spanned = s; +} var isNodeSourceFile = () => false; var nodeOnlyGlobalNames = /* @__PURE__ */ new Set(); function setIsNodeSourceFileCallback(callback) { @@ -124957,6 +124962,9 @@ function createCreateProgramOptions(rootNames, options, host, oldProgram, config }; } function createProgram(rootNamesOrOptions, _options, _host, _oldProgram, _configFileParsingDiagnostics) { + return spanned("tsc::createProgram", () => { + + var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p; const createProgramOptions = isArray(rootNamesOrOptions) ? createCreateProgramOptions(rootNamesOrOptions, _options, _host, _oldProgram, _configFileParsingDiagnostics) : rootNamesOrOptions; const { rootNames, options, configFileParsingDiagnostics, projectReferences, typeScriptVersion: typeScriptVersion3 } = createProgramOptions; @@ -125684,12 +125692,13 @@ function createProgram(rootNamesOrOptions, _options, _host, _oldProgram, _config ); } function tryReuseStructureFromOldProgram() { + return spanned("tsc::tryReuseStructureFromOldProgram", () => { var _a2; if (!oldProgram) { return 0 /* Not */; } const oldOptions = oldProgram.getCompilerOptions(); - if (changesAffectModuleResolution(oldOptions, options)) { + if (spanned("tsc::changesAffectModuleResolution", () => changesAffectModuleResolution(oldOptions, options))) { return 0 /* Not */; } const oldRootNames = oldProgram.getRootFileNames(); @@ -125715,7 +125724,7 @@ function createProgram(rootNamesOrOptions, _options, _host, _oldProgram, _config SeenPackageName2[SeenPackageName2["Modified"] = 1] = "Modified"; })(SeenPackageName || (SeenPackageName = {})); const seenPackageNames = /* @__PURE__ */ new Map(); - for (const oldSourceFile of oldSourceFiles) { + const res = spanned("CheckOldSourceFiles", () => {for (const oldSourceFile of oldSourceFiles) { const sourceFileOptions = getCreateSourceFileOptions(oldSourceFile.fileName, moduleResolutionCache, host, options); let newSourceFile = host.getSourceFileByPath ? host.getSourceFileByPath( oldSourceFile.fileName, @@ -125792,6 +125801,9 @@ function createProgram(rootNamesOrOptions, _options, _host, _oldProgram, _config modifiedSourceFiles.push(newSourceFile); } newSourceFiles.push(newSourceFile); + }}); + if (res !== undefined) { + return res; } if (structureIsReused !== 2 /* Completely */) { return structureIsReused; @@ -125865,6 +125877,7 @@ function createProgram(rootNamesOrOptions, _options, _host, _oldProgram, _config resolvedLibReferences = oldProgram.resolvedLibReferences; packageMap = oldProgram.getCurrentPackagesMap(); return 2 /* Completely */; + }); } function getEmitHost(writeFileCallback) { return { @@ -128005,6 +128018,7 @@ function createProgram(rootNamesOrOptions, _options, _host, _oldProgram, _config function shouldTransformImportCall(sourceFile) { return shouldTransformImportCallWorker(sourceFile, getCompilerOptionsForFile(sourceFile)); } +}); } function shouldTransformImportCallWorker(sourceFile, options) { const moduleKind = getEmitModuleKind(options); diff --git a/cli/tsc/97_ts_host.js b/cli/tsc/97_ts_host.js index ba82a12b7c00f5..ed55adf6f49ded 100644 --- a/cli/tsc/97_ts_host.js +++ b/cli/tsc/97_ts_host.js @@ -11,6 +11,18 @@ const ops = core.ops; let logDebug = false; let logSource = "JS"; +function spanned(name, f) { + if (!ops.op_make_span) { + return f(); + } + const span = ops.op_make_span(name, false); + try { + return f(); + } finally { + ops.op_exit_span(span); + } +} + // The map from the normalized specifier to the original. // TypeScript normalizes the specifier in its internal processing, // but the original specifier is needed when looking up the source from the runtime. @@ -173,6 +185,8 @@ ts.deno.setIsNodeSourceFileCallback((sourceFile) => { return isNodeSourceFile; }); +ts.deno.setSpanned(spanned); + /** * @param msg {string} * @param code {number} @@ -374,7 +388,7 @@ export function clearScriptNamesCache() { * specific "bindings" to the Deno environment that tsc needs to work. * * @type {ts.CompilerHost & ts.LanguageServiceHost} */ -export const host = { +const hosty = { fileExists(specifier) { if (logDebug) { debug(`host.fileExists("${specifier}")`); @@ -714,7 +728,33 @@ export const host = { } return scriptSnapshot; }, + // getModuleResolutionCache() { + // return resolutionCache; + // } }; +// resolutionCache = ts.createResolutionCache(host, host.getCurrentDirectory(), true); +const excluded = new Set([ + "getScriptVersion", + "fileExists", + "getScriptSnapshot", + "getCompilationSettings", + "getCurrentDirectory", + "useCaseSensitiveFileNames", + "getModuleSpecifierCache", + "getGlobalTypingsCacheLocation", + "getSourceFile", +]); +/** @type {typeof hosty} */ +export const host = {}; +for (const [key, value] of Object.entries(hosty)) { + if (typeof value === "function" && !excluded.has(key)) { + host[key] = (...args) => { + return spanned(key, () => value.bind(host)(...args)); + }; + } else { + host[key] = value; + } +} // @ts-ignore Undocumented function. const moduleSpecifierCache = ts.server.createModuleSpecifierCache(host); diff --git a/cli/tsc/98_lsp.js b/cli/tsc/98_lsp.js index 09b4472124c456..6257e7fb1d1ac2 100644 --- a/cli/tsc/98_lsp.js +++ b/cli/tsc/98_lsp.js @@ -384,140 +384,146 @@ function arraysEqual(a, b) { * @param {PendingChange | null} maybeChange */ function serverRequest(id, method, args, scope, maybeChange) { - debug(`serverRequest()`, id, method, args, scope, maybeChange); - if (maybeChange !== null) { - const changedScripts = maybeChange[0]; - const newProjectVersion = maybeChange[1]; - const newConfigsByScope = maybeChange[2]; - if (newConfigsByScope) { - IS_NODE_SOURCE_FILE_CACHE.clear(); - ASSET_SCOPES.clear(); - /** @type { typeof LANGUAGE_SERVICE_ENTRIES.byScope } */ - const newByScope = new Map(); - for (const [scope, config] of newConfigsByScope) { - LAST_REQUEST_SCOPE.set(scope); - const oldEntry = LANGUAGE_SERVICE_ENTRIES.byScope.get(scope); - const ls = oldEntry - ? oldEntry.ls - : ts.createLanguageService(host, documentRegistry); - const compilerOptions = lspTsConfigToCompilerOptions(config); - newByScope.set(scope, { ls, compilerOptions }); - LANGUAGE_SERVICE_ENTRIES.byScope.delete(scope); - } - for (const oldEntry of LANGUAGE_SERVICE_ENTRIES.byScope.values()) { - oldEntry.ls.dispose(); + const span = ops.op_make_span(`serverRequest(${method})`, true); + try { + debug(`serverRequest()`, id, method, args, scope, maybeChange); + if (maybeChange !== null) { + const changedScripts = maybeChange[0]; + const newProjectVersion = maybeChange[1]; + const newConfigsByScope = maybeChange[2]; + if (newConfigsByScope) { + IS_NODE_SOURCE_FILE_CACHE.clear(); + ASSET_SCOPES.clear(); + /** @type { typeof LANGUAGE_SERVICE_ENTRIES.byScope } */ + const newByScope = new Map(); + for (const [scope, config] of newConfigsByScope) { + LAST_REQUEST_SCOPE.set(scope); + const oldEntry = LANGUAGE_SERVICE_ENTRIES.byScope.get(scope); + const ls = oldEntry + ? oldEntry.ls + : ts.createLanguageService(host, documentRegistry); + const compilerOptions = lspTsConfigToCompilerOptions(config); + newByScope.set(scope, { ls, compilerOptions }); + LANGUAGE_SERVICE_ENTRIES.byScope.delete(scope); + } + for (const oldEntry of LANGUAGE_SERVICE_ENTRIES.byScope.values()) { + oldEntry.ls.dispose(); + } + LANGUAGE_SERVICE_ENTRIES.byScope = newByScope; } - LANGUAGE_SERVICE_ENTRIES.byScope = newByScope; - } - PROJECT_VERSION_CACHE.set(newProjectVersion); + PROJECT_VERSION_CACHE.set(newProjectVersion); - let opened = false; - let closed = false; - for (const { 0: script, 1: changeKind } of changedScripts) { - if (changeKind === ChangeKind.Opened) { - opened = true; - } else if (changeKind === ChangeKind.Closed) { - closed = true; + let opened = false; + let closed = false; + for (const { 0: script, 1: changeKind } of changedScripts) { + if (changeKind === ChangeKind.Opened) { + opened = true; + } else if (changeKind === ChangeKind.Closed) { + closed = true; + } + SCRIPT_VERSION_CACHE.delete(script); + SCRIPT_SNAPSHOT_CACHE.delete(script); } - SCRIPT_VERSION_CACHE.delete(script); - SCRIPT_SNAPSHOT_CACHE.delete(script); - } - if (newConfigsByScope || opened || closed) { - clearScriptNamesCache(); + if (newConfigsByScope || opened || closed) { + clearScriptNamesCache(); + } } - } - // For requests pertaining to an asset document, we make it so that the - // passed scope is just its own specifier. We map it to an actual scope here - // based on the first scope that the asset was loaded into. - if (scope?.startsWith(ASSETS_URL_PREFIX)) { - scope = ASSET_SCOPES.get(scope) ?? null; - } - LAST_REQUEST_METHOD.set(method); - LAST_REQUEST_SCOPE.set(scope); - const ls = (scope ? LANGUAGE_SERVICE_ENTRIES.byScope.get(scope)?.ls : null) ?? - LANGUAGE_SERVICE_ENTRIES.unscoped.ls; - switch (method) { - case "$getSupportedCodeFixes": { - return respond( - id, - ts.getSupportedCodeFixes(), - ); - } - case "$getAssets": { - return respond(id, getAssets()); + // For requests pertaining to an asset document, we make it so that the + // passed scope is just its own specifier. We map it to an actual scope here + // based on the first scope that the asset was loaded into. + if (scope?.startsWith(ASSETS_URL_PREFIX)) { + scope = ASSET_SCOPES.get(scope) ?? null; } - case "$getDiagnostics": { - const projectVersion = args[1]; - // there's a possibility that we receive a change notification - // but the diagnostic server queues a `$getDiagnostics` request - // with a stale project version. in that case, treat it as cancelled - // (it's about to be invalidated anyway). - const cachedProjectVersion = PROJECT_VERSION_CACHE.get(); - if (cachedProjectVersion && projectVersion !== cachedProjectVersion) { - return respond(id, [{}, null]); + LAST_REQUEST_METHOD.set(method); + LAST_REQUEST_SCOPE.set(scope); + const ls = + (scope ? LANGUAGE_SERVICE_ENTRIES.byScope.get(scope)?.ls : null) ?? + LANGUAGE_SERVICE_ENTRIES.unscoped.ls; + switch (method) { + case "$getSupportedCodeFixes": { + return respond( + id, + ts.getSupportedCodeFixes(), + ); } - try { - /** @type {Record} */ - const diagnosticMap = {}; - for (const specifier of args[0]) { - diagnosticMap[specifier] = fromTypeScriptDiagnostics([ - ...ls.getSemanticDiagnostics(specifier), - ...ls.getSuggestionDiagnostics(specifier), - ...ls.getSyntacticDiagnostics(specifier), - ].filter(filterMapDiagnostic)); - } - let ambient = - ls.getProgram()?.getTypeChecker().getAmbientModules().map((symbol) => - symbol.getName() - ) ?? []; - const previousAmbient = ambientModulesCacheByScope.get(scope); - if ( - ambient && previousAmbient && arraysEqual(ambient, previousAmbient) - ) { - ambient = null; // null => use previous value - } else { - ambientModulesCacheByScope.set(scope, ambient); - } - return respond(id, [diagnosticMap, ambient]); - } catch (e) { - if ( - !isCancellationError(e) - ) { - return respond( - id, - [{}, null], - formatErrorWithArgs(e, [id, method, args, scope, maybeChange]), - ); - } - return respond(id, [{}, null]); + case "$getAssets": { + return respond(id, getAssets()); } - } - default: - if (typeof ls[method] === "function") { - // The `getCompletionEntryDetails()` method returns null if the - // `source` is `null` for whatever reason. It must be `undefined`. - if (method == "getCompletionEntryDetails") { - args[4] ??= undefined; + case "$getDiagnostics": { + const projectVersion = args[1]; + // there's a possibility that we receive a change notification + // but the diagnostic server queues a `$getDiagnostics` request + // with a stale project version. in that case, treat it as cancelled + // (it's about to be invalidated anyway). + const cachedProjectVersion = PROJECT_VERSION_CACHE.get(); + if (cachedProjectVersion && projectVersion !== cachedProjectVersion) { + return respond(id, [{}, null]); } try { - return respond(id, ls[method](...args)); + /** @type {Record} */ + const diagnosticMap = {}; + for (const specifier of args[0]) { + diagnosticMap[specifier] = fromTypeScriptDiagnostics([ + ...ls.getSemanticDiagnostics(specifier), + ...ls.getSuggestionDiagnostics(specifier), + ...ls.getSyntacticDiagnostics(specifier), + ].filter(filterMapDiagnostic)); + } + let ambient = + ls.getProgram()?.getTypeChecker().getAmbientModules().map(( + symbol, + ) => symbol.getName()) ?? []; + const previousAmbient = ambientModulesCacheByScope.get(scope); + if ( + ambient && previousAmbient && arraysEqual(ambient, previousAmbient) + ) { + ambient = null; // null => use previous value + } else { + ambientModulesCacheByScope.set(scope, ambient); + } + return respond(id, [diagnosticMap, ambient]); } catch (e) { - if (!isCancellationError(e)) { + if ( + !isCancellationError(e) + ) { return respond( id, - null, + [{}, null], formatErrorWithArgs(e, [id, method, args, scope, maybeChange]), ); } - return respond(id); + return respond(id, [{}, null]); } } - throw new TypeError( - // @ts-ignore exhausted case statement sets type to never - `Invalid request method for request: "${method}" (${id})`, - ); + default: + if (typeof ls[method] === "function") { + // The `getCompletionEntryDetails()` method returns null if the + // `source` is `null` for whatever reason. It must be `undefined`. + if (method == "getCompletionEntryDetails") { + args[4] ??= undefined; + } + try { + return respond(id, ls[method](...args)); + } catch (e) { + if (!isCancellationError(e)) { + return respond( + id, + null, + formatErrorWithArgs(e, [id, method, args, scope, maybeChange]), + ); + } + return respond(id); + } + } + throw new TypeError( + // @ts-ignore exhausted case statement sets type to never + `Invalid request method for request: "${method}" (${id})`, + ); + } + } finally { + ops.op_exit_span(span, true); } }