Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support pull diagnostics and use them for testing #2166

Merged
merged 1 commit into from
Feb 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 41 additions & 67 deletions lsp/lsp-harness/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@ pub use jsonrpc::Server;
use lsp_types::{
notification::{Notification, PublishDiagnostics},
request::{
Completion, DocumentSymbolRequest, Formatting, GotoDefinition, HoverRequest, References,
Rename, Request as LspRequest,
Completion, DocumentDiagnosticRequest, DocumentSymbolRequest, ExecuteCommand, Formatting,
GotoDefinition, HoverRequest, References, Rename, Request as LspRequest,
},
CompletionParams, DocumentFormattingParams, DocumentSymbolParams, GotoDefinitionParams,
HoverParams, PublishDiagnosticsParams, ReferenceParams, RenameParams, Url,
CompletionParams, DocumentDiagnosticParams, DocumentFormattingParams, DocumentSymbolParams,
ExecuteCommandParams, GotoDefinitionParams, HoverParams, PublishDiagnosticsParams,
ReferenceParams, RenameParams, TextDocumentIdentifier, Url,
};
pub use output::LspDebug;
use serde::Deserialize;
Expand Down Expand Up @@ -207,8 +208,17 @@ impl TestHarness {
out: Vec::new(),
}
}

/// Creates a new test harness with background evaluation disabled.
///
/// Background evaluation is annoying for tests because it has some timing-sensitive parts.
pub fn new() -> Self {
Self::new_with_options(None)
let default_options = serde_json::json!({
"eval_config": {
"disable": true,
},
});
Self::new_with_options(Some(default_options))
}

pub fn request<T: LspRequest>(&mut self, params: T::Params)
Expand All @@ -232,6 +242,32 @@ impl TestHarness {
}
}

/// Get evaluation diagnostics for a specific file.
pub fn get_eval_diagnostics(&mut self, uri: Url) {
let doc = TextDocumentIdentifier { uri: uri.clone() };
// First, request an evalution.
self.request::<ExecuteCommand>(ExecuteCommandParams {
command: "eval".to_owned(),
arguments: vec![serde_json::to_value(&doc).unwrap()],
work_done_progress_params: Default::default(),
});
// A diagnostic request returns the most-recent diagnostics, which will
// be the ones populated by the previous execution. (Unless there's a
// background evaluation going on in which case there will be a race.
// Best turn off background evaluation for testing.)
//
// We do it this way instead of relying on the default "push"
// diagnostics because those ones are hard to test: they arrive at
// unpredictable times and in unpredictable amounts.
self.request::<DocumentDiagnosticRequest>(DocumentDiagnosticParams {
text_document: TextDocumentIdentifier { uri },
identifier: None,
previous_result_id: None,
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
});
}

pub fn prepare_files(&mut self, fixture: &TestFixture) {
let mut file_versions = HashMap::new();

Expand Down Expand Up @@ -273,66 +309,4 @@ impl TestHarness {
}
}
}

// For debug purposes, drain and print notifications.
pub fn drain_diagnostics(&mut self, files: impl Iterator<Item = Url>) {
let mut diags = self.drain_diagnostics_inner(files);

// Sort and dedup the diagnostics, for stability of the output.
let mut files: Vec<_> = diags.keys().cloned().collect();
files.sort();

for f in files {
let mut diags = diags.remove(&f).unwrap();
diags.sort_by_cached_key(|d| (d.range.start, d.range.end, d.message.clone()));
diags.dedup_by_key(|d| (d.range.start, d.range.end, d.message.clone()));
for d in diags {
(&f, d).debug(&mut self.out).unwrap();
self.out.push(b'\n');
}
}
}

fn drain_diagnostics_inner(
&mut self,
files: impl Iterator<Item = Url>,
) -> HashMap<Url, Vec<lsp_types::Diagnostic>> {
let mut diags: HashMap<Url, Vec<lsp_types::Diagnostic>> = HashMap::new();

// This is pretty fragile, but I don't know of a better way to handle notifications: we
// expect 2 rounds of notifications from each file (one synchronously from typechecking,
// and one from the background eval). So we just wait until we've received both, and we
// concatenate their outputs.
let mut waiting: HashMap<Url, u32> = files.map(|f| (f, 2)).collect();

// Handle a single diagnostic, returning true if we have enough of them.
let mut handle_diag = |diag: PublishDiagnosticsParams| -> bool {
if let Some(remaining) = waiting.get_mut(&diag.uri) {
*remaining -= 1;
if *remaining == 0 {
waiting.remove(&diag.uri);
}
diags
.entry(diag.uri.clone())
.or_default()
.extend(diag.diagnostics);
}

waiting.is_empty()
};

for msg in self.srv.pending_notifications() {
if msg.method == PublishDiagnostics::METHOD {
let diag: PublishDiagnosticsParams =
serde_json::value::from_value(msg.params).unwrap();
if handle_diag(diag) {
return diags;
}
}
}

while !handle_diag(self.wait_for_diagnostics()) {}

diags
}
}
29 changes: 28 additions & 1 deletion lsp/lsp-harness/src/output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,33 @@ impl LspDebug for WorkspaceEdit {

impl LspDebug for Diagnostic {
fn debug(&self, mut w: impl Write) -> std::io::Result<()> {
write!(w, "{}: {}", self.range.debug_str(), self.message)
write!(w, "{}: {:?}", self.range.debug_str(), self.message)
}
}

impl LspDebug for Option<serde_json::Value> {
fn debug(&self, mut w: impl Write) -> std::io::Result<()> {
if let Some(v) = self {
write!(w, "{v:?}")
} else {
Ok(())
}
}
}

impl LspDebug for lsp_types::DocumentDiagnosticReportResult {
fn debug(&self, w: impl Write) -> std::io::Result<()> {
let lsp_types::DocumentDiagnosticReportResult::Report(
lsp_types::DocumentDiagnosticReport::Full(
lsp_types::RelatedFullDocumentDiagnosticReport {
full_document_diagnostic_report,
..
},
),
) = self
else {
panic!("unexpected report {self:?}");
};
full_document_diagnostic_report.items.debug(w)
}
}
79 changes: 26 additions & 53 deletions lsp/nls/src/background.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@ use log::warn;
use lsp_types::Url;
use nickel_lang_core::{
cache::{InputFormat, SourcePath},
eval::{cache::CacheImpl, VirtualMachine},
files::FileId,
};
use serde::{Deserialize, Serialize};

use crate::{
cache::CacheExt as _, config, diagnostic::SerializableDiagnostic, error::WarningReporter,
files::uri_to_path, world::World,
cache::CacheExt as _, config, diagnostic::SerializableDiagnostic, files::uri_to_path,
world::World,
};

// Environment variable used to pass the recursion limit value to the child worker
Expand Down Expand Up @@ -63,7 +62,7 @@ pub struct Diagnostics {
}

pub struct BackgroundJobs {
receiver: Receiver<Diagnostics>,
receiver: Option<Receiver<Diagnostics>>,
sender: Sender<Command>,
}

Expand Down Expand Up @@ -97,42 +96,8 @@ pub fn worker_main() -> anyhow::Result<()> {
.cache
.id_of(&SourcePath::Path(path.clone(), InputFormat::Nickel))
{
let mut diagnostics = world.parse_and_typecheck(file_id);

// Evaluation diagnostics (but only if there were no parse/type errors).
if diagnostics.is_empty() {
let (reporter, warnings) = WarningReporter::new();
// TODO: avoid cloning the cache.
let mut vm = VirtualMachine::<_, CacheImpl>::new(
world.cache.clone(),
std::io::stderr(),
reporter,
);
// We've already checked that parsing and typechecking are successful, so we
// don't expect further errors.
let rt = vm.prepare_eval(file_id).unwrap();
let recursion_limit = std::env::var(RECURSION_LIMIT_ENV_VAR_NAME)?.parse::<usize>()?;
let errors = vm.eval_permissive(rt, recursion_limit);
let mut files = vm.import_resolver().files().clone();

diagnostics.extend(
errors
.into_iter()
.filter(|e| {
!matches!(
e,
nickel_lang_core::error::EvalError::MissingFieldDef { .. }
)
})
.flat_map(|e| SerializableDiagnostic::from(e, &mut files, file_id)),
);
diagnostics.extend(warnings.try_iter().flat_map(|(warning, mut files)| {
SerializableDiagnostic::from(warning, &mut files, file_id)
}));
}

diagnostics.sort();
diagnostics.dedup();
let recursion_limit = std::env::var(RECURSION_LIMIT_ENV_VAR_NAME)?.parse::<usize>()?;
let diagnostics = world.eval_diagnostics(file_id, recursion_limit);
let diagnostics = Diagnostics { path, diagnostics };

// If this fails, the main process has already exited. No need for a loud error in that case.
Expand Down Expand Up @@ -314,20 +279,28 @@ impl BackgroundJobs {
pub fn new(config: config::LspEvalConfig) -> Self {
let (cmd_tx, cmd_rx) = crossbeam::channel::unbounded();
let (diag_tx, diag_rx) = crossbeam::channel::unbounded();
match SupervisorState::new(cmd_rx, diag_tx, config) {
Ok(mut sup) => {
std::thread::spawn(move || {
sup.run();
});

if config.disable {
Self {
sender: cmd_tx,
receiver: None,
}
Err(e) => {
warn!("failed to spawn background jobs: {e}");
} else {
match SupervisorState::new(cmd_rx, diag_tx, config) {
Ok(mut sup) => {
std::thread::spawn(move || {
sup.run();
});
}
Err(e) => {
warn!("failed to spawn background jobs: {e}");
}
}
}

Self {
sender: cmd_tx,
receiver: diag_rx,
Self {
sender: cmd_tx,
receiver: Some(diag_rx),
}
}
}

Expand Down Expand Up @@ -364,7 +337,7 @@ impl BackgroundJobs {
let _ = self.sender.send(Command::EvalFile { uri });
}

pub fn receiver(&self) -> &Receiver<Diagnostics> {
&self.receiver
pub fn receiver(&self) -> Option<&Receiver<Diagnostics>> {
self.receiver.as_ref()
}
}
40 changes: 5 additions & 35 deletions lsp/nls/src/command.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
use lsp_server::{RequestId, Response, ResponseError};
use lsp_types::{ExecuteCommandParams, TextDocumentIdentifier, Url};
use nickel_lang_core::eval::{cache::CacheImpl, VirtualMachine};

use crate::{
cache::CacheExt,
diagnostic::SerializableDiagnostic,
error::{Error, WarningReporter},
server::Server,
};
use crate::{cache::CacheExt, error::Error, server::Server};

const RECURSION_LIMIT: usize = 128;

pub fn handle_command(
params: ExecuteCommandParams,
Expand All @@ -16,11 +12,10 @@ pub fn handle_command(
) -> Result<(), ResponseError> {
match params.command.as_str() {
"eval" => {
server.reply(Response::new_ok(req, None::<()>));

let doc: TextDocumentIdentifier =
serde_json::from_value(params.arguments[0].clone()).unwrap();
eval(server, &doc.uri)?;
server.reply(Response::new_ok(req, None::<()>));
Ok(())
}
_ => Err(Error::CommandNotFound(params.command).into()),
Expand All @@ -29,32 +24,7 @@ pub fn handle_command(

fn eval(server: &mut Server, uri: &Url) -> Result<(), Error> {
if let Some(file_id) = server.world.cache.file_id(uri)? {
let (reporter, warnings) = WarningReporter::new();
// TODO: avoid cloning the cache. Maybe we can have a VM with a &mut Cache?
let mut vm = VirtualMachine::<_, CacheImpl>::new(
server.world.cache.clone(),
std::io::stderr(),
reporter,
);
let rt = vm.prepare_eval(file_id)?;

let result = vm.eval_full(rt);
// Get a possibly-updated files from the vm instead of relying on the one
// in `world`.
let mut files = vm.import_resolver().files().clone();

let mut diags: Vec<_> = warnings
.try_iter()
.flat_map(|(warning, mut files)| {
SerializableDiagnostic::from(warning, &mut files, file_id)
})
.collect();

if let Err(e) = result {
let mut error_diags = SerializableDiagnostic::from(e, &mut files, file_id);
diags.append(&mut error_diags);
}

let diags = server.world.eval_diagnostics(file_id, RECURSION_LIMIT);
server.issue_diagnostics(file_id, diags);
}
Ok(())
Expand Down
3 changes: 3 additions & 0 deletions lsp/nls/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ impl Default for LspEvalLimits {
#[derive(Debug, Deserialize, Serialize)]
#[serde(default)]
pub struct LspEvalConfig {
/// Disable background evaluation altogether.
pub disable: bool,
pub eval_limits: LspEvalLimits,
/// The duration during which a file that broke the background evaluator will be blacklisted
/// from it
Expand All @@ -38,6 +40,7 @@ pub struct LspEvalConfig {
impl Default for LspEvalConfig {
fn default() -> Self {
LspEvalConfig {
disable: false,
eval_limits: Default::default(),
blacklist_duration: Duration::from_secs(30),
}
Expand Down
Loading
Loading