Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

work towards toml based tests #21

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ tokio-stream = "0.1.8"
ropey = "1.4.1"
railroad = "0.1.1"
glob = "0.3.0"
toml-spanned-value = "0.1.0"
globset = "0.4.8"

[dependencies.console-subscriber]
version = "0.1.4"
Expand Down
104 changes: 80 additions & 24 deletions server/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ mod peek_channel;

use cfgrammar::yacc;
use parse_thread::{ParseThread, ParserMsg};
use serde;
use tower_lsp::jsonrpc;
use tower_lsp::lsp_types as lsp;

Expand Down Expand Up @@ -152,6 +151,7 @@ async fn process_parser_messages(
})
.await;
}
// This should probably be named something like ProgressInterrupt
ParserMsg::ProgressCancel(token) => {
let token = lsp::NumberOrString::Number(token);

Expand Down Expand Up @@ -186,32 +186,53 @@ struct State {
warned_needs_restart: bool,
}

#[derive(serde::Deserialize, serde::Serialize, Debug)]
struct TomlTest(Vec<toml::Spanned<String>>);

impl State {
fn affected_parsers(&self, path: &std::path::Path, ids: &mut Vec<usize>) {
if let Some(extension) = path.extension() {
let id = self.extensions.get(extension).map(ParserInfo::id);
// A couple of corner cases here:
//
// * The kind of case where you have foo.l and bar.y/baz.y using the same lexer.
// -- We should probably allow this case where editing a single file updates multiple parsers.
// * The kind of case where you have a yacc.y for the extension .y, so both the extension
// and the parse_info have the same id.
// -- We don't want to run the same parser multiple times: remove duplicates.
// In the general case, where you either change a .l, .y, or a file of the parsers extension
// this will be a vec of one element.
if let Some(id) = id {
ids.push(id);
}
// FIXME should be a globset of toml tests?
if extension == "toml" {
let stem = path.file_stem();
if let Some(stem) = stem {
let stem = std::path::PathBuf::from(stem);
let extension = stem.extension();
if let Some(extension) = extension {
let id = self.extensions.get(extension).map(ParserInfo::id);
if let Some(id) = id {
ids.push(id);
}
}
}
} else {
let id = self.extensions.get(extension).map(ParserInfo::id);

// A couple of corner cases here:
//
// * The kind of case where you have foo.l and bar.y/baz.y using the same lexer.
// -- We should probably allow this case where editing a single file updates multiple parsers.
// * The kind of case where you have a yacc.y for the extension .y, so both the extension
// and the parse_info have the same id.
// -- We don't want to run the same parser multiple times: remove duplicates.
// In the general case, where you either change a .l, .y, or a file of the parsers extension
// this will be a vec of one element.
if let Some(id) = id {
ids.push(id);
}

ids.extend(
self.extensions
.values()
.filter(|parser_info| path == parser_info.l_path || path == parser_info.y_path)
.map(ParserInfo::id),
);
ids.extend(
self.extensions
.values()
.filter(|parser_info| {
path == parser_info.l_path || path == parser_info.y_path
})
.map(ParserInfo::id),
);

ids.sort_unstable();
ids.dedup();
ids.sort_unstable();
ids.dedup();
}
}
}

Expand All @@ -223,8 +244,7 @@ impl State {
}

fn parser_for(&self, path: &std::path::Path) -> Option<&ParserInfo> {
path.extension()
.map_or(None, |ext| self.extensions.get(ext))
path.extension().and_then(|ext| self.extensions.get(ext))
}
}

Expand Down Expand Up @@ -397,8 +417,42 @@ impl tower_lsp::LanguageServer for Backend {
let mut state = self.state.lock().await;
let state = state.deref_mut();
let mut globs: Vec<lsp::Registration> = Vec::new();

if state.client_monitor {
for WorkspaceCfg { workspace, .. } in state.toml.values() {
for test in &workspace.tests {
if let nimbleparse_toml::TestKind::Toml {
parser_extension: _parser_extension,
toml_test_extension,
} = test.kind.clone().into_inner()
{
self.client
.log_message(
lsp::MessageType::LOG,
format!("registering toml test: {:?}", &test),
)
.await;

let mut reg = serde_json::Map::new();
reg.insert(
"globPattern".to_string(),
serde_json::value::Value::String(format!("*/{}", toml_test_extension)),
);
let mut watchers = serde_json::Map::new();
watchers.insert(
"watchers".to_string(),
serde_json::value::Value::Array(vec![
serde_json::value::Value::Object(reg),
]),
);
globs.push(lsp::Registration {
id: "1".to_string(),
method: "workspace/didChangeWatchedFiles".to_string(),
register_options: Some(serde_json::value::Value::Object(watchers)),
});
}
}

for parser in workspace.parsers.get_ref() {
let glob = format!("**/*{}", parser.extension.get_ref());
let mut reg = serde_json::Map::new();
Expand Down Expand Up @@ -532,6 +586,7 @@ impl tower_lsp::LanguageServer for Backend {
let url = params.text_document.uri.clone();
let path = url.to_file_path();
let nimbleparse_toml = std::ffi::OsStr::new("nimbleparse.toml");

match path {
Ok(path) if Some(nimbleparse_toml) == path.file_name() => {
if !state.warned_needs_restart {
Expand Down Expand Up @@ -594,6 +649,7 @@ impl tower_lsp::LanguageServer for Backend {
let mut state = self.state.lock().await;
let url = params.text_document.uri.clone();
let path = url.to_file_path();

match path {
Ok(path) => {
let mut ids = vec![];
Expand Down
Loading