Skip to content

Commit

Permalink
Merge pull request #309 from github/expose-logger
Browse files Browse the repository at this point in the history
Introduce new public `Reporter` trait that is used from CLI types to report file status
  • Loading branch information
hendrikvanantwerpen authored Sep 13, 2023
2 parents a4a726d + 4a271ca commit a20a97b
Show file tree
Hide file tree
Showing 13 changed files with 475 additions and 335 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[workspace]
resolver = "1"
members = [
# library projects
"lsp-positions",
Expand Down
2 changes: 1 addition & 1 deletion stack-graphs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ bincode = { version = "2.0.0-rc.3", optional = true }
bitvec = "1.0"
controlled-option = "0.4"
either = "1.6"
enumset = "1.0"
enumset = "1.1"
fxhash = "0.2"
itertools = "0.10"
libc = "0.2"
Expand Down
8 changes: 8 additions & 0 deletions tree-sitter-stack-graphs/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## Unreleased

### Library

#### Changed

- A new `Reporter` trait is used to support reporting status from CLI actions such as indexing and testing. The CLI actions have been cleaned up to ensure that they are not writing directly to the console anymore, but only call the reporter for output. The `Reporter` trait replaces the old inaccessible `Logger` trait so that clients can more easily implement their own reporters if necessary. A `ConsoleLogger` is provided for clients who just need console printing.

## v0.7.1 -- 2023-07-27

Support `stack-graphs` version `0.12`.
Expand Down
6 changes: 3 additions & 3 deletions tree-sitter-stack-graphs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ cli = [
"base64",
"clap",
"colored",
"dirs",
"dialoguer",
"dirs",
"env_logger",
"indoc",
"pathdiff",
Expand All @@ -52,12 +52,12 @@ lsp = [
anyhow = "1.0"
base64 = { version = "0.21", optional = true }
capture-it = { version = "0.3", optional = true }
clap = { version = "4", optional = true, features=["derive"] }
clap = { version = "4", optional = true, features = ["derive"] }
colored = { version = "2.0", optional = true }
controlled-option = ">=0.4"
crossbeam-channel = { version = "0.5", optional = true }
dialoguer = { version = "0.10", optional = true }
dirs = { version = "5", optional=true }
dirs = { version = "5", optional = true }
env_logger = { version = "0.9", optional = true }
indoc = { version = "1.0", optional = true }
itertools = "0.10"
Expand Down
8 changes: 3 additions & 5 deletions tree-sitter-stack-graphs/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ pub mod parse;
pub mod query;
pub mod status;
pub mod test;
mod util;
pub mod util;
pub mod visualize;

pub mod path_loading {
Expand All @@ -78,6 +78,7 @@ pub mod path_loading {
use clap::Subcommand;

use crate::cli::clean::CleanArgs;
use crate::cli::database::DatabaseArgs;
use crate::cli::index::IndexArgs;
use crate::cli::init::InitArgs;
use crate::cli::load::PathLoaderArgs;
Expand All @@ -90,8 +91,6 @@ pub mod path_loading {
use crate::cli::test::TestArgs;
use crate::cli::visualize::VisualizeArgs;

use super::database::DatabaseArgs;

#[derive(Subcommand)]
pub enum Subcommands {
Clean(Clean),
Expand Down Expand Up @@ -297,6 +296,7 @@ pub mod provided_languages {
use clap::Subcommand;

use crate::cli::clean::CleanArgs;
use crate::cli::database::DatabaseArgs;
use crate::cli::index::IndexArgs;
use crate::cli::init::InitArgs;
use crate::cli::load::LanguageConfigurationsLoaderArgs;
Expand All @@ -310,8 +310,6 @@ pub mod provided_languages {
use crate::cli::visualize::VisualizeArgs;
use crate::loader::LanguageConfiguration;

use super::database::DatabaseArgs;

#[derive(Subcommand)]
pub enum Subcommands {
Clean(Clean),
Expand Down
104 changes: 74 additions & 30 deletions tree-sitter-stack-graphs/src/cli/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,16 @@ use std::time::Duration;
use thiserror::Error;
use tree_sitter_graph::Variables;

use crate::cli::util::duration_from_seconds_str;
use crate::cli::util::iter_files_and_directories;
use crate::cli::util::reporter::ConsoleReporter;
use crate::cli::util::reporter::Level;
use crate::cli::util::reporter::Reporter;
use crate::cli::util::sha1;
use crate::cli::util::wait_for_input;
use crate::cli::util::BuildErrorWithSource;
use crate::cli::util::CLIFileReporter;
use crate::cli::util::ExistingPathBufValueParser;
use crate::loader::FileLanguageConfigurations;
use crate::loader::FileReader;
use crate::loader::Loader;
Expand All @@ -29,16 +39,6 @@ use crate::CancelAfterDuration;
use crate::CancellationFlag;
use crate::NoCancellation;

use super::util::duration_from_seconds_str;
use super::util::iter_files_and_directories;
use super::util::sha1;
use super::util::wait_for_input;
use super::util::BuildErrorWithSource;
use super::util::ConsoleLogger;
use super::util::ExistingPathBufValueParser;
use super::util::FileLogger;
use super::util::Logger;

#[derive(Args)]
pub struct IndexArgs {
/// Source file or directory paths to index.
Expand Down Expand Up @@ -101,8 +101,8 @@ impl IndexArgs {
wait_for_input()?;
}
let mut db = SQLiteWriter::open(&db_path)?;
let logger = ConsoleLogger::new(self.verbose, !self.hide_error_details);
let mut indexer = Indexer::new(&mut db, &mut loader, &logger);
let reporter = self.get_reporter();
let mut indexer = Indexer::new(&mut db, &mut loader, &reporter);
indexer.force = self.force;
indexer.max_file_time = self.max_file_time;

Expand All @@ -114,24 +114,53 @@ impl IndexArgs {
indexer.index_all(source_paths, self.continue_from, &NoCancellation)?;
Ok(())
}

fn get_reporter(&self) -> ConsoleReporter {
return ConsoleReporter {
skipped_level: if self.verbose {
Level::Summary
} else {
Level::None
},
succeeded_level: if self.verbose {
Level::Summary
} else {
Level::None
},
failed_level: if self.hide_error_details {
Level::Summary
} else {
Level::Details
},
canceled_level: if self.hide_error_details {
Level::Summary
} else {
Level::Details
},
};
}
}

pub struct Indexer<'a> {
db: &'a mut SQLiteWriter,
loader: &'a mut Loader,
logger: &'a dyn Logger,
reporter: &'a dyn Reporter,
/// Index files, even if they already exist in the database.
pub force: bool,
/// Maximum time per file.
pub max_file_time: Option<Duration>,
}

impl<'a> Indexer<'a> {
pub fn new(db: &'a mut SQLiteWriter, loader: &'a mut Loader, logger: &'a dyn Logger) -> Self {
pub fn new(
db: &'a mut SQLiteWriter,
loader: &'a mut Loader,
reporter: &'a dyn Reporter,
) -> Self {
Self {
db,
loader,
logger,
reporter,
force: false,
max_file_time: None,
}
Expand All @@ -149,14 +178,17 @@ impl<'a> Indexer<'a> {
Q: AsRef<Path>,
{
for (source_root, source_path, strict) in iter_files_and_directories(source_paths) {
let mut file_status = CLIFileReporter::new(self.reporter, &source_path);
cancellation_flag.check("indexing all files")?;
self.index_file(
&source_root,
&source_path,
strict,
&mut continue_from,
cancellation_flag,
&mut file_status,
)?;
file_status.assert_reported();
}
Ok(())
}
Expand All @@ -167,13 +199,16 @@ impl<'a> Indexer<'a> {
source_path: &Path,
cancellation_flag: &dyn CancellationFlag,
) -> Result<()> {
let mut file_status = CLIFileReporter::new(self.reporter, source_path);
self.index_file(
&source_root,
&source_path,
true,
&mut None::<&Path>,
cancellation_flag,
&mut file_status,
)?;
file_status.assert_reported();
Ok(())
}

Expand All @@ -185,22 +220,25 @@ impl<'a> Indexer<'a> {
missing_is_error: bool,
continue_from: &mut Option<P>,
cancellation_flag: &dyn CancellationFlag,
file_status: &mut CLIFileReporter,
) -> Result<()>
where
P: AsRef<Path>,
{
let mut file_status = self.logger.file(source_path);
match self.index_file_inner(
source_root,
source_path,
missing_is_error,
continue_from,
cancellation_flag,
file_status.as_mut(),
file_status,
) {
ok @ Ok(_) => ok,
ok @ Ok(_) => {
file_status.assert_reported();
ok
}
err @ Err(_) => {
file_status.default_failure("error", Some(&format!("Error analyzing file {}. To continue analysis from this file later, add: --continue-from {}", source_path.display(), source_path.display())));
file_status.failure_if_processing("error", Some(&format!("Error analyzing file {}. To continue analysis from this file later, add: --continue-from {}", source_path.display(), source_path.display())));
err
}
}
Expand All @@ -213,7 +251,7 @@ impl<'a> Indexer<'a> {
missing_is_error: bool,
continue_from: &mut Option<P>,
cancellation_flag: &dyn CancellationFlag,
file_status: &mut dyn FileLogger,
file_status: &mut CLIFileReporter<'_>,
) -> Result<()>
where
P: AsRef<Path>,
Expand Down Expand Up @@ -245,22 +283,28 @@ impl<'a> Indexer<'a> {
let source = file_reader.get(source_path)?;
let tag = sha1(source);

if !self.force {
match self
.db
.status_for_file(&source_path.to_string_lossy(), Some(&tag))?
{
FileStatus::Missing => {}
FileStatus::Indexed => {
let success_status = match self
.db
.status_for_file(&source_path.to_string_lossy(), Some(&tag))?
{
FileStatus::Missing => "indexed",
FileStatus::Indexed => {
if self.force {
"reindexed"
} else {
file_status.skipped("cached index", None);
return Ok(());
}
FileStatus::Error(error) => {
}
FileStatus::Error(error) => {
if self.force {
"reindexed"
} else {
file_status.skipped(&format!("cached error ({})", error), None);
return Ok(());
}
}
}
};

let file_cancellation_flag = CancelAfterDuration::from_option(self.max_file_time);
let cancellation_flag = cancellation_flag | file_cancellation_flag.as_ref();
Expand Down Expand Up @@ -331,7 +375,7 @@ impl<'a> Indexer<'a> {
self.db
.store_result_for_file(&graph, file, &tag, &mut partials, &paths)?;

file_status.success("success", None);
file_status.success(success_status, None);

Ok(())
}
Expand Down
Loading

0 comments on commit a20a97b

Please sign in to comment.