diff --git a/crates/sui-move-build/src/lib.rs b/crates/sui-move-build/src/lib.rs index 26e0f735636f1..063fa2a07e2f7 100644 --- a/crates/sui-move-build/src/lib.rs +++ b/crates/sui-move-build/src/lib.rs @@ -18,9 +18,10 @@ use move_binary_format::{ use move_bytecode_utils::{layout::SerdeLayoutBuilder, module_cache::GetModule}; use move_compiler::{ compiled_unit::AnnotatedCompiledModule, - diagnostics::{report_diagnostics_to_buffer, report_warnings, Diagnostics, FilesSourceText}, + diagnostics::{report_diagnostics_to_buffer, report_warnings, Diagnostics}, editions::Edition, linters::LINT_WARNING_PREFIX, + shared::files::MappedFiles, }; use move_core_types::{ account_address::AccountAddress, @@ -184,7 +185,7 @@ impl BuildConfig { /// There may be additional information that needs to be displayed after diagnostics are reported /// (optionally report diagnostics themselves if files argument is provided). -pub fn decorate_warnings(warning_diags: Diagnostics, files: Option<&FilesSourceText>) { +pub fn decorate_warnings(warning_diags: Diagnostics, files: Option<&MappedFiles>) { let any_linter_warnings = warning_diags.any_with_prefix(LINT_WARNING_PREFIX); let (filtered_diags_num, unique) = warning_diags.filtered_source_diags_with_prefix(LINT_WARNING_PREFIX); diff --git a/external-crates/move/crates/move-analyzer/src/analysis/typing_analysis.rs b/external-crates/move/crates/move-analyzer/src/analysis/typing_analysis.rs index e586821ab625e..d3f4427535b30 100644 --- a/external-crates/move/crates/move-analyzer/src/analysis/typing_analysis.rs +++ b/external-crates/move/crates/move-analyzer/src/analysis/typing_analysis.rs @@ -8,16 +8,15 @@ use crate::{ expansion_mod_ident_to_map_key, type_def_loc, DefInfo, DefLoc, LocalDef, ModuleDefs, UseDef, UseDefMap, UseLoc, }, - utils::{get_start_position_opt, ignored_function, to_lsp_position}, + utils::{ignored_function, loc_start_to_lsp_position_opt}, }; -use move_command_line_common::files::FileHash; use move_compiler::{ - diagnostics::{self as diag, PositionInfo}, + diagnostics as diag, expansion::ast::{self as E, ModuleIdent}, naming::ast as N, parser::ast as P, - shared::{ide::MacroCallInfo, Identifier, Name}, + shared::{files::MappedFiles, ide::MacroCallInfo, Identifier, Name}, typing::{ ast as T, visitor::{LValueKind, TypingVisitorContext}, @@ -26,10 +25,9 @@ use move_compiler::{ use move_ir_types::location::{sp, Loc}; use move_symbol_pool::Symbol; -use codespan_reporting::files::SimpleFiles; use im::OrdMap; use lsp_types::Position; -use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::collections::{BTreeMap, BTreeSet}; /// Data used during anlysis over typed AST pub struct TypingAnalysisContext<'a> { @@ -37,10 +35,8 @@ pub struct TypingAnalysisContext<'a> { /// string so that we can access it regardless of the ModuleIdent representation /// (e.g., in the parsing AST or in the typing AST) pub mod_outer_defs: &'a BTreeMap, - /// A mapping from file names to file content (used to obtain source file locations) - pub files: &'a SimpleFiles, - /// A mapping from file hashes to file IDs (used to obtain source file locations) - pub file_id_mapping: &'a HashMap, + /// Mapped file information for translating locations into positions + pub files: &'a MappedFiles, pub references: &'a mut BTreeMap>, /// Additional information about definitions pub def_info: &'a mut BTreeMap, @@ -66,13 +62,13 @@ impl TypingAnalysisContext<'_> { /// Returns the `lsp_types::Position` start for a location, but may fail if we didn't see the /// definition already. fn lsp_start_position_opt(&self, loc: &Loc) -> Option { - get_start_position_opt(loc, self.files(), self.file_mapping()) + loc_start_to_lsp_position_opt(self.files, loc) } /// Returns the `lsp_types::Position` start for a location, but may fail if we didn't see the /// definition already. This should only be used on things we already indexed. fn lsp_start_position(&self, loc: &Loc) -> lsp_types::Position { - to_lsp_position(self.start_position(loc)) + loc_start_to_lsp_position_opt(self.files, loc).unwrap() } fn reset_for_module_member(&mut self) { @@ -289,8 +285,7 @@ impl TypingAnalysisContext<'_> { let result = add_fun_use_def( fun_def_name, self.mod_outer_defs, - self.files(), - self.file_mapping(), + self.files, mod_ident_str, mod_defs, use_name, @@ -345,8 +340,7 @@ impl TypingAnalysisContext<'_> { let mut refs = std::mem::take(self.references); add_struct_use_def( self.mod_outer_defs, - self.files(), - self.file_mapping(), + self.files, mod_ident_str, mod_defs, &use_name.value(), @@ -607,7 +601,7 @@ impl<'a> TypingVisitorContext for TypingAnalysisContext<'a> { } let loc = function_name.loc(); // first, enter self-definition for function name (unwrap safe - done when inserting def) - let name_start = to_lsp_position(self.start_position(&loc)); + let name_start = self.lsp_start_position(&loc); let fun_info = self .def_info .get(&DefLoc::new(loc.file_hash(), name_start)) @@ -915,15 +909,3 @@ impl<'a> TypingVisitorContext for TypingAnalysisContext<'a> { } } } - -impl diag::PositionInfo for TypingAnalysisContext<'_> { - type FileContents = String; - - fn files(&self) -> &SimpleFiles { - self.files - } - - fn file_mapping(&self) -> &HashMap { - self.file_id_mapping - } -} diff --git a/external-crates/move/crates/move-analyzer/src/completion.rs b/external-crates/move/crates/move-analyzer/src/completion.rs index 67a4e6d71641c..1d4709ec4c1ab 100644 --- a/external-crates/move/crates/move-analyzer/src/completion.rs +++ b/external-crates/move/crates/move-analyzer/src/completion.rs @@ -284,9 +284,7 @@ fn dot(symbols: &Symbols, use_fpath: &Path, position: &Position) -> Vec Vec { items.extend_from_slice(&primitive_types()); @@ -585,7 +583,7 @@ fn completion_items(pos: Position, path: &Path, symbols: &Symbols) -> Vec Vec, Vec, )>, - files: &SimpleFiles, - file_id_mapping: &HashMap, - file_name_mapping: &BTreeMap, + files: &MappedFiles, ) -> BTreeMap> { let mut lsp_diagnostics = BTreeMap::new(); for (s, _, (loc, msg), labels, _) in diagnostics { - let fpath = file_name_mapping.get(&loc.file_hash()).unwrap(); - if let Some(start) = get_loc(&loc.file_hash(), loc.start(), files, file_id_mapping) { - if let Some(end) = get_loc(&loc.file_hash(), loc.end(), files, file_id_mapping) { + let fpath = files.file_path(&loc.file_hash()); + if let Some(start) = loc_start_to_lsp_position_opt(files, loc) { + if let Some(end) = loc_end_to_lsp_position_opt(files, loc) { let range = Range::new(start, end); let related_info_opt = if labels.is_empty() { None @@ -39,15 +34,9 @@ pub fn lsp_diagnostics( labels .iter() .filter_map(|(lloc, lmsg)| { - let lstart = get_loc( - &lloc.file_hash(), - lloc.start(), - files, - file_id_mapping, - )?; - let lend = - get_loc(&lloc.file_hash(), lloc.end(), files, file_id_mapping)?; - let lpath = file_name_mapping.get(&lloc.file_hash()).unwrap(); + let lstart = loc_start_to_lsp_position_opt(files, lloc)?; + let lend = loc_end_to_lsp_position_opt(files, lloc)?; + let lpath = files.file_path(&lloc.file_hash()); let lpos = Location::new( Url::from_file_path(lpath).unwrap(), Range::new(lstart, lend), diff --git a/external-crates/move/crates/move-analyzer/src/symbols.rs b/external-crates/move/crates/move-analyzer/src/symbols.rs index dd4a4285c67a9..02ae323e3eb62 100644 --- a/external-crates/move/crates/move-analyzer/src/symbols.rs +++ b/external-crates/move/crates/move-analyzer/src/symbols.rs @@ -59,11 +59,10 @@ use crate::{ compiler_info::CompilerInfo, context::Context, diagnostics::{lsp_diagnostics, lsp_empty_diagnostics}, - utils::get_loc, + utils::loc_start_to_lsp_position_opt, }; use anyhow::{anyhow, Result}; -use codespan_reporting::files::SimpleFiles; use crossbeam::channel::Sender; use derivative::*; use im::ordmap::OrdMap; @@ -98,7 +97,10 @@ use move_compiler::{ linters::LintLevel, naming::ast::{StructFields, Type, TypeName_, Type_}, parser::ast::{self as P}, - shared::{unique_map::UniqueMap, Identifier, Name, NamedAddressMap, NamedAddressMaps}, + shared::{ + files::MappedFiles, unique_map::UniqueMap, Identifier, Name, NamedAddressMap, + NamedAddressMaps, + }, typing::{ ast::{Exp, ExpListItem, ModuleDefinition, SequenceItem, SequenceItem_, UnannotatedExp_}, visitor::TypingVisitorContext, @@ -342,10 +344,8 @@ pub struct ParsingSymbolicator<'a> { /// string so that we can access it regardless of the ModuleIdent representation /// (e.g., in the parsing AST or in the typing AST) mod_outer_defs: &'a mut BTreeMap, - /// A mapping from file names to file content (used to obtain source file locations) - files: &'a SimpleFiles, - /// A mapping from file hashes to file IDs (used to obtain source file locations) - file_id_mapping: &'a HashMap, + /// Mapped file information for translating locations into positions + files: &'a MappedFiles, /// Associates uses for a given definition to allow displaying all references references: &'a mut BTreeMap>, /// Additional information about definitions @@ -375,16 +375,12 @@ pub struct Symbols { references: BTreeMap>, /// A mapping from uses to definitions in a file file_use_defs: BTreeMap, - /// A mapping from file hashes to file names - file_name_mapping: BTreeMap, /// A mapping from filePath to ModuleDefs pub file_mods: BTreeMap>, + /// Mapped file information for translating locations into positions + pub files: MappedFiles, /// Additional information about definitions def_info: BTreeMap, - /// A mapping from file names to file content (used to obtain source file locations) - pub files: SimpleFiles, - /// A mapping from file hashes to file IDs (used to obtain source file locations) - pub file_id_mapping: HashMap, /// IDE Annotation Information from the Compiler pub compiler_info: CompilerInfo, } @@ -1109,9 +1105,7 @@ impl Symbols { for (k, v) in other.references { self.references.entry(k).or_default().extend(v); } - self.file_use_defs.extend(other.file_use_defs); - self.file_name_mapping.extend(other.file_name_mapping); - self.file_mods.extend(other.file_mods); + self.files.extend(other.files); self.def_info.extend(other.def_info); } @@ -1127,7 +1121,7 @@ impl Symbols { } pub fn mod_defs(&self, fhash: &FileHash, mod_ident: ModuleIdent_) -> Option<&ModuleDefs> { - let Some(fpath) = self.file_name_mapping.get(fhash) else { + let Some(fpath) = self.files.file_name_mapping().get(fhash) else { return None; }; let Some(mod_defs) = self.file_mods.get(fpath) else { @@ -1197,24 +1191,15 @@ pub fn get_symbols( None }; - // get source files to be able to correlate positions (in terms of byte offsets) with actual - // file locations (in terms of line/column numbers) + let mut mapped_files: MappedFiles = MappedFiles::empty(); + + // Hash dependencies so we can check if something has changed. let source_files = file_sources(&resolution_graph, overlay_fs_root.clone()); - let mut files = SimpleFiles::new(); - let mut file_id_mapping = HashMap::new(); - let mut file_id_to_lines = HashMap::new(); - let mut file_name_mapping = BTreeMap::new(); let mut hasher = Sha256::new(); - for (fhash, (fname, source, is_dep)) in &source_files { - if *is_dep { - hasher.update(fhash.0); - } - let id = files.add(*fname, source.clone()); - file_id_mapping.insert(*fhash, id); - file_name_mapping.insert(*fhash, PathBuf::from(fname.as_str())); - let lines: Vec = source.lines().map(String::from).collect(); - file_id_to_lines.insert(id, lines); - } + source_files + .iter() + .filter(|(_, (_, _, is_dep))| *is_dep) + .for_each(|(fhash, _)| hasher.update(fhash.0)); let deps_hash = format!("{:X}", hasher.finalize()); let compiler_flags = resolution_graph.build_options.compiler_flags().clone(); @@ -1264,6 +1249,7 @@ pub fn get_symbols( .and_then(|pprog_and_comments_res| pprog_and_comments_res.ok()) .map(|libs| { eprintln!("created pre-compiled libs for {:?}", pkg_path); + mapped_files.extend(libs.files.clone()); let deps = Arc::new(libs); pkg_deps.insert( pkg_path.to_path_buf(), @@ -1305,6 +1291,7 @@ pub fn get_symbols( eprintln!("compiled to parsed AST"); let (compiler, parsed_program) = compiler.into_ast(); parsed_ast = Some(parsed_program.clone()); + mapped_files.extend(compiler.compilation_env_ref().mapped_files().clone()); // extract typed AST let compilation_result = compiler.at_parser(parsed_program).run::(); @@ -1344,14 +1331,10 @@ pub fn get_symbols( Ok((files, vec![])) })?; - let mut ide_diagnostics = lsp_empty_diagnostics(&file_name_mapping); + let mut ide_diagnostics = lsp_empty_diagnostics(mapped_files.file_name_mapping()); if let Some((compiler_diagnostics, failure)) = diagnostics { - let lsp_diagnostics = lsp_diagnostics( - &compiler_diagnostics.into_codespan_format(), - &files, - &file_id_mapping, - &file_name_mapping, - ); + let lsp_diagnostics = + lsp_diagnostics(&compiler_diagnostics.into_codespan_format(), &mapped_files); // start with empty diagnostics for all files and replace them with actual diagnostics // only for files that have failures/warnings so that diagnostics for all other files // (that no longer have failures/warnings) are reset @@ -1374,10 +1357,20 @@ pub fn get_symbols( let mut references = BTreeMap::new(); let mut def_info = BTreeMap::new(); + let mut file_id_to_lines = HashMap::new(); + for file_id in mapped_files.file_mapping().values() { + let Ok(file) = mapped_files.files().get(*file_id) else { + eprintln!("file id without source code"); + continue; + }; + let source = file.source(); + let lines: Vec = source.lines().map(String::from).collect(); + file_id_to_lines.insert(*file_id, lines); + } + pre_process_typed_modules( &typed_modules, - &files, - &file_id_mapping, + &mapped_files, &file_id_to_lines, &mut mod_outer_defs, &mut mod_use_defs, @@ -1389,8 +1382,7 @@ pub fn get_symbols( if let Some(libs) = compiled_libs.clone() { pre_process_typed_modules( &libs.typing.modules, - &files, - &file_id_mapping, + &mapped_files, &file_id_to_lines, &mut mod_outer_defs, &mut mod_use_defs, @@ -1407,8 +1399,7 @@ pub fn get_symbols( let mut parsing_symbolicator = ParsingSymbolicator { mod_outer_defs: &mut mod_outer_defs, - files: &files, - file_id_mapping: &file_id_mapping, + files: &mapped_files, references: &mut references, def_info: &mut def_info, use_defs: UseDefMap::new(), @@ -1433,8 +1424,7 @@ pub fn get_symbols( let mut compiler_info = compiler_info.unwrap(); let mut typing_symbolicator = typing_analysis::TypingAnalysisContext { mod_outer_defs: &mod_outer_defs, - files: &files, - file_id_mapping: &file_id_mapping, + files: &mapped_files, references: &mut references, def_info: &mut def_info, use_defs: UseDefMap::new(), @@ -1466,18 +1456,16 @@ pub fn get_symbols( let mut file_mods: BTreeMap> = BTreeMap::new(); for d in mod_outer_defs.into_values() { - let path = file_name_mapping.get(&d.fhash.clone()).unwrap(); + let path = mapped_files.file_path(&d.fhash.clone()); file_mods.entry(path.to_path_buf()).or_default().insert(d); } let symbols = Symbols { references, file_use_defs, - file_name_mapping, file_mods, def_info, - files, - file_id_mapping, + files: mapped_files, compiler_info, }; @@ -1488,8 +1476,7 @@ pub fn get_symbols( fn pre_process_typed_modules( typed_modules: &UniqueMap, - files: &SimpleFiles, - file_id_mapping: &HashMap, + files: &MappedFiles, file_id_to_lines: &HashMap>, mod_outer_defs: &mut BTreeMap, mod_use_defs: &mut BTreeMap, @@ -1504,7 +1491,6 @@ fn pre_process_typed_modules( &sp(pos, *module_ident), module_def, files, - file_id_mapping, file_id_to_lines, references, def_info, @@ -1655,11 +1641,9 @@ pub fn empty_symbols() -> Symbols { Symbols { file_use_defs: BTreeMap::new(), references: BTreeMap::new(), - file_name_mapping: BTreeMap::new(), file_mods: BTreeMap::new(), def_info: BTreeMap::new(), - files: SimpleFiles::new(), - file_id_mapping: HashMap::new(), + files: MappedFiles::empty(), compiler_info: CompilerInfo::new(), } } @@ -1681,8 +1665,7 @@ fn get_mod_outer_defs( loc: &Loc, mod_ident: &ModuleIdent, mod_def: &ModuleDefinition, - files: &SimpleFiles, - file_id_mapping: &HashMap, + files: &MappedFiles, file_id_to_lines: &HashMap>, references: &mut BTreeMap>, def_info: &mut BTreeMap, @@ -1702,7 +1685,7 @@ fn get_mod_outer_defs( if let StructFields::Defined(pos_fields, fields) = &def.fields { positional = *pos_fields; for (fpos, fname, (_, t)) in fields { - let start = match get_start_loc(&fpos, files, file_id_mapping) { + let start = match loc_start_to_lsp_position_opt(files, &fpos) { Some(s) => s, None => { debug_assert!(false); @@ -1714,7 +1697,7 @@ fn get_mod_outer_defs( start, }); let doc_string = extract_doc_string( - file_id_mapping, + files.file_mapping(), file_id_to_lines, &start, &fpos.file_hash(), @@ -1728,7 +1711,7 @@ fn get_mod_outer_defs( }; // process the struct itself - let name_start = match get_start_loc(&pos, files, file_id_mapping) { + let name_start = match loc_start_to_lsp_position_opt(files, &pos) { Some(s) => s, None => { debug_assert!(false); @@ -1755,7 +1738,7 @@ fn get_mod_outer_defs( Visibility::Internal }; let doc_string = extract_doc_string( - file_id_mapping, + files.file_mapping(), file_id_to_lines, &name_start, &pos.file_hash(), @@ -1787,7 +1770,7 @@ fn get_mod_outer_defs( } for (pos, name, c) in &mod_def.constants { - let name_start = match get_start_loc(&pos, files, file_id_mapping) { + let name_start = match loc_start_to_lsp_position_opt(files, &pos) { Some(s) => s, None => { debug_assert!(false); @@ -1796,7 +1779,7 @@ fn get_mod_outer_defs( }; constants.insert(*name, ConstDef { name_start }); let doc_string = extract_doc_string( - file_id_mapping, + files.file_mapping(), file_id_to_lines, &name_start, &pos.file_hash(), @@ -1817,7 +1800,7 @@ fn get_mod_outer_defs( if ignored_function(*name) { continue; } - let name_start = match get_start_loc(&pos, files, file_id_mapping) { + let name_start = match loc_start_to_lsp_position_opt(files, &pos) { Some(s) => s, None => { debug_assert!(false); @@ -1832,7 +1815,7 @@ fn get_mod_outer_defs( FunType::Regular }; let doc_string = extract_doc_string( - file_id_mapping, + files.file_mapping(), file_id_to_lines, &name_start, &pos.file_hash(), @@ -1879,7 +1862,7 @@ fn get_mod_outer_defs( let mut use_def_map = UseDefMap::new(); let ident = mod_ident.value; - let start = match get_start_loc(loc, files, file_id_mapping) { + let start = match loc_start_to_lsp_position_opt(files, loc) { Some(s) => s, None => { debug_assert!(false); @@ -1901,7 +1884,7 @@ fn get_mod_outer_defs( } }; - let doc_comment = extract_doc_string(file_id_mapping, file_id_to_lines, &start, &fhash); + let doc_comment = extract_doc_string(files.file_mapping(), file_id_to_lines, &start, &fhash); let mod_defs = ModuleDefs { fhash, ident, @@ -1914,7 +1897,7 @@ fn get_mod_outer_defs( // insert use of the module name in the definition itself let mod_name = ident.module; - if let Some(mod_name_start) = get_start_loc(&mod_name.loc(), files, file_id_mapping) { + if let Some(mod_name_start) = loc_start_to_lsp_position_opt(files, &mod_name.loc()) { use_def_map.insert( mod_name_start.line, UseDef::new( @@ -1937,14 +1920,6 @@ fn get_mod_outer_defs( (mod_defs, use_def_map) } -fn get_start_loc( - pos: &Loc, - files: &SimpleFiles, - file_id_mapping: &HashMap, -) -> Option { - get_loc(&pos.file_hash(), pos.start(), files, file_id_mapping) -} - impl<'a> ParsingSymbolicator<'a> { /// Get symbols for the whole program fn prog_symbols( @@ -2267,7 +2242,7 @@ impl<'a> ParsingSymbolicator<'a> { let Some(mod_defs) = self.mod_outer_defs.get_mut(mod_ident_str) else { return; }; - let Some(mod_name_start) = get_start_loc(&mod_name.loc(), self.files, self.file_id_mapping) + let Some(mod_name_start) = loc_start_to_lsp_position_opt(self.files, &mod_name.loc()) else { debug_assert!(false); return; @@ -2315,7 +2290,6 @@ impl<'a> ParsingSymbolicator<'a> { if let Some(mut ud) = add_struct_use_def( self.mod_outer_defs, self.files, - self.file_id_mapping, mod_ident_str.clone(), mod_defs, &name.value, @@ -2327,7 +2301,7 @@ impl<'a> ParsingSymbolicator<'a> { ) { // it's a struct - add it for the alias as well if let Some(alias) = alias_opt { - let Some(alias_start) = get_start_loc(&alias.loc, self.files, self.file_id_mapping) + let Some(alias_start) = loc_start_to_lsp_position_opt(self.files, &alias.loc) else { debug_assert!(false); return; @@ -2346,7 +2320,6 @@ impl<'a> ParsingSymbolicator<'a> { &name.value, self.mod_outer_defs, self.files, - self.file_id_mapping, mod_ident_str.clone(), mod_defs, &name.value, @@ -2358,7 +2331,7 @@ impl<'a> ParsingSymbolicator<'a> { ) { // it's a function - add it for the alias as well if let Some(alias) = alias_opt { - let Some(alias_start) = get_start_loc(&alias.loc, self.files, self.file_id_mapping) + let Some(alias_start) = loc_start_to_lsp_position_opt(self.files, &alias.loc) else { debug_assert!(false); return; @@ -2426,8 +2399,7 @@ impl<'a> ParsingSymbolicator<'a> { else { return; }; - let Some(def_start) = - get_start_loc(&var.loc(), self.files, self.file_id_mapping) + let Some(def_start) = loc_start_to_lsp_position_opt(self.files, &var.loc()) else { return; }; @@ -2471,7 +2443,7 @@ impl<'a> ParsingSymbolicator<'a> { return; }; let sp!(pos, name) = n; - let Some(loc) = get_start_loc(&pos, self.files, self.file_id_mapping) else { + let Some(loc) = loc_start_to_lsp_position_opt(self.files, &pos) else { return; }; self.alias_lengths.insert(loc, name.len()); @@ -2482,8 +2454,7 @@ impl<'a> ParsingSymbolicator<'a> { pub fn add_fun_use_def( fun_def_name: &Symbol, // may be different from use_name for methods mod_outer_defs: &BTreeMap, - files: &SimpleFiles, - file_id_mapping: &HashMap, + files: &MappedFiles, mod_ident_str: String, mod_defs: &ModuleDefs, use_name: &Symbol, @@ -2493,7 +2464,7 @@ pub fn add_fun_use_def( use_defs: &mut UseDefMap, alias_lengths: &BTreeMap, ) -> Option { - let Some(name_start) = get_start_loc(use_pos, files, file_id_mapping) else { + let Some(name_start) = loc_start_to_lsp_position_opt(files, use_pos) else { debug_assert!(false); return None; }; @@ -2522,8 +2493,7 @@ pub fn add_fun_use_def( /// Add use of a struct identifier pub fn add_struct_use_def( mod_outer_defs: &BTreeMap, - files: &SimpleFiles, - file_id_mapping: &HashMap, + files: &MappedFiles, mod_ident_str: String, mod_defs: &ModuleDefs, use_name: &Symbol, @@ -2533,7 +2503,7 @@ pub fn add_struct_use_def( use_defs: &mut UseDefMap, alias_lengths: &BTreeMap, ) -> Option { - let Some(name_start) = get_start_loc(use_pos, files, file_id_mapping) else { + let Some(name_start) = loc_start_to_lsp_position_opt(files, use_pos) else { debug_assert!(false); return None; }; @@ -2729,7 +2699,7 @@ pub fn def_ide_location(def_loc: &DefLoc, symbols: &Symbols) -> Location { start: def_loc.start, end: def_loc.start, }; - let path = symbols.file_name_mapping.get(&def_loc.fhash).unwrap(); + let path = symbols.files.file_path(&def_loc.fhash); Location { uri: Url::from_file_path(path).unwrap(), range, @@ -2764,7 +2734,7 @@ pub fn on_go_to_type_def_request(context: &Context, request: &Request, symbols: start: def_loc.start, end: def_loc.start, }; - let path = symbols.file_name_mapping.get(&u.def_loc.fhash).unwrap(); + let path = symbols.files.file_path(&u.def_loc.fhash); let loc = Location { uri: Url::from_file_path(path).unwrap(), range, @@ -2814,7 +2784,7 @@ pub fn on_references_request(context: &Context, request: &Request, symbols: &Sym start: ref_loc.start, end: end_pos, }; - let path = symbols.file_name_mapping.get(&ref_loc.fhash).unwrap(); + let path = symbols.files.file_path(&ref_loc.fhash); locs.push(Location { uri: Url::from_file_path(path).unwrap(), range, @@ -3072,7 +3042,7 @@ fn assert_use_def_with_doc_string( type_def: Option<(u32, u32, &str)>, doc_string: Option<&str>, ) { - let file_name_mapping = &symbols.file_name_mapping; + let file_name_mapping = &symbols.files.file_name_mapping(); let def_info = &symbols.def_info; let Some(uses) = mod_symbols.get(use_line) else { diff --git a/external-crates/move/crates/move-analyzer/src/utils.rs b/external-crates/move/crates/move-analyzer/src/utils.rs index 8f28d4f3fb254..750c06948bb8a 100644 --- a/external-crates/move/crates/move-analyzer/src/utils.rs +++ b/external-crates/move/crates/move-analyzer/src/utils.rs @@ -1,67 +1,70 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -use codespan_reporting::files::{Files, SimpleFiles}; use lsp_types::Position; use move_command_line_common::files::FileHash; -use move_compiler::unit_test::filter_test_members::UNIT_TEST_POISON_FUN_NAME; +use move_compiler::{ + shared::files::MappedFiles, unit_test::filter_test_members::UNIT_TEST_POISON_FUN_NAME, +}; use move_ir_types::location::*; use move_symbol_pool::Symbol; -use std::collections::HashMap; + +//************************************************************************************************** +// Location Conversions +//************************************************************************************************** /// Converts a location from the byte index format to the line/character (Position) format, where /// line/character are 0-based. -pub fn get_loc( - fhash: &FileHash, - pos: ByteIndex, - files: &SimpleFiles, - file_id_mapping: &HashMap, +pub fn offset_to_lsp_position( + files: &MappedFiles, + file_hash: &FileHash, + offset: ByteIndex, ) -> Option { - let id = match file_id_mapping.get(fhash) { - Some(v) => v, - None => return None, + let loc_posn = files.byte_index_to_position_opt(file_hash, offset)?; + let result = Position { + line: loc_posn.line_offset() as u32, + character: loc_posn.column_offset() as u32, }; - match files.location(*id, pos as usize) { - Ok(v) => Some(Position { - // lsp line is 0-indexed, lsp column is 0-indexed - line: v.line_number as u32 - 1, - character: v.column_number as u32 - 1, - }), - Err(_) => None, - } + Some(result) } -/// Converts a position (line/column) to byte index in the file. -pub fn get_byte_idx( - pos: Position, - fhash: FileHash, - files: &SimpleFiles, - file_id_mapping: &HashMap, -) -> Option { - let Some(file_id) = file_id_mapping.get(&fhash) else { - return None; +pub fn loc_start_to_lsp_position_opt(files: &MappedFiles, loc: &Loc) -> Option { + let start_loc_posn = files.start_position_opt(loc)?; + let result = Position { + line: start_loc_posn.line_offset() as u32, + character: start_loc_posn.column_offset() as u32, }; - let Ok(line_range) = files.line_range(*file_id, pos.line as usize) else { - return None; + Some(result) +} + +pub fn loc_end_to_lsp_position_opt(files: &MappedFiles, loc: &Loc) -> Option { + let end_loc_posn = files.end_position_opt(loc)?; + let result = Position { + line: end_loc_posn.line_offset() as u32, + character: end_loc_posn.column_offset() as u32, }; - Some(line_range.start as u32 + pos.character) + Some(result) } -/// Convert a move_compiler Position into an lsp_types position -pub fn to_lsp_position(pos: move_compiler::diagnostics::Position) -> Position { - Position { - // lsp line is 0-indexed, lsp column is 0-indexed - line: pos.line as u32 - 1, - character: pos.column as u32, - } +pub fn lsp_position_to_loc( + files: &MappedFiles, + file_hash: FileHash, + pos: &Position, +) -> Option { + let line_offset = pos.line; + let char_offset = pos.character; + files.line_char_offset_to_loc_opt(file_hash, line_offset, char_offset) } -pub fn get_start_position_opt( - pos: &Loc, - files: &SimpleFiles, - file_id_mapping: &HashMap, -) -> Option { - get_loc(&pos.file_hash(), pos.start(), files, file_id_mapping) +/// Converts a position (line/column) to byte index in the file. +pub fn lsp_position_to_byte_index( + files: &MappedFiles, + file_hash: FileHash, + pos: &Position, +) -> Option { + files + .line_char_offset_to_loc_opt(file_hash, pos.line, pos.character) + .map(|loc| loc.start()) } /// Some functions defined in a module need to be ignored. diff --git a/external-crates/move/crates/move-cli/src/base/test.rs b/external-crates/move/crates/move-cli/src/base/test.rs index 7890922976eed..dda5f8faaf775 100644 --- a/external-crates/move/crates/move-cli/src/base/test.rs +++ b/external-crates/move/crates/move-cli/src/base/test.rs @@ -172,7 +172,7 @@ pub fn run_move_unit_tests( let (mut compiler, cfgir) = compiler.into_ast(); let compilation_env = compiler.compilation_env(); let built_test_plan = construct_test_plan(compilation_env, Some(root_package), &cfgir); - let mapped_files = compilation_env.file_mapping().clone(); + let mapped_files = compilation_env.mapped_files().clone(); let compilation_result = compiler.at_cfgir(cfgir).build(); let (units, warnings) = diff --git a/external-crates/move/crates/move-cli/src/sandbox/utils/mod.rs b/external-crates/move/crates/move-cli/src/sandbox/utils/mod.rs index 58a597f911785..2855ab9460665 100644 --- a/external-crates/move/crates/move-cli/src/sandbox/utils/mod.rs +++ b/external-crates/move/crates/move-cli/src/sandbox/utils/mod.rs @@ -13,7 +13,10 @@ use move_binary_format::{ }; use move_bytecode_utils::Modules; use move_command_line_common::files::{FileHash, MOVE_COMPILED_EXTENSION}; -use move_compiler::diagnostics::{self, report_diagnostics, Diagnostic, Diagnostics, FileName}; +use move_compiler::{ + diagnostics::{self, report_diagnostics, Diagnostic, Diagnostics}, + shared::files::FileName, +}; use move_core_types::{ account_address::AccountAddress, effects::{ChangeSet, Op}, @@ -267,7 +270,7 @@ pub(crate) fn explain_publish_error( } } } - report_diagnostics(&files, diags) + report_diagnostics(&files.into(), diags) } status_code => { println!("Publishing failed with unexpected error {:?}", status_code) diff --git a/external-crates/move/crates/move-compiler/src/command_line/compiler.rs b/external-crates/move/crates/move-compiler/src/command_line/compiler.rs index 65b2b581cb53d..ad953a238d1a8 100644 --- a/external-crates/move/crates/move-compiler/src/command_line/compiler.rs +++ b/external-crates/move/crates/move-compiler/src/command_line/compiler.rs @@ -17,6 +17,7 @@ use crate::{ expansion, hlir, interface_generator, naming, parser::{self, comments::*, *}, shared::{ + files::{FilesSourceText, MappedFiles}, CompilationEnv, Flags, IndexedPhysicalPackagePath, IndexedVfsPackagePath, NamedAddressMap, NamedAddressMaps, NumericalAddress, PackageConfig, PackagePaths, SaveFlag, SaveHook, }, @@ -97,8 +98,7 @@ enum PassResult { #[derive(Clone)] pub struct FullyCompiledProgram { - // TODO don't store this... - pub files: FilesSourceText, + pub files: MappedFiles, pub parser: parser::ast::Program, pub expansion: expansion::ast::Program, pub naming: naming::ast::Program, @@ -310,7 +310,7 @@ impl Compiler { pub fn run( self, ) -> anyhow::Result<( - FilesSourceText, + MappedFiles, Result<(CommentMap, SteppedCompiler), (Pass, Diagnostics)>, )> { let Self { @@ -384,30 +384,29 @@ impl Compiler { compilation_env.add_custom_known_filters(prefix, filters)?; } - let (mut source_text, pprog, comments) = + let (source_text, pprog, comments) = parse_program(&mut compilation_env, maps, targets, deps)?; - source_text.iter_mut().for_each(|(_, (path, _))| { + for (fhash, (fname, contents)) in &source_text { // TODO better support for bytecode interface file paths - *path = vfs_to_original_path.get(path).copied().unwrap_or(*path) - }); + let path = vfs_to_original_path.get(fname).copied().unwrap_or(*fname); - for (fhash, (fname, contents)) in source_text.iter() { - compilation_env.add_source_file(*fhash, *fname, contents.clone()) + compilation_env.add_source_file(*fhash, path, contents.clone()) } + let mapped_files = compilation_env.mapped_files().clone(); let res: Result<_, (Pass, Diagnostics)> = SteppedCompiler::new_at_parser(compilation_env, pre_compiled_lib, pprog) .run::() .map(|compiler| (comments, compiler)); - Ok((source_text, res)) + Ok((mapped_files, res)) } pub fn generate_migration_patch( mut self, root_module: &Symbol, - ) -> anyhow::Result<(FilesSourceText, Result, Diagnostics>)> { + ) -> anyhow::Result<(MappedFiles, Result, Diagnostics>)> { self.package_configs.get_mut(root_module).unwrap().edition = Edition::E2024_MIGRATION; let (files, res) = self.run::()?; if let Err((pass, mut diags)) = res { @@ -433,12 +432,12 @@ impl Compiler { } } - pub fn check(self) -> anyhow::Result<(FilesSourceText, Result<(), Diagnostics>)> { + pub fn check(self) -> anyhow::Result<(MappedFiles, Result<(), Diagnostics>)> { let (files, res) = self.run::()?; Ok((files, res.map(|_| ()).map_err(|(_pass, diags)| diags))) } - pub fn check_and_report(self) -> anyhow::Result { + pub fn check_and_report(self) -> anyhow::Result { let (files, res) = self.check()?; unwrap_or_report_diagnostics(&files, res); Ok(files) @@ -447,7 +446,7 @@ impl Compiler { pub fn build( self, ) -> anyhow::Result<( - FilesSourceText, + MappedFiles, Result<(Vec, Diagnostics), Diagnostics>, )> { let (files, res) = self.run::()?; @@ -458,7 +457,7 @@ impl Compiler { )) } - pub fn build_and_report(self) -> anyhow::Result<(FilesSourceText, Vec)> { + pub fn build_and_report(self) -> anyhow::Result<(MappedFiles, Vec)> { let (files, units_res) = self.build()?; let (units, warnings) = unwrap_or_report_diagnostics(&files, units_res); report_warnings(&files, warnings); @@ -501,6 +500,9 @@ impl SteppedCompiler

{ pub fn compilation_env(&mut self) -> &mut CompilationEnv { &mut self.compilation_env } + pub fn compilation_env_ref(&self) -> &CompilationEnv { + &self.compilation_env + } } macro_rules! ast_stepped_compilers { @@ -573,14 +575,14 @@ macro_rules! ast_stepped_compilers { Ok(units) } - pub fn check_and_report(self, files: &FilesSourceText) { + pub fn check_and_report(self, files: &MappedFiles) { let errors_result = self.check().map_err(|(_, diags)| diags); unwrap_or_report_diagnostics(&files, errors_result); } pub fn build_and_report( self, - files: &FilesSourceText, + files: &MappedFiles, ) -> Vec { let units_result = self.build().map_err(|(_, diags)| diags); let (units, warnings) = unwrap_or_report_diagnostics(&files, units_result); @@ -628,7 +630,7 @@ pub fn construct_pre_compiled_lib, NamedAddress: Into, flags: Flags, vfs_root: Option, -) -> anyhow::Result> { +) -> anyhow::Result> { let hook = SaveHook::new([ SaveFlag::Parser, SaveFlag::Expansion, @@ -701,14 +703,14 @@ pub fn sanity_check_compiled_units( ) { let ice_errors = compiled_unit::verify_units(compiled_units); if !ice_errors.is_empty() { - report_diagnostics(&files, ice_errors) + report_diagnostics(&files.into(), ice_errors) } } /// Given a file map and a set of compiled programs, saves the compiled programs to disk pub fn output_compiled_units( emit_source_maps: bool, - files: FilesSourceText, + files: MappedFiles, compiled_units: Vec, out_dir: &str, ) -> anyhow::Result<()> { diff --git a/external-crates/move/crates/move-compiler/src/diagnostics/mod.rs b/external-crates/move/crates/move-compiler/src/diagnostics/mod.rs index c520494e1c042..12db49bc7625f 100644 --- a/external-crates/move/crates/move-compiler/src/diagnostics/mod.rs +++ b/external-crates/move/crates/move-compiler/src/diagnostics/mod.rs @@ -11,14 +11,14 @@ use crate::{ WellKnownFilterName, }, shared::{ - ast_debug::AstDebug, known_attributes, FILTER_UNUSED_CONST, FILTER_UNUSED_FUNCTION, - FILTER_UNUSED_MUT_PARAM, FILTER_UNUSED_MUT_REF, FILTER_UNUSED_STRUCT_FIELD, - FILTER_UNUSED_TYPE_PARAMETER, + ast_debug::AstDebug, + files::{ByteSpan, FileByteSpan, FileId, MappedFiles}, + known_attributes, FILTER_UNUSED_CONST, FILTER_UNUSED_FUNCTION, FILTER_UNUSED_MUT_PARAM, + FILTER_UNUSED_MUT_REF, FILTER_UNUSED_STRUCT_FIELD, FILTER_UNUSED_TYPE_PARAMETER, }, }; use codespan_reporting::{ self as csr, - files::SimpleFiles, term::{ emit, termcolor::{Buffer, ColorChoice, StandardStream, WriteColor}, @@ -26,17 +26,15 @@ use codespan_reporting::{ }, }; use csr::files::Files; -use move_command_line_common::{env::read_env_var, files::FileHash}; +use move_command_line_common::env::read_env_var; use move_ir_types::location::*; -use move_symbol_pool::Symbol; use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeMap, BTreeSet, HashMap, HashSet}, + collections::{BTreeMap, BTreeSet, HashSet}, io::Write, iter::FromIterator, ops::Range, path::PathBuf, - sync::Arc, }; use self::codes::UnusedItem; @@ -45,11 +43,6 @@ use self::codes::UnusedItem; // Types //************************************************************************************************** -pub type FileId = usize; -pub type FileName = Symbol; - -pub type FilesSourceText = HashMap)>; - #[derive(PartialEq, Eq, Clone, Debug, Hash)] #[must_use] pub struct Diagnostic { @@ -131,192 +124,17 @@ pub struct Migration { changes: BTreeMap>, } -/// A mapping from file ids to file contents along with the mapping of filehash to fileID. -#[derive(Debug, Clone)] -pub struct MappedFiles { - files: SimpleFiles>, - file_mapping: HashMap, -} - -/// A file, and the line:column start, and line:column end that corresponds to a `Loc` -#[allow(dead_code)] -pub struct FilePosition { - pub file_id: FileId, - pub start: Position, - pub end: Position, -} - -/// A position holds the byte offset along with the line and column location in a file. -pub struct Position { - pub line: usize, - pub column: usize, - pub byte: usize, -} - -/// A file, and the usize start and usize end that corresponds to a `Loc` -pub struct FileByteSpan { - file_id: FileId, - byte_span: ByteSpan, -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct ByteSpan { - start: usize, - end: usize, -} - -impl MappedFiles { - pub fn new(files: FilesSourceText) -> Self { - let mut simple_files = SimpleFiles::new(); - let mut file_mapping = HashMap::new(); - for (fhash, (fname, source)) in files { - let id = simple_files.add(fname, source); - file_mapping.insert(fhash, id); - } - Self { - files: simple_files, - file_mapping, - } - } - - pub fn empty() -> Self { - Self { - files: SimpleFiles::new(), - file_mapping: HashMap::new(), - } - } - - pub fn file_hash_to_file_id(&self, fhash: &FileHash) -> Option { - self.file_mapping.get(fhash).copied() - } - - pub fn add(&mut self, fhash: FileHash, fname: FileName, source: Arc) { - let id = self.files.add(fname, source); - self.file_mapping.insert(fhash, id); - } -} - -// This is abstracted into a trait for reuse by the Move Analyzer. - -pub trait PositionInfo { - type FileContents: AsRef; - fn files(&self) -> &SimpleFiles; - fn file_mapping(&self) -> &HashMap; - - fn filename(&self, fhash: &FileHash) -> &str { - let file_id = self.file_mapping().get(fhash).unwrap(); - self.files().get(*file_id).unwrap().name() - } - - fn start_position(&self, loc: &Loc) -> Position { - self.position(loc).start - } - - fn end_position(&self, loc: &Loc) -> Position { - self.position(loc).end - } - - fn position(&self, loc: &Loc) -> FilePosition { - self.position_opt(loc).unwrap() - } - - fn byte_span(&self, loc: &Loc) -> FileByteSpan { - self.byte_span_opt(loc).unwrap() - } - - fn start_position_opt(&self, loc: &Loc) -> Option { - self.position_opt(loc).map(|posn| posn.start) - } - - fn end_position_opt(&self, loc: &Loc) -> Option { - self.position_opt(loc).map(|posn| posn.end) - } - - fn position_opt(&self, loc: &Loc) -> Option { - let start_loc = loc.start() as usize; - let end_loc = loc.end() as usize; - let file_id = *self.file_mapping().get(&loc.file_hash())?; - let start_file_loc = self.files().location(file_id, start_loc).ok()?; - let end_file_loc = self.files().location(file_id, end_loc).ok()?; - let posn = FilePosition { - file_id, - start: Position { - line: start_file_loc.line_number, - column: start_file_loc.column_number - 1, - byte: start_loc, - }, - end: Position { - line: end_file_loc.line_number, - column: end_file_loc.column_number - 1, - byte: end_loc, - }, - }; - Some(posn) - } - - fn byte_span_opt(&self, loc: &Loc) -> Option { - let start = loc.start() as usize; - let end = loc.end() as usize; - let file_id = *self.file_mapping().get(&loc.file_hash())?; - let posn = FileByteSpan { - byte_span: ByteSpan { start, end }, - file_id, - }; - Some(posn) - } - - /// Given a line number in the file return the `Loc` for the line. - fn line_to_loc_opt(&self, file_hash: &FileHash, line_number: usize) -> Option { - let file_id = self.file_mapping().get(file_hash)?; - let line_range = self.files().line_range(*file_id, line_number).ok()?; - Some(Loc::new( - *file_hash, - line_range.start as u32, - line_range.end as u32, - )) - } - - /// Given a location `Loc` return a new loc only for source with leading and trailing - /// whitespace removed. - fn trimmed_loc_opt(&self, loc: &Loc) -> Option { - let source_str = self.source_of_loc_opt(loc)?; - let trimmed_front = source_str.trim_start(); - let new_start = loc.start() as usize + (source_str.len() - trimmed_front.len()); - let trimmed_back = trimmed_front.trim_end(); - let new_end = (loc.end() as usize).saturating_sub(trimmed_front.len() - trimmed_back.len()); - Some(Loc::new(loc.file_hash(), new_start as u32, new_end as u32)) - } - - /// Given a location `Loc` return the source for the location. This include any leading and - /// trailing whitespace. - fn source_of_loc_opt(&self, loc: &Loc) -> Option<&str> { - let file_id = *self.file_mapping().get(&loc.file_hash())?; - Some(&self.files().source(file_id).ok()?[loc.usize_range()]) - } -} - -impl PositionInfo for MappedFiles { - type FileContents = Arc; - fn files(&self) -> &SimpleFiles { - &self.files - } - - fn file_mapping(&self) -> &HashMap { - &self.file_mapping - } -} - //************************************************************************************************** // Diagnostic Reporting //************************************************************************************************** -pub fn report_diagnostics(files: &FilesSourceText, diags: Diagnostics) -> ! { +pub fn report_diagnostics(files: &MappedFiles, diags: Diagnostics) -> ! { let should_exit = true; report_diagnostics_impl(files, diags, should_exit); std::process::exit(1) } -pub fn report_warnings(files: &FilesSourceText, warnings: Diagnostics) { +pub fn report_warnings(files: &MappedFiles, warnings: Diagnostics) { if warnings.is_empty() { return; } @@ -324,17 +142,17 @@ pub fn report_warnings(files: &FilesSourceText, warnings: Diagnostics) { report_diagnostics_impl(files, warnings, false) } -fn report_diagnostics_impl(files: &FilesSourceText, diags: Diagnostics, should_exit: bool) { +fn report_diagnostics_impl(files: &MappedFiles, diags: Diagnostics, should_exit: bool) { let color_choice = diags.env_color(); let mut writer = StandardStream::stderr(color_choice); - output_diagnostics(&mut writer, files, diags); + render_diagnostics(&mut writer, files, diags); if should_exit { std::process::exit(1); } } pub fn unwrap_or_report_pass_diagnostics( - files: &FilesSourceText, + files: &MappedFiles, res: Result, ) -> T { match res { @@ -346,7 +164,7 @@ pub fn unwrap_or_report_pass_diagnostics( } } -pub fn unwrap_or_report_diagnostics(files: &FilesSourceText, res: Result) -> T { +pub fn unwrap_or_report_diagnostics(files: &MappedFiles, res: Result) -> T { match res { Ok(t) => t, Err(diags) => { @@ -357,7 +175,7 @@ pub fn unwrap_or_report_diagnostics(files: &FilesSourceText, res: Result Vec { let ansi_color = match env_color() { @@ -368,7 +186,7 @@ pub fn report_diagnostics_to_buffer_with_env_color( } pub fn report_diagnostics_to_buffer( - files: &FilesSourceText, + files: &MappedFiles, diags: Diagnostics, ansi_color: bool, ) -> Vec { @@ -377,7 +195,7 @@ pub fn report_diagnostics_to_buffer( } else { Buffer::no_color() }; - output_diagnostics(&mut writer, files, diags); + render_diagnostics(&mut writer, files, diags); writer.into_inner() } @@ -404,15 +222,6 @@ fn env_color() -> ColorChoice { } } -fn output_diagnostics( - writer: &mut W, - sources: &FilesSourceText, - diags: Diagnostics, -) { - let mapping = MappedFiles::new(sources.clone()); - render_diagnostics(writer, &mapping, diags); -} - fn render_diagnostics(writer: &mut dyn WriteColor, mapping: &MappedFiles, diags: Diagnostics) { let Diagnostics { diags: Some(mut diags), @@ -457,7 +266,7 @@ fn emit_diagnostics_text( } seen.insert(diag.clone()); let rendered = render_diagnostic_text(mapped_files, diag); - emit(writer, &Config::default(), &mapped_files.files, &rendered).unwrap() + emit(writer, &Config::default(), mapped_files.files(), &rendered).unwrap() } } @@ -519,7 +328,7 @@ fn emit_diagnostics_json( //************************************************************************************************** pub fn generate_migration_diff( - files: &FilesSourceText, + files: &MappedFiles, diags: &Diagnostics, ) -> Option<(Migration, /* Migration errors */ Diagnostics)> { match diags { @@ -547,7 +356,7 @@ pub fn generate_migration_diff( } // Used in test harness for unit testing -pub fn report_migration_to_buffer(files: &FilesSourceText, diags: Diagnostics) -> Vec { +pub fn report_migration_to_buffer(files: &MappedFiles, diags: Diagnostics) -> Vec { let mut writer = Buffer::no_color(); if let Some((mut diff, errors)) = generate_migration_diff(files, &diags) { let rendered_errors = report_diagnostics_to_buffer(files, errors, /* color */ false); @@ -893,13 +702,14 @@ impl Diagnostic { let bloc = mapped_files.position(ploc); JsonDiagnostic { file: mapped_files - .files + .files() .get(bloc.file_id) .unwrap() .name() .to_string(), - line: bloc.start.line, - column: bloc.start.column, + // TODO: This line and column choice is a bit weird. Consider changing it. + line: bloc.start.user_line(), + column: bloc.start.column_offset(), level: format!("{:?}", info.severity()), category: info.category(), code: info.code(), @@ -1156,10 +966,9 @@ impl UnprefixedWarningFilters { impl Migration { pub fn new( - sources: FilesSourceText, + mapped_files: MappedFiles, diags: Vec, ) -> (Migration, /* Migration errors */ Diagnostics) { - let mapped_files = MappedFiles::new(sources); let mut mig = Migration { changes: BTreeMap::new(), mapped_files, @@ -1212,7 +1021,11 @@ impl Migration { } fn get_file_contents(&self, file_id: FileId) -> String { - self.mapped_files.files.source(file_id).unwrap().to_string() + self.mapped_files + .files() + .source(file_id) + .unwrap() + .to_string() } fn render_changes(source: String, changes: &mut [(ByteSpan, MigrationChange)]) -> String { @@ -1280,7 +1093,7 @@ impl Migration { let mut names = self .changes .keys() - .map(|id| (*id, *self.mapped_files.files.get(*id).unwrap().name())) + .map(|id| (*id, *self.mapped_files.files().get(*id).unwrap().name())) .collect::>(); names.sort_by_key(|(_, name)| *name); for (file_id, name) in names { @@ -1312,7 +1125,7 @@ impl Migration { let mut names = self .changes .keys() - .map(|id| (*id, *self.mapped_files.files.get(*id).unwrap().name())) + .map(|id| (*id, *self.mapped_files.files().get(*id).unwrap().name())) .collect::>(); names.sort_by_key(|(_, name)| *name); for (file_id, name) in names { diff --git a/external-crates/move/crates/move-compiler/src/parser/mod.rs b/external-crates/move/crates/move-compiler/src/parser/mod.rs index 7c1eb70f41cfd..5b2e299d813e2 100644 --- a/external-crates/move/crates/move-compiler/src/parser/mod.rs +++ b/external-crates/move/crates/move-compiler/src/parser/mod.rs @@ -12,18 +12,14 @@ mod token_set; pub(crate) mod verification_attribute_filter; use crate::{ - diagnostics::FilesSourceText, parser::{self, ast::PackageDefinition, syntax::parse_file_string}, - shared::{CompilationEnv, IndexedVfsPackagePath, NamedAddressMaps}, + shared::{files::MappedFiles, CompilationEnv, IndexedVfsPackagePath, NamedAddressMaps}, }; use anyhow::anyhow; use comments::*; use move_command_line_common::files::FileHash; use move_symbol_pool::Symbol; -use std::{ - collections::{BTreeSet, HashMap}, - sync::Arc, -}; +use std::{collections::BTreeSet, sync::Arc}; use vfs::VfsPath; /// Parses program's targets and dependencies, both of which are read from different virtual file @@ -33,13 +29,13 @@ pub(crate) fn parse_program( named_address_maps: NamedAddressMaps, mut targets: Vec, mut deps: Vec, -) -> anyhow::Result<(FilesSourceText, parser::ast::Program, CommentMap)> { +) -> anyhow::Result<(MappedFiles, parser::ast::Program, CommentMap)> { // sort the filenames so errors about redefinitions, or other inter-file conflicts, are // deterministic targets.sort_by(|p1, p2| p1.path.as_str().cmp(p2.path.as_str())); deps.sort_by(|p1, p2| p1.path.as_str().cmp(p2.path.as_str())); ensure_targets_deps_dont_intersect(compilation_env, &targets, &mut deps)?; - let mut files: FilesSourceText = HashMap::new(); + let mut files: MappedFiles = MappedFiles::empty(); let mut source_definitions = Vec::new(); let mut source_comments = CommentMap::new(); let mut lib_definitions = Vec::new(); @@ -118,7 +114,7 @@ fn ensure_targets_deps_dont_intersect( fn parse_file( path: &VfsPath, compilation_env: &mut CompilationEnv, - files: &mut FilesSourceText, + files: &mut MappedFiles, package: Option, ) -> anyhow::Result<( Vec, @@ -132,7 +128,7 @@ fn parse_file( let source_str = Arc::from(source_buffer); if let Err(ds) = verify_string(file_hash, &source_str) { compilation_env.add_diags(ds); - files.insert(file_hash, (fname, source_str)); + files.add(file_hash, fname, source_str); return Ok((vec![], MatchedFileCommentMap::new(), file_hash)); } let (defs, comments) = match parse_file_string(compilation_env, file_hash, &source_str, package) @@ -143,6 +139,6 @@ fn parse_file( (vec![], MatchedFileCommentMap::new()) } }; - files.insert(file_hash, (fname, source_str)); + files.add(file_hash, fname, source_str); Ok((defs, comments, file_hash)) } diff --git a/external-crates/move/crates/move-compiler/src/shared/files.rs b/external-crates/move/crates/move-compiler/src/shared/files.rs new file mode 100644 index 0000000000000..78e39169ffd80 --- /dev/null +++ b/external-crates/move/crates/move-compiler/src/shared/files.rs @@ -0,0 +1,346 @@ +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +use codespan_reporting::files::{Files, SimpleFiles}; +use move_command_line_common::files::FileHash; +use move_ir_types::location::*; +use move_symbol_pool::Symbol; +use std::{ + collections::{hash_map, BTreeMap, HashMap}, + path::PathBuf, + sync::Arc, +}; + +//************************************************************************************************** +// Types +//************************************************************************************************** + +pub type FileId = usize; +pub type FileName = Symbol; + +pub type FilesSourceText = HashMap)>; + +/// A mapping from file ids to file contents along with the mapping of filehash to fileID. +#[derive(Debug, Clone)] +pub struct MappedFiles { + files: SimpleFiles>, + file_mapping: HashMap, + file_name_mapping: BTreeMap, +} + +/// A file, and the line:column start, and line:column end that corresponds to a `Loc` +#[allow(dead_code)] +pub struct FilePosition { + pub file_id: FileId, + pub start: Position, + pub end: Position, +} + +/// A position holds the byte offset along with the line and column location in a file. +/// Both are zero-indexed. +pub struct Position { + // zero-indexed line offset + line_offset: usize, + // zero-indexed column offset + column_offset: usize, + // zero-indexed byte offset + byte_offset: usize, +} + +/// A file, and the usize start and usize end that corresponds to a `Loc` +pub struct FileByteSpan { + pub file_id: FileId, + pub byte_span: ByteSpan, +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct ByteSpan { + pub start: usize, + pub end: usize, +} + +//************************************************************************************************** +// Traits and Impls +//************************************************************************************************** +impl Position { + /// User-facing (1-indexed) line + pub fn user_line(&self) -> usize { + self.line_offset + 1 + } + + /// User-facing (1-indexed) coulmn + pub fn user_column(&self) -> usize { + self.column_offset + 1 + } + + /// Line offset (0-indexed) + pub fn line_offset(&self) -> usize { + self.line_offset + } + + /// Column offset (0-indexed) + pub fn column_offset(&self) -> usize { + self.column_offset + } + + /// Btye offset for position (0-indexed) + pub fn byte_offset(&self) -> usize { + self.byte_offset + } +} + +impl MappedFiles { + pub fn new(files: FilesSourceText) -> Self { + let mut simple_files = SimpleFiles::new(); + let mut file_mapping = HashMap::new(); + let mut file_name_mapping = BTreeMap::new(); + for (fhash, (fname, source)) in files { + let id = simple_files.add(fname, source); + file_mapping.insert(fhash, id); + file_name_mapping.insert(fhash, PathBuf::from(fname.as_str())); + } + Self { + files: simple_files, + file_mapping, + file_name_mapping, + } + } + + pub fn empty() -> Self { + Self { + files: SimpleFiles::new(), + file_mapping: HashMap::new(), + file_name_mapping: BTreeMap::new(), + } + } + + pub fn extend(&mut self, other: Self) { + for (file_hash, file_id) in other.file_mapping { + let Ok(file) = other.files.get(file_id) else { + debug_assert!(false, "Found a file without a file entry"); + continue; + }; + let Some(path) = other.file_name_mapping.get(&file_hash) else { + debug_assert!(false, "Found a file without a path entry"); + continue; + }; + debug_assert!( + !self.file_mapping.contains_key(&file_hash), + "Found a repeat file hash" + ); + let fname = format!("{}", path.to_string_lossy()); + self.add(file_hash, fname.into(), file.source().clone()); + } + } + + pub fn add(&mut self, fhash: FileHash, fname: FileName, source: Arc) { + let id = self.files.add(fname, source); + self.file_mapping.insert(fhash, id); + self.file_name_mapping + .insert(fhash, PathBuf::from(fname.as_str())); + } + + pub fn get(&self, fhash: &FileHash) -> Option<(Symbol, Arc)> { + let file_id = self.file_mapping.get(fhash)?; + self.files + .get(*file_id) + .ok() + .map(|file| (*file.name(), file.source().clone())) + } + + /// Returns the FileHashes for iteration + pub fn keys(&self) -> hash_map::Keys<'_, FileHash, FileId> { + self.file_mapping.keys() + } + + pub fn files(&self) -> &SimpleFiles> { + &self.files + } + + pub fn file_mapping(&self) -> &HashMap { + &self.file_mapping + } + + pub fn file_name_mapping(&self) -> &BTreeMap { + &self.file_name_mapping + } + + pub fn filename(&self, fhash: &FileHash) -> &str { + let file_id = self.file_mapping().get(fhash).unwrap(); + self.files().get(*file_id).unwrap().name() + } + + pub fn file_path(&self, fhash: &FileHash) -> &PathBuf { + self.file_name_mapping.get(fhash).unwrap() + } + + pub fn file_hash_to_file_id(&self, fhash: &FileHash) -> Option { + self.file_mapping().get(fhash).copied() + } + + /// Like `start_position_opt`, but may panic + pub fn start_position(&self, loc: &Loc) -> Position { + self.position(loc).start + } + + /// Like `end_position_opt`, but may panic + pub fn end_position(&self, loc: &Loc) -> Position { + self.position(loc).end + } + + /// Like `position_opt`, but may panic + pub fn position(&self, loc: &Loc) -> FilePosition { + self.position_opt(loc).unwrap() + } + + /// Like `byte_span_opt`, but may panic + pub fn byte_span(&self, loc: &Loc) -> FileByteSpan { + self.byte_span_opt(loc).unwrap() + } + + pub fn start_position_opt(&self, loc: &Loc) -> Option { + self.position_opt(loc).map(|posn| posn.start) + } + + pub fn end_position_opt(&self, loc: &Loc) -> Option { + self.position_opt(loc).map(|posn| posn.end) + } + + pub fn position_opt(&self, loc: &Loc) -> Option { + let start_loc = loc.start() as usize; + let end_loc = loc.end() as usize; + let file_id = *self.file_mapping().get(&loc.file_hash())?; + let start_file_loc = self.files().location(file_id, start_loc).ok()?; + let end_file_loc = self.files().location(file_id, end_loc).ok()?; + let posn = FilePosition { + file_id, + start: Position { + line_offset: start_file_loc.line_number - 1, + column_offset: start_file_loc.column_number - 1, + byte_offset: start_loc, + }, + end: Position { + line_offset: end_file_loc.line_number - 1, + column_offset: end_file_loc.column_number - 1, + byte_offset: end_loc, + }, + }; + Some(posn) + } + + pub fn byte_span_opt(&self, loc: &Loc) -> Option { + let start = loc.start() as usize; + let end = loc.end() as usize; + let file_id = *self.file_mapping().get(&loc.file_hash())?; + let posn = FileByteSpan { + byte_span: ByteSpan { start, end }, + file_id, + }; + Some(posn) + } + + /// Given a line number and character number (both 0-indexed) in the file return the `Loc` for + /// the line. Note that the end byte is exclusive in the resultant `Loc`. + pub fn line_char_offset_to_loc_opt( + &self, + file_hash: FileHash, + line_offset: u32, + char_offset: u32, + ) -> Option { + let file_id = self.file_mapping().get(&file_hash)?; + let line_range = self + .files() + .line_range(*file_id, line_offset as usize) + .ok()?; + let offset = line_range.start as u32 + char_offset; + Some(Loc::new(file_hash, offset, offset + 1)) + } + + /// Given a line number (1-indexed) in the file return the `Loc` for the line. + pub fn line_to_loc_opt(&self, file_hash: &FileHash, line_number: usize) -> Option { + let file_id = self.file_mapping().get(file_hash)?; + let line_range = self.files().line_range(*file_id, line_number).ok()?; + Some(Loc::new( + *file_hash, + line_range.start as u32, + line_range.end as u32, + )) + } + + /// Given a location `Loc` return a new loc only for source with leading and trailing + /// whitespace removed. + pub fn trimmed_loc_opt(&self, loc: &Loc) -> Option { + let source_str = self.source_of_loc_opt(loc)?; + let trimmed_front = source_str.trim_start(); + let new_start = loc.start() as usize + (source_str.len() - trimmed_front.len()); + let trimmed_back = trimmed_front.trim_end(); + let new_end = (loc.end() as usize).saturating_sub(trimmed_front.len() - trimmed_back.len()); + Some(Loc::new(loc.file_hash(), new_start as u32, new_end as u32)) + } + + /// Given a location `Loc` return the source for the location. This include any leading and + /// trailing whitespace. + pub fn source_of_loc_opt(&self, loc: &Loc) -> Option<&str> { + let file_id = *self.file_mapping().get(&loc.file_hash())?; + Some(&self.files().source(file_id).ok()?[loc.usize_range()]) + } + + /// Given a file_hash `file` and a byte index `byte_index`, compute its `Position`. + pub fn byte_index_to_position_opt( + &self, + file: &FileHash, + byte_index: ByteIndex, + ) -> Option { + let file_id = self.file_hash_to_file_id(file)?; + let byte_position = self.files().location(file_id, byte_index as usize).ok()?; + let result = Position { + line_offset: byte_position.line_number - 1, + column_offset: byte_position.column_number - 1, + byte_offset: byte_index as usize, + }; + Some(result) + } +} + +impl From for MappedFiles { + fn from(value: FilesSourceText) -> Self { + MappedFiles::new(value) + } +} + +/// Iterator for MappedFiles +pub struct MappedFilesIter<'a> { + mapped_files: &'a MappedFiles, + keys_iter: hash_map::Iter<'a, FileHash, FileId>, +} + +impl<'a> MappedFilesIter<'a> { + fn new(mapped_files: &'a MappedFiles) -> Self { + MappedFilesIter { + mapped_files, + keys_iter: mapped_files.file_mapping.iter(), + } + } +} + +impl<'a> Iterator for MappedFilesIter<'a> { + type Item = (&'a FileHash, (&'a Symbol, &'a Arc)); + + fn next(&mut self) -> Option { + let (file_hash, file_id) = self.keys_iter.next()?; + let Some(file) = self.mapped_files.files.get(*file_id).ok() else { + eprintln!("Files went wrong."); + return None; + }; + Some((file_hash, (file.name(), file.source()))) + } +} + +impl<'a> IntoIterator for &'a MappedFiles { + type Item = (&'a FileHash, (&'a Symbol, &'a Arc)); + type IntoIter = MappedFilesIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + MappedFilesIter::new(self) + } +} diff --git a/external-crates/move/crates/move-compiler/src/shared/mod.rs b/external-crates/move/crates/move-compiler/src/shared/mod.rs index b846b0daa1358..038d7382f7d09 100644 --- a/external-crates/move/crates/move-compiler/src/shared/mod.rs +++ b/external-crates/move/crates/move-compiler/src/shared/mod.rs @@ -10,7 +10,7 @@ use crate::{ command_line as cli, diagnostics::{ codes::{Category, Declarations, DiagnosticsID, Severity, WarningFilter}, - Diagnostic, Diagnostics, DiagnosticsFormat, FileName, MappedFiles, WarningFilters, + Diagnostic, Diagnostics, DiagnosticsFormat, WarningFilters, }, editions::{ check_feature_or_error as edition_check_feature, feature_edition_error_msg, Edition, @@ -20,7 +20,10 @@ use crate::{ hlir::ast as H, naming::ast as N, parser::ast as P, - shared::ide::{IDEAnnotation, IDEInfo}, + shared::{ + files::{FileName, MappedFiles}, + ide::{IDEAnnotation, IDEInfo}, + }, sui_mode, typing::{ ast as T, @@ -46,6 +49,7 @@ use std::{ use vfs::{VfsError, VfsPath}; pub mod ast_debug; +pub mod files; pub mod ide; pub mod known_attributes; pub mod matching; @@ -388,7 +392,7 @@ impl CompilationEnv { self.mapped_files.add(file_hash, file_name, source_text) } - pub fn file_mapping(&self) -> &MappedFiles { + pub fn mapped_files(&self) -> &MappedFiles { &self.mapped_files } diff --git a/external-crates/move/crates/move-compiler/src/to_bytecode/translate.rs b/external-crates/move/crates/move-compiler/src/to_bytecode/translate.rs index d154b295b9a35..7e45d817f5313 100644 --- a/external-crates/move/crates/move-compiler/src/to_bytecode/translate.rs +++ b/external-crates/move/crates/move-compiler/src/to_bytecode/translate.rs @@ -7,7 +7,6 @@ use crate::{ cfgir::{ast as G, translate::move_value_from_value_}, compiled_unit::*, diag, - diagnostics::PositionInfo, expansion::ast::{ AbilitySet, Address, Attributes, ModuleIdent, ModuleIdent_, Mutability, TargetKind, }, @@ -1062,9 +1061,9 @@ fn exp(context: &mut Context, code: &mut IR::BytecodeBlock, e: H::Exp) { } => { let line_no = context .env - .file_mapping() + .mapped_files() .start_position(&line_number_loc) - .line; + .user_line(); // Clamp line number to u16::MAX -- so if the line number exceeds u16::MAX, we don't // record the line number essentially. diff --git a/external-crates/move/crates/move-compiler/src/unit_test/mod.rs b/external-crates/move/crates/move-compiler/src/unit_test/mod.rs index 4a3a8d5b816ed..798e07c62a4f5 100644 --- a/external-crates/move/crates/move-compiler/src/unit_test/mod.rs +++ b/external-crates/move/crates/move-compiler/src/unit_test/mod.rs @@ -3,7 +3,7 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - compiled_unit::NamedCompiledModule, diagnostics::MappedFiles, shared::NumericalAddress, + compiled_unit::NamedCompiledModule, shared::files::MappedFiles, shared::NumericalAddress, }; use move_core_types::{ account_address::AccountAddress, diff --git a/external-crates/move/crates/move-ir-types/src/location.rs b/external-crates/move/crates/move-ir-types/src/location.rs index fb6ee5ba54fb9..2cca10cb6c464 100644 --- a/external-crates/move/crates/move-ir-types/src/location.rs +++ b/external-crates/move/crates/move-ir-types/src/location.rs @@ -72,7 +72,7 @@ impl Loc { /// Indicates this this location contains the provided location pub fn contains(&self, other: &Loc) -> bool { - self.file_hash == other.file_hash && self.start <= other.start && other.end <= self.end + self.file_hash == other.file_hash && self.start <= other.start && other.end <= self.end } /// Indicates this this location overlaps the provided location diff --git a/external-crates/move/crates/move-model/src/lib.rs b/external-crates/move/crates/move-model/src/lib.rs index 947dba095d917..b864b996d38db 100644 --- a/external-crates/move/crates/move-model/src/lib.rs +++ b/external-crates/move/crates/move-model/src/lib.rs @@ -36,7 +36,7 @@ use move_symbol_pool::Symbol as MoveSymbol; use crate::{ ast::ModuleName, builder::model_builder::ModelBuilder, - model::{FunId, FunctionData, GlobalEnv, Loc, ModuleData, ModuleId, DatatypeId}, + model::{DatatypeId, FunId, FunctionData, GlobalEnv, Loc, ModuleData, ModuleId}, options::ModelBuilderOptions, }; @@ -155,7 +155,7 @@ pub fn run_model_builder_with_options_and_compilation_flags< .iter() .map(|(symbol, addr)| (env.symbol_pool().make(symbol.as_str()), *addr)) .collect(); - env.add_source(fhash, Rc::new(aliases), fname.as_str(), fsrc, is_dep); + env.add_source(fhash, Rc::new(aliases), fname.as_str(), &fsrc, is_dep); } // If a move file does not contain any definition, it will not appear in `parsed_prog`. Add them explicitly. @@ -167,7 +167,7 @@ pub fn run_model_builder_with_options_and_compilation_flags< *fhash, Rc::new(BTreeMap::new()), fname.as_str(), - fsrc, + &fsrc, is_dep, ); } diff --git a/external-crates/move/crates/move-package/src/compilation/build_plan.rs b/external-crates/move/crates/move-package/src/compilation/build_plan.rs index 01e7aca2e1045..4f1c57e9a4c1e 100644 --- a/external-crates/move/crates/move-package/src/compilation/build_plan.rs +++ b/external-crates/move/crates/move-package/src/compilation/build_plan.rs @@ -14,11 +14,9 @@ use crate::{ use anyhow::Result; use move_compiler::{ compiled_unit::AnnotatedCompiledUnit, - diagnostics::{ - report_diagnostics_to_buffer_with_env_color, report_warnings, FilesSourceText, Migration, - }, + diagnostics::{report_diagnostics_to_buffer_with_env_color, report_warnings, Migration}, editions::Edition, - shared::PackagePaths, + shared::{files::MappedFiles, PackagePaths}, Compiler, }; use move_symbol_pool::Symbol; @@ -212,7 +210,7 @@ impl BuildPlan { compiler_driver: impl FnMut( Compiler, ) - -> anyhow::Result<(FilesSourceText, Vec)>, + -> anyhow::Result<(MappedFiles, Vec)>, ) -> Result { let dependencies = self.compute_dependencies(); self.compile_with_driver_and_deps(dependencies, writer, compiler_driver) @@ -225,7 +223,7 @@ impl BuildPlan { mut compiler_driver: impl FnMut( Compiler, ) - -> anyhow::Result<(FilesSourceText, Vec)>, + -> anyhow::Result<(MappedFiles, Vec)>, ) -> Result { let CompilationDependencies { root_package, diff --git a/external-crates/move/crates/move-package/src/compilation/compiled_package.rs b/external-crates/move/crates/move-package/src/compilation/compiled_package.rs index 7a4720a4c259d..998b18ea0e346 100644 --- a/external-crates/move/crates/move-package/src/compilation/compiled_package.rs +++ b/external-crates/move/crates/move-package/src/compilation/compiled_package.rs @@ -23,10 +23,9 @@ use move_command_line_common::files::{ }; use move_compiler::{ compiled_unit::{AnnotatedCompiledUnit, CompiledUnit, NamedCompiledModule}, - diagnostics::FilesSourceText, editions::Flavor, linters, - shared::{NamedAddressMap, NumericalAddress, PackageConfig, PackagePaths}, + shared::{files::MappedFiles, NamedAddressMap, NumericalAddress, PackageConfig, PackagePaths}, sui_mode::{self}, Compiler, }; @@ -532,7 +531,7 @@ impl CompiledPackage { resolved_package: Package, transitive_dependencies: Vec, resolution_graph: &ResolvedGraph, - compiler_driver: impl FnMut(Compiler) -> Result<(FilesSourceText, Vec)>, + compiler_driver: impl FnMut(Compiler) -> Result<(MappedFiles, Vec)>, ) -> Result { let BuildResult { root_package_name, @@ -552,7 +551,13 @@ impl CompiledPackage { let mut root_compiled_units = vec![]; let mut deps_compiled_units = vec![]; for annot_unit in all_compiled_units { - let source_path = PathBuf::from(file_map[&annot_unit.loc().file_hash()].0.as_str()); + let source_path = PathBuf::from( + file_map + .get(&annot_unit.loc().file_hash()) + .unwrap() + .0 + .as_str(), + ); let package_name = annot_unit.named_module.package_name.unwrap(); let unit = CompiledUnitWithSource { unit: annot_unit.into_compiled_unit(), diff --git a/external-crates/move/crates/move-transactional-test-runner/src/framework.rs b/external-crates/move/crates/move-transactional-test-runner/src/framework.rs index d08d7a339930c..cde08bf98dc23 100644 --- a/external-crates/move/crates/move-transactional-test-runner/src/framework.rs +++ b/external-crates/move/crates/move-transactional-test-runner/src/framework.rs @@ -23,9 +23,9 @@ use move_command_line_common::{ }; use move_compiler::{ compiled_unit::AnnotatedCompiledUnit, - diagnostics::{Diagnostics, FilesSourceText, WarningFilters}, + diagnostics::{Diagnostics, WarningFilters}, editions::{Edition, Flavor}, - shared::{NumericalAddress, PackageConfig}, + shared::{files::MappedFiles, NumericalAddress, PackageConfig}, FullyCompiledProgram, }; use move_core_types::{ @@ -618,14 +618,16 @@ pub fn compile_source_units( state: &CompiledState, file_name: impl AsRef, ) -> Result<(Vec, Option)> { - fn rendered_diags(files: &FilesSourceText, diags: Diagnostics) -> Option { + fn rendered_diags(files: &MappedFiles, diags: Diagnostics) -> Option { if diags.is_empty() { return None; } let ansi_color = read_bool_env_var(move_command_line_common::testing::PRETTY); let error_buffer = - move_compiler::diagnostics::report_diagnostics_to_buffer(files, diags, ansi_color); + move_compiler::diagnostics::report_diagnostics_to_buffer_with_mapped_files( + files, diags, ansi_color, + ); Some(String::from_utf8(error_buffer).unwrap()) } @@ -656,14 +658,8 @@ pub fn compile_source_units( match units_or_diags { Err((_pass, diags)) => { if let Some(pcd) = state.pre_compiled_deps.clone() { - for (file_name, text) in &pcd.files { - // TODO This is bad. Rethink this when errors are redone - if !files.contains_key(file_name) { - files.insert(*file_name, text.clone()); - } - } + files.extend(pcd.files.clone()); } - Err(anyhow!(rendered_diags(&files, diags).unwrap())) } Ok((units, warnings)) => Ok((units, rendered_diags(&files, warnings))), diff --git a/external-crates/move/crates/move-unit-test/src/lib.rs b/external-crates/move/crates/move-unit-test/src/lib.rs index cc923914810e5..9043af54dd6d9 100644 --- a/external-crates/move/crates/move-unit-test/src/lib.rs +++ b/external-crates/move/crates/move-unit-test/src/lib.rs @@ -172,7 +172,7 @@ impl UnitTestingConfig { let (mut compiler, cfgir) = compiler.into_ast(); let compilation_env = compiler.compilation_env(); let test_plan = unit_test::plan_builder::construct_test_plan(compilation_env, None, &cfgir); - let mapped_files = compilation_env.file_mapping().clone(); + let mapped_files = compilation_env.mapped_files().clone(); let compilation_result = compiler.at_cfgir(cfgir).build(); let (units, warnings) = diff --git a/external-crates/move/crates/move-unit-test/src/test_reporter.rs b/external-crates/move/crates/move-unit-test/src/test_reporter.rs index 13cd2f5cbc64c..71ab7ae300d27 100644 --- a/external-crates/move/crates/move-unit-test/src/test_reporter.rs +++ b/external-crates/move/crates/move-unit-test/src/test_reporter.rs @@ -7,7 +7,7 @@ use colored::{control, Colorize}; use move_binary_format::errors::{ExecutionState, Location, VMError}; use move_command_line_common::error_bitset::ErrorBitset; use move_compiler::{ - diagnostics::{self, Diagnostic, Diagnostics, PositionInfo}, + diagnostics::{self, Diagnostic, Diagnostics}, unit_test::{ModuleTestPlan, MoveErrorType, TestPlan}, }; use move_core_types::{ @@ -226,11 +226,14 @@ impl TestFailure { let fn_name = named_module.module.identifier_at(fn_id_idx).as_str(); let file_name = test_plan.mapped_files.filename(&loc.file_hash()); let formatted_line = { + // Adjust lines by 1 to report 1-indexed let position = test_plan.mapped_files.position(&loc); - if position.start.line == position.end.line { - format!("{}", position.start.line) + let start_line = position.start.user_line(); + let end_line = position.end.user_line(); + if start_line == end_line { + format!("{}", start_line) } else { - format!("{}-{}", position.start.line, position.end.line) + format!("{}-{}", start_line, end_line) } }; buf.push_str( @@ -582,7 +585,7 @@ impl TestResults { "│ {}", format!( "This test uses randomly generated inputs. Rerun with `{}` to recreate this test failure.\n", - format!("test {} --seed {}", + format!("test {} --seed {}", test_name, seed ).bright_red().bold()