From 6e1fcaf0b5417e546574ebce1175bb6f418513eb Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 17 Jul 2024 00:25:32 +0800 Subject: [PATCH 01/19] feat: allow multiple compiler configs --- crates/compilers/src/compile/project.rs | 27 ++-- crates/compilers/src/compilers/mod.rs | 11 ++ crates/compilers/src/compilers/multi.rs | 26 +++- crates/compilers/src/compilers/solc/mod.rs | 65 +++++++- .../compilers/src/compilers/vyper/settings.rs | 21 ++- crates/compilers/src/lib.rs | 31 ++-- crates/compilers/src/project_util/mod.rs | 16 +- crates/compilers/src/resolver/mod.rs | 139 ++++++++++++------ crates/compilers/tests/project.rs | 65 ++++++-- 9 files changed, 299 insertions(+), 102 deletions(-) diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index b76c47a5..eeb59558 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -117,7 +117,7 @@ use semver::Version; use std::{collections::HashMap, path::PathBuf, time::Instant}; /// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedSources = HashMap>; +pub(crate) type VersionedSources = HashMap>; #[derive(Debug)] pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { @@ -125,7 +125,7 @@ pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { edges: GraphEdges, project: &'a Project, /// how to compile all the sources - sources: CompilerSources, + sources: CompilerSources, } impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { @@ -146,11 +146,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { sources.retain(|f, _| filter.is_match(f)) } let graph = Graph::resolve_sources(&project.paths, sources)?; - let (sources, edges) = graph.into_sources_by_version( - project.offline, - &project.locked_versions, - &project.compiler, - )?; + let (sources, edges) = graph.into_sources_by_version(project)?; // If there are multiple different versions, and we can use multiple jobs we can compile // them in parallel. @@ -217,7 +213,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { #[derive(Debug)] struct PreprocessedState<'a, T: ArtifactOutput, C: Compiler> { /// Contains all the sources to compile. - sources: CompilerSources, + sources: CompilerSources, /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled cache: ArtifactsCache<'a, T, C>, @@ -357,14 +353,14 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { /// Determines how the `solc <-> sources` pairs are executed. #[derive(Debug, Clone)] -struct CompilerSources { +struct CompilerSources { /// The sources to compile. - sources: VersionedSources, + sources: VersionedSources, /// The number of jobs to use for parallel compilation. jobs: Option, } -impl CompilerSources { +impl CompilerSources { /// Converts all `\\` separators to `/`. /// /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the @@ -394,7 +390,7 @@ impl CompilerSources { ) { cache.remove_dirty_sources(); for versioned_sources in self.sources.values_mut() { - for (version, sources) in versioned_sources { + for (version, sources, _) in versioned_sources { trace!("Filtering {} sources for {}", sources.len(), version); cache.filter(sources, version); trace!( @@ -407,7 +403,7 @@ impl CompilerSources { } /// Compiles all the files with `Solc` - fn compile, T: ArtifactOutput>( + fn compile, T: ArtifactOutput>( self, cache: &mut ArtifactsCache<'_, T, C>, ) -> Result> { @@ -424,7 +420,7 @@ impl CompilerSources { let mut jobs = Vec::new(); for (language, versioned_sources) in self.sources { - for (version, sources) in versioned_sources { + for (version, sources, mut opt_settings) in versioned_sources { if sources.is_empty() { // nothing to compile trace!("skip {} for empty sources set", version); @@ -433,7 +429,6 @@ impl CompilerSources { // depending on the composition of the filtered sources, the output selection can be // optimized - let mut opt_settings = project.settings.clone(); let actually_dirty = sparse_output.sparse_sources(&sources, &mut opt_settings, graph); @@ -678,7 +673,7 @@ mod tests { // single solc assert_eq!(len, 1); - let filtered = &sources.values().next().unwrap().values().next().unwrap(); + let filtered = &sources.values().next().unwrap()[0].1; // 3 contracts total assert_eq!(filtered.0.len(), 3); diff --git a/crates/compilers/src/compilers/mod.rs b/crates/compilers/src/compilers/mod.rs index aa4b7230..b632e645 100644 --- a/crates/compilers/src/compilers/mod.rs +++ b/crates/compilers/src/compilers/mod.rs @@ -61,10 +61,18 @@ impl fmt::Display for CompilerVersion { } } +pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { + fn merge(&mut self, other: &Self); +} + /// Compilation settings including evm_version, output_selection, etc. pub trait CompilerSettings: Default + Serialize + DeserializeOwned + Clone + Debug + Send + Sync + 'static { + /// We allow configuring settings restrictions which might optionally contain specific + /// requiremets for compiler configuration. e.g. min/max evm_version, optimizer runs + type Restrictions: CompilerSettingsRestrictions; + /// Executes given fn with mutable reference to configured [OutputSelection]. fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy); @@ -97,6 +105,9 @@ pub trait CompilerSettings: fn with_include_paths(self, _include_paths: &BTreeSet) -> Self { self } + + /// Returns whether current settings satisfy given restrictions. + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool; } /// Input of a compiler, including sources and settings used for their compilation. diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs index 832cdf62..47541f2a 100644 --- a/crates/compilers/src/compilers/multi.rs +++ b/crates/compilers/src/compilers/multi.rs @@ -1,5 +1,5 @@ use super::{ - solc::{SolcCompiler, SolcVersionedInput, SOLC_EXTENSIONS}, + solc::{SolcCompiler, SolcSettings, SolcVersionedInput, SOLC_EXTENSIONS}, vyper::{ input::VyperVersionedInput, parser::VyperParsedSource, Vyper, VyperLanguage, VYPER_EXTENSIONS, @@ -10,7 +10,9 @@ use super::{ use crate::{ artifacts::vyper::{VyperCompilationError, VyperSettings}, resolver::parse::SolData, - solc::SolcSettings, + settings::VyperRestrictions, + solc::SolcRestrictions, + CompilerSettingsRestrictions, }; use foundry_compilers_artifacts::{ error::SourceLocation, @@ -129,6 +131,19 @@ impl fmt::Display for MultiCompilerError { } } +#[derive(Clone, Copy, Debug, Default)] +pub struct MultiCompilerRestrictions { + pub solc: SolcRestrictions, + pub vyper: VyperRestrictions, +} + +impl CompilerSettingsRestrictions for MultiCompilerRestrictions { + fn merge(&mut self, other: &Self) { + self.solc.merge(&other.solc); + self.vyper.merge(&other.vyper); + } +} + /// Settings for the [MultiCompiler]. Includes settings for both Solc and Vyper compilers. #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] pub struct MultiCompilerSettings { @@ -137,6 +152,8 @@ pub struct MultiCompilerSettings { } impl CompilerSettings for MultiCompilerSettings { + type Restrictions = MultiCompilerRestrictions; + fn can_use_cached(&self, other: &Self) -> bool { self.solc.can_use_cached(&other.solc) && self.vyper.can_use_cached(&other.vyper) } @@ -173,6 +190,11 @@ impl CompilerSettings for MultiCompilerSettings { vyper: self.vyper.with_remappings(remappings), } } + + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { + self.solc.satisfies_restrictions(&restrictions.solc) + && self.vyper.satisfies_restrictions(&restrictions.vyper) + } } impl From for SolcSettings { diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 4a4b36a9..1f38f647 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -2,14 +2,14 @@ use super::{ CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, Language, ParsedSource, }; -use crate::resolver::parse::SolData; +use crate::{resolver::parse::SolData, CompilerSettingsRestrictions}; pub use foundry_compilers_artifacts::SolcLanguage; use foundry_compilers_artifacts::{ error::SourceLocation, output_selection::OutputSelection, remappings::Remapping, sources::{Source, Sources}, - Error, Settings, Severity, SolcInput, + Error, EvmVersion, Settings, Severity, SolcInput, }; use foundry_compilers_core::error::Result; use itertools::Itertools; @@ -190,7 +190,58 @@ impl DerefMut for SolcSettings { } } +#[derive(Debug, Clone, Copy, Default)] +pub struct EvmVersionRestriction { + pub min_evm_version: Option, + pub max_evm_version: Option, +} + +impl EvmVersionRestriction { + /// Returns true if the given version satisfies the restrictions + /// + /// If given None, only returns true if no restrictions are set + pub fn satisfies(&self, version: Option) -> bool { + self.min_evm_version.map_or(true, |min| version.map_or(false, |v| v >= min)) + && self.max_evm_version.map_or(true, |max| version.map_or(false, |v| v <= max)) + } + + pub fn merge(&mut self, other: &Self) { + let Self { min_evm_version, max_evm_version } = other; + + if let Some(min_evm_version) = min_evm_version { + if self.min_evm_version.map_or(true, |e| e < *min_evm_version) { + self.min_evm_version.replace(*min_evm_version); + } + } + + if let Some(max_evm_version) = max_evm_version { + if self.max_evm_version.map_or(true, |e| e > *max_evm_version) { + self.max_evm_version.replace(*max_evm_version); + } + } + } +} + +#[derive(Debug, Clone, Copy, Default)] +pub struct SolcRestrictions { + pub evm_version: EvmVersionRestriction, + pub via_ir: Option, +} + +impl CompilerSettingsRestrictions for SolcRestrictions { + fn merge(&mut self, other: &Self) { + self.evm_version.merge(&other.evm_version); + + // Preserve true + if self.via_ir.map_or(true, |via_ir| !via_ir) { + self.via_ir = other.via_ir; + } + } +} + impl CompilerSettings for SolcSettings { + type Restrictions = SolcRestrictions; + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { f(&mut self.settings.output_selection) } @@ -247,6 +298,16 @@ impl CompilerSettings for SolcSettings { self.cli_settings.include_paths.clone_from(include_paths); self } + + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { + let mut satisfies = true; + + satisfies &= restrictions.evm_version.satisfies(self.evm_version); + satisfies &= + restrictions.via_ir.map_or(true, |via_ir| via_ir == self.via_ir.unwrap_or_default()); + + satisfies + } } impl ParsedSource for SolData { diff --git a/crates/compilers/src/compilers/vyper/settings.rs b/crates/compilers/src/compilers/vyper/settings.rs index 21ae4526..4e1a8b30 100644 --- a/crates/compilers/src/compilers/vyper/settings.rs +++ b/crates/compilers/src/compilers/vyper/settings.rs @@ -1,10 +1,25 @@ use std::{collections::BTreeSet, path::PathBuf}; pub use crate::artifacts::vyper::VyperSettings; -use crate::compilers::CompilerSettings; +use crate::{ + compilers::CompilerSettings, solc::EvmVersionRestriction, CompilerSettingsRestrictions, +}; use foundry_compilers_artifacts::output_selection::OutputSelection; +#[derive(Clone, Copy, Debug, Default)] +pub struct VyperRestrictions { + pub evm_version: EvmVersionRestriction, +} + +impl CompilerSettingsRestrictions for VyperRestrictions { + fn merge(&mut self, other: &Self) { + self.evm_version.merge(&other.evm_version); + } +} + impl CompilerSettings for VyperSettings { + type Restrictions = VyperRestrictions; + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection)) { f(&mut self.output_selection) } @@ -30,4 +45,8 @@ impl CompilerSettings for VyperSettings { self.search_paths = Some(include_paths.clone()); self } + + fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { + restrictions.evm_version.satisfies(self.evm_version) + } } diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index fc2f8c3a..3a25a02c 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -71,12 +71,14 @@ use std::{ #[derivative(Debug)] pub struct Project { pub compiler: C, - /// Compiler versions locked for specific languages. - pub locked_versions: HashMap, /// The layout of the project pub paths: ProjectPathsConfig, /// The compiler settings pub settings: C::Settings, + /// Additional settings for cases when default compiler settings are not enough to cover all + /// possible restrictions. + pub additional_settings: Vec, + pub restrictions: BTreeMap::Restrictions>, /// Whether caching is enabled pub cached: bool, /// Whether to output build information with each solc call. @@ -142,6 +144,10 @@ impl Project { pub fn artifacts_handler(&self) -> &T { &self.artifacts } + + pub fn settings_profiles(&self) -> impl Iterator { + std::iter::once(&self.settings).chain(self.additional_settings.iter()) + } } impl Project @@ -446,8 +452,6 @@ impl Project { pub struct ProjectBuilder { /// The layout of the paths: Option>, - /// Compiler versions locked for specific languages. - locked_versions: HashMap, /// How solc invocation should be configured. settings: Option, /// Whether caching is enabled, default is true. @@ -489,7 +493,6 @@ impl ProjectBuilder { compiler_severity_filter: Severity::Error, solc_jobs: None, settings: None, - locked_versions: Default::default(), sparse_output: None, } } @@ -606,18 +609,6 @@ impl ProjectBuilder { self.solc_jobs(1) } - #[must_use] - pub fn locked_version(mut self, lang: impl Into, version: Version) -> Self { - self.locked_versions.insert(lang.into(), version); - self - } - - #[must_use] - pub fn locked_versions(mut self, versions: HashMap) -> Self { - self.locked_versions = versions; - self - } - #[must_use] pub fn sparse_output(mut self, filter: F) -> Self where @@ -641,7 +632,6 @@ impl ProjectBuilder { slash_paths, ignored_file_paths, settings, - locked_versions, sparse_output, .. } = self; @@ -658,7 +648,6 @@ impl ProjectBuilder { solc_jobs, build_info, settings, - locked_versions, sparse_output, } } @@ -677,7 +666,6 @@ impl ProjectBuilder { build_info, slash_paths, settings, - locked_versions, sparse_output, } = self; @@ -704,8 +692,9 @@ impl ProjectBuilder { offline, slash_paths, settings: settings.unwrap_or_default(), - locked_versions, sparse_output, + additional_settings: Default::default(), + restrictions: Default::default(), }) } } diff --git a/crates/compilers/src/project_util/mod.rs b/crates/compilers/src/project_util/mod.rs index 36fa73d8..b9c54de2 100644 --- a/crates/compilers/src/project_util/mod.rs +++ b/crates/compilers/src/project_util/mod.rs @@ -55,16 +55,12 @@ impl TempProject { /// Explicitly sets the solc version for the project #[cfg(feature = "svm-solc")] pub fn set_solc(&mut self, solc: &str) -> &mut Self { - use crate::compilers::{multi::MultiCompilerLanguage, solc::SolcLanguage}; - use semver::Version; - - let version = Version::parse(solc).unwrap(); - self.inner - .locked_versions - .insert(MultiCompilerLanguage::Solc(SolcLanguage::Solidity), version.clone()); - self.inner - .locked_versions - .insert(MultiCompilerLanguage::Solc(SolcLanguage::Yul), version.clone()); + use crate::solc::{Solc, SolcCompiler}; + + self.inner.compiler.solc = SolcCompiler::Specific( + Solc::find_svm_installed_version(&solc.parse().unwrap()).unwrap().unwrap(), + ); + self } } diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 01e3e520..6c5fbdba 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -48,7 +48,7 @@ use crate::{ compilers::{Compiler, CompilerVersion, Language, ParsedSource}, project::VersionedSources, - ProjectPathsConfig, + ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig, }; use core::fmt; use foundry_compilers_artifacts::sources::{Source, Sources}; @@ -460,12 +460,15 @@ impl> Graph { /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version>( + pub fn into_sources_by_version( self, - offline: bool, - locked_versions: &HashMap, - compiler: &C, - ) -> Result<(VersionedSources, GraphEdges)> { + project: &Project, + ) -> Result<(VersionedSources, GraphEdges)> + where + T: ArtifactOutput, + S: CompilerSettings, + C: Compiler, + { /// insert the imports of the given node into the sources map /// There can be following graph: /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` @@ -496,38 +499,48 @@ impl> Graph { } } - let versioned_nodes_by_lang = - self.get_input_node_versions(offline, locked_versions, compiler)?; + let versioned_nodes = self.get_input_node_versions(project)?; + let versioned_nodes = self.resolve_settings(project, versioned_nodes); let (nodes, edges) = self.split(); let mut all_nodes = nodes.into_iter().enumerate().collect::>(); let mut resulted_sources = HashMap::new(); - // determine the `Sources` set for each solc version - for (language, versioned_nodes) in versioned_nodes_by_lang { - let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); - - for (version, input_node_indices) in versioned_nodes { - let mut sources = Sources::new(); - - // all input nodes will be processed - let mut processed_sources = input_node_indices.iter().copied().collect(); + let profiles = project.settings_profiles().collect::>(); - // we only process input nodes (from sources, tests for example) - for idx in input_node_indices { - // insert the input node in the sources set and remove it from the available set - let (path, source) = all_nodes.get(&idx).cloned().expect("node is preset. qed"); - sources.insert(path, source); - insert_imports( - idx, - &mut all_nodes, - &mut sources, - &edges.edges, - &mut processed_sources, - ); + // determine the `Sources` set for each solc version + for (language, versioned_nodes) in versioned_nodes { + let mut versioned_sources = Vec::with_capacity(versioned_nodes.len()); + + for (version, profile_to_nodes) in versioned_nodes { + for (profile_idx, input_node_indixies) in profile_to_nodes { + let mut sources = Sources::new(); + + // all input nodes will be processed + let mut processed_sources = input_node_indixies.iter().copied().collect(); + + // we only process input nodes (from sources, tests for example) + for idx in input_node_indixies { + // insert the input node in the sources set and remove it from the available + // set + let (path, source) = + all_nodes.get(&idx).cloned().expect("node is preset. qed"); + sources.insert(path, source); + insert_imports( + idx, + &mut all_nodes, + &mut sources, + &edges.edges, + &mut processed_sources, + ); + } + versioned_sources.push(( + version.clone(), + sources, + profiles[profile_idx].clone(), + )); } - versioned_sources.insert(version, sources); } resulted_sources.insert(language, versioned_sources); @@ -580,6 +593,25 @@ impl> Graph { } } + fn retain_compatible_profiles( + &self, + idx: usize, + project: &Project, + candidates: &mut Vec<(usize, &C::Settings)>, + ) { + let nodes: HashSet<_> = self.node_ids(idx).collect(); + for node in nodes { + let node = self.node(node); + if let Some(requirement) = project.restrictions.get(&node.path) { + candidates.retain(|(_, settings)| settings.satisfies_restrictions(requirement)); + } + if candidates.is_empty() { + // nothing to filter anymore + return; + } + } + } + fn input_nodes_by_language(&self) -> HashMap> { let mut nodes = HashMap::new(); @@ -602,19 +634,13 @@ impl> Graph { /// If `offline` is set to `true` then only already installed. fn get_input_node_versions>( &self, - offline: bool, - locked_versions: &HashMap, - compiler: &C, + project: &Project, ) -> Result>>> { trace!("resolving input node versions"); let mut resulted_nodes = HashMap::new(); for (language, nodes) in self.input_nodes_by_language() { - if let Some(version) = locked_versions.get(&language) { - resulted_nodes.insert(language, HashMap::from([(version.clone(), nodes)])); - continue; - } // this is likely called by an application and will be eventually printed so we don't // exit on first error, instead gather all the errors and return a bundled // error message instead @@ -623,14 +649,15 @@ impl> Graph { let mut erroneous_nodes = HashSet::with_capacity(self.edges.num_input_files); // the sorted list of all versions - let all_versions = if offline { - compiler + let all_versions = if project.offline { + project + .compiler .available_versions(&language) .into_iter() .filter(|v| v.is_installed()) .collect() } else { - compiler.available_versions(&language) + project.compiler.available_versions(&language) }; if all_versions.is_empty() && !nodes.is_empty() { @@ -654,7 +681,9 @@ impl> Graph { if candidates.is_empty() && !erroneous_nodes.contains(&idx) { // check if the version is even valid let node = self.node(idx); - if let Err(version_err) = node.check_available_version(&all_versions, offline) { + if let Err(version_err) = + node.check_available_version(&all_versions, project.offline) + { let f = utils::source_name(&node.path, &self.root).display(); errors.push(format!( "Encountered invalid solc version in {f}: {version_err}" @@ -718,6 +747,34 @@ impl> Graph { Ok(resulted_nodes) } + fn resolve_settings, T: ArtifactOutput>( + &self, + project: &Project, + input_nodes_versions: HashMap>>, + ) -> HashMap>>> { + let mut resulted_sources = HashMap::new(); + for (language, versions) in input_nodes_versions { + let mut versioned_sources = HashMap::new(); + for (version, nodes) in versions { + let mut profile_to_nodes = HashMap::new(); + for idx in nodes { + let mut profile_candidates = + project.settings_profiles().enumerate().collect::>(); + self.retain_compatible_profiles(idx, project, &mut profile_candidates); + + profile_to_nodes + .entry(profile_candidates[0].0) + .or_insert_with(Vec::new) + .push(idx); + } + versioned_sources.insert(version, profile_to_nodes); + } + resulted_sources.insert(language, versioned_sources); + } + + resulted_sources + } + /// Tries to find the "best" set of versions to nodes, See [Solc version /// auto-detection](#solc-version-auto-detection) /// diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index 62922569..d7f3827a 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -14,15 +14,16 @@ use foundry_compilers::{ }, flatten::Flattener, info::ContractInfo, + multi::MultiCompilerRestrictions, project_util::*, - solc::SolcSettings, + solc::{EvmVersionRestriction, SolcRestrictions}, take_solc_installer_lock, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, TestFileFilter, }; use foundry_compilers_artifacts::{ output_selection::OutputSelection, remappings::Remapping, BytecodeHash, DevDoc, Error, - ErrorDoc, EventDoc, Libraries, MethodDoc, ModelCheckerEngine::CHC, ModelCheckerSettings, - Settings, Severity, SolcInput, UserDoc, UserDocNotice, + ErrorDoc, EventDoc, EvmVersion, Libraries, MethodDoc, ModelCheckerEngine::CHC, + ModelCheckerSettings, Settings, Severity, SolcInput, UserDoc, UserDocNotice, }; use foundry_compilers_core::{ error::SolcError, @@ -3845,12 +3846,14 @@ fn test_deterministic_metadata() { let orig_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); copy_dir_all(&orig_root, tmp_dir.path()).unwrap(); + let compiler = MultiCompiler { + solc: SolcCompiler::Specific( + Solc::find_svm_installed_version(&Version::new(0, 8, 18)).unwrap().unwrap(), + ), + vyper: None, + }; let paths = ProjectPathsConfig::builder().root(root).build().unwrap(); - let project = Project::builder() - .locked_version(SolcLanguage::Solidity, Version::new(0, 8, 18)) - .paths(paths) - .build(MultiCompiler::default()) - .unwrap(); + let project = Project::builder().paths(paths).build(compiler).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); @@ -3990,7 +3993,6 @@ fn test_can_compile_multi() { } // This is a reproduction of https://github.com/foundry-rs/compilers/issues/47 -#[cfg(feature = "svm-solc")] #[test] fn remapping_trailing_slash_issue47() { use std::sync::Arc; @@ -4021,3 +4023,48 @@ fn remapping_trailing_slash_issue47() { let output = compiler.compile_exact(&input).unwrap(); assert!(!output.has_error()); } + +#[test] +fn test_settings_restrictions() { + let mut project = TempProject::::dapptools().unwrap(); + // default EVM version is Paris, Cancun contract won't compile + project.project_mut().settings.solc.evm_version = Some(EvmVersion::Paris); + + let cancun_path = project + .add_source( + "Cancun.sol", + r#" +contract TransientContract { + function lock()public { + assembly { + tstore(0, 1) + } + } +}"#, + ) + .unwrap(); + + project.add_source("CancunImporter.sol", "import \"./Cancun.sol\";").unwrap(); + project.add_source("Simple.sol", "contract SimpleContract {}").unwrap(); + + // Add config with Cancun enabled + let mut cancun_settings = project.project().settings.clone(); + cancun_settings.solc.evm_version = Some(EvmVersion::Cancun); + project.project_mut().additional_settings.push(cancun_settings); + + let cancun_restriction = MultiCompilerRestrictions { + solc: SolcRestrictions { + evm_version: EvmVersionRestriction { + min_evm_version: Some(EvmVersion::Cancun), + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + // Restrict compiling Cancun contract to Cancun EVM version + project.project_mut().restrictions.insert(cancun_path, cancun_restriction); + + project.compile().unwrap().assert_success(); +} From 1b219b766972726497712889f029eedf24c0b122 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 13 Aug 2024 23:09:05 +0800 Subject: [PATCH 02/19] wip --- crates/compilers/src/artifact_output/mod.rs | 110 ++++++++------ crates/compilers/src/cache.rs | 140 ++++++++++++------ .../compilers/src/compile/output/contracts.rs | 1 + crates/compilers/src/compile/output/mod.rs | 5 +- .../compilers/src/compile/output/sources.rs | 1 + crates/compilers/src/compile/project.rs | 65 ++++---- crates/compilers/src/compilers/solc/mod.rs | 2 +- crates/compilers/src/lib.rs | 40 ++--- crates/compilers/src/resolver/mod.rs | 11 +- crates/compilers/tests/project.rs | 21 ++- 10 files changed, 248 insertions(+), 148 deletions(-) diff --git a/crates/compilers/src/artifact_output/mod.rs b/crates/compilers/src/artifact_output/mod.rs index 3c7444a2..43bb624d 100644 --- a/crates/compilers/src/artifact_output/mod.rs +++ b/crates/compilers/src/artifact_output/mod.rs @@ -119,6 +119,7 @@ pub struct ArtifactFile { /// `solc` version that produced this artifact pub version: Version, pub build_id: String, + pub profile: String, } impl ArtifactFile { @@ -642,14 +643,22 @@ pub trait ArtifactOutput { /// Returns the file name for the contract's artifact /// `Greeter.json` - fn output_file_name(name: &str) -> PathBuf { - format!("{name}.json").into() - } - - /// Returns the file name for the contract's artifact and the given version - /// `Greeter.0.8.11.json` - fn output_file_name_versioned(name: &str, version: &Version) -> PathBuf { - format!("{}.{}.{}.{}.json", name, version.major, version.minor, version.patch).into() + fn output_file_name( + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { + let mut name = name.to_string(); + if with_version { + name.push_str(&format!(".{}.{}.{}", version.major, version.minor, version.patch)); + } + if with_profile { + name.push_str(&format!(".{}", profile)); + } + name.push_str(".json"); + name.into() } /// Returns the appropriate file name for the conflicting file. @@ -724,24 +733,23 @@ pub trait ArtifactOutput { /// Returns the path to the contract's artifact location based on the contract's file and name /// /// This returns `contract.sol/contract.json` by default - fn output_file(contract_file: &Path, name: &str) -> PathBuf { - contract_file - .file_name() - .map(Path::new) - .map(|p| p.join(Self::output_file_name(name))) - .unwrap_or_else(|| Self::output_file_name(name)) - } - - /// Returns the path to the contract's artifact location based on the contract's file, name and - /// version - /// - /// This returns `contract.sol/contract.0.8.11.json` by default - fn output_file_versioned(contract_file: &Path, name: &str, version: &Version) -> PathBuf { + fn output_file( + contract_file: &Path, + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { contract_file .file_name() .map(Path::new) - .map(|p| p.join(Self::output_file_name_versioned(name, version))) - .unwrap_or_else(|| Self::output_file_name_versioned(name, version)) + .map(|p| { + p.join(Self::output_file_name(name, version, profile, with_version, with_profile)) + }) + .unwrap_or_else(|| { + Self::output_file_name(name, version, profile, with_version, with_profile) + }) } /// The inverse of `contract_file_name` @@ -752,11 +760,6 @@ pub trait ArtifactOutput { file.file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) } - /// Whether the corresponding artifact of the given contract file and name exists - fn output_exists(contract_file: &Path, name: &str, root: &Path) -> bool { - root.join(Self::output_file(contract_file, name)).exists() - } - /// Read the artifact that's stored at the given path /// /// # Errors @@ -807,21 +810,19 @@ pub trait ArtifactOutput { name: &str, artifacts_folder: &Path, version: &Version, - versioned: bool, + profile: &str, + with_version: bool, + with_profile: bool, ) -> PathBuf { // if an artifact for the contract already exists (from a previous compile job) // we reuse the path, this will make sure that even if there are conflicting // files (files for witch `T::output_file()` would return the same path) we use // consistent output paths - if let Some(existing_artifact) = ctx.existing_artifact(file, name, version) { + if let Some(existing_artifact) = ctx.existing_artifact(file, name, version, profile) { trace!("use existing artifact file {:?}", existing_artifact,); existing_artifact.to_path_buf() } else { - let path = if versioned { - Self::output_file_versioned(file, name, version) - } else { - Self::output_file(file, name) - }; + let path = Self::output_file(file, name, version, profile, with_version, with_profile); let path = artifacts_folder.join(path); @@ -854,7 +855,9 @@ pub trait ArtifactOutput { let mut taken_paths_lowercase = ctx .existing_artifacts .values() - .flat_map(|artifacts| artifacts.values().flat_map(|artifacts| artifacts.values())) + .flat_map(|artifacts| artifacts.values()) + .flat_map(|artifacts| artifacts.values()) + .flat_map(|artifacts| artifacts.values()) .map(|a| a.path.to_slash_lossy().to_lowercase()) .collect::>(); @@ -865,6 +868,10 @@ pub trait ArtifactOutput { }); for file in files { for (name, versioned_contracts) in &contracts[file] { + let unique_versions = + versioned_contracts.iter().map(|c| &c.version).collect::>(); + let unique_profiles = + versioned_contracts.iter().map(|c| &c.profile).collect::>(); for contract in versioned_contracts { // track `SourceFile`s that can be mapped to contracts let source_file = sources.find_file_and_version(file, &contract.version); @@ -880,7 +887,9 @@ pub trait ArtifactOutput { name, layout.artifacts.as_path(), &contract.version, - versioned_contracts.len() > 1, + &contract.profile, + unique_versions.len() > 1, + unique_profiles.len() > 1, ); taken_paths_lowercase.insert(artifact_path.to_slash_lossy().to_lowercase()); @@ -904,6 +913,7 @@ pub trait ArtifactOutput { file: artifact_path, version: contract.version.clone(), build_id: contract.build_id.clone(), + profile: contract.profile.clone(), }; artifacts @@ -921,6 +931,8 @@ pub trait ArtifactOutput { // any contract definition, which are not included in the `CompilerOutput` but we want to // create Artifacts for them regardless for (file, sources) in sources.as_ref().iter() { + let unique_versions = sources.iter().map(|s| &s.version).collect::>(); + let unique_profiles = sources.iter().map(|s| &s.profile).collect::>(); for source in sources { if !non_standalone_sources.contains(&(source.source_file.id, &source.version)) { // scan the ast as a safe measure to ensure this file does not include any @@ -945,7 +957,9 @@ pub trait ArtifactOutput { name, &layout.artifacts, &source.version, - sources.len() > 1, + &source.profile, + unique_versions.len() > 1, + unique_profiles.len() > 1, ); let entries = artifacts @@ -963,6 +977,7 @@ pub trait ArtifactOutput { file: artifact_path, version: source.version.clone(), build_id: source.build_id.clone(), + profile: source.profile.clone(), }); } } @@ -1015,8 +1030,10 @@ pub struct OutputContext<'a> { /// └── inner /// └── a.sol /// ``` - pub existing_artifacts: - BTreeMap<&'a Path, &'a BTreeMap>>, + pub existing_artifacts: BTreeMap< + &'a Path, + &'a BTreeMap>>, + >, } // === impl OutputContext @@ -1042,13 +1059,14 @@ impl<'a> OutputContext<'a> { file: &Path, contract: &str, version: &Version, + profile: &str, ) -> Option<&Path> { - self.existing_artifacts.get(file).and_then(|contracts| { - contracts - .get(contract) - .and_then(|versions| versions.get(version)) - .map(|a| a.path.as_path()) - }) + self.existing_artifacts + .get(file) + .and_then(|contracts| contracts.get(contract)) + .and_then(|versions| versions.get(version)) + .and_then(|profiles| profiles.get(profile)) + .map(|a| a.path.as_path()) } } diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 0d5d1613..575486bf 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -42,13 +42,20 @@ pub struct CompilerCache { pub format: String, /// contains all directories used for the project pub paths: ProjectPaths, - pub files: BTreeMap>, + pub files: BTreeMap, pub builds: BTreeSet, + pub profiles: BTreeMap, } impl CompilerCache { pub fn new(format: String, paths: ProjectPaths) -> Self { - Self { format, paths, files: Default::default(), builds: Default::default() } + Self { + format, + paths, + files: Default::default(), + builds: Default::default(), + profiles: Default::default(), + } } } @@ -63,7 +70,7 @@ impl CompilerCache { } /// Removes entry for the given file - pub fn remove(&mut self, file: &Path) -> Option> { + pub fn remove(&mut self, file: &Path) -> Option { self.files.remove(file) } @@ -78,17 +85,17 @@ impl CompilerCache { } /// Returns an iterator over all `CacheEntry` this cache contains - pub fn entries(&self) -> impl Iterator> { + pub fn entries(&self) -> impl Iterator { self.files.values() } /// Returns the corresponding `CacheEntry` for the file if it exists - pub fn entry(&self, file: &Path) -> Option<&CacheEntry> { + pub fn entry(&self, file: &Path) -> Option<&CacheEntry> { self.files.get(file) } /// Returns the corresponding `CacheEntry` for the file if it exists - pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> { + pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> { self.files.get_mut(file) } @@ -373,6 +380,7 @@ impl Default for CompilerCache { builds: Default::default(), files: Default::default(), paths: Default::default(), + profiles: Default::default(), } } } @@ -400,15 +408,13 @@ pub struct CachedArtifact { /// `solc` versions generating version specific artifacts. #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct CacheEntry { +pub struct CacheEntry { /// the last modification time of this file pub last_modification_date: u64, /// hash to identify whether the content of the file changed pub content_hash: String, /// identifier name see [`foundry_compilers_core::utils::source_name()`] pub source_name: PathBuf, - /// what config was set when compiling this file - pub compiler_settings: S, /// fully resolved imports of the file /// /// all paths start relative from the project's root: `src/importedFile.sol` @@ -422,9 +428,9 @@ pub struct CacheEntry { /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different /// artifacts. /// - /// This map tracks the artifacts by `name -> (Version -> PathBuf)`. + /// This map tracks the artifacts by `name -> (Version -> profile -> PathBuf)`. /// This mimics the default artifacts directory structure - pub artifacts: BTreeMap>, + pub artifacts: BTreeMap>>, /// Whether this file was compiled at least once. /// /// If this is true and `artifacts` are empty, it means that given version of the file does @@ -435,7 +441,7 @@ pub struct CacheEntry { pub seen_by_compiler: bool, } -impl CacheEntry { +impl CacheEntry { /// Returns the last modified timestamp `Duration` pub fn last_modified(&self) -> Duration { Duration::from_millis(self.last_modification_date) @@ -456,7 +462,12 @@ impl CacheEntry { /// # } /// ``` pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> { - self.artifacts.get(contract_name)?.iter().next().map(|(_, p)| p.path.as_path()) + self.artifacts + .get(contract_name)? + .iter() + .next() + .and_then(|(_, a)| a.iter().next()) + .map(|(_, p)| p.path.as_path()) } /// Reads the last modification date from the file's metadata @@ -481,13 +492,16 @@ impl CacheEntry { for (artifact_name, versioned_files) in self.artifacts.iter() { let mut files = Vec::with_capacity(versioned_files.len()); for (version, cached_artifact) in versioned_files { - let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?; - files.push(ArtifactFile { - artifact, - file: cached_artifact.path.clone(), - version: version.clone(), - build_id: cached_artifact.build_id.clone(), - }); + for (profile, cached_artifact) in cached_artifact { + let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?; + files.push(ArtifactFile { + artifact, + file: cached_artifact.path.clone(), + version: version.clone(), + build_id: cached_artifact.build_id.clone(), + profile: profile.clone(), + }); + } } artifacts.insert(artifact_name.clone(), files); } @@ -501,30 +515,46 @@ impl CacheEntry { { for (name, artifacts) in artifacts.into_iter() { for artifact in artifacts { - self.artifacts.entry(name.clone()).or_default().insert( - artifact.version.clone(), - CachedArtifact { - build_id: artifact.build_id.clone(), - path: artifact.file.clone(), - }, - ); + self.artifacts + .entry(name.clone()) + .or_default() + .entry(artifact.version.clone()) + .or_default() + .insert( + artifact.profile.clone(), + CachedArtifact { + build_id: artifact.build_id.clone(), + path: artifact.file.clone(), + }, + ); } } } /// Returns `true` if the artifacts set contains the given version pub fn contains_version(&self, version: &Version) -> bool { - self.artifacts_versions().any(|(v, _)| v == version) + self.artifacts_versions().any(|(v, _, _)| v == version) } /// Iterator that yields all artifact files and their version - pub fn artifacts_versions(&self) -> impl Iterator { - self.artifacts.values().flatten() + pub fn artifacts_versions(&self) -> impl Iterator { + self.artifacts + .values() + .flatten() + .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a))) } /// Returns the artifact file for the contract and version pair - pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&CachedArtifact> { - self.artifacts.get(contract).and_then(|files| files.get(version)) + pub fn find_artifact( + &self, + contract: &str, + version: &Version, + profile: &str, + ) -> Option<&CachedArtifact> { + self.artifacts + .get(contract) + .and_then(|files| files.get(version)) + .and_then(|files| files.get(profile)) } /// Iterator that yields all artifact files and their version @@ -532,17 +562,17 @@ impl CacheEntry { &'a self, version: &'a Version, ) -> impl Iterator + 'a { - self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then_some(file)) + self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file)) } /// Iterator that yields all artifact files pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(BTreeMap::values) + self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values) } /// Mutable iterator over all artifact files pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(BTreeMap::values_mut) + self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut) } /// Checks if all artifact files exist @@ -633,11 +663,10 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { .collect(); let entry = CacheEntry { - last_modification_date: CacheEntry::::read_last_modification_date(&file) + last_modification_date: CacheEntry::read_last_modification_date(&file) .unwrap_or_default(), content_hash: source.content_hash(), source_name: strip_prefix(&file, self.project.root()).into(), - compiler_settings: self.project.settings.clone(), imports, version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()), // artifacts remain empty until we received the compiler output @@ -750,6 +779,38 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { } } + let existing_profiles = self.project.settings_profiles().collect::>(); + + let mut dirty_profiles = HashSet::new(); + for (profile, settings) in &self.cache.profiles { + if !existing_profiles + .get(profile.as_str()) + .map_or(false, |p| p.can_use_cached(settings)) + { + dirty_profiles.insert(profile.clone()); + } + } + + for profile in &dirty_profiles { + self.cache.profiles.remove(profile); + } + + for (_, entry) in &mut self.cache.files { + entry.artifacts.retain(|_, artifacts| { + artifacts.retain(|_, artifacts| { + artifacts.retain(|profile, _| !dirty_profiles.contains(profile)); + !artifacts.is_empty() + }); + !artifacts.is_empty() + }); + } + + for (profile, settings) in existing_profiles { + if !self.cache.profiles.contains_key(profile) { + self.cache.profiles.insert(profile.to_string(), settings.clone()); + } + } + // Iterate over existing cache entries. let files = self.cache.files.keys().cloned().collect::>(); @@ -811,11 +872,6 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { return true; } - if !self.project.settings.can_use_cached(&entry.compiler_settings) { - trace!("solc config not compatible"); - return true; - } - // If any requested extra files are missing for any artifact, mark source as dirty to // generate them for artifacts in self.cached_artifacts.values() { diff --git a/crates/compilers/src/compile/output/contracts.rs b/crates/compilers/src/compile/output/contracts.rs index 99a0e0bd..004c7d03 100644 --- a/crates/compilers/src/compile/output/contracts.rs +++ b/crates/compilers/src/compile/output/contracts.rs @@ -262,6 +262,7 @@ pub struct VersionedContract { pub contract: Contract, pub version: Version, pub build_id: String, + pub profile: String, } /// A mapping of `ArtifactId` and their `CompactContractBytecode` diff --git a/crates/compilers/src/compile/output/mod.rs b/crates/compilers/src/compile/output/mod.rs index 06b5224d..4b87c6c1 100644 --- a/crates/compilers/src/compile/output/mod.rs +++ b/crates/compilers/src/compile/output/mod.rs @@ -564,6 +564,7 @@ impl AggregatedCompilerOutput { &mut self, version: Version, build_info: RawBuildInfo, + profile: &str, output: CompilerOutput, ) { let build_id = build_info.id.clone(); @@ -578,17 +579,19 @@ impl AggregatedCompilerOutput { source_file, version: version.clone(), build_id: build_id.clone(), + profile: profile.to_string(), }); } for (file_name, new_contracts) in contracts { - let contracts = self.contracts.as_mut().entry(file_name).or_default(); + let contracts = self.contracts.0.entry(file_name).or_default(); for (contract_name, contract) in new_contracts { let versioned = contracts.entry(contract_name).or_default(); versioned.push(VersionedContract { contract, version: version.clone(), build_id: build_id.clone(), + profile: profile.to_string(), }); } } diff --git a/crates/compilers/src/compile/output/sources.rs b/crates/compilers/src/compile/output/sources.rs index e34fb464..063c09b9 100644 --- a/crates/compilers/src/compile/output/sources.rs +++ b/crates/compilers/src/compile/output/sources.rs @@ -224,4 +224,5 @@ pub struct VersionedSourceFile { pub source_file: SourceFile, pub version: Version, pub build_id: String, + pub profile: String, } diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index eeb59558..3f1e0b24 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -117,7 +117,7 @@ use semver::Version; use std::{collections::HashMap, path::PathBuf, time::Instant}; /// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedSources = HashMap>; +pub(crate) type VersionedSources<'a, L, S> = HashMap>; #[derive(Debug)] pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { @@ -125,7 +125,7 @@ pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { edges: GraphEdges, project: &'a Project, /// how to compile all the sources - sources: CompilerSources, + sources: CompilerSources<'a, C::Language, C::Settings>, } impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { @@ -213,7 +213,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { #[derive(Debug)] struct PreprocessedState<'a, T: ArtifactOutput, C: Compiler> { /// Contains all the sources to compile. - sources: CompilerSources, + sources: CompilerSources<'a, C::Language, C::Settings>, /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled cache: ArtifactsCache<'a, T, C>, @@ -353,14 +353,14 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { /// Determines how the `solc <-> sources` pairs are executed. #[derive(Debug, Clone)] -struct CompilerSources { +struct CompilerSources<'a, L, S> { /// The sources to compile. - sources: VersionedSources, + sources: VersionedSources<'a, L, S>, /// The number of jobs to use for parallel compilation. jobs: Option, } -impl CompilerSources { +impl CompilerSources<'_, L, S> { /// Converts all `\\` separators to `/`. /// /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the @@ -370,16 +370,18 @@ impl CompilerSources { { use path_slash::PathBufExt; - self.sources.values_mut().for_each(|versioned_sources| { - versioned_sources.values_mut().for_each(|sources| { - *sources = std::mem::take(sources) - .into_iter() - .map(|(path, source)| { - (PathBuf::from(path.to_slash_lossy().as_ref()), source) - }) - .collect() - }) - }); + self.sources.values_mut().for_each( + versioned_sources | { + versioned_sources.iter_mut().for_each(|(_, sources, _)| { + *sources = std::mem::take(sources) + .into_iter() + .map(|(path, source)| { + (PathBuf::from(path.to_slash_lossy().as_ref()), source) + }) + .collect() + }) + }, + ); } } @@ -420,7 +422,8 @@ impl CompilerSources { let mut jobs = Vec::new(); for (language, versioned_sources) in self.sources { - for (version, sources, mut opt_settings) in versioned_sources { + for (version, sources, (profile, opt_settings)) in versioned_sources { + let mut opt_settings = opt_settings.clone(); if sources.is_empty() { // nothing to compile trace!("skip {} for empty sources set", version); @@ -451,7 +454,7 @@ impl CompilerSources { input.strip_prefix(project.paths.root.as_path()); - jobs.push((input, actually_dirty)); + jobs.push((input, profile, actually_dirty)); } } @@ -463,7 +466,7 @@ impl CompilerSources { let mut aggregated = AggregatedCompilerOutput::default(); - for (input, mut output, actually_dirty) in results { + for (input, mut output, profile, actually_dirty) in results { let version = input.version(); // Mark all files as seen by the compiler @@ -480,22 +483,22 @@ impl CompilerSources { ); output.join_all(project.paths.root.as_path()); - aggregated.extend(version.clone(), build_info, output); + aggregated.extend(version.clone(), build_info, profile, output); } Ok(aggregated) } } -type CompilationResult = Result, Vec)>>; +type CompilationResult<'a, I, E> = Result, &'a str, Vec)>>; /// Compiles the input set sequentially and returns a [Vec] of outputs. -fn compile_sequential( +fn compile_sequential<'a, C: Compiler>( compiler: &C, - jobs: Vec<(C::Input, Vec)>, -) -> CompilationResult { + jobs: Vec<(C::Input, &'a str, Vec)>, +) -> CompilationResult<'a, C::Input, C::CompilationError> { jobs.into_iter() - .map(|(input, actually_dirty)| { + .map(|(input, profile, actually_dirty)| { let start = Instant::now(); report::compiler_spawn( &input.compiler_name(), @@ -505,17 +508,17 @@ fn compile_sequential( let output = compiler.compile(&input)?; report::compiler_success(&input.compiler_name(), input.version(), &start.elapsed()); - Ok((input, output, actually_dirty)) + Ok((input, output, profile, actually_dirty)) }) .collect() } /// compiles the input set using `num_jobs` threads -fn compile_parallel( +fn compile_parallel<'a, C: Compiler>( compiler: &C, - jobs: Vec<(C::Input, Vec)>, + jobs: Vec<(C::Input, &'a str, Vec)>, num_jobs: usize, -) -> CompilationResult { +) -> CompilationResult<'a, C::Input, C::CompilationError> { // need to get the currently installed reporter before installing the pool, otherwise each new // thread in the pool will get initialized with the default value of the `thread_local!`'s // localkey. This way we keep access to the reporter in the rayon pool @@ -526,7 +529,7 @@ fn compile_parallel( pool.install(move || { jobs.into_par_iter() - .map(move |(input, actually_dirty)| { + .map(move |(input, profile, actually_dirty)| { // set the reporter on this thread let _guard = report::set_scoped(&scoped_report); @@ -542,7 +545,7 @@ fn compile_parallel( input.version(), &start.elapsed(), ); - (input, output, actually_dirty) + (input, output, profile, actually_dirty) }) }) .collect() diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 1f38f647..ca5f1712 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -422,7 +422,7 @@ mod tests { ); let build_info = RawBuildInfo::new(&input, &out_converted, true).unwrap(); let mut aggregated = AggregatedCompilerOutput::::default(); - aggregated.extend(v, build_info, out_converted); + aggregated.extend(v, build_info, "default", out_converted); assert!(!aggregated.is_unchanged()); } } diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index 3a25a02c..cf3692c9 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -77,7 +77,7 @@ pub struct Project, + pub additional_settings: BTreeMap, pub restrictions: BTreeMap::Restrictions>, /// Whether caching is enabled pub cached: bool, @@ -145,8 +145,9 @@ impl Project { &self.artifacts } - pub fn settings_profiles(&self) -> impl Iterator { - std::iter::once(&self.settings).chain(self.additional_settings.iter()) + pub fn settings_profiles(&self) -> impl Iterator { + std::iter::once(("default", &self.settings)) + .chain(self.additional_settings.iter().map(|(p, s)| (p.as_str(), s))) } } @@ -726,30 +727,31 @@ impl ArtifactOutput for Project { self.artifacts_handler().handle_artifacts(contracts, artifacts) } - fn output_file_name(name: &str) -> PathBuf { - T::output_file_name(name) - } - - fn output_file_name_versioned(name: &str, version: &Version) -> PathBuf { - T::output_file_name_versioned(name, version) - } - - fn output_file(contract_file: &Path, name: &str) -> PathBuf { - T::output_file(contract_file, name) + fn output_file_name( + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { + T::output_file_name(name, version, profile, with_version, with_profile) } - fn output_file_versioned(contract_file: &Path, name: &str, version: &Version) -> PathBuf { - T::output_file_versioned(contract_file, name, version) + fn output_file( + contract_file: &Path, + name: &str, + version: &Version, + profile: &str, + with_version: bool, + with_profile: bool, + ) -> PathBuf { + T::output_file(contract_file, name, version, profile, with_version, with_profile) } fn contract_name(file: &Path) -> Option { T::contract_name(file) } - fn output_exists(contract_file: &Path, name: &str, root: &Path) -> bool { - T::output_exists(contract_file, name, root) - } - fn read_cached_artifact(path: &Path) -> Result { T::read_cached_artifact(path) } diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 6c5fbdba..0ed94e8e 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -460,10 +460,10 @@ impl> Graph { /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version( + pub fn into_sources_by_version<'a, C, T, S>( self, - project: &Project, - ) -> Result<(VersionedSources, GraphEdges)> + project: &'a Project, + ) -> Result<(VersionedSources<'a, L, S>, GraphEdges)> where T: ArtifactOutput, S: CompilerSettings, @@ -597,13 +597,14 @@ impl> Graph { &self, idx: usize, project: &Project, - candidates: &mut Vec<(usize, &C::Settings)>, + candidates: &mut Vec<(usize, (&str, &C::Settings))>, ) { let nodes: HashSet<_> = self.node_ids(idx).collect(); for node in nodes { let node = self.node(node); if let Some(requirement) = project.restrictions.get(&node.path) { - candidates.retain(|(_, settings)| settings.satisfies_restrictions(requirement)); + candidates + .retain(|(_, (_, settings))| settings.satisfies_restrictions(requirement)); } if candidates.is_empty() { // nothing to filter anymore diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index d7f3827a..f0cd598f 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -4030,10 +4030,14 @@ fn test_settings_restrictions() { // default EVM version is Paris, Cancun contract won't compile project.project_mut().settings.solc.evm_version = Some(EvmVersion::Paris); + let common_path = project.add_source("Common.sol", ""); + let cancun_path = project .add_source( "Cancun.sol", r#" +import "./Common.sol"; + contract TransientContract { function lock()public { assembly { @@ -4045,12 +4049,21 @@ contract TransientContract { .unwrap(); project.add_source("CancunImporter.sol", "import \"./Cancun.sol\";").unwrap(); - project.add_source("Simple.sol", "contract SimpleContract {}").unwrap(); + project + .add_source( + "Simple.sol", + r#" +import "./Common.sol"; + +contract SimpleContract {} +"#, + ) + .unwrap(); // Add config with Cancun enabled let mut cancun_settings = project.project().settings.clone(); cancun_settings.solc.evm_version = Some(EvmVersion::Cancun); - project.project_mut().additional_settings.push(cancun_settings); + project.project_mut().additional_settings.insert("cancun".to_string(), cancun_settings); let cancun_restriction = MultiCompilerRestrictions { solc: SolcRestrictions { @@ -4066,5 +4079,7 @@ contract TransientContract { // Restrict compiling Cancun contract to Cancun EVM version project.project_mut().restrictions.insert(cancun_path, cancun_restriction); - project.compile().unwrap().assert_success(); + let output = project.compile().unwrap(); + + panic!("{:?}", output); } From 88fa78a1f5b083d621b42f68aa9222bae2d96e7c Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 02:07:55 +0800 Subject: [PATCH 03/19] wip --- crates/compilers/src/cache.rs | 1 + crates/compilers/src/compilers/mod.rs | 7 ++-- crates/compilers/src/compilers/multi.rs | 2 +- .../compilers/src/compilers/restrictions.rs | 32 +++++++++++++++++ crates/compilers/src/compilers/solc/mod.rs | 19 +++++++--- .../compilers/src/compilers/vyper/settings.rs | 3 +- crates/compilers/src/lib.rs | 36 +++++++++++++++++-- crates/compilers/src/resolver/mod.rs | 32 ++++++++++++----- 8 files changed, 111 insertions(+), 21 deletions(-) create mode 100644 crates/compilers/src/compilers/restrictions.rs diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 575486bf..d51397f1 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -163,6 +163,7 @@ impl CompilerCache { .entries() .flat_map(|e| e.artifacts.values()) .flat_map(|a| a.values()) + .flat_map(|a| a.values()) .any(|a| a.build_id == *build_id) { outdated.push(build_id.to_owned()); diff --git a/crates/compilers/src/compilers/mod.rs b/crates/compilers/src/compilers/mod.rs index b632e645..88ee10a9 100644 --- a/crates/compilers/src/compilers/mod.rs +++ b/crates/compilers/src/compilers/mod.rs @@ -24,6 +24,9 @@ pub mod solc; pub mod vyper; pub use vyper::*; +mod restrictions; +pub use restrictions::{CompilerSettingsRestrictions, RestrictionsWithVersion}; + /// A compiler version is either installed (available locally) or can be downloaded, from the remote /// endpoint #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] @@ -61,10 +64,6 @@ impl fmt::Display for CompilerVersion { } } -pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { - fn merge(&mut self, other: &Self); -} - /// Compilation settings including evm_version, output_selection, etc. pub trait CompilerSettings: Default + Serialize + DeserializeOwned + Clone + Debug + Send + Sync + 'static diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs index 47541f2a..94c148cb 100644 --- a/crates/compilers/src/compilers/multi.rs +++ b/crates/compilers/src/compilers/multi.rs @@ -1,4 +1,5 @@ use super::{ + restrictions::CompilerSettingsRestrictions, solc::{SolcCompiler, SolcSettings, SolcVersionedInput, SOLC_EXTENSIONS}, vyper::{ input::VyperVersionedInput, parser::VyperParsedSource, Vyper, VyperLanguage, @@ -12,7 +13,6 @@ use crate::{ resolver::parse::SolData, settings::VyperRestrictions, solc::SolcRestrictions, - CompilerSettingsRestrictions, }; use foundry_compilers_artifacts::{ error::SourceLocation, diff --git a/crates/compilers/src/compilers/restrictions.rs b/crates/compilers/src/compilers/restrictions.rs new file mode 100644 index 00000000..cd2917b6 --- /dev/null +++ b/crates/compilers/src/compilers/restrictions.rs @@ -0,0 +1,32 @@ +use std::{ + fmt::Debug, + ops::{Deref, DerefMut}, +}; + +use semver::VersionReq; + +pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { + fn merge(&mut self, other: &Self); +} + +/// Combines [CompilerVersionRestriction] with a restrictions on compiler versions for a given +/// source file. +#[derive(Debug, Clone, Default)] +pub struct RestrictionsWithVersion { + pub version: Option, + pub settings: T, +} + +impl Deref for RestrictionsWithVersion { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.settings + } +} + +impl DerefMut for RestrictionsWithVersion { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.settings + } +} diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index ca5f1712..5f502d07 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -1,13 +1,14 @@ use super::{ - CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, - Language, ParsedSource, + restrictions::CompilerSettingsRestrictions, CompilationError, Compiler, CompilerInput, + CompilerOutput, CompilerSettings, CompilerVersion, Language, ParsedSource, }; -use crate::{resolver::parse::SolData, CompilerSettingsRestrictions}; +use crate::resolver::parse::SolData; pub use foundry_compilers_artifacts::SolcLanguage; use foundry_compilers_artifacts::{ error::SourceLocation, output_selection::OutputSelection, remappings::Remapping, + serde_helpers::display_from_str_opt, sources::{Source, Sources}, Error, EvmVersion, Settings, Severity, SolcInput, }; @@ -190,9 +191,11 @@ impl DerefMut for SolcSettings { } } -#[derive(Debug, Clone, Copy, Default)] +#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, Eq, PartialEq)] pub struct EvmVersionRestriction { + #[serde(default, with = "display_from_str_opt", skip_serializing_if = "Option::is_none")] pub min_evm_version: Option, + #[serde(default, with = "display_from_str_opt", skip_serializing_if = "Option::is_none")] pub max_evm_version: Option, } @@ -226,6 +229,8 @@ impl EvmVersionRestriction { pub struct SolcRestrictions { pub evm_version: EvmVersionRestriction, pub via_ir: Option, + pub min_optimizer_runs: Option, + pub max_optimizer_runs: Option, } impl CompilerSettingsRestrictions for SolcRestrictions { @@ -305,6 +310,12 @@ impl CompilerSettings for SolcSettings { satisfies &= restrictions.evm_version.satisfies(self.evm_version); satisfies &= restrictions.via_ir.map_or(true, |via_ir| via_ir == self.via_ir.unwrap_or_default()); + satisfies &= restrictions + .min_optimizer_runs + .map_or(true, |min| self.optimizer.runs.map_or(false, |runs| runs >= min)); + satisfies &= restrictions + .max_optimizer_runs + .map_or(true, |max| self.optimizer.runs.map_or(false, |runs| runs <= max)); satisfies } diff --git a/crates/compilers/src/compilers/vyper/settings.rs b/crates/compilers/src/compilers/vyper/settings.rs index 4e1a8b30..7730086b 100644 --- a/crates/compilers/src/compilers/vyper/settings.rs +++ b/crates/compilers/src/compilers/vyper/settings.rs @@ -2,7 +2,8 @@ use std::{collections::BTreeSet, path::PathBuf}; pub use crate::artifacts::vyper::VyperSettings; use crate::{ - compilers::CompilerSettings, solc::EvmVersionRestriction, CompilerSettingsRestrictions, + compilers::{restrictions::CompilerSettingsRestrictions, CompilerSettings}, + solc::EvmVersionRestriction, }; use foundry_compilers_artifacts::output_selection::OutputSelection; diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index cf3692c9..6529a60e 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -78,7 +78,8 @@ pub struct Project, - pub restrictions: BTreeMap::Restrictions>, + pub restrictions: + BTreeMap::Restrictions>>, /// Whether caching is enabled pub cached: bool, /// Whether to output build information with each solc call. @@ -455,6 +456,9 @@ pub struct ProjectBuilder>, /// How solc invocation should be configured. settings: Option, + additional_settings: BTreeMap, + restrictions: + BTreeMap::Restrictions>>, /// Whether caching is enabled, default is true. cached: bool, /// Whether to output build information with each solc call. @@ -495,6 +499,8 @@ impl ProjectBuilder { solc_jobs: None, settings: None, sparse_output: None, + additional_settings: BTreeMap::new(), + restrictions: BTreeMap::new(), } } @@ -619,6 +625,24 @@ impl ProjectBuilder { self } + #[must_use] + pub fn additional_settings(mut self, additional: BTreeMap) -> Self { + self.additional_settings = additional; + self + } + + #[must_use] + pub fn restrictions( + mut self, + restrictions: BTreeMap< + PathBuf, + RestrictionsWithVersion<::Restrictions>, + >, + ) -> Self { + self.restrictions = restrictions; + self + } + /// Set arbitrary `ArtifactOutputHandler` pub fn artifacts(self, artifacts: A) -> ProjectBuilder { let Self { @@ -634,12 +658,16 @@ impl ProjectBuilder { ignored_file_paths, settings, sparse_output, + additional_settings, + restrictions, .. } = self; ProjectBuilder { paths, cached, no_artifacts, + additional_settings, + restrictions, offline, slash_paths, artifacts, @@ -668,6 +696,8 @@ impl ProjectBuilder { slash_paths, settings, sparse_output, + additional_settings, + restrictions, } = self; let mut paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; @@ -694,8 +724,8 @@ impl ProjectBuilder { slash_paths, settings: settings.unwrap_or_default(), sparse_output, - additional_settings: Default::default(), - restrictions: Default::default(), + additional_settings, + restrictions, }) } } diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 0ed94e8e..d6a108fa 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -580,10 +580,20 @@ impl> Graph { } /// Filters incompatible versions from the `candidates`. - fn retain_compatible_versions(&self, idx: usize, candidates: &mut Vec<&CompilerVersion>) { + fn retain_compatible_versions( + &self, + idx: usize, + project: &Project, + candidates: &mut Vec<&CompilerVersion>, + ) { let nodes: HashSet<_> = self.node_ids(idx).collect(); for node in nodes { - if let Some(req) = &self.node(node).data.version_req() { + let node = self.node(node); + if let Some(req) = node.data.version_req() { + candidates.retain(|v| req.matches(v.as_ref())); + } + if let Some(req) = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref()) + { candidates.retain(|v| req.matches(v.as_ref())); } if candidates.is_empty() { @@ -604,7 +614,7 @@ impl> Graph { let node = self.node(node); if let Some(requirement) = project.restrictions.get(&node.path) { candidates - .retain(|(_, (_, settings))| settings.satisfies_restrictions(requirement)); + .retain(|(_, (_, settings))| settings.satisfies_restrictions(&*requirement)); } if candidates.is_empty() { // nothing to filter anymore @@ -677,7 +687,7 @@ impl> Graph { let mut candidates = all_versions.iter().collect::>(); // remove all incompatible versions from the candidates list by checking the node // and all its imports - self.retain_compatible_versions(idx, &mut candidates); + self.retain_compatible_versions(idx, project, &mut candidates); if candidates.is_empty() && !erroneous_nodes.contains(&idx) { // check if the version is even valid @@ -759,14 +769,20 @@ impl> Graph { for (version, nodes) in versions { let mut profile_to_nodes = HashMap::new(); for idx in nodes { + println!("resolving {:?}", self.node(idx).path.display()); let mut profile_candidates = project.settings_profiles().enumerate().collect::>(); self.retain_compatible_profiles(idx, project, &mut profile_candidates); - profile_to_nodes - .entry(profile_candidates[0].0) - .or_insert_with(Vec::new) - .push(idx); + if let Some((profile_idx, _)) = profile_candidates.first() { + profile_to_nodes.entry(*profile_idx).or_insert_with(Vec::new).push(idx); + } else { + panic!( + "failed to resolve settings for node {}", + self.node(idx).path.display() + ); + } + println!("resolved"); } versioned_sources.insert(version, profile_to_nodes); } From 3abd92fbb223c0ab406243b9c4ef8a209768fd13 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 04:12:12 +0800 Subject: [PATCH 04/19] wip --- crates/compilers/src/artifact_output/mod.rs | 26 ++++++----- crates/compilers/src/cache.rs | 33 +++++++------- crates/compilers/src/compile/project.rs | 4 +- crates/compilers/src/compilers/multi.rs | 6 +-- .../compilers/src/compilers/restrictions.rs | 21 +++++++-- crates/compilers/src/compilers/solc/mod.rs | 16 ++++++- .../compilers/src/compilers/vyper/settings.rs | 2 +- crates/compilers/src/resolver/mod.rs | 29 ++++++++----- crates/compilers/tests/project.rs | 43 +++++++++++++------ 9 files changed, 117 insertions(+), 63 deletions(-) diff --git a/crates/compilers/src/artifact_output/mod.rs b/crates/compilers/src/artifact_output/mod.rs index 43bb624d..697f2c36 100644 --- a/crates/compilers/src/artifact_output/mod.rs +++ b/crates/compilers/src/artifact_output/mod.rs @@ -52,6 +52,7 @@ pub struct ArtifactId { pub version: Version, /// `solc` build id pub build_id: String, + pub profile: String, } impl ArtifactId { @@ -299,6 +300,7 @@ impl Artifacts { source: source.clone(), version: artifact.version.clone(), build_id: artifact.build_id.clone(), + profile: artifact.profile.clone(), } .with_slashed_paths(), &artifact.artifact, @@ -325,6 +327,7 @@ impl Artifacts { source: source.clone(), version: artifact.version, build_id: artifact.build_id.clone(), + profile: artifact.profile.clone(), } .with_slashed_paths(), artifact.artifact, @@ -873,13 +876,11 @@ pub trait ArtifactOutput { let unique_profiles = versioned_contracts.iter().map(|c| &c.profile).collect::>(); for contract in versioned_contracts { + non_standalone_sources.insert(file); + // track `SourceFile`s that can be mapped to contracts let source_file = sources.find_file_and_version(file, &contract.version); - if let Some(source) = source_file { - non_standalone_sources.insert((source.id, &contract.version)); - } - let artifact_path = Self::get_artifact_path( &ctx, &taken_paths_lowercase, @@ -934,7 +935,7 @@ pub trait ArtifactOutput { let unique_versions = sources.iter().map(|s| &s.version).collect::>(); let unique_profiles = sources.iter().map(|s| &s.profile).collect::>(); for source in sources { - if !non_standalone_sources.contains(&(source.source_file.id, &source.version)) { + if !non_standalone_sources.contains(file) { // scan the ast as a safe measure to ensure this file does not include any // source units // there's also no need to create a standalone artifact for source files that @@ -962,24 +963,21 @@ pub trait ArtifactOutput { unique_profiles.len() > 1, ); - let entries = artifacts + taken_paths_lowercase + .insert(artifact_path.to_slash_lossy().to_lowercase()); + + artifacts .entry(file.clone()) .or_default() .entry(name.to_string()) - .or_default(); - - if entries.iter().all(|entry| entry.version != source.version) { - taken_paths_lowercase - .insert(artifact_path.to_slash_lossy().to_lowercase()); - - entries.push(ArtifactFile { + .or_default() + .push(ArtifactFile { artifact, file: artifact_path, version: source.version.clone(), build_id: source.build_id.clone(), profile: source.profile.clone(), }); - } } } } diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index d51397f1..c94b9a8e 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -64,11 +64,6 @@ impl CompilerCache { self.files.is_empty() } - /// Returns `true` if the cache contains any artifacts for the given file and version. - pub fn contains(&self, file: &Path, version: &Version) -> bool { - self.files.get(file).map_or(true, |entry| !entry.contains_version(version)) - } - /// Removes entry for the given file pub fn remove(&mut self, file: &Path) -> Option { self.files.remove(file) @@ -533,8 +528,10 @@ impl CacheEntry { } /// Returns `true` if the artifacts set contains the given version - pub fn contains_version(&self, version: &Version) -> bool { - self.artifacts_versions().any(|(v, _, _)| v == version) + pub fn contains(&self, version: &Version, profile: &str) -> bool { + self.artifacts.values().any(|artifacts| { + artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some() + }) } /// Iterator that yields all artifact files and their version @@ -688,7 +685,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { /// 2. [SourceCompilationKind::Optimized] - the file is not dirty, but is imported by a dirty /// file and thus will be processed by solc. For such files we don't need full data, so we /// are marking them as clean to optimize output selection later. - fn filter(&mut self, sources: &mut Sources, version: &Version) { + fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) { // sources that should be passed to compiler. let mut compile_complete = HashSet::new(); let mut compile_optimized = HashSet::new(); @@ -697,7 +694,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { self.sources_in_scope.insert(file.clone(), version.clone()); // If we are missing artifact for file, compile it. - if self.is_missing_artifacts(file, version) { + if self.is_missing_artifacts(file, version, profile) { compile_complete.insert(file.clone()); } @@ -731,7 +728,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { /// Returns whether we are missing artifacts for the given file and version. #[instrument(level = "trace", skip(self))] - fn is_missing_artifacts(&self, file: &Path, version: &Version) -> bool { + fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool { let Some(entry) = self.cache.entry(file) else { trace!("missing cache entry"); return true; @@ -745,7 +742,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { return false; } - if !entry.contains_version(version) { + if !entry.contains(version, profile) { trace!("missing linked artifacts"); return true; } @@ -788,6 +785,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { .get(profile.as_str()) .map_or(false, |p| p.can_use_cached(settings)) { + trace!("dirty profile: {}", profile); dirty_profiles.insert(profile.clone()); } } @@ -796,7 +794,11 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { self.cache.profiles.remove(profile); } - for (_, entry) in &mut self.cache.files { + self.cache.files.retain(|_, entry| { + // keep entries which already had no artifacts + if entry.artifacts.is_empty() { + return true; + } entry.artifacts.retain(|_, artifacts| { artifacts.retain(|_, artifacts| { artifacts.retain(|profile, _| !dirty_profiles.contains(profile)); @@ -804,7 +806,8 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { }); !artifacts.is_empty() }); - } + !entry.artifacts.is_empty() + }); for (profile, settings) in existing_profiles { if !self.cache.profiles.contains_key(profile) { @@ -1031,10 +1034,10 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { } /// Filters out those sources that don't need to be compiled - pub fn filter(&mut self, sources: &mut Sources, version: &Version) { + pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) { match self { ArtifactsCache::Ephemeral(..) => {} - ArtifactsCache::Cached(cache) => cache.filter(sources, version), + ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile), } } diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index 3f1e0b24..0ddac4fd 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -392,9 +392,9 @@ impl CompilerSources<'_, L, S> { ) { cache.remove_dirty_sources(); for versioned_sources in self.sources.values_mut() { - for (version, sources, _) in versioned_sources { + for (version, sources, (profile, _)) in versioned_sources { trace!("Filtering {} sources for {}", sources.len(), version); - cache.filter(sources, version); + cache.filter(sources, version, profile); trace!( "Detected {} sources to compile {:?}", sources.dirty().count(), diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs index 94c148cb..f1b13415 100644 --- a/crates/compilers/src/compilers/multi.rs +++ b/crates/compilers/src/compilers/multi.rs @@ -138,9 +138,9 @@ pub struct MultiCompilerRestrictions { } impl CompilerSettingsRestrictions for MultiCompilerRestrictions { - fn merge(&mut self, other: &Self) { - self.solc.merge(&other.solc); - self.vyper.merge(&other.vyper); + fn merge(&mut self, other: Self) { + self.solc.merge(other.solc); + self.vyper.merge(other.vyper); } } diff --git a/crates/compilers/src/compilers/restrictions.rs b/crates/compilers/src/compilers/restrictions.rs index cd2917b6..b7f52b25 100644 --- a/crates/compilers/src/compilers/restrictions.rs +++ b/crates/compilers/src/compilers/restrictions.rs @@ -6,7 +6,7 @@ use std::{ use semver::VersionReq; pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { - fn merge(&mut self, other: &Self); + fn merge(&mut self, other: Self); } /// Combines [CompilerVersionRestriction] with a restrictions on compiler versions for a given @@ -14,19 +14,32 @@ pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { #[derive(Debug, Clone, Default)] pub struct RestrictionsWithVersion { pub version: Option, - pub settings: T, + pub restrictions: T, +} + +impl RestrictionsWithVersion { + pub fn merge(&mut self, other: Self) { + if let Some(version) = other.version { + if let Some(self_version) = self.version.as_mut() { + self_version.comparators.extend(version.comparators); + } else { + self.version = Some(version.clone()); + } + } + self.restrictions.merge(other.restrictions); + } } impl Deref for RestrictionsWithVersion { type Target = T; fn deref(&self) -> &Self::Target { - &self.settings + &self.restrictions } } impl DerefMut for RestrictionsWithVersion { fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.settings + &mut self.restrictions } } diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 5f502d07..106ae74c 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -234,13 +234,27 @@ pub struct SolcRestrictions { } impl CompilerSettingsRestrictions for SolcRestrictions { - fn merge(&mut self, other: &Self) { + fn merge(&mut self, other: Self) { self.evm_version.merge(&other.evm_version); // Preserve true if self.via_ir.map_or(true, |via_ir| !via_ir) { self.via_ir = other.via_ir; } + + if self + .min_optimizer_runs + .map_or(true, |min| min < other.min_optimizer_runs.unwrap_or(usize::MAX)) + { + self.min_optimizer_runs = other.min_optimizer_runs; + } + + if self + .max_optimizer_runs + .map_or(true, |max| max > other.max_optimizer_runs.unwrap_or(usize::MIN)) + { + self.max_optimizer_runs = other.max_optimizer_runs; + } } } diff --git a/crates/compilers/src/compilers/vyper/settings.rs b/crates/compilers/src/compilers/vyper/settings.rs index 7730086b..cc5ddd3e 100644 --- a/crates/compilers/src/compilers/vyper/settings.rs +++ b/crates/compilers/src/compilers/vyper/settings.rs @@ -13,7 +13,7 @@ pub struct VyperRestrictions { } impl CompilerSettingsRestrictions for VyperRestrictions { - fn merge(&mut self, other: &Self) { + fn merge(&mut self, other: Self) { self.evm_version.merge(&other.evm_version); } } diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index d6a108fa..501e7c65 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -500,7 +500,7 @@ impl> Graph { } let versioned_nodes = self.get_input_node_versions(project)?; - let versioned_nodes = self.resolve_settings(project, versioned_nodes); + let versioned_nodes = self.resolve_settings(project, versioned_nodes)?; let (nodes, edges) = self.split(); let mut all_nodes = nodes.into_iter().enumerate().collect::>(); @@ -557,9 +557,10 @@ impl> Graph { /// path/to/c.sol () /// ... /// ``` - fn format_imports_list( + fn format_imports_list, T: ArtifactOutput>( &self, idx: usize, + project: &Project, f: &mut W, ) -> std::result::Result<(), std::fmt::Error> { let node = self.node(idx); @@ -567,6 +568,9 @@ impl> Graph { if let Some(req) = node.data.version_req() { write!(f, "{req}")?; } + if let Some(req) = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref()) { + write!(f, "{req}")?; + } write!(f, " imports:")?; for dep in self.node_ids(idx).skip(1) { let dep = self.node(dep); @@ -701,7 +705,7 @@ impl> Graph { )); } else { let mut msg = String::new(); - self.format_imports_list(idx, &mut msg).unwrap(); + self.format_imports_list(idx, project, &mut msg).unwrap(); errors.push(format!("Found incompatible versions:\n{msg}")); } @@ -762,14 +766,14 @@ impl> Graph { &self, project: &Project, input_nodes_versions: HashMap>>, - ) -> HashMap>>> { + ) -> Result>>>> { let mut resulted_sources = HashMap::new(); + let mut errors = Vec::new(); for (language, versions) in input_nodes_versions { let mut versioned_sources = HashMap::new(); for (version, nodes) in versions { let mut profile_to_nodes = HashMap::new(); for idx in nodes { - println!("resolving {:?}", self.node(idx).path.display()); let mut profile_candidates = project.settings_profiles().enumerate().collect::>(); self.retain_compatible_profiles(idx, project, &mut profile_candidates); @@ -777,19 +781,22 @@ impl> Graph { if let Some((profile_idx, _)) = profile_candidates.first() { profile_to_nodes.entry(*profile_idx).or_insert_with(Vec::new).push(idx); } else { - panic!( - "failed to resolve settings for node {}", - self.node(idx).path.display() - ); + let mut msg = String::new(); + self.format_imports_list(idx, project, &mut msg).unwrap(); + errors.push(format!("Found incompatible settings restrictions:\n{msg}")); } - println!("resolved"); } versioned_sources.insert(version, profile_to_nodes); } resulted_sources.insert(language, versioned_sources); } - resulted_sources + if errors.is_empty() { + Ok(resulted_sources) + } else { + error!("failed to resolve settings"); + Err(SolcError::msg(errors.join("\n"))) + } } /// Tries to find the "best" set of versions to nodes, See [Solc version diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index f0cd598f..7a1df887 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -16,9 +16,10 @@ use foundry_compilers::{ info::ContractInfo, multi::MultiCompilerRestrictions, project_util::*, - solc::{EvmVersionRestriction, SolcRestrictions}, + solc::{EvmVersionRestriction, SolcRestrictions, SolcSettings}, take_solc_installer_lock, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, - ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, TestFileFilter, + ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, RestrictionsWithVersion, + TestFileFilter, }; use foundry_compilers_artifacts::{ output_selection::OutputSelection, remappings::Remapping, BytecodeHash, DevDoc, Error, @@ -4030,7 +4031,7 @@ fn test_settings_restrictions() { // default EVM version is Paris, Cancun contract won't compile project.project_mut().settings.solc.evm_version = Some(EvmVersion::Paris); - let common_path = project.add_source("Common.sol", ""); + let common_path = project.add_source("Common.sol", "").unwrap(); let cancun_path = project .add_source( @@ -4048,8 +4049,9 @@ contract TransientContract { ) .unwrap(); - project.add_source("CancunImporter.sol", "import \"./Cancun.sol\";").unwrap(); - project + let cancun_importer_path = + project.add_source("CancunImporter.sol", "import \"./Cancun.sol\";").unwrap(); + let simple_path = project .add_source( "Simple.sol", r#" @@ -4065,21 +4067,38 @@ contract SimpleContract {} cancun_settings.solc.evm_version = Some(EvmVersion::Cancun); project.project_mut().additional_settings.insert("cancun".to_string(), cancun_settings); - let cancun_restriction = MultiCompilerRestrictions { - solc: SolcRestrictions { - evm_version: EvmVersionRestriction { - min_evm_version: Some(EvmVersion::Cancun), + let cancun_restriction = RestrictionsWithVersion { + restrictions: MultiCompilerRestrictions { + solc: SolcRestrictions { + evm_version: EvmVersionRestriction { + min_evm_version: Some(EvmVersion::Cancun), + ..Default::default() + }, ..Default::default() }, ..Default::default() }, - ..Default::default() + version: None, }; // Restrict compiling Cancun contract to Cancun EVM version - project.project_mut().restrictions.insert(cancun_path, cancun_restriction); + project.project_mut().restrictions.insert(cancun_path.clone(), cancun_restriction); let output = project.compile().unwrap(); - panic!("{:?}", output); + output.assert_success(); + + let artifacts = + output.artifact_ids().map(|(id, _)| (id.profile, id.source)).collect::>(); + + assert_eq!( + artifacts, + vec![ + ("cancun".to_string(), cancun_path), + ("cancun".to_string(), cancun_importer_path), + ("cancun".to_string(), common_path.clone()), + ("default".to_string(), common_path), + ("default".to_string(), simple_path) + ] + ); } From 55d9a60f06574090dd258fff861b8be8075957c9 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 04:17:53 +0800 Subject: [PATCH 05/19] fix --- crates/compilers/src/compile/project.rs | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index 0ddac4fd..17b2e5bd 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -370,18 +370,16 @@ impl CompilerSources<'_, L, S> { { use path_slash::PathBufExt; - self.sources.values_mut().for_each( - versioned_sources | { - versioned_sources.iter_mut().for_each(|(_, sources, _)| { - *sources = std::mem::take(sources) - .into_iter() - .map(|(path, source)| { - (PathBuf::from(path.to_slash_lossy().as_ref()), source) - }) - .collect() - }) - }, - ); + self.sources.values_mut().for_each(|versioned_sources| { + versioned_sources.iter_mut().for_each(|(_, sources, _)| { + *sources = std::mem::take(sources) + .into_iter() + .map(|(path, source)| { + (PathBuf::from(path.to_slash_lossy().as_ref()), source) + }) + .collect() + }) + }); } } From 8794b643b472d18ebebcbfa39b72a20824fdc7dd Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 04:22:01 +0800 Subject: [PATCH 06/19] fix --- crates/compilers/tests/project.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index 7a1df887..1da599cb 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -4088,8 +4088,12 @@ contract SimpleContract {} output.assert_success(); - let artifacts = - output.artifact_ids().map(|(id, _)| (id.profile, id.source)).collect::>(); + let artifacts = output + .artifact_ids() + .map(|(id, _)| (id.profile, id.source)) + .collect::>() + .into_iter() + .collect::>(); assert_eq!( artifacts, @@ -4098,7 +4102,7 @@ contract SimpleContract {} ("cancun".to_string(), cancun_importer_path), ("cancun".to_string(), common_path.clone()), ("default".to_string(), common_path), - ("default".to_string(), simple_path) + ("default".to_string(), simple_path), ] ); } From 0a36a53e45e823ba2cdb4761fa7b61670cbd6d3a Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 04:25:42 +0800 Subject: [PATCH 07/19] fix --- crates/compilers/src/artifact_output/mod.rs | 3 ++- crates/compilers/src/compilers/restrictions.rs | 2 +- crates/compilers/src/resolver/mod.rs | 14 +++++--------- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/crates/compilers/src/artifact_output/mod.rs b/crates/compilers/src/artifact_output/mod.rs index 697f2c36..351ef533 100644 --- a/crates/compilers/src/artifact_output/mod.rs +++ b/crates/compilers/src/artifact_output/mod.rs @@ -658,7 +658,7 @@ pub trait ArtifactOutput { name.push_str(&format!(".{}.{}.{}", version.major, version.minor, version.patch)); } if with_profile { - name.push_str(&format!(".{}", profile)); + name.push_str(&format!(".{profile}")); } name.push_str(".json"); name.into() @@ -806,6 +806,7 @@ pub trait ArtifactOutput { /// Generates a path for an artifact based on already taken paths by either cached or compiled /// artifacts. + #[allow(clippy::too_many_arguments)] fn get_artifact_path( ctx: &OutputContext<'_>, already_taken: &HashSet, diff --git a/crates/compilers/src/compilers/restrictions.rs b/crates/compilers/src/compilers/restrictions.rs index b7f52b25..44c4ba8b 100644 --- a/crates/compilers/src/compilers/restrictions.rs +++ b/crates/compilers/src/compilers/restrictions.rs @@ -9,7 +9,7 @@ pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { fn merge(&mut self, other: Self); } -/// Combines [CompilerVersionRestriction] with a restrictions on compiler versions for a given +/// Combines [CompilerSettingsRestrictions] with a restrictions on compiler versions for a given /// source file. #[derive(Debug, Clone, Default)] pub struct RestrictionsWithVersion { diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 501e7c65..0e495ed1 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -460,10 +460,10 @@ impl> Graph { /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version<'a, C, T, S>( + pub fn into_sources_by_version( self, - project: &'a Project, - ) -> Result<(VersionedSources<'a, L, S>, GraphEdges)> + project: &Project, + ) -> Result<(VersionedSources<'_, L, S>, GraphEdges)> where T: ArtifactOutput, S: CompilerSettings, @@ -535,11 +535,7 @@ impl> Graph { &mut processed_sources, ); } - versioned_sources.push(( - version.clone(), - sources, - profiles[profile_idx].clone(), - )); + versioned_sources.push((version.clone(), sources, profiles[profile_idx])); } } @@ -618,7 +614,7 @@ impl> Graph { let node = self.node(node); if let Some(requirement) = project.restrictions.get(&node.path) { candidates - .retain(|(_, (_, settings))| settings.satisfies_restrictions(&*requirement)); + .retain(|(_, (_, settings))| settings.satisfies_restrictions(&**requirement)); } if candidates.is_empty() { // nothing to filter anymore From 855a784d8b44b7a1779dd474bf09fb424584dead Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 04:29:03 +0800 Subject: [PATCH 08/19] clippy --- crates/compilers/src/artifact_output/mod.rs | 7 ++----- crates/compilers/src/cache.rs | 4 +++- crates/compilers/src/resolver/mod.rs | 1 + 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/compilers/src/artifact_output/mod.rs b/crates/compilers/src/artifact_output/mod.rs index 351ef533..7714a467 100644 --- a/crates/compilers/src/artifact_output/mod.rs +++ b/crates/compilers/src/artifact_output/mod.rs @@ -32,7 +32,7 @@ mod hh; pub use hh::*; use crate::{ - cache::{CachedArtifact, CompilerCache}, + cache::{CachedArtifacts, CompilerCache}, output::{ contracts::VersionedContracts, sources::{VersionedSourceFile, VersionedSourceFiles}, @@ -1029,10 +1029,7 @@ pub struct OutputContext<'a> { /// └── inner /// └── a.sol /// ``` - pub existing_artifacts: BTreeMap< - &'a Path, - &'a BTreeMap>>, - >, + pub existing_artifacts: BTreeMap<&'a Path, &'a CachedArtifacts>, } // === impl OutputContext diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index c94b9a8e..a493855b 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -397,6 +397,8 @@ pub struct CachedArtifact { pub build_id: String, } +pub type CachedArtifacts = BTreeMap>>; + /// A `CacheEntry` in the cache file represents a solidity file /// /// A solidity file can contain several contracts, for every contract a separate `Artifact` is @@ -426,7 +428,7 @@ pub struct CacheEntry { /// /// This map tracks the artifacts by `name -> (Version -> profile -> PathBuf)`. /// This mimics the default artifacts directory structure - pub artifacts: BTreeMap>>, + pub artifacts: CachedArtifacts, /// Whether this file was compiled at least once. /// /// If this is true and `artifacts` are empty, it means that given version of the file does diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 0e495ed1..a3b55204 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -758,6 +758,7 @@ impl> Graph { Ok(resulted_nodes) } + #[allow(clippy::complexity)] fn resolve_settings, T: ArtifactOutput>( &self, project: &Project, From 8a97fe55a9eb550f74cf70f34ca5b7b53ddffa11 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 14 Aug 2024 21:30:20 +0800 Subject: [PATCH 09/19] fix restrictions for optimizer runs --- crates/compilers/src/compilers/solc/mod.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 106ae74c..afd51a27 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -331,6 +331,11 @@ impl CompilerSettings for SolcSettings { .max_optimizer_runs .map_or(true, |max| self.optimizer.runs.map_or(false, |runs| runs <= max)); + // Ensure that we either don't have min optimizer runs set or that the optimizer is enabled + satisfies &= restrictions + .min_optimizer_runs + .map_or(true, |min| min == 0 || self.optimizer.enabled.unwrap_or_default()); + satisfies } } From 831873dc56b861b8aaa4480a6f5232bffb031b11 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 17 Aug 2024 22:32:50 +0800 Subject: [PATCH 10/19] fix error formatting --- crates/compilers/src/resolver/mod.rs | 31 ++++++++++++++++++---------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index a3b55204..0c5ded97 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -558,22 +558,31 @@ impl> Graph { idx: usize, project: &Project, f: &mut W, + ) -> std::result::Result<(), std::fmt::Error> { + self.format_node(idx, project, f)?; + write!(f, " imports:")?; + for dep in self.node_ids(idx).skip(1) { + write!(f, "\n ")?; + self.format_node(dep, project, f)?; + } + + Ok(()) + } + + /// Formats a single node along with its version requirements. + fn format_node, T: ArtifactOutput>( + &self, + idx: usize, + project: &Project, + f: &mut W, ) -> std::result::Result<(), std::fmt::Error> { let node = self.node(idx); - write!(f, "{} ", utils::source_name(&node.path, &self.root).display())?; + write!(f, "{}", utils::source_name(&node.path, &self.root).display())?; if let Some(req) = node.data.version_req() { - write!(f, "{req}")?; + write!(f, " {req}")?; } if let Some(req) = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref()) { - write!(f, "{req}")?; - } - write!(f, " imports:")?; - for dep in self.node_ids(idx).skip(1) { - let dep = self.node(dep); - write!(f, "\n {} ", utils::source_name(&dep.path, &self.root).display())?; - if let Some(req) = dep.data.version_req() { - write!(f, "{req}")?; - } + write!(f, " {req}")?; } Ok(()) From 5b42d05032bde83e36d59d52ba6e9d7b7b6b6a99 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 28 Aug 2024 04:19:39 +0400 Subject: [PATCH 11/19] Project::update_output_selection --- crates/compilers/src/lib.rs | 9 +++++++++ crates/compilers/src/project_util/mod.rs | 4 ++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index 6529a60e..16c955cd 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -449,6 +449,15 @@ impl Project { Ok(paths.remove(0)) } + + /// Invokes [CompilerSettings::update_output_selection] on the project's settings and all + /// additional settings profiles. + pub fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { + self.settings.update_output_selection(f); + self.additional_settings.iter_mut().for_each(|(_, s)| { + s.update_output_selection(f); + }); + } } pub struct ProjectBuilder { diff --git a/crates/compilers/src/project_util/mod.rs b/crates/compilers/src/project_util/mod.rs index b9c54de2..4de3109b 100644 --- a/crates/compilers/src/project_util/mod.rs +++ b/crates/compilers/src/project_util/mod.rs @@ -57,9 +57,9 @@ impl TempProject { pub fn set_solc(&mut self, solc: &str) -> &mut Self { use crate::solc::{Solc, SolcCompiler}; - self.inner.compiler.solc = SolcCompiler::Specific( + self.inner.compiler.solc = Some(SolcCompiler::Specific( Solc::find_svm_installed_version(&solc.parse().unwrap()).unwrap().unwrap(), - ); + )); self } From 26a265752fef5d2f06498b43a86fa7fafb5dce42 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 6 Nov 2024 05:32:39 +0400 Subject: [PATCH 12/19] fmt --- crates/compilers/src/resolver/mod.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 0f438fda..5cf4d167 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -770,7 +770,7 @@ impl> Graph { /// If `offline` is set to `true` then only already installed. fn get_input_node_versions, T: ArtifactOutput>( &self, - project: &Project + project: &Project, ) -> Result>>> { trace!("resolving input node versions"); @@ -784,7 +784,8 @@ impl> Graph { // the sorted list of all versions let all_versions = if project.offline { - project.compiler + project + .compiler .available_versions(&language) .into_iter() .filter(|v| v.is_installed()) @@ -878,7 +879,9 @@ impl> Graph { for idx in nodes { let mut profile_candidates = project.settings_profiles().enumerate().collect::>(); - if let Err(err) = self.retain_compatible_profiles(idx, project, &mut profile_candidates) { + if let Err(err) = + self.retain_compatible_profiles(idx, project, &mut profile_candidates) + { errors.push(err); } else { let (profile_idx, _) = profile_candidates.first().expect("exists"); @@ -1180,9 +1183,12 @@ src/Dapp.t.sol >=0.6.6 Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas"); let paths = ProjectPathsConfig::dapptools(&root).unwrap(); let graph = Graph::::resolve(&paths).unwrap(); - let Err(SolcError::Message(err)) = - graph.get_input_node_versions(&ProjectBuilder::::default().paths(paths).build(SolcCompiler::AutoDetect).unwrap()) - else { + let Err(SolcError::Message(err)) = graph.get_input_node_versions( + &ProjectBuilder::::default() + .paths(paths) + .build(SolcCompiler::AutoDetect) + .unwrap(), + ) else { panic!("expected error"); }; From e06876fa8cfcc39ff129c379f365b7636f22b977 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 6 Nov 2024 07:03:12 +0400 Subject: [PATCH 13/19] refactor restrictions --- crates/compilers/src/compilers/multi.rs | 5 +- .../compilers/src/compilers/restrictions.rs | 4 +- crates/compilers/src/compilers/solc/mod.rs | 102 +++++++++--------- .../compilers/src/compilers/vyper/settings.rs | 10 +- crates/compilers/tests/project.rs | 7 +- 5 files changed, 61 insertions(+), 67 deletions(-) diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs index f1b13415..78ab9381 100644 --- a/crates/compilers/src/compilers/multi.rs +++ b/crates/compilers/src/compilers/multi.rs @@ -138,9 +138,8 @@ pub struct MultiCompilerRestrictions { } impl CompilerSettingsRestrictions for MultiCompilerRestrictions { - fn merge(&mut self, other: Self) { - self.solc.merge(other.solc); - self.vyper.merge(other.vyper); + fn merge(self, other: Self) -> Option { + Some(Self { solc: self.solc.merge(other.solc)?, vyper: self.vyper.merge(other.vyper)? }) } } diff --git a/crates/compilers/src/compilers/restrictions.rs b/crates/compilers/src/compilers/restrictions.rs index 44c4ba8b..95380d15 100644 --- a/crates/compilers/src/compilers/restrictions.rs +++ b/crates/compilers/src/compilers/restrictions.rs @@ -5,8 +5,8 @@ use std::{ use semver::VersionReq; -pub trait CompilerSettingsRestrictions: Debug + Sync + Send + Clone + Default { - fn merge(&mut self, other: Self); +pub trait CompilerSettingsRestrictions: Copy + Debug + Sync + Send + Clone + Default { + fn merge(self, other: Self) -> Option; } /// Combines [CompilerSettingsRestrictions] with a restrictions on compiler versions for a given diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 88432ccd..c0fcfde1 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -8,7 +8,6 @@ use foundry_compilers_artifacts::{ error::SourceLocation, output_selection::OutputSelection, remappings::Remapping, - serde_helpers::display_from_str_opt, sources::{Source, Sources}, Error, EvmVersion, Settings, Severity, SolcInput, }; @@ -191,70 +190,73 @@ impl DerefMut for SolcSettings { } } -#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, Eq, PartialEq)] -pub struct EvmVersionRestriction { - #[serde(default, with = "display_from_str_opt", skip_serializing_if = "Option::is_none")] - pub min_evm_version: Option, - #[serde(default, with = "display_from_str_opt", skip_serializing_if = "Option::is_none")] - pub max_evm_version: Option, +#[derive(Debug, Clone, Copy, Eq, Default, PartialEq)] +pub struct Restriction { + pub min: Option, + pub max: Option, } -impl EvmVersionRestriction { +impl Restriction { /// Returns true if the given version satisfies the restrictions /// /// If given None, only returns true if no restrictions are set - pub fn satisfies(&self, version: Option) -> bool { - self.min_evm_version.map_or(true, |min| version.map_or(false, |v| v >= min)) - && self.max_evm_version.map_or(true, |max| version.map_or(false, |v| v <= max)) - } - - pub fn merge(&mut self, other: &Self) { - let Self { min_evm_version, max_evm_version } = other; - - if let Some(min_evm_version) = min_evm_version { - if self.min_evm_version.map_or(true, |e| e < *min_evm_version) { - self.min_evm_version.replace(*min_evm_version); + pub fn satisfies(&self, value: Option) -> bool { + self.min.map_or(true, |min| value.map_or(false, |v| v >= min)) + && self.max.map_or(true, |max| value.map_or(false, |v| v <= max)) + } + + /// Combines two restrictions into a new one + pub fn merge(self, other: Self) -> Option { + let Self { mut min, mut max } = self; + let Self { min: other_min, max: other_max } = other; + + min = min.map_or(other_min, |this_min| { + Some(other_min.map_or(this_min, |other_min| this_min.max(other_min))) + }); + max = max.map_or(other_max, |this_max| { + Some(other_max.map_or(this_max, |other_max| this_max.min(other_max))) + }); + + if let (Some(min), Some(max)) = (min, max) { + if min > max { + return None; } } - if let Some(max_evm_version) = max_evm_version { - if self.max_evm_version.map_or(true, |e| e > *max_evm_version) { - self.max_evm_version.replace(*max_evm_version); - } + Some(Self { min, max }) + } + + pub fn apply(&self, value: Option) -> Option { + match (value, self.min, self.max) { + (None, Some(min), _) => Some(min), + (None, None, Some(max)) => Some(max), + (Some(cur), Some(min), _) if cur < min => Some(min), + (Some(cur), _, Some(max)) if cur > max => Some(max), + _ => value, } } } #[derive(Debug, Clone, Copy, Default)] pub struct SolcRestrictions { - pub evm_version: EvmVersionRestriction, + pub evm_version: Restriction, pub via_ir: Option, - pub min_optimizer_runs: Option, - pub max_optimizer_runs: Option, + pub optimizer_runs: Restriction, } impl CompilerSettingsRestrictions for SolcRestrictions { - fn merge(&mut self, other: Self) { - self.evm_version.merge(&other.evm_version); - - // Preserve true - if self.via_ir.map_or(true, |via_ir| !via_ir) { - self.via_ir = other.via_ir; - } - - if self - .min_optimizer_runs - .map_or(true, |min| min < other.min_optimizer_runs.unwrap_or(usize::MAX)) - { - self.min_optimizer_runs = other.min_optimizer_runs; + fn merge(self, other: Self) -> Option { + if let (Some(via_ir), Some(other_via_ir)) = (self.via_ir, other.via_ir) { + if via_ir != other_via_ir { + return None; + } } - if self - .max_optimizer_runs - .map_or(true, |max| max > other.max_optimizer_runs.unwrap_or(usize::MIN)) - { - self.max_optimizer_runs = other.max_optimizer_runs; - } + Some(Self { + evm_version: self.evm_version.merge(other.evm_version)?, + via_ir: self.via_ir.or(other.via_ir), + optimizer_runs: self.optimizer_runs.merge(other.optimizer_runs)?, + }) } } @@ -324,16 +326,12 @@ impl CompilerSettings for SolcSettings { satisfies &= restrictions.evm_version.satisfies(self.evm_version); satisfies &= restrictions.via_ir.map_or(true, |via_ir| via_ir == self.via_ir.unwrap_or_default()); - satisfies &= restrictions - .min_optimizer_runs - .map_or(true, |min| self.optimizer.runs.map_or(false, |runs| runs >= min)); - satisfies &= restrictions - .max_optimizer_runs - .map_or(true, |max| self.optimizer.runs.map_or(false, |runs| runs <= max)); + satisfies &= restrictions.optimizer_runs.satisfies(self.optimizer.runs); // Ensure that we either don't have min optimizer runs set or that the optimizer is enabled satisfies &= restrictions - .min_optimizer_runs + .optimizer_runs + .min .map_or(true, |min| min == 0 || self.optimizer.enabled.unwrap_or_default()); satisfies diff --git a/crates/compilers/src/compilers/vyper/settings.rs b/crates/compilers/src/compilers/vyper/settings.rs index cc5ddd3e..2a815d62 100644 --- a/crates/compilers/src/compilers/vyper/settings.rs +++ b/crates/compilers/src/compilers/vyper/settings.rs @@ -3,18 +3,18 @@ use std::{collections::BTreeSet, path::PathBuf}; pub use crate::artifacts::vyper::VyperSettings; use crate::{ compilers::{restrictions::CompilerSettingsRestrictions, CompilerSettings}, - solc::EvmVersionRestriction, + solc::Restriction, }; -use foundry_compilers_artifacts::output_selection::OutputSelection; +use foundry_compilers_artifacts::{output_selection::OutputSelection, EvmVersion}; #[derive(Clone, Copy, Debug, Default)] pub struct VyperRestrictions { - pub evm_version: EvmVersionRestriction, + pub evm_version: Restriction, } impl CompilerSettingsRestrictions for VyperRestrictions { - fn merge(&mut self, other: Self) { - self.evm_version.merge(&other.evm_version); + fn merge(self, other: Self) -> Option { + Some(Self { evm_version: self.evm_version.merge(other.evm_version)? }) } } diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index aa9b0118..0e0cf9c4 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -16,7 +16,7 @@ use foundry_compilers::{ info::ContractInfo, multi::MultiCompilerRestrictions, project_util::*, - solc::{EvmVersionRestriction, SolcRestrictions, SolcSettings}, + solc::{Restriction, SolcRestrictions, SolcSettings}, take_solc_installer_lock, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, RestrictionsWithVersion, TestFileFilter, @@ -4073,10 +4073,7 @@ contract SimpleContract {} let cancun_restriction = RestrictionsWithVersion { restrictions: MultiCompilerRestrictions { solc: SolcRestrictions { - evm_version: EvmVersionRestriction { - min_evm_version: Some(EvmVersion::Cancun), - ..Default::default() - }, + evm_version: Restriction { min: Some(EvmVersion::Cancun), ..Default::default() }, ..Default::default() }, ..Default::default() From bf42af13009ac55f3720c1f8962b2447272f100a Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 6 Nov 2024 22:28:45 +0400 Subject: [PATCH 14/19] add bytecode_hash restriction --- crates/compilers/src/compilers/solc/mod.rs | 27 ++++++++++++++++------ 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index c0fcfde1..7d0c1b20 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -9,7 +9,7 @@ use foundry_compilers_artifacts::{ output_selection::OutputSelection, remappings::Remapping, sources::{Source, Sources}, - Error, EvmVersion, Settings, Severity, SolcInput, + BytecodeHash, Error, EvmVersion, Settings, Severity, SolcInput, }; use foundry_compilers_core::error::Result; use itertools::Itertools; @@ -242,6 +242,7 @@ pub struct SolcRestrictions { pub evm_version: Restriction, pub via_ir: Option, pub optimizer_runs: Restriction, + pub bytecode_hash: Option, } impl CompilerSettingsRestrictions for SolcRestrictions { @@ -252,10 +253,19 @@ impl CompilerSettingsRestrictions for SolcRestrictions { } } + if let (Some(bytecode_hash), Some(other_bytecode_hash)) = + (self.bytecode_hash, other.bytecode_hash) + { + if bytecode_hash != other_bytecode_hash { + return None; + } + } + Some(Self { evm_version: self.evm_version.merge(other.evm_version)?, via_ir: self.via_ir.or(other.via_ir), optimizer_runs: self.optimizer_runs.merge(other.optimizer_runs)?, + bytecode_hash: self.bytecode_hash.or(other.bytecode_hash), }) } } @@ -323,14 +333,17 @@ impl CompilerSettings for SolcSettings { fn satisfies_restrictions(&self, restrictions: &Self::Restrictions) -> bool { let mut satisfies = true; - satisfies &= restrictions.evm_version.satisfies(self.evm_version); - satisfies &= - restrictions.via_ir.map_or(true, |via_ir| via_ir == self.via_ir.unwrap_or_default()); - satisfies &= restrictions.optimizer_runs.satisfies(self.optimizer.runs); + let SolcRestrictions { evm_version, via_ir, optimizer_runs, bytecode_hash } = restrictions; + + satisfies &= evm_version.satisfies(self.evm_version); + satisfies &= via_ir.map_or(true, |via_ir| via_ir == self.via_ir.unwrap_or_default()); + satisfies &= bytecode_hash.map_or(true, |bytecode_hash| { + self.metadata.as_ref().and_then(|m| m.bytecode_hash) == Some(bytecode_hash) + }); + satisfies &= optimizer_runs.satisfies(self.optimizer.runs); // Ensure that we either don't have min optimizer runs set or that the optimizer is enabled - satisfies &= restrictions - .optimizer_runs + satisfies &= optimizer_runs .min .map_or(true, |min| min == 0 || self.optimizer.enabled.unwrap_or_default()); From c41bb8e2a473a1d0a0e813d47031f3e8b5312196 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 8 Nov 2024 00:21:31 +0400 Subject: [PATCH 15/19] better msg --- crates/compilers/src/resolver/mod.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 5cf4d167..95be025b 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -727,6 +727,13 @@ impl> Graph { all_profiles.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req)); } + if all_profiles.is_empty() { + let f = utils::source_name(&failed_node.path, &self.root).display(); + return Err( + format!("Missing profile satisfying settings restrictions for {f}").to_string(), + ); + } + // iterate over all the nodes once again and find the one incompatible for node in &nodes { if let Some(req) = project.restrictions.get(&self.node(*node).path) { From c573ee65081445be3ecaaa03c12196f63b1ddf57 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 8 Nov 2024 00:47:28 +0400 Subject: [PATCH 16/19] better docs --- crates/compilers/src/compilers/restrictions.rs | 2 ++ crates/compilers/src/compilers/solc/mod.rs | 6 ++++-- crates/compilers/src/resolver/mod.rs | 3 ++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/crates/compilers/src/compilers/restrictions.rs b/crates/compilers/src/compilers/restrictions.rs index 95380d15..acfcf29b 100644 --- a/crates/compilers/src/compilers/restrictions.rs +++ b/crates/compilers/src/compilers/restrictions.rs @@ -5,7 +5,9 @@ use std::{ use semver::VersionReq; +/// Abstraction over set of restrictions for given [`crate::Compiler::Settings`]. pub trait CompilerSettingsRestrictions: Copy + Debug + Sync + Send + Clone + Default { + /// Combines this restriction with another one. Returns `None` if restrictions are incompatible. fn merge(self, other: Self) -> Option; } diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 7d0c1b20..d317e67a 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -190,14 +190,15 @@ impl DerefMut for SolcSettings { } } +/// Abstraction over min/max restrictions on some value. #[derive(Debug, Clone, Copy, Eq, Default, PartialEq)] pub struct Restriction { pub min: Option, pub max: Option, } -impl Restriction { - /// Returns true if the given version satisfies the restrictions +impl Restriction { + /// Returns true if the given value satisfies the restrictions /// /// If given None, only returns true if no restrictions are set pub fn satisfies(&self, value: Option) -> bool { @@ -237,6 +238,7 @@ impl Restriction { } } +/// Restrictions on settings for the solc compiler. #[derive(Debug, Clone, Copy, Default)] pub struct SolcRestrictions { pub evm_version: Restriction, diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 95be025b..4bbd1aa6 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -695,6 +695,7 @@ impl> Graph { Err(msg) } + /// Filters profiles incompatible with the given node and its imports. fn retain_compatible_profiles( &self, idx: usize, @@ -730,7 +731,7 @@ impl> Graph { if all_profiles.is_empty() { let f = utils::source_name(&failed_node.path, &self.root).display(); return Err( - format!("Missing profile satisfying settings restrictions for {f}").to_string(), + format!("Missing profile satisfying settings restrictions for {f}").to_string() ); } From d3c12f846198235360109471757fc75245db4c81 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 8 Nov 2024 00:51:24 +0400 Subject: [PATCH 17/19] better docs --- crates/compilers/src/lib.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index 16c955cd..ea6fa1b3 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -78,6 +78,10 @@ pub struct Project, + /// Mapping from file path to requrements on settings to compile it. + /// + /// This file will only be included into compiler inputs with profiles which satisfy the + /// restrictions. pub restrictions: BTreeMap::Restrictions>>, /// Whether caching is enabled From ffda1825e69e9e3e9c78e572addcde667508d1c1 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 8 Nov 2024 04:54:20 +0400 Subject: [PATCH 18/19] correctly format reqs --- crates/compilers/src/resolver/mod.rs | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs index 4bbd1aa6..a4085a34 100644 --- a/crates/compilers/src/resolver/mod.rs +++ b/crates/compilers/src/resolver/mod.rs @@ -556,10 +556,11 @@ impl> Graph { /// path/to/c.sol () /// ... /// ``` - fn format_imports_list( + fn format_imports_list( &self, idx: usize, incompatible: HashSet, + project: &Project, f: &mut W, ) -> std::result::Result<(), std::fmt::Error> { let format_node = |idx, f: &mut W| { @@ -567,7 +568,7 @@ impl> Graph { let color = if incompatible.contains(&idx) { Color::Red } else { Color::White }; let mut line = utils::source_name(&node.path, &self.root).display().to_string(); - if let Some(req) = node.data.version_req() { + if let Some(req) = self.version_requirement(idx, project) { line.push_str(&format!(" {req}")); } @@ -683,15 +684,20 @@ impl> Graph { if self.check_available_version(*node, &all_versions, project).is_err() { let mut msg = "Found incompatible versions:\n".white().to_string(); - self.format_imports_list(idx, [*node, failed_node_idx].into(), &mut msg) - .unwrap(); + self.format_imports_list( + idx, + [*node, failed_node_idx].into(), + project, + &mut msg, + ) + .unwrap(); return Err(msg); } } } let mut msg = "Found incompatible versions:\n".white().to_string(); - self.format_imports_list(idx, nodes.into_iter().collect(), &mut msg).unwrap(); + self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap(); Err(msg) } @@ -744,15 +750,20 @@ impl> Graph { { let mut msg = "Found incompatible settings restrictions:\n".white().to_string(); - self.format_imports_list(idx, [*node, failed_node_idx].into(), &mut msg) - .unwrap(); + self.format_imports_list( + idx, + [*node, failed_node_idx].into(), + project, + &mut msg, + ) + .unwrap(); return Err(msg); } } } let mut msg = "Found incompatible settings restrictions:\n".white().to_string(); - self.format_imports_list(idx, nodes.into_iter().collect(), &mut msg).unwrap(); + self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap(); Err(msg) } From c75f39a600756aae57f3164b8884fbfd427bab07 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 15 Nov 2024 18:54:03 +0400 Subject: [PATCH 19/19] bump cache format --- crates/compilers/src/cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 7c5ee7d0..21a738d7 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -30,7 +30,7 @@ use std::{ /// `ethers-solc` uses a different format version id, but the actual format is consistent with /// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or /// `ethers-solc` -const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-3"; +const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4"; /// The file name of the default cache file pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";