diff --git a/.cargo/config b/.cargo/config.toml similarity index 100% rename from .cargo/config rename to .cargo/config.toml diff --git a/.github/workflows/rust-compile.yml b/.github/workflows/rust-compile.yml index a50dd078f..c139c5511 100644 --- a/.github/workflows/rust-compile.yml +++ b/.github/workflows/rust-compile.yml @@ -34,9 +34,7 @@ jobs: submodules: recursive - uses: actions-rust-lang/setup-rust-toolchain@v1 - run: | - for package in $(cargo metadata --no-deps --format-version=1 | jq -r '.packages[] | .name'); do - cargo rustdoc -p "$package" --all-features -- -D warnings -W unreachable-pub - done + RUSTDOCFLAGS="-Dwarnings -Wunreachable-pub" cargo doc --no-deps --all --all-features format_and_lint: name: Format and Lint diff --git a/Cargo.toml b/Cargo.toml index 3d50a39f2..d666ba24d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,8 @@ async-compression = { version = "0.4.8", features = [ "bzip2", "zstd", ] } +async-fd-lock = "0.2.0" +fs4 = "0.9.1" async-trait = "0.1.80" axum = { version = "0.7.5", default-features = false, features = [ "tokio", @@ -113,7 +115,7 @@ retry-policies = { version = "0.4.0", default-features = false } rmp-serde = { version = "1.2.0" } rstest = { version = "0.21.0" } rstest_reuse = "0.7.0" -simd-json = { version = "0.13.10" , features = ["serde_impl"]} +simd-json = { version = "0.13.10", features = ["serde_impl"] } serde = { version = "1.0.198" } serde_json = { version = "1.0.116" } serde_repr = "0.1" diff --git a/crates/rattler/src/install/installer/mod.rs b/crates/rattler/src/install/installer/mod.rs index 9bd8dde96..9ab69a8ff 100644 --- a/crates/rattler/src/install/installer/mod.rs +++ b/crates/rattler/src/install/installer/mod.rs @@ -9,6 +9,13 @@ use std::{ sync::Arc, }; +use super::{unlink_package, AppleCodeSignBehavior, InstallDriver, InstallOptions, Transaction}; +use crate::install::link_script::LinkScriptError; +use crate::{ + default_cache_dir, + install::{clobber_registry::ClobberedPath, link_script::PrePostLinkResult}, + package_cache::PackageCache, +}; pub use error::InstallerError; use futures::{stream::FuturesUnordered, FutureExt, StreamExt, TryFutureExt}; #[cfg(feature = "indicatif")] @@ -16,6 +23,8 @@ pub use indicatif::{ DefaultProgressFormatter, IndicatifReporter, IndicatifReporterBuilder, Placement, ProgressFormatter, }; +use rattler_cache::package_cache::CacheLock; +use rattler_cache::package_cache::CacheReporter; use rattler_conda_types::{ prefix_record::{Link, LinkType}, Platform, PrefixRecord, RepoDataRecord, @@ -26,14 +35,6 @@ use reqwest::Client; use simple_spawn_blocking::tokio::run_blocking_task; use tokio::{sync::Semaphore, task::JoinError}; -use super::{unlink_package, AppleCodeSignBehavior, InstallDriver, InstallOptions, Transaction}; -use crate::install::link_script::LinkScriptError; -use crate::{ - default_cache_dir, - install::{clobber_registry::ClobberedPath, link_script::PrePostLinkResult}, - package_cache::{CacheReporter, PackageCache}, -}; - /// An installer that can install packages into a prefix. #[derive(Default)] pub struct Installer { @@ -366,7 +367,7 @@ impl Installer { let cache_index = r.on_populate_cache_start(idx, &record); (r, cache_index) }); - let cache_path = populate_cache( + let cache_lock = populate_cache( &record, downloader, &package_cache, @@ -376,7 +377,7 @@ impl Installer { if let Some((reporter, index)) = populate_cache_report { reporter.on_populate_cache_complete(index); } - Ok((cache_path, record)) + Ok((cache_lock, record)) }) .map_err(JoinError::try_into_panic) .map(|res| match res { @@ -405,14 +406,14 @@ impl Installer { } // Install the package if it was fetched. - if let Some((cached_path, record)) = package_to_install.await? { + if let Some((cache_lock, record)) = package_to_install.await? { let reporter = reporter .as_deref() .map(|r| (r, r.on_link_start(idx, &record))); link_package( &record, prefix.as_ref(), - &cached_path, + cache_lock.path(), base_install_options.clone(), driver, ) @@ -519,7 +520,7 @@ async fn populate_cache( downloader: reqwest_middleware::ClientWithMiddleware, cache: &PackageCache, reporter: Option<(Arc, usize)>, -) -> Result { +) -> Result { struct CacheReporterBridge { reporter: Arc, cache_index: usize, diff --git a/crates/rattler/src/install/link_script.rs b/crates/rattler/src/install/link_script.rs index b04bec345..ce8b05c92 100644 --- a/crates/rattler/src/install/link_script.rs +++ b/crates/rattler/src/install/link_script.rs @@ -2,6 +2,7 @@ use std::{ borrow::Borrow, collections::{HashMap, HashSet}, + fmt::{Display, Formatter}, path::Path, }; @@ -57,11 +58,11 @@ impl LinkScriptType { } } -impl ToString for LinkScriptType { - fn to_string(&self) -> String { +impl Display for LinkScriptType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - LinkScriptType::PreUnlink => "pre-unlink".to_string(), - LinkScriptType::PostLink => "post-link".to_string(), + LinkScriptType::PreUnlink => write!(f, "pre-unlink"), + LinkScriptType::PostLink => write!(f, "post-link"), } } } @@ -103,7 +104,7 @@ pub fn run_link_scripts<'a>( let mut messages = HashMap::::new(); for record in prefix_records { let prec = &record.repodata_record.package_record; - let link_file = target_prefix.join(&link_script_type.get_path(prec, platform)); + let link_file = target_prefix.join(link_script_type.get_path(prec, platform)); if link_file.exists() { env.insert( diff --git a/crates/rattler/src/install/mod.rs b/crates/rattler/src/install/mod.rs index 21be0f2d7..1f5b7e20f 100644 --- a/crates/rattler/src/install/mod.rs +++ b/crates/rattler/src/install/mod.rs @@ -799,7 +799,7 @@ mod test { async move { // Populate the cache let package_info = ArchiveIdentifier::try_from_url(&package_url).unwrap(); - let package_dir = package_cache + let package_cache_lock = package_cache .get_or_fetch_from_url( package_info, package_url.clone(), @@ -811,7 +811,7 @@ mod test { // Install the package to the prefix link_package( - &package_dir, + package_cache_lock.path(), prefix_path, install_driver, InstallOptions { diff --git a/crates/rattler/src/install/test_utils.rs b/crates/rattler/src/install/test_utils.rs index 4a2a17786..2ef6510e6 100644 --- a/crates/rattler/src/install/test_utils.rs +++ b/crates/rattler/src/install/test_utils.rs @@ -45,7 +45,7 @@ pub async fn install_package_to_environment( // Create the conda-meta directory if it doesnt exist yet. let target_prefix = target_prefix.to_path_buf(); - match tokio::task::spawn_blocking(move || { + let result = tokio::task::spawn_blocking(move || { let conda_meta_path = target_prefix.join("conda-meta"); std::fs::create_dir_all(&conda_meta_path)?; @@ -53,8 +53,8 @@ pub async fn install_package_to_environment( let pkg_meta_path = conda_meta_path.join(prefix_record.file_name()); prefix_record.write_to_path(pkg_meta_path, true) }) - .await - { + .await; + match result { Ok(result) => Ok(result?), Err(err) => { if let Ok(panic) = err.try_into_panic() { @@ -95,7 +95,7 @@ pub async fn execute_operation( default_retry_policy(), None, ) - .map_ok(|cache_dir| Some((install_record.clone(), cache_dir))) + .map_ok(|cache_lock| Some((install_record.clone(), cache_lock))) .map_err(anyhow::Error::from) .await .unwrap() @@ -104,10 +104,10 @@ pub async fn execute_operation( }; // If there is a package to install, do that now. - if let Some((record, package_dir)) = install_package { + if let Some((record, package_cache_lock)) = install_package { install_package_to_environment( target_prefix, - package_dir, + package_cache_lock.path().to_path_buf(), record.clone(), install_driver, install_options, diff --git a/crates/rattler_cache/Cargo.toml b/crates/rattler_cache/Cargo.toml index 4dcf08916..1650dff57 100644 --- a/crates/rattler_cache/Cargo.toml +++ b/crates/rattler_cache/Cargo.toml @@ -11,7 +11,9 @@ readme.workspace = true [dependencies] anyhow.workspace = true +dashmap.workspace = true dirs.workspace = true +futures.workspace = true fxhash.workspace = true itertools.workspace = true parking_lot.workspace = true @@ -20,12 +22,14 @@ rattler_digest = { version = "1.0.1", path = "../rattler_digest", default-featur rattler_networking = { version = "0.21.2", path = "../rattler_networking", default-features = false } rattler_package_streaming = { version = "0.22.4", path = "../rattler_package_streaming", default-features = false, features = ["reqwest"] } reqwest.workspace = true -tokio.workspace = true +tokio = { workspace = true, features = ["macros"] } tracing.workspace = true url.workspace = true thiserror.workspace = true reqwest-middleware.workspace = true digest.workspace = true +fs4 = { workspace = true, features = ["fs-err-tokio"] } +simple_spawn_blocking = { version = "1.0.0", path = "../simple_spawn_blocking"} [dev-dependencies] assert_matches.workspace = true diff --git a/crates/rattler_cache/src/package_cache/cache_key.rs b/crates/rattler_cache/src/package_cache/cache_key.rs new file mode 100644 index 000000000..712d9d245 --- /dev/null +++ b/crates/rattler_cache/src/package_cache/cache_key.rs @@ -0,0 +1,64 @@ +use rattler_conda_types::package::ArchiveIdentifier; +use rattler_conda_types::PackageRecord; +use rattler_digest::Sha256Hash; +use std::fmt::{Display, Formatter}; + +/// Provides a unique identifier for packages in the cache. +/// TODO: This could not be unique over multiple subdir. How to handle? +/// TODO: Wouldn't it be better to cache based on hashes? +#[derive(Debug, Hash, Clone, Eq, PartialEq)] +pub struct CacheKey { + name: String, + version: String, + build_string: String, + sha256: Option, +} + +impl CacheKey { + /// Adds a sha256 hash of the archive. + pub fn with_sha256(mut self, sha256: Sha256Hash) -> Self { + self.sha256 = Some(sha256); + self + } + + /// Potentially adds a sha256 hash of the archive. + pub fn with_opt_sha256(mut self, sha256: Option) -> Self { + self.sha256 = sha256; + self + } +} + +impl CacheKey { + /// Return the sha256 hash of the package if it is known. + pub fn sha256(&self) -> Option { + self.sha256 + } +} + +impl From for CacheKey { + fn from(pkg: ArchiveIdentifier) -> Self { + CacheKey { + name: pkg.name, + version: pkg.version, + build_string: pkg.build_string, + sha256: None, + } + } +} + +impl From<&PackageRecord> for CacheKey { + fn from(record: &PackageRecord) -> Self { + Self { + name: record.name.as_normalized().to_string(), + version: record.version.to_string(), + build_string: record.build.clone(), + sha256: record.sha256, + } + } +} + +impl Display for CacheKey { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}-{}-{}", &self.name, &self.version, &self.build_string) + } +} diff --git a/crates/rattler_cache/src/package_cache/cache_lock.rs b/crates/rattler_cache/src/package_cache/cache_lock.rs new file mode 100644 index 000000000..cbbbbb69f --- /dev/null +++ b/crates/rattler_cache/src/package_cache/cache_lock.rs @@ -0,0 +1,222 @@ +use std::{ + fmt::{Debug, Formatter}, + io::{Read, Seek, Write}, + path::{Path, PathBuf}, + sync::Arc, + time::Duration, +}; + +use fs4::fs_std::FileExt; +use parking_lot::Mutex; + +use crate::package_cache::PackageCacheError; + +/// A lock on the cache entry. As long as this lock is held, no other process is +/// allowed to modify the cache entry. This however, does not guarantee that the +/// contents of the cache is not corrupted by external processes, but it does +/// guarantee that when concurrent processes access the package cache they do +/// not interfere with each other. +pub struct CacheLock { + pub(super) _lock: CacheRwLock, + pub(super) revision: u64, + pub(super) path: PathBuf, +} + +impl Debug for CacheLock { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("CacheLock") + .field("path", &self.path) + .field("revision", &self.revision) + .finish() + } +} + +impl CacheLock { + /// Returns the path to the cache entry on disk. + pub fn path(&self) -> &Path { + &self.path + } + + /// Returns the revision of the cache entry. This revision indicates the + /// number of times the cache entry has been updated. + pub fn revision(&self) -> u64 { + self.revision + } +} + +pub struct CacheRwLock { + file: Arc>, +} + +impl Drop for CacheRwLock { + fn drop(&mut self) { + // Ensure that the lock is released when the lock is dropped. + let _ = self.file.lock().unlock(); + } +} + +impl CacheRwLock { + pub async fn acquire_read(path: &Path) -> Result { + let lock_file_path = path.to_path_buf(); + + let acquire_lock_fut = simple_spawn_blocking::tokio::run_blocking_task(move || { + let file = std::fs::OpenOptions::new() + .create(true) + .read(true) + .truncate(false) + .write(true) + .open(&lock_file_path) + .map_err(|e| { + PackageCacheError::LockError( + format!( + "failed to open cache lock for reading: '{}'", + lock_file_path.display() + ), + e, + ) + })?; + + file.lock_shared().map_err(move |e| { + PackageCacheError::LockError( + format!( + "failed to acquire read lock on cache lock file: '{}'", + lock_file_path.display() + ), + e, + ) + })?; + + Ok(CacheRwLock { + file: Arc::new(Mutex::new(file)), + }) + }); + + tokio::select!( + lock = acquire_lock_fut => lock, + _ = warn_timeout_future(format!( + "Blocking waiting for file lock on package cache for {}", + path.file_name() + .expect("lock file must have a name") + .to_string_lossy() + )) => unreachable!("warn_timeout_future should never finish") + ) + } +} + +impl CacheRwLock { + pub async fn acquire_write(path: &Path) -> Result { + let lock_file_path = path.to_path_buf(); + let acquire_lock_fut = simple_spawn_blocking::tokio::run_blocking_task(move || { + let file = std::fs::OpenOptions::new() + .create(true) + .truncate(false) + .write(true) + .read(true) + .open(&lock_file_path) + .map_err(|e| { + PackageCacheError::LockError( + format!( + "failed to open cache lock for writing: '{}", + lock_file_path.display() + ), + e, + ) + })?; + + file.lock_exclusive().map_err(move |e| { + PackageCacheError::LockError( + format!( + "failed to acquire write lock on cache lock file: '{}'", + lock_file_path.display() + ), + e, + ) + })?; + + Ok(CacheRwLock { + file: Arc::new(Mutex::new(file)), + }) + }); + + tokio::select!( + lock = acquire_lock_fut => lock, + _ = warn_timeout_future(format!( + "Blocking waiting for file lock on package cache for {}", + path.file_name() + .expect("lock file must have a name") + .to_string_lossy() + )) => unreachable!("warn_timeout_future should never finish") + ) + } +} + +impl CacheRwLock { + pub async fn write_revision(&mut self, revision: u64) -> Result<(), PackageCacheError> { + let file = self.file.clone(); + simple_spawn_blocking::tokio::run_blocking_task(move || { + let mut file = file.lock(); + + // Ensure we write from the start of the file + file.rewind().map_err(|e| { + PackageCacheError::LockError( + "failed to rewind cache lock for reading revision".to_string(), + e, + ) + })?; + + // Write the bytes of the revision + let revision_bytes = revision.to_be_bytes(); + file.write_all(&revision_bytes).map_err(|e| { + PackageCacheError::LockError( + "failed to write revision from cache lock".to_string(), + e, + ) + })?; + + // Ensure all bytes are written to disk + file.flush().map_err(|e| { + PackageCacheError::LockError( + "failed to flush cache lock after writing revision".to_string(), + e, + ) + })?; + + // Update the length of the file + file.set_len(revision_bytes.len() as u64).map_err(|e| { + PackageCacheError::LockError( + "failed to truncate cache lock after writing revision".to_string(), + e, + ) + })?; + + Ok(()) + }) + .await + } +} + +impl CacheRwLock { + pub fn read_revision(&mut self) -> Result { + let mut buf = [0; 8]; + match self.file.lock().read_exact(&mut buf) { + Ok(_) => {} + Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => { + return Ok(0); + } + Err(e) => { + return Err(PackageCacheError::LockError( + "failed to read revision from cache lock".to_string(), + e, + )); + } + } + Ok(u64::from_be_bytes(buf)) + } +} + +async fn warn_timeout_future(message: String) { + loop { + tokio::time::sleep(Duration::from_secs(30)).await; + tracing::warn!("{}", &message); + } +} diff --git a/crates/rattler_cache/src/package_cache.rs b/crates/rattler_cache/src/package_cache/mod.rs similarity index 54% rename from crates/rattler_cache/src/package_cache.rs rename to crates/rattler_cache/src/package_cache/mod.rs index 6e5cd78d5..f1b2ad3c9 100644 --- a/crates/rattler_cache/src/package_cache.rs +++ b/crates/rattler_cache/src/package_cache/mod.rs @@ -3,143 +3,88 @@ use std::{ error::Error, - fmt::{Display, Formatter}, + fmt::Debug, future::Future, - path::PathBuf, + path::{Path, PathBuf}, sync::Arc, time::{Duration, SystemTime}, }; -use fxhash::FxHashMap; +pub use cache_key::CacheKey; +pub use cache_lock::CacheLock; +use cache_lock::CacheRwLock; +use dashmap::DashMap; +use futures::TryFutureExt; use itertools::Itertools; use parking_lot::Mutex; -use rattler_conda_types::{package::ArchiveIdentifier, PackageRecord}; -use rattler_digest::Sha256Hash; +use rattler_conda_types::package::ArchiveIdentifier; use rattler_networking::retry_policies::{DoNotRetryPolicy, RetryDecision, RetryPolicy}; use rattler_package_streaming::{DownloadReporter, ExtractError}; +pub use reporter::CacheReporter; use reqwest::StatusCode; -use tokio::sync::broadcast; -use tracing::Instrument; +use simple_spawn_blocking::Cancelled; use url::Url; use crate::validation::validate_package_directory; -/// A trait that can be implemented to report progress of the download and -/// validation process. -pub trait CacheReporter: Send + Sync { - /// Called when validation starts - fn on_validate_start(&self) -> usize; - /// Called when validation completex - fn on_validate_complete(&self, index: usize); - /// Called when a download starts - fn on_download_start(&self) -> usize; - /// Called with regular updates on the download progress - fn on_download_progress(&self, index: usize, progress: u64, total: Option); - /// Called when a download completes - fn on_download_completed(&self, index: usize); -} +mod cache_key; +mod cache_lock; +mod reporter; /// A [`PackageCache`] manages a cache of extracted Conda packages on disk. /// /// The store does not provide an implementation to get the data into the store. -/// Instead this is left up to the user when the package is requested. If the +/// Instead, this is left up to the user when the package is requested. If the /// package is found in the cache it is returned immediately. However, if the /// cache is stale a user defined function is called to populate the cache. This /// separates the corners between caching and fetching of the content. #[derive(Clone)] pub struct PackageCache { - inner: Arc>, -} - -/// Provides a unique identifier for packages in the cache. -/// TODO: This could not be unique over multiple subdir. How to handle? -/// TODO: Wouldn't it be better to cache based on hashes? -#[derive(Debug, Hash, Clone, Eq, PartialEq)] -pub struct CacheKey { - name: String, - version: String, - build_string: String, - sha256: Option, -} - -impl CacheKey { - /// Adds a sha256 hash of the archive. - pub fn with_sha256(mut self, sha256: Sha256Hash) -> Self { - self.sha256 = Some(sha256); - self - } - - /// Potentially adds a sha256 hash of the archive. - pub fn with_opt_sha256(mut self, sha256: Option) -> Self { - self.sha256 = sha256; - self - } -} - -impl CacheKey { - /// Return the sha256 hash of the package if it is known. - pub fn sha256(&self) -> Option { - self.sha256 - } -} - -impl From for CacheKey { - fn from(pkg: ArchiveIdentifier) -> Self { - CacheKey { - name: pkg.name, - version: pkg.version, - build_string: pkg.build_string, - sha256: None, - } - } -} - -impl From<&PackageRecord> for CacheKey { - fn from(record: &PackageRecord) -> Self { - Self { - name: record.name.as_normalized().to_string(), - version: record.version.to_string(), - build_string: record.build.clone(), - sha256: record.sha256, - } - } -} - -impl Display for CacheKey { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{}-{}-{}", &self.name, &self.version, &self.build_string) - } + inner: Arc, } #[derive(Default)] struct PackageCacheInner { path: PathBuf, - packages: FxHashMap>>, + packages: DashMap>>, } #[derive(Default)] -struct Package { - path: Option, - inflight: Option>>, +struct Entry { + last_revision: Option, } /// An error that might be returned from one of the caching function of the /// [`PackageCache`]. -#[derive(Debug, Clone, thiserror::Error)] +#[derive(Debug, thiserror::Error)] pub enum PackageCacheError { /// An error occurred while fetching the package. #[error(transparent)] FetchError(#[from] Arc), + + /// A locking error occured + #[error("{0}")] + LockError(String, #[source] std::io::Error), + + /// The operation was cancelled + #[error("operation was cancelled")] + Cancelled, +} + +impl From for PackageCacheError { + fn from(_value: Cancelled) -> Self { + Self::Cancelled + } } impl PackageCache { /// Constructs a new [`PackageCache`] located at the specified path. pub fn new(path: impl Into) -> Self { Self { - inner: Arc::new(Mutex::new(PackageCacheInner { + inner: Arc::new(PackageCacheInner { path: path.into(), - packages: FxHashMap::default(), - })), + packages: DashMap::default(), + }), } } @@ -158,69 +103,31 @@ impl PackageCache { pkg: impl Into, fetch: F, reporter: Option>, - ) -> Result + ) -> Result where - F: (FnOnce(PathBuf) -> Fut) + Send + 'static, + F: (Fn(PathBuf) -> Fut) + Send + 'static, Fut: Future> + Send + 'static, E: std::error::Error + Send + Sync + 'static, { let cache_key = pkg.into(); + let cache_path = self.inner.path.join(cache_key.to_string()); + let cache_entry = self.inner.packages.entry(cache_key).or_default().clone(); - // Get the package entry - let (package, pkg_cache_dir) = { - let mut inner = self.inner.lock(); - let destination = inner.path.join(cache_key.to_string()); - let package = inner.packages.entry(cache_key).or_default().clone(); - (package, destination) - }; + // Acquire the entry. From this point on we can be sure that only one task is + // accessing the cache entry. + let mut cache_entry = cache_entry.lock().await; - let mut rx = { - // Only sync code in this block - let mut inner = package.lock(); + // Validate the cache entry or fetch the package if it is not valid. + let cache_lock = + validate_or_fetch_to_cache(cache_path, fetch, cache_entry.last_revision, reporter) + .await?; - // If there exists an existing value in our cache, we can return that. - if let Some(path) = inner.path.as_ref() { - return Ok(path.clone()); - } - - // Is there an in-flight requests for the package? - if let Some(inflight) = inner.inflight.as_ref() { - inflight.subscribe() - } else { - // There is no in-flight requests so we start one! - let (tx, rx) = broadcast::channel(1); - inner.inflight = Some(tx.clone()); - - let package = package.clone(); - tokio::spawn(async move { - let result = validate_or_fetch_to_cache(pkg_cache_dir.clone(), fetch, reporter) - .instrument( - tracing::debug_span!("validating", path = %pkg_cache_dir.display()), - ) - .await; + // Store the current revision stored in the cache. If any other task tries to + // read the cache and the revision stayed the same, we can assume that the cache + // is still valid. + cache_entry.last_revision = Some(cache_lock.revision); - { - // only sync code in this block - let mut package = package.lock(); - package.inflight = None; - - match result { - Ok(_) => { - package.path.replace(pkg_cache_dir.clone()); - let _ = tx.send(Ok(pkg_cache_dir)); - } - Err(e) => { - let _ = tx.send(Err(e)); - } - } - } - }); - - rx - } - }; - - rx.recv().await.expect("in-flight request has died") + Ok(cache_lock) } /// Returns the directory that contains the specified package. @@ -234,11 +141,37 @@ impl PackageCache { url: Url, client: reqwest_middleware::ClientWithMiddleware, reporter: Option>, - ) -> Result { + ) -> Result { self.get_or_fetch_from_url_with_retry(pkg, url, client, DoNotRetryPolicy, reporter) .await } + /// Returns the directory that contains the specified package. + /// + /// This is a convenience wrapper around `get_or_fetch` which fetches the + /// package from the given path if the package could not be found in the + /// cache. + pub async fn get_or_fetch_from_path( + &self, + path: &Path, + reporter: Option>, + ) -> Result { + let path = path.to_path_buf(); + self.get_or_fetch( + ArchiveIdentifier::try_from_path(&path).unwrap(), + move |destination| { + let path = path.clone(); + async move { + rattler_package_streaming::tokio::fs::extract(&path, &destination) + .await + .map(|_| ()) + } + }, + reporter, + ) + .await + } + /// Returns the directory that contains the specified package. /// /// This is a convenience wrapper around `get_or_fetch` which fetches the @@ -249,36 +182,41 @@ impl PackageCache { pkg: impl Into, url: Url, client: reqwest_middleware::ClientWithMiddleware, - retry_policy: impl RetryPolicy + Send + 'static, + retry_policy: impl RetryPolicy + Send + 'static + Clone, reporter: Option>, - ) -> Result { + ) -> Result { let request_start = SystemTime::now(); let cache_key = pkg.into(); let sha256 = cache_key.sha256(); let download_reporter = reporter.clone(); - self.get_or_fetch(cache_key, move |destination| async move { - let mut current_try = 0; - loop { - current_try += 1; - tracing::debug!("downloading {} to {}", &url, destination.display()); - - let result = rattler_package_streaming::reqwest::tokio::extract( - client.clone(), - url.clone(), - &destination, - sha256, - download_reporter.clone().map(|reporter| Arc::new(PassthroughReporter { - reporter, - index: Mutex::new(None), - }) as Arc::) - ) - .await; + self.get_or_fetch(cache_key, move |destination| { + let url = url.clone(); + let client = client.clone(); + let retry_policy = retry_policy.clone(); + let download_reporter = download_reporter.clone(); + async move { + let mut current_try = 0; + loop { + current_try += 1; + tracing::debug!("downloading {} to {}", &url, destination.display()); + + let result = rattler_package_streaming::reqwest::tokio::extract( + client.clone(), + url.clone(), + &destination, + sha256, + download_reporter.clone().map(|reporter| Arc::new(PassthroughReporter { + reporter, + index: Mutex::new(None), + }) as Arc::), + ) + .await; - // Extract any potential error - let Err(err) = result else { return Ok(()); }; + // Extract any potential error + let Err(err) = result else { return Ok(()); }; - // Only retry on certain errors. - if !matches!( + // Only retry on certain errors. + if !matches!( &err, ExtractError::IoError(_) | ExtractError::CouldNotCreateDestination(_) ) && !matches!(&err, ExtractError::ReqwestError(err) if @@ -288,30 +226,31 @@ impl PackageCache { .status() .map_or(false, |status| status.is_server_error() || status == StatusCode::TOO_MANY_REQUESTS || status == StatusCode::REQUEST_TIMEOUT) ) { - return Err(err); - } + return Err(err); + } - // Determine whether to retry based on the retry policy - let execute_after = match retry_policy.should_retry(request_start, current_try) { - RetryDecision::Retry { execute_after } => execute_after, - RetryDecision::DoNotRetry => return Err(err), - }; - let duration = execute_after.duration_since(SystemTime::now()).unwrap_or(Duration::ZERO); - - // Wait for a second to let the remote service restore itself. This increases the - // chance of success. - tracing::warn!( - "failed to download and extract {} to {}: {}. Retry #{}, Sleeping {:?} until the next attempt...", - &url, - destination.display(), - err, - current_try, - duration - ); - tokio::time::sleep(duration).await; + // Determine whether to retry based on the retry policy + let execute_after = match retry_policy.should_retry(request_start, current_try) { + RetryDecision::Retry { execute_after } => execute_after, + RetryDecision::DoNotRetry => return Err(err), + }; + let duration = execute_after.duration_since(SystemTime::now()).unwrap_or(Duration::ZERO); + + // Wait for a second to let the remote service restore itself. This increases the + // chance of success. + tracing::warn!( + "failed to download and extract {} to {}: {}. Retry #{}, Sleeping {:?} until the next attempt...", + &url, + destination.display(), + err, + current_try, + duration + ); + tokio::time::sleep(duration).await; + } } }, reporter) - .await + .await } } @@ -320,53 +259,137 @@ impl PackageCache { async fn validate_or_fetch_to_cache( path: PathBuf, fetch: F, + known_valid_revision: Option, reporter: Option>, -) -> Result<(), PackageCacheError> +) -> Result where - F: FnOnce(PathBuf) -> Fut + Send, + F: Fn(PathBuf) -> Fut + Send, Fut: Future> + 'static, - E: std::error::Error + Send + Sync + 'static, + E: Error + Send + Sync + 'static, { - // If the directory already exists validate the contents of the package - if path.is_dir() { - let path_inner = path.clone(); + // Acquire a read lock on the cache entry. This ensures that no other process is + // currently writing to the cache. + let lock_file_path = path.with_extension("lock"); + + // Ensure the directory containing the lock-file exists. + if let Some(root_dir) = lock_file_path.parent() { + tokio::fs::create_dir_all(root_dir) + .map_err(|e| { + PackageCacheError::LockError( + format!("failed to create cache directory: '{}", root_dir.display()), + e, + ) + }) + .await?; + } - let reporter = reporter.as_deref().map(|r| (r, r.on_validate_start())); + // The revision of the cache entry that we already know is valid. + let mut validated_revision = known_valid_revision; - let validation_result = - tokio::task::spawn_blocking(move || validate_package_directory(&path_inner)).await; + loop { + let mut read_lock = CacheRwLock::acquire_read(&lock_file_path).await?; + let cache_revision = read_lock.read_revision()?; - if let Some((reporter, index)) = reporter { - reporter.on_validate_complete(index); - } + if path.is_dir() { + let path_inner = path.clone(); - match validation_result { - Ok(Ok(_)) => { - tracing::debug!("validation succeeded"); - return Ok(()); - } - Ok(Err(e)) => { - tracing::warn!("validation for {path:?} failed: {e}"); - if let Some(cause) = e.source() { + let reporter = reporter.as_deref().map(|r| (r, r.on_validate_start())); + + match validated_revision { + Some(revision) if revision == cache_revision => { + // We previously already determined that the revision is valid. We can skip + // actually validating. + if let Some((reporter, index)) = reporter { + reporter.on_validate_complete(index); + } + return Ok(CacheLock { + _lock: read_lock, + revision: cache_revision, + path: path_inner, + }); + } + Some(_) => { + // The cache has been modified since the last validation. We need to + // re-validate. tracing::debug!( - " Caused by: {}", - std::iter::successors(Some(cause), |e| (*e).source()) - .format("\n Caused by: ") + "cache became stale while acquiring a read-lock from {}. Revalidating.", + lock_file_path.display() ); } + None => { + // We have no information about the cache revision. We need + // to validate. + } + } + + let validation_result = + tokio::task::spawn_blocking(move || validate_package_directory(&path_inner)).await; + + if let Some((reporter, index)) = reporter { + reporter.on_validate_complete(index); } - Err(e) => { - if let Ok(panic) = e.try_into_panic() { - std::panic::resume_unwind(panic) + + match validation_result { + Ok(Ok(_)) => { + tracing::debug!("validation succeeded"); + return Ok(CacheLock { + _lock: read_lock, + revision: cache_revision, + path, + }); + } + Ok(Err(e)) => { + tracing::warn!("validation for {path:?} failed: {e}"); + if let Some(cause) = e.source() { + tracing::debug!( + " Caused by: {}", + std::iter::successors(Some(cause), |e| (*e).source()) + .format("\n Caused by: ") + ); + } + } + Err(e) => { + if let Ok(panic) = e.try_into_panic() { + std::panic::resume_unwind(panic) + } } } } - } - // Otherwise, defer to populate method to fill our cache. - fetch(path) - .await - .map_err(|e| PackageCacheError::FetchError(Arc::new(e))) + // If the cache is stale, we need to fetch the package again. We have to acquire + // a write lock on the cache entry. However, we can't do that while we have a + // read lock on the cache lock file. So we release the read lock and acquire a + // write lock on the cache lock file. In the meantime, another process might + // have already fetched the package. To guard against this we read a revision + // from the lock-file while we have the read lock, then we acquire the write + // lock and check if the revision has changed. If it has, we assume that + // another process has already fetched the package and we restart the + // validation process. + + drop(read_lock); + + let mut write_lock = CacheRwLock::acquire_write(&lock_file_path).await?; + + let read_revision = write_lock.read_revision()?; + if read_revision != cache_revision { + tracing::warn!("cache revisions dont match '{}", lock_file_path.display()); + // The cache has been modified since we last checked. We need to re-validate. + continue; + } + + // Write the new revision + let new_revision = cache_revision + 1; + write_lock.write_revision(new_revision).await?; + + // Otherwise, defer to populate method to fill our cache. + fetch(path.clone()) + .await + .map_err(|e| PackageCacheError::FetchError(Arc::new(e)))?; + + tracing::warn!("fetched '{}", lock_file_path.display()); + + validated_revision = Some(new_revision); + } } struct PassthroughReporter { @@ -440,7 +463,7 @@ mod test { #[tokio::test] pub async fn test_package_cache() { let tar_archive_path = tools::download_and_cache_file_async("https://conda.anaconda.org/robostack/linux-64/ros-noetic-rosbridge-suite-0.11.14-py39h6fdeb60_14.tar.bz2".parse().unwrap(), - "4dd9893f1eee45e1579d1a4f5533ef67a84b5e4b7515de7ed0db1dd47adc6bc8").await.unwrap(); + "4dd9893f1eee45e1579d1a4f5533ef67a84b5e4b7515de7ed0db1dd47adc6bc8").await.unwrap(); // Read the paths.json file straight from the tar file. let paths = { @@ -458,13 +481,19 @@ mod test { let cache = PackageCache::new(packages_dir.path()); // Get the package to the cache - let package_dir = cache + let cache_lock = cache .get_or_fetch( ArchiveIdentifier::try_from_path(&tar_archive_path).unwrap(), - move |destination| async move { - rattler_package_streaming::tokio::fs::extract(&tar_archive_path, &destination) + move |destination| { + let tar_archive_path = tar_archive_path.clone(); + async move { + rattler_package_streaming::tokio::fs::extract( + &tar_archive_path, + &destination, + ) .await .map(|_| ()) + } }, None, ) @@ -472,7 +501,7 @@ mod test { .unwrap(); // Validate the contents of the package - let (_, current_paths) = validate_package_directory(&package_dir).unwrap(); + let (_, current_paths) = validate_package_directory(cache_lock.path()).unwrap(); // Make sure that the paths are the same as what we would expect from the // original tar archive. @@ -642,4 +671,46 @@ mod test { test_flaky_package_cache(conda, Middleware::FailAfterBytes(1000)).await; test_flaky_package_cache(conda, Middleware::FailAfterBytes(50)).await; } + + #[tokio::test] + async fn test_multi_process() { + let packages_dir = tempdir().unwrap(); + let cache_a = PackageCache::new(packages_dir.path()); + let cache_b = PackageCache::new(packages_dir.path()); + let cache_c = PackageCache::new(packages_dir.path()); + + let package_path = get_test_data_dir().join("clobber/clobber-python-0.1.0-cpython.conda"); + + // Get the file to the cache + let cache_a_lock = cache_a + .get_or_fetch_from_path(&package_path, None) + .await + .unwrap(); + + assert_eq!(cache_a_lock.revision(), 1); + + // Get the file to the cache + let cache_b_lock = cache_b + .get_or_fetch_from_path(&package_path, None) + .await + .unwrap(); + + assert_eq!(cache_b_lock.revision(), 1); + + // Now delete the index.json from the cache entry, effectively + // corrupting the cache. + std::fs::remove_file(cache_a_lock.path().join("info/index.json")).unwrap(); + + // Drop previous locks to ensure the package is not locked. + drop(cache_a_lock); + drop(cache_b_lock); + + // Get the file to the cache + let cache_c_lock = cache_c + .get_or_fetch_from_path(&package_path, None) + .await + .unwrap(); + + assert_eq!(cache_c_lock.revision(), 2); + } } diff --git a/crates/rattler_cache/src/package_cache/reporter.rs b/crates/rattler_cache/src/package_cache/reporter.rs new file mode 100644 index 000000000..1047343b5 --- /dev/null +++ b/crates/rattler_cache/src/package_cache/reporter.rs @@ -0,0 +1,14 @@ +/// A trait that can be implemented to report progress of the download and +/// validation process. +pub trait CacheReporter: Send + Sync { + /// Called when validation starts + fn on_validate_start(&self) -> usize; + /// Called when validation completex + fn on_validate_complete(&self, index: usize); + /// Called when a download starts + fn on_download_start(&self) -> usize; + /// Called with regular updates on the download progress + fn on_download_progress(&self, index: usize, progress: u64, total: Option); + /// Called when a download completes + fn on_download_completed(&self, index: usize); +} diff --git a/crates/rattler_conda_types/src/build_spec/parse.rs b/crates/rattler_conda_types/src/build_spec/parse.rs index 7a8b0cb6f..3f93594b8 100644 --- a/crates/rattler_conda_types/src/build_spec/parse.rs +++ b/crates/rattler_conda_types/src/build_spec/parse.rs @@ -64,10 +64,10 @@ impl BuildNumberSpec { #[derive(Debug, Clone, Error, Eq, PartialEq)] pub enum ParseOrdOperatorError { /// Indicates that operator symbols were captured, - /// but not interpretable as an OrdOperator + /// but not interpretable as an `OrdOperator` #[error("invalid operator '{0}'")] InvalidOperator(String), - /// Indicates no symbol sequence found for OrdOperators, + /// Indicates no symbol sequence found for `OrdOperator`s, /// callers should expect explicit operators #[error("expected version operator")] ExpectedOperator, diff --git a/crates/rattler_conda_types/src/environment_yaml.rs b/crates/rattler_conda_types/src/environment_yaml.rs index 5f1e34ecd..f54897cf6 100644 --- a/crates/rattler_conda_types/src/environment_yaml.rs +++ b/crates/rattler_conda_types/src/environment_yaml.rs @@ -64,7 +64,7 @@ impl MatchSpecOrSubSection { impl EnvironmentYaml { /// Returns all the matchspecs in the `dependencies` section of the file. - pub fn match_specs(&self) -> impl Iterator + DoubleEndedIterator + '_ { + pub fn match_specs(&self) -> impl DoubleEndedIterator + '_ { self.dependencies .iter() .filter_map(MatchSpecOrSubSection::as_match_spec) diff --git a/crates/rattler_conda_types/src/match_spec/parse.rs b/crates/rattler_conda_types/src/match_spec/parse.rs index 72bd4c35a..aedf86ab6 100644 --- a/crates/rattler_conda_types/src/match_spec/parse.rs +++ b/crates/rattler_conda_types/src/match_spec/parse.rs @@ -1276,10 +1276,7 @@ mod tests { channel.unwrap(), Channel::from_str(expected_channel.unwrap(), &channel_config()).unwrap() ); - assert_eq!( - subdir, - expected_subdir.map(std::string::ToString::to_string) - ); + assert_eq!(subdir, expected_subdir.map(ToString::to_string)); } } } diff --git a/crates/rattler_conda_types/src/package/paths.rs b/crates/rattler_conda_types/src/package/paths.rs index 5b948618e..e32f279c9 100644 --- a/crates/rattler_conda_types/src/package/paths.rs +++ b/crates/rattler_conda_types/src/package/paths.rs @@ -176,7 +176,7 @@ impl PathsJson { #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct PrefixPlaceholder { /// The type of the file, either binary or text. Depending on the type of file either text - /// replacement is performed or CString replacement. + /// replacement is performed or `CString` replacement. pub file_mode: FileMode, /// The placeholder prefix used in the file. This is the path of the prefix when the package diff --git a/crates/rattler_conda_types/src/platform.rs b/crates/rattler_conda_types/src/platform.rs index 3f3a01a35..e4c561ab9 100644 --- a/crates/rattler_conda_types/src/platform.rs +++ b/crates/rattler_conda_types/src/platform.rs @@ -93,10 +93,10 @@ impl Platform { return Platform::LinuxArmV6l; } - #[cfg(target_arch = "powerpc64le")] + #[cfg(all(target_arch = "powerpc64", target_endian = "little"))] return Platform::LinuxPpc64le; - #[cfg(target_arch = "powerpc64")] + #[cfg(all(target_arch = "powerpc64", target_endian = "big"))] return Platform::LinuxPpc64; #[cfg(target_arch = "s390x")] @@ -115,7 +115,6 @@ impl Platform { target_arch = "riscv64", target_arch = "aarch64", target_arch = "arm", - target_arch = "powerpc64le", target_arch = "powerpc64", target_arch = "s390x" )))] diff --git a/crates/rattler_conda_types/src/repo_data/mod.rs b/crates/rattler_conda_types/src/repo_data/mod.rs index fdbca1c5a..bc3f00a82 100644 --- a/crates/rattler_conda_types/src/repo_data/mod.rs +++ b/crates/rattler_conda_types/src/repo_data/mod.rs @@ -71,7 +71,7 @@ pub struct ChannelInfo { /// The channel's subdirectory pub subdir: String, - /// The base_url for all package urls. Can be an absolute or relative url. + /// The `base_url` for all package urls. Can be an absolute or relative url. #[serde(skip_serializing_if = "Option::is_none")] pub base_url: Option, } diff --git a/crates/rattler_conda_types/src/version/mod.rs b/crates/rattler_conda_types/src/version/mod.rs index 159dfd87d..451ed958c 100644 --- a/crates/rattler_conda_types/src/version/mod.rs +++ b/crates/rattler_conda_types/src/version/mod.rs @@ -231,7 +231,7 @@ impl Version { /// Returns the individual segments of the version. pub fn segments( &self, - ) -> impl Iterator> + DoubleEndedIterator + ExactSizeIterator + '_ { + ) -> impl DoubleEndedIterator> + ExactSizeIterator + '_ { let mut idx = usize::from(self.has_epoch()); let version_segments = if let Some(local_index) = self.local_segment_index() { &self.segments[..local_index] @@ -259,7 +259,7 @@ impl Version { /// ``` pub fn local_segments( &self, - ) -> impl Iterator> + DoubleEndedIterator + ExactSizeIterator + '_ { + ) -> impl DoubleEndedIterator> + ExactSizeIterator + '_ { if let Some(start) = self.local_segment_index() { let mut idx = usize::from(self.has_epoch()); idx += self.segments[..start] @@ -962,7 +962,7 @@ impl<'v> SegmentIter<'v> { } /// Returns an iterator over the components of this segment. - pub fn components(&self) -> impl Iterator + DoubleEndedIterator { + pub fn components(&self) -> impl DoubleEndedIterator { static IMPLICIT_DEFAULT: Component = Component::Numeral(0); let version = self.version; diff --git a/crates/rattler_conda_types/src/version/parse.rs b/crates/rattler_conda_types/src/version/parse.rs index 403d5ab38..4a933db5e 100644 --- a/crates/rattler_conda_types/src/version/parse.rs +++ b/crates/rattler_conda_types/src/version/parse.rs @@ -451,7 +451,6 @@ mod test { use crate::version::SegmentFormatter; use serde::Serialize; use std::collections::BTreeMap; - use std::fmt::{Display, Formatter}; use std::path::Path; use std::str::FromStr; @@ -512,17 +511,6 @@ mod test { insta::assert_debug_snapshot!(index_map); } - struct DisplayAsDebug(T); - - impl Display for DisplayAsDebug - where - for<'i> &'i T: std::fmt::Debug, - { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", &self.0) - } - } - /// Parse a large number of versions and see if parsing succeeded. /// TODO: This doesnt really verify that the parsing is correct. Maybe we can parse the version /// with Conda too and verify that the results match? diff --git a/crates/rattler_conda_types/src/version_spec/parse.rs b/crates/rattler_conda_types/src/version_spec/parse.rs index 8e43275e5..62a829bdc 100644 --- a/crates/rattler_conda_types/src/version_spec/parse.rs +++ b/crates/rattler_conda_types/src/version_spec/parse.rs @@ -283,7 +283,7 @@ pub fn looks_like_infinite_starts_with(input: &str) -> bool { let mut input = input.strip_suffix('.').unwrap_or(input); while !input.is_empty() { match input.strip_suffix(".*") { - Some(rest) if rest.is_empty() => { + Some("") => { // If we were able to continuously strip the `.*` pattern, // then we found a match. return true; diff --git a/crates/rattler_lock/src/lib.rs b/crates/rattler_lock/src/lib.rs index f5d1e85bb..1e9169a8d 100644 --- a/crates/rattler_lock/src/lib.rs +++ b/crates/rattler_lock/src/lib.rs @@ -214,9 +214,7 @@ impl LockFile { } /// Returns an iterator over all environments defined in the lock-file. - pub fn environments( - &self, - ) -> impl Iterator + ExactSizeIterator + '_ { + pub fn environments(&self) -> impl ExactSizeIterator + '_ { self.inner .environment_lookup .iter() @@ -251,7 +249,7 @@ impl Environment { } /// Returns all the platforms for which we have a locked-down environment. - pub fn platforms(&self) -> impl Iterator + ExactSizeIterator + '_ { + pub fn platforms(&self) -> impl ExactSizeIterator + '_ { self.data().packages.keys().copied() } @@ -276,7 +274,7 @@ impl Environment { pub fn packages( &self, platform: Platform, - ) -> Option + ExactSizeIterator + DoubleEndedIterator + '_> { + ) -> Option + ExactSizeIterator + '_> { let packages = self.data().packages.get(&platform)?; Some( packages @@ -289,13 +287,12 @@ impl Environment { /// environment pub fn packages_by_platform( &self, - ) -> impl Iterator< + ) -> impl ExactSizeIterator< Item = ( Platform, - impl Iterator + ExactSizeIterator + DoubleEndedIterator + '_, + impl DoubleEndedIterator + ExactSizeIterator + '_, ), - > + ExactSizeIterator - + '_ { + > + '_ { let env_data = self.data(); env_data.packages.iter().map(move |(platform, packages)| { ( @@ -386,9 +383,7 @@ impl Environment { &self, platform: Platform, ) -> Option> { - let Some(packages) = self.data().packages.get(&platform) else { - return None; - }; + let packages = self.data().packages.get(&platform)?; Some( packages diff --git a/crates/rattler_lock/src/utils/serde/pep440_map_or_vec.rs b/crates/rattler_lock/src/utils/serde/pep440_map_or_vec.rs index 0adf3103b..d2ee6060a 100644 --- a/crates/rattler_lock/src/utils/serde/pep440_map_or_vec.rs +++ b/crates/rattler_lock/src/utils/serde/pep440_map_or_vec.rs @@ -35,7 +35,8 @@ impl<'de> DeserializeAs<'de, Vec> for Pep440MapOrVec { } else { Some(VersionOrUrl::VersionSpecifier(spec)) }, - marker: Option::default(), + #[allow(clippy::default_trait_access)] + marker: Default::default(), origin: None, }) }) diff --git a/crates/rattler_networking/src/authentication_storage/backends/keyring.rs b/crates/rattler_networking/src/authentication_storage/backends/keyring.rs index c9a716f38..2c384c6f2 100644 --- a/crates/rattler_networking/src/authentication_storage/backends/keyring.rs +++ b/crates/rattler_networking/src/authentication_storage/backends/keyring.rs @@ -9,7 +9,7 @@ use crate::{authentication_storage::StorageBackend, Authentication}; #[derive(Clone, Debug)] /// A storage backend that stores credentials in the operating system's keyring pub struct KeyringAuthenticationStorage { - /// The store_key needs to be unique per program as it is stored + /// The `store_key` needs to be unique per program as it is stored /// in a global dictionary in the operating system pub store_key: String, } diff --git a/crates/rattler_networking/src/oci_middleware.rs b/crates/rattler_networking/src/oci_middleware.rs index ac6fd9e5f..56f91f830 100644 --- a/crates/rattler_networking/src/oci_middleware.rs +++ b/crates/rattler_networking/src/oci_middleware.rs @@ -1,8 +1,13 @@ //! Middleware to handle `oci://` URLs to pull artifacts from an OCI registry -use std::collections::HashMap; - -use http::header::{ACCEPT, AUTHORIZATION}; -use http::Extensions; +use std::{ + collections::HashMap, + fmt::{Display, Formatter}, +}; + +use http::{ + header::{ACCEPT, AUTHORIZATION}, + Extensions, +}; use reqwest::{Request, Response}; use reqwest_middleware::{Middleware, Next}; use serde::Deserialize; @@ -41,15 +46,16 @@ struct OCIToken { token: String, } -impl ToString for OciAction { - fn to_string(&self) -> String { +impl Display for OciAction { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - OciAction::Pull => "pull".to_string(), - OciAction::Push => "push".to_string(), - OciAction::PushPull => "push,pull".to_string(), + OciAction::Pull => write!(f, "pull"), + OciAction::Push => write!(f, "push"), + OciAction::PushPull => write!(f, "push,pull"), } } } + // [oci://ghcr.io/channel-mirrors/conda-forge]/[osx-arm64/xtensor] async fn get_token(url: &OCIUrl, action: OciAction) -> Result { let token_url = url.token_url(action)?; @@ -74,8 +80,8 @@ struct OCIUrl { media_type: String, } -/// OCI registry tags are not allowed to contain `+`, `!`, or `=`, so we need to replace them -/// with something else (reverse of `version_build_tag`) +/// OCI registry tags are not allowed to contain `+`, `!`, or `=`, so we need to +/// replace them with something else (reverse of `version_build_tag`) #[allow(dead_code)] fn reverse_version_build_tag(tag: &str) -> String { tag.replace("__p__", "+") @@ -83,8 +89,8 @@ fn reverse_version_build_tag(tag: &str) -> String { .replace("__eq__", "=") } -/// OCI registry tags are not allowed to contain `+`, `!`, or `=`, so we need to replace them -/// with something else +/// OCI registry tags are not allowed to contain `+`, `!`, or `=`, so we need to +/// replace them with something else fn version_build_tag(tag: &str) -> String { tag.replace('+', "__p__") .replace('!', "__e__") @@ -103,9 +109,7 @@ impl OCIUrl { pub fn token_url(&self, action: OciAction) -> Result { format!( "https://{}/token?scope=repository:{}:{}", - self.host, - self.path, - action.to_string() + self.host, self.path, action ) .parse() } @@ -276,9 +280,10 @@ impl Middleware for OciMiddleware { #[cfg(test)] mod tests { - use crate::OciMiddleware; use sha2::{Digest, Sha256}; + use crate::OciMiddleware; + // test pulling an image from OCI registry #[cfg(any(feature = "rustls-tls", feature = "native-tls"))] #[tokio::test] diff --git a/crates/rattler_networking/src/retry_policies.rs b/crates/rattler_networking/src/retry_policies.rs index bfd84a5bf..f74b3ae5c 100644 --- a/crates/rattler_networking/src/retry_policies.rs +++ b/crates/rattler_networking/src/retry_policies.rs @@ -8,6 +8,7 @@ pub use retry_policies::{policies::*, Jitter, RetryDecision, RetryPolicy}; use std::time::SystemTime; /// A simple [`RetryPolicy`] that just never retries. +#[derive(Clone, Copy)] pub struct DoNotRetryPolicy; impl RetryPolicy for DoNotRetryPolicy { fn should_retry(&self, _: SystemTime, _: u32) -> RetryDecision { diff --git a/crates/rattler_package_streaming/src/tokio/async_read.rs b/crates/rattler_package_streaming/src/tokio/async_read.rs index 01b023acb..e8af9ab7a 100644 --- a/crates/rattler_package_streaming/src/tokio/async_read.rs +++ b/crates/rattler_package_streaming/src/tokio/async_read.rs @@ -1,12 +1,13 @@ -//! Functions that enable extracting or streaming a Conda package for objects that implement the -//! [`tokio::io::AsyncRead`] trait. +//! Functions that enable extracting or streaming a Conda package for objects +//! that implement the [`tokio::io::AsyncRead`] trait. + +use std::{io::Read, path::Path}; -use crate::{ExtractError, ExtractResult}; -use std::io::Read; -use std::path::Path; use tokio::io::AsyncRead; use tokio_util::io::SyncIoBridge; +use crate::{ExtractError, ExtractResult}; + /// Extracts the contents a `.tar.bz2` package archive. pub async fn extract_tar_bz2( reader: impl AsyncRead + Send + 'static, @@ -44,7 +45,8 @@ pub async fn extract_conda( .await } -/// Extracts the contents of a .conda package archive by fully reading the stream and then decompressing +/// Extracts the contents of a .conda package archive by fully reading the +/// stream and then decompressing pub async fn extract_conda_via_buffering( reader: impl AsyncRead + Send + 'static, destination: &Path, @@ -57,7 +59,8 @@ pub async fn extract_conda_via_buffering( .await } -/// Extracts the contents of a `.conda` package archive using the provided extraction function +/// Extracts the contents of a `.conda` package archive using the provided +/// extraction function async fn extract_conda_internal( reader: impl AsyncRead + Send + 'static, destination: &Path, @@ -68,18 +71,15 @@ async fn extract_conda_internal( // Spawn a block task to perform the extraction let destination = destination.to_owned(); - match tokio::task::spawn_blocking(move || { + tokio::task::spawn_blocking(move || { let reader: Box = Box::new(reader); extract_fn(reader, &destination) }) .await - { - Ok(result) => result, - Err(err) => { - if let Ok(reason) = err.try_into_panic() { - std::panic::resume_unwind(reason); - } - Err(ExtractError::Cancelled) + .unwrap_or_else(|err| { + if let Ok(reason) = err.try_into_panic() { + std::panic::resume_unwind(reason); } - } + Err(ExtractError::Cancelled) + }) } diff --git a/crates/rattler_package_streaming/src/write.rs b/crates/rattler_package_streaming/src/write.rs index 4410133eb..73cb151dc 100644 --- a/crates/rattler_package_streaming/src/write.rs +++ b/crates/rattler_package_streaming/src/write.rs @@ -271,7 +271,7 @@ fn write_zst_archive( /// * `paths` - a list of paths to include in the package /// * `compression_level` - the compression level to use for the inner zstd encoded files /// * `compression_num_threads` - the number of threads to use for zstd compression (defaults to -/// the number of CPU cores if `None`) +/// the number of CPU cores if `None`) /// * `timestamp` - optional a timestamp to use for all archive files (useful for reproducible builds) /// /// # Errors diff --git a/crates/rattler_repodata_gateway/Cargo.toml b/crates/rattler_repodata_gateway/Cargo.toml index bfb00004e..29b027942 100644 --- a/crates/rattler_repodata_gateway/Cargo.toml +++ b/crates/rattler_repodata_gateway/Cargo.toml @@ -12,6 +12,7 @@ readme.workspace = true [dependencies] anyhow = { workspace = true } +async-fd-lock = { workspace = true } async-compression = { workspace = true, features = ["gzip", "tokio", "bzip2", "zstd"] } async-trait = { workspace = true, optional = true } blake2 = { workspace = true } diff --git a/crates/rattler_repodata_gateway/src/fetch/cache/cache_headers.rs b/crates/rattler_repodata_gateway/src/fetch/cache/cache_headers.rs index 9e33f3f17..8a7f96fc5 100644 --- a/crates/rattler_repodata_gateway/src/fetch/cache/cache_headers.rs +++ b/crates/rattler_repodata_gateway/src/fetch/cache/cache_headers.rs @@ -8,11 +8,11 @@ use serde::{Deserialize, Serialize}; /// Extracted HTTP response headers that enable caching the repodata.json files. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct CacheHeaders { - /// The ETag HTTP cache header + /// The `ETag` HTTP cache header #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option, - /// The Last-Modified HTTP cache header + /// The `Last-Modified` HTTP cache header #[serde(default, skip_serializing_if = "Option::is_none", rename = "mod")] pub last_modified: Option, diff --git a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs index b7f8ce1d8..468a088c2 100644 --- a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs @@ -388,7 +388,7 @@ impl<'a> JLAPResponse<'a> { /// Calculates the bytes offset. We default to zero if we receive a shorter than /// expected vector. -fn get_bytes_offset(lines: &Vec<&str>) -> u64 { +fn get_bytes_offset(lines: &[&str]) -> u64 { if lines.len() >= JLAP_FOOTER_OFFSET { lines[0..lines.len() - JLAP_FOOTER_OFFSET] .iter() diff --git a/crates/rattler_repodata_gateway/src/gateway/direct_url_query.rs b/crates/rattler_repodata_gateway/src/gateway/direct_url_query.rs index 6ecbf3083..05ef9af5c 100644 --- a/crates/rattler_repodata_gateway/src/gateway/direct_url_query.rs +++ b/crates/rattler_repodata_gateway/src/gateway/direct_url_query.rs @@ -1,7 +1,8 @@ use std::{future::IntoFuture, sync::Arc}; use futures::FutureExt; -use rattler_cache::package_cache::{CacheKey, PackageCache, PackageCacheError}; +use rattler_cache::package_cache::CacheKey; +use rattler_cache::package_cache::{PackageCache, PackageCacheError}; use rattler_conda_types::{ package::{ArchiveIdentifier, IndexJson, PackageFile}, ConvertSubdirError, PackageRecord, RepoDataRecord, @@ -63,7 +64,7 @@ impl DirectUrlQuery { // TODO: Optimize this by only parsing the index json from stream. // Get package on system - let package_dir = self + let cache_lock = self .package_cache .get_or_fetch_from_url( cache_key, @@ -75,7 +76,7 @@ impl DirectUrlQuery { .await?; // Extract package record from index json - let index_json = IndexJson::from_package_directory(package_dir)?; + let index_json = IndexJson::from_package_directory(cache_lock.path())?; let package_record = PackageRecord::from_index_json( index_json, None, // Size diff --git a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs index f1e00d0ca..e6f761e95 100644 --- a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs +++ b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs @@ -1,21 +1,23 @@ -use super::{token::TokenClient, ShardedRepodata}; -use crate::reporter::ResponseReporterExt; -use crate::{utils::url_to_cache_filename, GatewayError, Reporter}; +use std::{path::Path, str::FromStr, sync::Arc, time::SystemTime}; + +use async_fd_lock::{LockWrite, RwLockWriteGuard}; use bytes::Bytes; -use futures::{FutureExt, TryFutureExt}; +use futures::TryFutureExt; use http::{HeaderMap, Method, Uri}; use http_cache_semantics::{AfterResponse, BeforeRequest, CachePolicy, RequestLike}; use reqwest::Response; use reqwest_middleware::ClientWithMiddleware; use serde::{Deserialize, Serialize}; use simple_spawn_blocking::tokio::run_blocking_task; -use std::sync::Arc; -use std::{io::Write, path::Path, str::FromStr, time::SystemTime}; -use tempfile::NamedTempFile; -use tokio::fs::File; -use tokio::io::{AsyncReadExt, BufReader}; +use tokio::{ + fs::File, + io::{AsyncRead, AsyncReadExt, AsyncSeekExt, AsyncWriteExt, BufReader, BufWriter}, +}; use url::Url; +use super::{token::TokenClient, ShardedRepodata}; +use crate::{reporter::ResponseReporterExt, utils::url_to_cache_filename, GatewayError, Reporter}; + /// Magic number that identifies the cache file format. const MAGIC_NUMBER: &[u8] = b"SHARD-CACHE-V1"; @@ -31,6 +33,7 @@ pub async fn fetch_index( reporter: Option<&dyn Reporter>, ) -> Result { async fn from_response( + mut cache_file: RwLockWriteGuard, cache_path: &Path, policy: CachePolicy, response: Response, @@ -48,8 +51,8 @@ pub async fn fetch_index( let decoded_bytes = Bytes::from(super::decode_zst_bytes_async(bytes).await?); // Write the cache to disk if the policy allows it. - let cache_fut = if policy.is_storable() { - write_shard_index_cache(cache_path, policy, decoded_bytes.clone()) + let cache_fut = + write_shard_index_cache(cache_file.inner_mut(), policy, decoded_bytes.clone()) .map_ok(Some) .map_err(|e| { GatewayError::IoError( @@ -59,29 +62,7 @@ pub async fn fetch_index( ), e, ) - }) - .left_future() - } else { - // Otherwise delete the file - tokio::fs::remove_file(cache_path) - .map_ok_or_else( - |e| { - if e.kind() == std::io::ErrorKind::NotFound { - Ok(None) - } else { - Err(GatewayError::IoError( - format!( - "failed to remove cached shard index at {}", - cache_path.display() - ), - e, - )) - } - }, - |_| Ok(None), - ) - .right_future() - }; + }); // Parse the bytes let parse_fut = run_blocking_task(move || { @@ -96,17 +77,7 @@ pub async fn fetch_index( }); // Parse and write the file to disk concurrently - let (temp_file, sharded_index) = tokio::try_join!(cache_fut, parse_fut)?; - - // Persist the cache if successfully updated the cache. - if let Some(temp_file) = temp_file { - temp_file.persist(cache_path).map_err(|e| { - GatewayError::IoError( - format!("failed to persist shard index to {}", cache_path.display()), - e.into(), - ) - })?; - } + let (_, sharded_index) = tokio::try_join!(cache_fut, parse_fut)?; Ok(sharded_index) } @@ -122,16 +93,44 @@ pub async fn fetch_index( ); let cache_path = cache_dir.join(cache_file_name); + // Make sure the cache directory exists + if let Some(parent) = cache_path.parent() { + tokio::fs::create_dir_all(parent).await.map_err(|err| { + GatewayError::IoError(format!("failed to create '{}'", parent.display()), err) + })?; + } + + // Open and lock the cache file + let cache_file = tokio::fs::OpenOptions::new() + .write(true) + .read(true) + .truncate(false) + .create(true) + .open(&cache_path) + .await + .map_err(|err| { + GatewayError::IoError(format!("failed to open '{}'", cache_path.display()), err) + })?; + + // Acquire a lock on the file. + let cache_lock = cache_file.lock_write().await.map_err(|err| { + GatewayError::IoError( + format!("failed to lock '{}'", cache_path.display()), + err.error, + ) + })?; + let mut cache_reader = BufReader::new(cache_lock); + let canonical_request = SimpleRequest::get(&canonical_shards_url); // Try reading the cached file - if let Ok((cache_header, file)) = read_cached_index(&cache_path).await { + if let Ok(cache_header) = read_cached_index(&mut cache_reader).await { match cache_header .policy .before_request(&canonical_request, SystemTime::now()) { BeforeRequest::Fresh(_) => { - if let Ok(shard_index) = read_shard_index_from_reader(file).await { + if let Ok(shard_index) = read_shard_index_from_reader(&mut cache_reader).await { tracing::debug!("shard index cache hit"); return Ok(shard_index); } @@ -173,10 +172,11 @@ pub async fn fetch_index( ) { AfterResponse::NotModified(_policy, _) => { // The cached file is still valid - match read_shard_index_from_reader(file).await { + match read_shard_index_from_reader(&mut cache_reader).await { Ok(shard_index) => { tracing::debug!("shard index cache was not modified"); - // If reading the file failed for some reason we'll just fetch it again. + // If reading the file failed for some reason we'll just fetch it + // again. return Ok(shard_index); } Err(e) => { @@ -189,11 +189,15 @@ pub async fn fetch_index( } AfterResponse::Modified(policy, _) => { // Close the old file so we can create a new one. - drop(file); - tracing::debug!("shard index cache has become stale"); - return from_response(&cache_path, policy, response, download_reporter) - .await; + return from_response( + cache_reader.into_inner(), + &cache_path, + policy, + response, + download_reporter, + ) + .await; } } } @@ -234,44 +238,49 @@ pub async fn fetch_index( .await?; let policy = CachePolicy::new(&canonical_request, &response); - from_response(&cache_path, policy, response, reporter).await + from_response( + cache_reader.into_inner(), + &cache_path, + policy, + response, + reporter, + ) + .await } /// Writes the shard index cache to disk. async fn write_shard_index_cache( - cache_path: &Path, + cache_file: &mut File, policy: CachePolicy, decoded_bytes: Bytes, -) -> std::io::Result { - let cache_path = cache_path.to_path_buf(); - tokio::task::spawn_blocking(move || { - // Write the header - let cache_header = rmp_serde::encode::to_vec(&CacheHeader { policy }) - .expect("failed to encode cache header"); - let cache_dir = cache_path - .parent() - .expect("the cache path must have a parent"); - std::fs::create_dir_all(cache_dir)?; - let mut temp_file = tempfile::Builder::new() - .tempfile_in(cache_dir) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; - temp_file.write_all(MAGIC_NUMBER)?; - temp_file.write_all(&(cache_header.len() as u32).to_le_bytes())?; - temp_file.write_all(&cache_header)?; - temp_file.write_all(decoded_bytes.as_ref())?; - - Ok(temp_file) - }) - .map_err(|e| match e.try_into_panic() { - Ok(payload) => std::panic::resume_unwind(payload), - Err(e) => std::io::Error::new(std::io::ErrorKind::Other, e), - }) - .await? +) -> std::io::Result<()> { + let cache_header = + rmp_serde::encode::to_vec(&CacheHeader { policy }).expect("failed to encode cache header"); + + // Move to the start of the file + cache_file.rewind().await?; + + // Write the cache to disk + let mut writer = BufWriter::new(cache_file); + writer.write_all(MAGIC_NUMBER).await?; + writer + .write_all(&(cache_header.len() as u32).to_le_bytes()) + .await?; + writer.write_all(&cache_header).await?; + writer.write_all(decoded_bytes.as_ref()).await?; + writer.flush().await?; + + // Truncate the file to the correct size + let cache_file = writer.into_inner(); + let len = cache_file.stream_position().await?; + cache_file.set_len(len).await?; + + Ok(()) } /// Read the shard index from a reader and deserialize it. -async fn read_shard_index_from_reader( - mut reader: BufReader, +async fn read_shard_index_from_reader( + reader: &mut BufReader, ) -> Result { // Read the file to memory let mut bytes = Vec::new(); @@ -296,11 +305,9 @@ struct CacheHeader { } /// Try reading the cache file from disk. -async fn read_cached_index(cache_path: &Path) -> std::io::Result<(CacheHeader, BufReader)> { - // Open the file for reading - let file = File::open(cache_path).await?; - let mut reader = BufReader::new(file); - +async fn read_cached_index( + reader: &mut BufReader, +) -> std::io::Result { // Read the magic from the file let mut magic_number = [0; MAGIC_NUMBER.len()]; reader.read_exact(&mut magic_number).await?; @@ -322,10 +329,11 @@ async fn read_cached_index(cache_path: &Path) -> std::io::Result<(CacheHeader, B let cache_header = rmp_serde::from_slice::(&header_bytes) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; - Ok((cache_header, reader)) + Ok(cache_header) } -/// A helper struct to make it easier to construct something that implements [`RequestLike`]. +/// A helper struct to make it easier to construct something that implements +/// [`RequestLike`]. struct SimpleRequest { uri: Uri, method: Method, diff --git a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs index 40b9a9680..0824e9318 100644 --- a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs +++ b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs @@ -239,14 +239,17 @@ async fn write_shard_to_cache( })?; match temp_file.persist(&shard_cache_path) { Ok(_) => Ok(()), - Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => { - // The file already exists, we don't need to write it again. - Ok(()) + Err(e) => { + if shard_cache_path.is_file() { + // The file already exists, we can ignore the error. + Ok(()) + } else { + Err(GatewayError::IoError( + format!("failed to persist shard to {}", shard_cache_path.display()), + e.error, + )) + } } - Err(e) => Err(GatewayError::IoError( - format!("failed to persist shard to {}", shard_cache_path.display()), - e.error, - )), } }) .await diff --git a/crates/rattler_shell/src/activation.rs b/crates/rattler_shell/src/activation.rs index 3c24606dd..fef51f207 100644 --- a/crates/rattler_shell/src/activation.rs +++ b/crates/rattler_shell/src/activation.rs @@ -24,9 +24,9 @@ pub enum PathModificationBehavior { /// Replaces the complete path variable with specified paths. #[default] Replace, - /// Appends the new path variables to the path. E.g. '$PATH:/new/path' + /// Appends the new path variables to the path. E.g. Append, - /// Prepends the new path variables to the path. E.g. '/new/path:$PATH' + /// Prepends the new path variables to the path. E.g. "/new/path:$PATH" Prepend, } @@ -603,13 +603,10 @@ mod tests { } #[cfg(unix)] - fn get_script( + fn get_script( shell_type: T, path_modification_behavior: PathModificationBehavior, - ) -> String - where - T: Clone, - { + ) -> String { let tdir = create_temp_dir(); let activator = Activator::from_path(tdir.path(), shell_type, Platform::Osx64).unwrap(); diff --git a/crates/rattler_solve/src/libsolv_c/wrapper/solve_problem.rs b/crates/rattler_solve/src/libsolv_c/wrapper/solve_problem.rs index 2668d14ce..ed0b40a92 100644 --- a/crates/rattler_solve/src/libsolv_c/wrapper/solve_problem.rs +++ b/crates/rattler_solve/src/libsolv_c/wrapper/solve_problem.rs @@ -36,8 +36,8 @@ pub enum SolveProblem { source: SolvableId, target: SolvableId, }, - /// A constraint (run_constrained) on source is conflicting with target. - /// SOLVER_RULE_PKG_CONSTRAINS has a dep, but it can resolve to nothing. + /// A constraint (`run_constrained`) on source is conflicting with target. + /// `SOLVER_RULE_PKG_CONSTRAINS` has a dep, but it can resolve to nothing. /// The constraint conflict is actually expressed between the target and /// a constrains node child of the source. PkgConstrains { diff --git a/crates/rattler_solve/src/resolvo/mod.rs b/crates/rattler_solve/src/resolvo/mod.rs index 62e7d8ca3..e5fb193c7 100644 --- a/crates/rattler_solve/src/resolvo/mod.rs +++ b/crates/rattler_solve/src/resolvo/mod.rs @@ -343,8 +343,7 @@ impl<'a> CondaDependencyProvider<'a> { // This function makes the assumption that the records are given in order of the // channels. if let (Some(first_channel), ChannelPriority::Strict) = ( - package_name_found_in_channel - .get(&record.package_record.name.as_normalized().to_string()), + package_name_found_in_channel.get(record.package_record.name.as_normalized()), channel_priority, ) { // Add the record to the excluded list when it is from a different channel. diff --git a/pixi.toml b/pixi.toml index a44b66249..33691d02b 100644 --- a/pixi.toml +++ b/pixi.toml @@ -24,7 +24,7 @@ cxx-compiler = "~=1.6.0" openssl = "~=3.1" make = "~=4.3" pkg-config = "~=0.29.2" -rust = "~=1.75.0" +rust = "~=1.80.0" cmake = "~=3.26.4" [target.linux-64.dependencies] diff --git a/py-rattler/pixi.lock b/py-rattler/pixi.lock index d330d51e7..995f6ff68 100644 --- a/py-rattler/pixi.lock +++ b/py-rattler/pixi.lock @@ -543,6 +543,7 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha1999f0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py38h17151c0_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda @@ -554,7 +555,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.2.0-h9eb54c0_7.conda - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_17.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-13.2.0-hceb6213_107.conda @@ -586,14 +587,15 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.3.7-py38h18b4745_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rust-1.77.2-h70c747d_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.77.2-h2c6d0dc_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rust-1.80.1-h0a17960_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.80.1-h2c6d0dc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-70.0.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_17.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240602-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 @@ -631,8 +633,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.3.7-py38h1916ca8_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rust-1.77.2-h7e1429e_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.77.2-h38e4360_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rust-1.80.1-h6c54e5d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.80.1-h38e4360_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-70.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 @@ -675,8 +677,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.3.7-py38h5477e86_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.77.2-h4ff7c5d_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.77.2-hf6ec828_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.80.1-h4ff7c5d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.80.1-hf6ec828_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-70.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 @@ -722,8 +724,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.3.7-py38h5e48be7_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/rust-1.77.2-hf8d6059_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.77.2-h17fc481_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rust-1.80.1-hf8d6059_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.80.1-h17fc481_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-70.0.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 @@ -771,6 +773,21 @@ packages: purls: [] size: 23621 timestamp: 1650670423406 +- kind: conda + name: _sysroot_linux-64_curr_repodata_hack + version: '3' + build: h69a702a_16 + build_number: 16 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda + sha256: 6ac30acdbfd3136ee7a1de28af4355165291627e905715611726e674499b0786 + md5: 1c005af0c6ff22814b7c52ee448d4bea + license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 + license_family: GPL + purls: [] + size: 20798 + timestamp: 1720621358501 - kind: conda name: astunparse version: 1.6.3 @@ -1467,7 +1484,7 @@ packages: license: BSD-3-Clause license_family: BSD purls: - - pkg:pypi/colorama?source=conda-forge-mapping + - pkg:pypi/colorama?source=hash-mapping size: 25170 timestamp: 1666700778190 - kind: conda @@ -1988,21 +2005,23 @@ packages: timestamp: 1715127275924 - kind: conda name: kernel-headers_linux-64 - version: 2.6.32 - build: he073ed8_17 - build_number: 17 + version: 3.10.0 + build: h4a8ded7_16 + build_number: 16 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_17.conda - sha256: fb39d64b48f3d9d1acc3df208911a41f25b6a00bd54935d5973b4739a9edd5b6 - md5: d731b543793afc0433c4fd593e693fce + url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda + sha256: a55044e0f61058a5f6bab5e1dd7f15a1fa7a08ec41501dbfca5ab0fc50b9c0c1 + md5: ff7f38675b226cfb855aebfc32a13e31 + depends: + - _sysroot_linux-64_curr_repodata_hack 3.* constrains: - - sysroot_linux-64 ==2.12 + - sysroot_linux-64 ==2.17 license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 license_family: GPL purls: [] - size: 710627 - timestamp: 1708000830116 + size: 944344 + timestamp: 1720621422017 - kind: conda name: lcms2 version: '2.16' @@ -3853,8 +3872,8 @@ packages: - kind: pypi name: numpy version: 1.24.4 - url: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc + url: https://files.pythonhosted.org/packages/11/10/943cfb579f1a02909ff96464c69893b1d25be3731b5d3652c2e0cf1281ea/numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl + sha256: 1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61 requires_python: '>=3.8' - kind: pypi name: numpy @@ -3865,14 +3884,14 @@ packages: - kind: pypi name: numpy version: 1.24.4 - url: https://files.pythonhosted.org/packages/a7/ae/f53b7b265fdc701e663fbb322a8e9d4b14d9cb7b2385f45ddfabfc4327e4/numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl - sha256: 04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f + url: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc requires_python: '>=3.8' - kind: pypi name: numpy version: 1.24.4 - url: https://files.pythonhosted.org/packages/11/10/943cfb579f1a02909ff96464c69893b1d25be3731b5d3652c2e0cf1281ea/numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl - sha256: 1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61 + url: https://files.pythonhosted.org/packages/a7/ae/f53b7b265fdc701e663fbb322a8e9d4b14d9cb7b2385f45ddfabfc4327e4/numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl + sha256: 04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f requires_python: '>=3.8' - kind: conda name: openjpeg @@ -4037,7 +4056,7 @@ packages: license: Apache-2.0 license_family: APACHE purls: - - pkg:pypi/packaging?source=conda-forge-mapping + - pkg:pypi/packaging?source=hash-mapping size: 49832 timestamp: 1710076089469 - kind: conda @@ -5485,147 +5504,141 @@ packages: timestamp: 1712963346969 - kind: conda name: rust - version: 1.77.2 - build: h4ff7c5d_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.77.2-h4ff7c5d_1.conda - sha256: 176f4e84380cd01e63fe58270b1b365fd2adc241227b1c388adb8b73a13315f9 - md5: b6092e78fbbb95001bba59edbbe05446 + version: 1.80.1 + build: h0a17960_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/rust-1.80.1-h0a17960_0.conda + sha256: 7058519747d4b81f3cab23a0d6b4326c80879d38b2a0bf11cade52fc59980b8f + md5: dba7ad0d2f707fee5e85c6a19042fdb4 depends: - - rust-std-aarch64-apple-darwin 1.77.2 hf6ec828_1 + - __glibc >=2.17,<3.0.a0 + - gcc_impl_linux-64 + - libgcc-ng >=12 + - libzlib >=1.3.1,<2.0a0 + - rust-std-x86_64-unknown-linux-gnu 1.80.1 h2c6d0dc_0 + - sysroot_linux-64 >=2.17 license: MIT license_family: MIT purls: [] - size: 147205067 - timestamp: 1715155248202 + size: 198885602 + timestamp: 1723153698032 - kind: conda name: rust - version: 1.77.2 - build: h70c747d_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/rust-1.77.2-h70c747d_1.conda - sha256: fc981fbc0a5e76fc5fbd6364bd079e114769e71a420c052881d9ae8f5a513b54 - md5: 3c1c59e0515577dd985ae9eb8e70cca3 + version: 1.80.1 + build: h4ff7c5d_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.80.1-h4ff7c5d_0.conda + sha256: 5b296bb663be4c10bf3d07eaaa69c3c5856bd198152a775404e161f6780236bb + md5: 76d236abc95f2d77f7a3c16f1b565b3e depends: - - gcc_impl_linux-64 - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - - rust-std-x86_64-unknown-linux-gnu 1.77.2 h2c6d0dc_1 + - rust-std-aarch64-apple-darwin 1.80.1 hf6ec828_0 license: MIT license_family: MIT purls: [] - size: 186692944 - timestamp: 1715154179188 + size: 197866703 + timestamp: 1723155024117 - kind: conda name: rust - version: 1.77.2 - build: h7e1429e_1 - build_number: 1 + version: 1.80.1 + build: h6c54e5d_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/rust-1.77.2-h7e1429e_1.conda - sha256: 85a2ab529ff0de61bb7fd850cbbf74f1c304d0ab20ff728bb0290c3e1e7b6b44 - md5: d6439f780f9e1b471bffa06dca6ffc1e + url: https://conda.anaconda.org/conda-forge/osx-64/rust-1.80.1-h6c54e5d_0.conda + sha256: 8e799c550545a41baef23a543ffd87620cf67c0afd3494ea40b6081cbf8aabe7 + md5: ecf36b937ded5c641039161f7f5c7f64 depends: - - rust-std-x86_64-apple-darwin 1.77.2 h38e4360_1 + - rust-std-x86_64-apple-darwin 1.80.1 h38e4360_0 license: MIT license_family: MIT purls: [] - size: 192556912 - timestamp: 1715155429820 + size: 202606989 + timestamp: 1723154998091 - kind: conda name: rust - version: 1.77.2 - build: hf8d6059_1 - build_number: 1 + version: 1.80.1 + build: hf8d6059_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/rust-1.77.2-hf8d6059_1.conda - sha256: 8b7a9f161b2841fd076c4952add8cb502748979e7955ebcc4de76ccad3822498 - md5: 21f5e10279d810f1bcaa650606039a5c + url: https://conda.anaconda.org/conda-forge/win-64/rust-1.80.1-hf8d6059_0.conda + sha256: 3d8f926d5db03762a1e3ff723295ea18674c29960e2e501a16c9413304698654 + md5: 385a661cb1746cb6c62eb55712b412dd depends: - - rust-std-x86_64-pc-windows-msvc 1.77.2 h17fc481_1 + - rust-std-x86_64-pc-windows-msvc 1.80.1 h17fc481_0 license: MIT license_family: MIT purls: [] - size: 186782410 - timestamp: 1715157050370 + size: 194534225 + timestamp: 1723155969495 - kind: conda name: rust-std-aarch64-apple-darwin - version: 1.77.2 - build: hf6ec828_1 - build_number: 1 + version: 1.80.1 + build: hf6ec828_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.77.2-hf6ec828_1.conda - sha256: 763fbe92431b7c0388b4dcfec7fd42d71495ddfd3ea7493d3e85d54e609be2f2 - md5: 223490e17c8ddc7f31f158a0c78900d1 + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.80.1-hf6ec828_0.conda + sha256: 6cd8c3cf93fb8348c815595eced946316bc81a0bf8c6fc8f6b9f27e270734770 + md5: b3b07764d1fa59acf5c356bbb727db20 depends: - __unix constrains: - - rust >=1.77.2,<1.77.3.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT license_family: MIT purls: [] - size: 30979017 - timestamp: 1715153523506 + size: 30991019 + timestamp: 1723152907303 - kind: conda name: rust-std-x86_64-apple-darwin - version: 1.77.2 - build: h38e4360_1 - build_number: 1 + version: 1.80.1 + build: h38e4360_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.77.2-h38e4360_1.conda - sha256: 9a5aabbf00971e97645628d0c3e290d7f253603eec31c2865b0c9ad6362ebfb6 - md5: 80263a26212c5ea9f6e58b9c203d12ca + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.80.1-h38e4360_0.conda + sha256: 56a30b275235975ea4e37f8d703818079601163aca92195a45468b0e7d6beffb + md5: b1ce3c6d57f2cf9f5a8b2448e3b6f499 depends: - __unix constrains: - - rust >=1.77.2,<1.77.3.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT license_family: MIT purls: [] - size: 31784306 - timestamp: 1715153497698 + size: 31988631 + timestamp: 1723152891461 - kind: conda name: rust-std-x86_64-pc-windows-msvc - version: 1.77.2 - build: h17fc481_1 - build_number: 1 + version: 1.80.1 + build: h17fc481_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.77.2-h17fc481_1.conda - sha256: 0392aa88488de836a85eb79857e393ca1119d917b77a895dbe452b8384d9c4b4 - md5: 82211ed614cfbc5d78437b4b050d7ac3 + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.80.1-h17fc481_0.conda + sha256: a4f118c6211f717846c094e58d3baef32215d1a2414d51c3e08b739dce75c28f + md5: f21862b6487af2fe504ca2b78dfec822 depends: - __win constrains: - - rust >=1.77.2,<1.77.3.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT license_family: MIT purls: [] - size: 25155888 - timestamp: 1715156710925 + size: 25255952 + timestamp: 1723155705619 - kind: conda name: rust-std-x86_64-unknown-linux-gnu - version: 1.77.2 - build: h2c6d0dc_1 - build_number: 1 + version: 1.80.1 + build: h2c6d0dc_0 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.77.2-h2c6d0dc_1.conda - sha256: 6a82d49964c98f1510f4e27c50df33ce1abdd2ade2625b9133ce8e34b3819c75 - md5: 116000ac370d62d9e9062d6e8ce8cd70 + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.80.1-h2c6d0dc_0.conda + sha256: 769cb83291804c9faa0de81534ceb3794cd06efd4d5164872bd5527e511f12a7 + md5: 0a5b8783d18a253b0812a5501df297af depends: - __unix constrains: - - rust >=1.77.2,<1.77.3.0a0 + - rust >=1.80.1,<1.80.2.0a0 license: MIT license_family: MIT purls: [] - size: 33923495 - timestamp: 1715154009471 + size: 33938994 + timestamp: 1723153507938 - kind: conda name: setuptools version: 70.0.0 @@ -5640,7 +5653,7 @@ packages: license: MIT license_family: MIT purls: - - pkg:pypi/setuptools?source=conda-forge-mapping + - pkg:pypi/setuptools?source=hash-mapping size: 483015 timestamp: 1716368141661 - kind: conda @@ -5675,21 +5688,23 @@ packages: timestamp: 1620240338595 - kind: conda name: sysroot_linux-64 - version: '2.12' - build: he073ed8_17 - build_number: 17 + version: '2.17' + build: h4a8ded7_16 + build_number: 16 subdir: noarch noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_17.conda - sha256: b4e4d685e41cb36cfb16f0cb15d2c61f8f94f56fab38987a44eff95d8a673fb5 - md5: 595db67e32b276298ff3d94d07d47fbf + url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda + sha256: b892b0b9c6dc8efe8b9b5442597d1ab8d65c0dc7e4e5a80f822cbdf0a639bd77 + md5: 223fe8a3ff6d5e78484a9d58eb34d055 depends: - - kernel-headers_linux-64 2.6.32 he073ed8_17 + - _sysroot_linux-64_curr_repodata_hack 3.* + - kernel-headers_linux-64 3.10.0 h4a8ded7_16 + - tzdata license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 license_family: GPL purls: [] - size: 15127123 - timestamp: 1708000843849 + size: 15513240 + timestamp: 1720621429816 - kind: conda name: tinycss2 version: 1.3.0 @@ -5798,21 +5813,6 @@ packages: requires_dist: - numpy requires_python: '>=3.8' -- kind: conda - name: types-requests - version: 2.32.0.20240602 - build: pyhd8ed1ab_0 - subdir: noarch - noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240602-pyhd8ed1ab_0.conda - sha256: 70b9c9ea851150026f85b918a06f91b5c7aeaa40d9fdd847e416e0c667a531c7 - md5: de4dfa59fdd7513b1d69c2b2d9f1acc8 - depends: - - python >=3.6 - - urllib3 >=2 - license: Apache-2.0 AND MIT - size: 26420 - timestamp: 1717310434494 - kind: conda name: types-requests version: 2.32.0.20240602 @@ -5859,7 +5859,7 @@ packages: license: PSF-2.0 license_family: PSF purls: - - pkg:pypi/typing-extensions?source=conda-forge-mapping + - pkg:pypi/typing-extensions?source=hash-mapping size: 39706 timestamp: 1717287863652 - kind: conda @@ -5889,6 +5889,20 @@ packages: license: LicenseRef-Public-Domain size: 119815 timestamp: 1706886945727 +- kind: conda + name: tzdata + version: 2024a + build: h8827d51_1 + build_number: 1 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + sha256: 7d21c95f61319dba9209ca17d1935e6128af4235a67ee4e57a00908a1450081e + md5: 8bfdead4e0fff0383ae4c9c50d0531bd + license: LicenseRef-Public-Domain + purls: [] + size: 124164 + timestamp: 1724736371498 - kind: conda name: ucrt version: 10.0.22621.0 diff --git a/py-rattler/pixi.toml b/py-rattler/pixi.toml index 3883d9005..e1faa1f07 100644 --- a/py-rattler/pixi.toml +++ b/py-rattler/pixi.toml @@ -14,7 +14,7 @@ license = "BSD-3-Clause" [feature.build.dependencies] maturin = "~=1.2.2" pip = "~=23.2.1" -rust = "~=1.77" +rust = "~=1.80.0" [feature.build.tasks] build = "PIP_REQUIRE_VIRTUALENV=false maturin develop" diff --git a/rust-toolchain b/rust-toolchain index 7c7053aa2..aaceec04e 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.75.0 +1.80.0