Skip to content

Commit

Permalink
add download endpoint for rustdoc archive
Browse files Browse the repository at this point in the history
  • Loading branch information
syphar committed Oct 8, 2022
1 parent dd66a73 commit 7f2074f
Show file tree
Hide file tree
Showing 15 changed files with 476 additions and 18 deletions.
39 changes: 32 additions & 7 deletions src/build_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,8 @@ mod tests {
fn test_invalidate_cdn_after_build_and_error() {
crate::test::wrapper(|env| {
env.override_config(|config| {
config.cloudfront_distribution_id_web = Some("distribution_id".into());
config.cloudfront_distribution_id_web = Some("distribution_id_web".into());
config.cloudfront_distribution_id_static = Some("distribution_id_static".into());
});

let queue = env.build_queue();
Expand All @@ -573,8 +574,16 @@ mod tests {
assert_eq!(
*ir,
[
("distribution_id".into(), "/will_succeed*".into()),
("distribution_id".into(), "/crate/will_succeed*".into()),
("distribution_id_web".into(), "/will_succeed*".into()),
("distribution_id_web".into(), "/crate/will_succeed*".into()),
(
"distribution_id_static".into(),
"/rustdoc/will_succeed*".into()
),
(
"distribution_id_static".into(),
"/sources/will_succeed*".into()
),
]
);
}
Expand All @@ -588,10 +597,26 @@ mod tests {
assert_eq!(
*ir,
[
("distribution_id".into(), "/will_succeed*".into()),
("distribution_id".into(), "/crate/will_succeed*".into()),
("distribution_id".into(), "/will_fail*".into()),
("distribution_id".into(), "/crate/will_fail*".into()),
("distribution_id_web".into(), "/will_succeed*".into()),
("distribution_id_web".into(), "/crate/will_succeed*".into()),
(
"distribution_id_static".into(),
"/rustdoc/will_succeed*".into()
),
(
"distribution_id_static".into(),
"/sources/will_succeed*".into()
),
("distribution_id_web".into(), "/will_fail*".into()),
("distribution_id_web".into(), "/crate/will_fail*".into()),
(
"distribution_id_static".into(),
"/rustdoc/will_fail*".into()
),
(
"distribution_id_static".into(),
"/sources/will_fail*".into()
),
]
);
}
Expand Down
38 changes: 37 additions & 1 deletion src/cdn.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,17 @@ pub(crate) fn invalidate_crate(config: &Config, cdn: &CdnBackend, name: &str) ->
distribution_id,
&[&format!("/{}*", name), &format!("/crate/{}*", name)],
)
.context("error creating CDN invalidation")?;
.context("error creating web CDN invalidation")?;
}
if let Some(distribution_id) = config.cloudfront_distribution_id_static.as_ref() {
cdn.create_invalidation(
distribution_id,
&[
&format!("/rustdoc/{}*", name),
&format!("/sources/{}*", name),
],
)
.context("error creating static CDN invalidation")?;
}

Ok(())
Expand Down Expand Up @@ -168,6 +178,32 @@ mod tests {
})
}

#[test]
fn invalidate_a_crate() {
crate::test::wrapper(|env| {
env.override_config(|config| {
config.cloudfront_distribution_id_web = Some("distribution_id_web".into());
config.cloudfront_distribution_id_static = Some("distribution_id_static".into());
});
invalidate_crate(&*env.config(), &*env.cdn(), "krate")?;

assert!(matches!(*env.cdn(), CdnBackend::Dummy(_)));
if let CdnBackend::Dummy(ref invalidation_requests) = *env.cdn() {
let ir = invalidation_requests.lock().unwrap();
assert_eq!(
*ir,
[
("distribution_id_web".into(), "/krate*".into()),
("distribution_id_web".into(), "/crate/krate*".into()),
("distribution_id_static".into(), "/rustdoc/krate*".into()),
("distribution_id_static".into(), "/sources/krate*".into()),
]
);
}
Ok(())
});
}

async fn get_mock_config() -> aws_sdk_cloudfront::Config {
let cfg = aws_config::from_env()
.region(Region::new("eu-central-1"))
Expand Down
9 changes: 9 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ pub struct Config {
#[cfg(test)]
pub(crate) s3_bucket_is_temporary: bool,

// CloudFront domain which we can access
// public S3 files through
pub(crate) s3_static_domain: String,

// Github authentication
pub(crate) github_accesstoken: Option<String>,
pub(crate) github_updater_min_rate_limit: u32,
Expand Down Expand Up @@ -67,6 +71,8 @@ pub struct Config {
// CloudFront distribution ID for the web server.
// Will be used for invalidation-requests.
pub cloudfront_distribution_id_web: Option<String>,
/// same for the `static.docs.rs` distribution
pub cloudfront_distribution_id_static: Option<String>,

// Build params
pub(crate) build_attempts: u16,
Expand Down Expand Up @@ -125,6 +131,8 @@ impl Config {
#[cfg(test)]
s3_bucket_is_temporary: false,

s3_static_domain: env("S3_STATIC_DOMAIN", "https://static.docs.rs".to_string())?,

github_accesstoken: maybe_env("DOCSRS_GITHUB_ACCESSTOKEN")?,
github_updater_min_rate_limit: env("DOCSRS_GITHUB_UPDATER_MIN_RATE_LIMIT", 2500)?,

Expand All @@ -148,6 +156,7 @@ impl Config {
cdn_backend: env("DOCSRS_CDN_BACKEND", CdnKind::Dummy)?,

cloudfront_distribution_id_web: maybe_env("CLOUDFRONT_DISTRIBUTION_ID_WEB")?,
cloudfront_distribution_id_static: maybe_env("CLOUDFRONT_DISTRIBUTION_ID_STATIC")?,

local_archive_cache_path: env(
"DOCSRS_ARCHIVE_INDEX_CACHE_PATH",
Expand Down
4 changes: 4 additions & 0 deletions src/db/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,12 @@ pub fn add_path_into_remote_archive<P: AsRef<Path>>(
storage: &Storage,
archive_path: &str,
path: P,
public_access: bool,
) -> Result<(Value, CompressionAlgorithm)> {
let (file_list, algorithm) = storage.store_all_in_archive(archive_path, path.as_ref())?;
if public_access {
storage.set_public_access(archive_path, true)?;
}
Ok((
file_list_to_json(file_list.into_iter().collect()),
algorithm,
Expand Down
5 changes: 5 additions & 0 deletions src/db/migrate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -848,6 +848,11 @@ pub fn migrate(version: Option<Version>, conn: &mut Client) -> crate::error::Res
"CREATE INDEX builds_release_id_idx ON builds (rid);",
"DROP INDEX builds_release_id_idx;",
),
sql_migration!(
context, 35, "add public visibility to files table",
"ALTER TABLE files ADD COLUMN public BOOL NOT NULL DEFAULT FALSE;",
"ALTER TABLE files DROP COLUMN public;"
),

];

Expand Down
2 changes: 2 additions & 0 deletions src/docbuilder/rustwide_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,7 @@ impl RustwideBuilder {
&self.storage,
&rustdoc_archive_path(name, version),
local_storage.path(),
true,
)?;
algs.insert(new_alg);
};
Expand All @@ -421,6 +422,7 @@ impl RustwideBuilder {
&self.storage,
&source_archive_path(name, version),
build.host_source_dir(),
false,
)?;
algs.insert(new_alg);
files_list
Expand Down
26 changes: 26 additions & 0 deletions src/storage/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,32 @@ impl DatabaseBackend {
Ok(conn.query(query, &[&path])?[0].get(0))
}

pub(super) fn get_public_access(&self, path: &str) -> Result<bool> {
match self.pool.get()?.query_opt(
"SELECT public
FROM files
WHERE path = $1",
&[&path],
)? {
Some(row) => Ok(row.get(0)),
None => Err(super::PathNotFoundError.into()),
}
}

pub(super) fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
if self.pool.get()?.execute(
"UPDATE files
SET public = $2
WHERE path = $1",
&[&path, &public],
)? == 1
{
Ok(())
} else {
Err(super::PathNotFoundError.into())
}
}

pub(super) fn get(
&self,
path: &str,
Expand Down
57 changes: 55 additions & 2 deletions src/storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,20 @@ impl Storage {
}
}

pub(crate) fn get_public_access(&self, path: &str) -> Result<bool> {
match &self.backend {
StorageBackend::Database(db) => db.get_public_access(path),
StorageBackend::S3(s3) => s3.get_public_access(path),
}
}

pub(crate) fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
match &self.backend {
StorageBackend::Database(db) => db.set_public_access(path, public),
StorageBackend::S3(s3) => s3.set_public_access(path, public),
}
}

fn max_file_size_for(&self, path: &str) -> usize {
if path.ends_with(".html") {
self.config.max_file_size_html
Expand Down Expand Up @@ -620,9 +634,38 @@ mod backend_tests {
Ok(())
}

fn test_set_public(storage: &Storage) -> Result<()> {
let path: &str = "foo/bar.txt";

storage.store_blobs(vec![Blob {
path: path.into(),
mime: "text/plain".into(),
date_updated: Utc::now(),
compression: None,
content: b"test content\n".to_vec(),
}])?;

assert!(!storage.get_public_access(path)?);
storage.set_public_access(path, true)?;
assert!(storage.get_public_access(path)?);
storage.set_public_access(path, false)?;
assert!(!storage.get_public_access(path)?);

for path in &["bar.txt", "baz.txt", "foo/baz.txt"] {
assert!(storage
.set_public_access(path, true)
.unwrap_err()
.downcast_ref::<PathNotFoundError>()
.is_some());
}

Ok(())
}

fn test_get_object(storage: &Storage) -> Result<()> {
let path: &str = "foo/bar.txt";
let blob = Blob {
path: "foo/bar.txt".into(),
path: path.into(),
mime: "text/plain".into(),
date_updated: Utc::now(),
compression: None,
Expand All @@ -631,16 +674,25 @@ mod backend_tests {

storage.store_blobs(vec![blob.clone()])?;

let found = storage.get("foo/bar.txt", std::usize::MAX)?;
let found = storage.get(path, std::usize::MAX)?;
assert_eq!(blob.mime, found.mime);
assert_eq!(blob.content, found.content);

// default visibility is private
assert!(!storage.get_public_access(path)?);

for path in &["bar.txt", "baz.txt", "foo/baz.txt"] {
assert!(storage
.get(path, std::usize::MAX)
.unwrap_err()
.downcast_ref::<PathNotFoundError>()
.is_some());

assert!(storage
.get_public_access(path)
.unwrap_err()
.downcast_ref::<PathNotFoundError>()
.is_some());
}

Ok(())
Expand Down Expand Up @@ -1028,6 +1080,7 @@ mod backend_tests {
test_delete_prefix_without_matches,
test_delete_percent,
test_exists_without_remote_archive,
test_set_public,
}

tests_with_metrics {
Expand Down
Loading

0 comments on commit 7f2074f

Please sign in to comment.