diff --git a/src/docbuilder/mod.rs b/src/docbuilder/mod.rs index 430d3cb64..f0d3fdb03 100644 --- a/src/docbuilder/mod.rs +++ b/src/docbuilder/mod.rs @@ -8,4 +8,6 @@ pub use self::rustwide_builder::{ }; #[cfg(test)] -pub use self::rustwide_builder::RUSTDOC_JSON_COMPRESSION_ALGORITHMS; +pub use self::rustwide_builder::{ + RUSTDOC_JSON_COMPRESSION_ALGORITHMS, read_format_version_from_rustdoc_json, +}; diff --git a/src/docbuilder/rustwide_builder.rs b/src/docbuilder/rustwide_builder.rs index 04683bf9f..2980638e5 100644 --- a/src/docbuilder/rustwide_builder.rs +++ b/src/docbuilder/rustwide_builder.rs @@ -54,7 +54,7 @@ pub const RUSTDOC_JSON_COMPRESSION_ALGORITHMS: &[CompressionAlgorithm] = &[CompressionAlgorithm::Zstd, CompressionAlgorithm::Gzip]; /// read the format version from a rustdoc JSON file. -fn read_format_version_from_rustdoc_json( +pub fn read_format_version_from_rustdoc_json( reader: impl std::io::Read, ) -> Result { let reader = BufReader::new(reader); diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 0019b094d..8f5daea1c 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -419,14 +419,22 @@ impl AsyncStorage { self.get_stream(path).await?.materialize(max_size).await } - /// get a decompressing stream to an object in storage + /// get a raw stream to an object in storage + /// + /// We don't decompress ourselves, S3 only decompresses with a correct + /// `Content-Encoding` header set, which we don't. #[instrument] - pub(crate) async fn get_stream(&self, path: &str) -> Result { - let blob = match &self.backend { + pub(crate) async fn get_raw_stream(&self, path: &str) -> Result { + match &self.backend { StorageBackend::Database(db) => db.get_stream(path, None).await, StorageBackend::S3(s3) => s3.get_stream(path, None).await, - }?; - Ok(blob.decompress().await?) + } + } + + /// get a decompressing stream to an object in storage. + #[instrument] + pub(crate) async fn get_stream(&self, path: &str) -> Result { + Ok(self.get_raw_stream(path).await?.decompress().await?) } /// get, decompress and materialize part of an object from store diff --git a/src/test/mod.rs b/src/test/mod.rs index eed807a8e..f86cbaaed 100644 --- a/src/test/mod.rs +++ b/src/test/mod.rs @@ -154,7 +154,7 @@ pub(crate) trait AxumRouterTestExt { path: &str, cache_policy: cache::CachePolicy, config: &Config, - ) -> Result<()>; + ) -> Result; async fn assert_success(&self, path: &str) -> Result; async fn get(&self, path: &str) -> Result; async fn post(&self, path: &str) -> Result; @@ -269,7 +269,7 @@ impl AxumRouterTestExt for axum::Router { path: &str, cache_policy: cache::CachePolicy, config: &Config, - ) -> Result<()> { + ) -> Result { let response = self.get(path).await?; let status = response.status(); assert!( @@ -278,7 +278,7 @@ impl AxumRouterTestExt for axum::Router { response.redirect_target().unwrap_or_default() ); response.assert_cache_control(cache_policy, config); - Ok(()) + Ok(response) } async fn get(&self, path: &str) -> Result { diff --git a/src/web/extractors/mod.rs b/src/web/extractors/mod.rs index 09edabfb7..583dcba94 100644 --- a/src/web/extractors/mod.rs +++ b/src/web/extractors/mod.rs @@ -3,4 +3,4 @@ mod path; pub(crate) mod rustdoc; pub(crate) use context::DbConnection; -pub(crate) use path::{Path, PathFileExtension}; +pub(crate) use path::{Path, WantedCompression}; diff --git a/src/web/extractors/path.rs b/src/web/extractors/path.rs index 62e2aecb6..44a9a47aa 100644 --- a/src/web/extractors/path.rs +++ b/src/web/extractors/path.rs @@ -1,11 +1,15 @@ //! custom axum extractors for path parameters -use crate::web::error::AxumNope; +use crate::{ + storage::{CompressionAlgorithm, compression::compression_from_file_extension}, + web::error::AxumNope, +}; use anyhow::anyhow; use axum::{ RequestPartsExt, extract::{FromRequestParts, OptionalFromRequestParts}, http::request::Parts, }; +use derive_more::Deref; /// custom axum `Path` extractor that uses our own AxumNope::BadRequest /// as error response instead of a plain text "bad request" @@ -97,6 +101,58 @@ where } } +/// get wanted compression from file extension in path. +/// +/// TODO: we could also additionally read the accept-encoding header here. But especially +/// in combination with priorities it's complex to parse correctly. So for now only +/// file extensions in the URL. +/// When using Accept-Encoding, we also have to return "Vary: Accept-Encoding" to ensure +/// the cache behaves correctly. +#[derive(Debug, Clone, Deref, Default, PartialEq)] +pub(crate) struct WantedCompression(pub(crate) CompressionAlgorithm); + +impl FromRequestParts for WantedCompression +where + S: Send + Sync, +{ + type Rejection = AxumNope; + + async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result { + parts + .extract::>() + .await + .expect("can never fail") + .ok_or_else(|| AxumNope::BadRequest(anyhow!("compression extension not found in path"))) + } +} + +impl OptionalFromRequestParts for WantedCompression +where + S: Send + Sync, +{ + type Rejection = AxumNope; + + async fn from_request_parts( + parts: &mut Parts, + _state: &S, + ) -> Result, Self::Rejection> { + if let Some(ext) = parts + .extract::>() + .await + .expect("can't fail") + .map(|ext| ext.0) + { + Ok(Some(WantedCompression( + compression_from_file_extension(&ext).ok_or_else(|| { + AxumNope::BadRequest(anyhow!("unknown compression file extension: {}", ext)) + })?, + ))) + } else { + Ok(None) + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/web/extractors/rustdoc.rs b/src/web/extractors/rustdoc.rs index e20ffda1e..4d7d29b99 100644 --- a/src/web/extractors/rustdoc.rs +++ b/src/web/extractors/rustdoc.rs @@ -2,6 +2,7 @@ use crate::{ db::BuildId, + storage::CompressionAlgorithm, web::{ MatchedRelease, MetaData, ReqVersion, error::AxumNope, escaped_uri::EscapedURI, extractors::Path, url_decode, @@ -573,6 +574,30 @@ impl RustdocParams { EscapedURI::from_path(path) } + pub(crate) fn json_download_url( + &self, + wanted_compression: Option, + format_version: Option<&str>, + ) -> EscapedURI { + let mut path = format!("/crate/{}/{}", self.name, self.req_version); + + if let Some(doc_target) = self.doc_target() { + path.push_str(&format!("/{doc_target}")); + } + + if let Some(format_version) = format_version { + path.push_str(&format!("/json/{format_version}")); + } else { + path.push_str("/json"); + } + + if let Some(wanted_compression) = wanted_compression { + path.push_str(&format!(".{}", wanted_compression.file_extension())); + } + + EscapedURI::from_path(path) + } + pub(crate) fn features_url(&self) -> EscapedURI { EscapedURI::from_path(format!( "/crate/{}/{}/features", @@ -863,7 +888,7 @@ mod tests { use super::*; use crate::{ db::types::version::Version, - test::{AxumResponseTestExt, AxumRouterTestExt}, + test::{AxumResponseTestExt, AxumRouterTestExt, V1}, }; use axum::{Router, routing::get}; use test_case::test_case; @@ -1719,4 +1744,66 @@ mod tests { format!("/{KRATE}/0.14.0/{KRATE}/trait.Itertools.html") ) } + + #[test_case(None)] + #[test_case(Some(CompressionAlgorithm::Gzip))] + #[test_case(Some(CompressionAlgorithm::Zstd))] + fn test_plain_json_url(wanted_compression: Option) { + let mut params = RustdocParams::new(KRATE) + .with_page_kind(PageKind::Rustdoc) + .with_req_version(ReqVersion::Exact(V1)); + + assert_eq!( + params.json_download_url(wanted_compression, None), + format!( + "/crate/{KRATE}/{V1}/json{}", + wanted_compression + .map(|c| format!(".{}", c.file_extension())) + .unwrap_or_default() + ) + ); + + params = params.with_doc_target("some-target"); + + assert_eq!( + params.json_download_url(wanted_compression, None), + format!( + "/crate/{KRATE}/{V1}/some-target/json{}", + wanted_compression + .map(|c| format!(".{}", c.file_extension())) + .unwrap_or_default() + ) + ); + } + + #[test_case(None)] + #[test_case(Some(CompressionAlgorithm::Gzip))] + #[test_case(Some(CompressionAlgorithm::Zstd))] + fn test_plain_json_url_with_format(wanted_compression: Option) { + let mut params = RustdocParams::new(KRATE) + .with_page_kind(PageKind::Rustdoc) + .with_req_version(ReqVersion::Exact(V1)); + + assert_eq!( + params.json_download_url(wanted_compression, Some("42")), + format!( + "/crate/{KRATE}/{V1}/json/42{}", + wanted_compression + .map(|c| format!(".{}", c.file_extension())) + .unwrap_or_default() + ) + ); + + params = params.with_doc_target("some-target"); + + assert_eq!( + params.json_download_url(wanted_compression, Some("42")), + format!( + "/crate/{KRATE}/{V1}/some-target/json/42{}", + wanted_compression + .map(|c| format!(".{}", c.file_extension())) + .unwrap_or_default() + ) + ); + } } diff --git a/src/web/rustdoc.rs b/src/web/rustdoc.rs index 608a33495..aeb7048d2 100644 --- a/src/web/rustdoc.rs +++ b/src/web/rustdoc.rs @@ -1,11 +1,11 @@ -//! rustdoc handler +//! rustdoc handlerr use crate::{ AsyncStorage, BUILD_VERSION, Config, InstanceMetrics, RUSTDOC_STATIC_STORAGE_PREFIX, registry_api::OwnerKind, storage::{ - CompressionAlgorithm, RustdocJsonFormatVersion, StreamingBlob, - compression::compression_from_file_extension, rustdoc_archive_path, rustdoc_json_path, + CompressionAlgorithm, RustdocJsonFormatVersion, StreamingBlob, rustdoc_archive_path, + rustdoc_json_path, }, utils::{self, Dependency}, web::{ @@ -16,7 +16,7 @@ use crate::{ error::{AxumNope, AxumResult}, escaped_uri::EscapedURI, extractors::{ - DbConnection, Path, PathFileExtension, + DbConnection, Path, WantedCompression, rustdoc::{PageKind, RustdocParams}, }, file::StreamingFile, @@ -879,37 +879,39 @@ pub(crate) async fn badge_handler( #[derive(Clone, Deserialize, Debug)] pub(crate) struct JsonDownloadParams { - pub(crate) name: String, - pub(crate) version: ReqVersion, - pub(crate) target: Option, pub(crate) format_version: Option, } #[instrument(skip_all)] pub(crate) async fn json_download_handler( - Path(params): Path, + mut params: RustdocParams, + Path(json_params): Path, mut conn: DbConnection, - Extension(config): Extension>, Extension(storage): Extension>, - file_extension: Option, -) -> AxumResult { - // TODO: we could also additionally read the accept-encoding header here. But especially - // in combination with priorities it's complex to parse correctly. So for now only - // file extensions in the URL. - // When using Accept-Encoding, we also have to return "Vary: Accept-Encoding" to ensure - // the cache behaves correctly. - let wanted_compression = - if let Some(ext) = file_extension.map(|ext| ext.0) { - Some(compression_from_file_extension(&ext).ok_or_else(|| { - AxumNope::BadRequest(anyhow!("unknown compression file extension")) - })?) - } else { - None - }; - - let matched_release = match_version(&mut conn, ¶ms.name, ¶ms.version) + wanted_compression: Option, + if_none_match: Option>, +) -> AxumResult { + let matched_release = match_version(&mut conn, params.name(), params.req_version()) .await? - .assume_exact_name()?; + .assume_exact_name()? + .into_canonical_req_version_or_else(|version| { + AxumNope::Redirect( + params.clone().with_req_version(version).json_download_url( + wanted_compression.clone().map(|c| c.0), + json_params.format_version.as_deref(), + ), + CachePolicy::ForeverInCdn, + ) + })?; + + // this validates the doc ttarget too + params = params.apply_matched_release(&matched_release); + + if params.doc_target().is_none() && !params.inner_path().is_empty() { + // an unkonwn target leads to doc-target being removed, and the target being + // added to the inner path + return Err(AxumNope::TargetNotFound); + } if !matched_release.rustdoc_status() { // without docs we'll never have JSON docs too @@ -918,29 +920,7 @@ pub(crate) async fn json_download_handler( let krate = CrateDetails::from_matched_release(&mut conn, matched_release).await?; - let target = if let Some(wanted_target) = params.target { - if krate - .metadata - .doc_targets - .as_ref() - .expect("we are checking rustdoc_status() above, so we always have metadata") - .iter() - .any(|s| s == &wanted_target) - { - wanted_target - } else { - return Err(AxumNope::TargetNotFound); - } - } else { - krate - .metadata - .default_target - .as_ref() - .expect("we are checking rustdoc_status() above, so we always have metadata") - .to_string() - }; - - let wanted_format_version = if let Some(request_format_version) = params.format_version { + let wanted_format_version = if let Some(request_format_version) = json_params.format_version { // axum doesn't support extension suffixes in the route yet, not as parameter, and not // statically, when combined with a parameter (like `.../{format_version}.gz`). // This is solved in matchit 0.8.6, but not yet in axum: @@ -950,7 +930,7 @@ pub(crate) async fn json_download_handler( // Because of this we have cases where `format_version` also contains a file extension // suffix like `.zstd`. `wanted_compression` is already extracted above, so we only // need to strip the extension from the `format_version` before trying to parse it. - let stripped_format_version = if let Some(wanted_compression) = wanted_compression { + let stripped_format_version = if let Some(ref wanted_compression) = wanted_compression { request_format_version .strip_suffix(&format!(".{}", wanted_compression.file_extension())) .expect("should exist") @@ -965,47 +945,53 @@ pub(crate) async fn json_download_handler( RustdocJsonFormatVersion::Latest }; - let wanted_compression = wanted_compression.unwrap_or_default(); + let wanted_compression = wanted_compression.map(|c| c.0).unwrap_or_default(); + + let target = params.doc_target().unwrap_or_else(|| { + params + .default_target() + .expect("with applied matched version we always have a default target") + }); let storage_path = rustdoc_json_path( &krate.name, &krate.version, - &target, + target, wanted_format_version, Some(wanted_compression), ); - let redirect = |storage_path: &str| { - super::axum_cached_redirect( - format!("{}/{}", config.s3_static_root_path, storage_path), - CachePolicy::ForeverInCdn, - ) - }; - - if storage.exists(&storage_path).await? { - Ok(redirect(&storage_path)?) - } else { - // we have old files on the bucket where we stored zstd compressed files, - // with content-encoding=zstd & just a `.json` file extension. - // As a fallback, we redirect to that, if zstd was requested (which is also the default). - if wanted_compression == CompressionAlgorithm::Zstd { - let storage_path = rustdoc_json_path( - &krate.name, - &krate.version, - &target, - wanted_format_version, - None, - ); - - if storage.exists(&storage_path).await? { + let mut response = match storage.get_raw_stream(&storage_path).await { + Ok(file) => StreamingFile(file).into_response(if_none_match.as_deref()), + Err(err) if matches!(err.downcast_ref(), Some(crate::storage::PathNotFoundError)) => { + // we have old files on the bucket where we stored zstd compressed files, + // with content-encoding=zstd & just a `.json` file extension. + // As a fallback, we redirect to that, if zstd was requested (which is also the default). + if wanted_compression == CompressionAlgorithm::Zstd { + let storage_path = rustdoc_json_path( + &krate.name, + &krate.version, + target, + wanted_format_version, + None, + ); // we have an old file with a `.json` extension, // redirect to that as fallback - return Ok(redirect(&storage_path)?); + StreamingFile(storage.get_raw_stream(&storage_path).await?) + .into_response(if_none_match.as_deref()) + } else { + return Err(AxumNope::ResourceNotFound); } } + Err(err) => return Err(err.into()), + }; - Err(AxumNope::ResourceNotFound) - } + // StreamingFile::into_response automatically set the default cache-policy for + // static assets (ForeverInCdnAndBrowser). + // Here we override it with the standard policy for build output. + response.extensions_mut().insert(CachePolicy::ForeverInCdn); + + Ok(response) } #[instrument(skip_all)] @@ -1071,9 +1057,9 @@ mod test { use crate::{ Config, db::types::version::Version, - docbuilder::RUSTDOC_JSON_COMPRESSION_ALGORITHMS, + docbuilder::{RUSTDOC_JSON_COMPRESSION_ALGORITHMS, read_format_version_from_rustdoc_json}, registry_api::{CrateOwner, OwnerKind}, - storage::compression::file_extension_for, + storage::decompress, test::*, utils::Dependency, web::{cache::CachePolicy, encode_url_path}, @@ -3331,105 +3317,60 @@ mod test { }); } - #[test_case( - "latest/json", - "0.2.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Zstd - )] - #[test_case( - "latest/json.gz", - "0.2.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Gzip - )] - #[test_case( - "0.1/json", - "0.1.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Zstd; - "semver" - )] - #[test_case( - "0.1.0/json", - "0.1.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Zstd - )] - #[test_case( - "latest/json/latest", - "0.2.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Zstd - )] - #[test_case( - "latest/json/latest.gz", - "0.2.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Gzip - )] - #[test_case( - "latest/json/42", - "0.2.0", - "x86_64-unknown-linux-gnu", - RustdocJsonFormatVersion::Version(42), - CompressionAlgorithm::Zstd - )] - #[test_case( - "latest/i686-pc-windows-msvc/json", - "0.2.0", - "i686-pc-windows-msvc", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Zstd - )] - #[test_case( - "latest/i686-pc-windows-msvc/json.gz", - "0.2.0", - "i686-pc-windows-msvc", - RustdocJsonFormatVersion::Latest, - CompressionAlgorithm::Gzip - )] - #[test_case( - "latest/i686-pc-windows-msvc/json/42", - "0.2.0", - "i686-pc-windows-msvc", - RustdocJsonFormatVersion::Version(42), - CompressionAlgorithm::Zstd - )] - #[test_case( - "latest/i686-pc-windows-msvc/json/42.gz", - "0.2.0", - "i686-pc-windows-msvc", - RustdocJsonFormatVersion::Version(42), - CompressionAlgorithm::Gzip - )] - #[test_case( - "latest/i686-pc-windows-msvc/json/42.zst", - "0.2.0", - "i686-pc-windows-msvc", - RustdocJsonFormatVersion::Version(42), - CompressionAlgorithm::Zstd - )] + #[test_case("/crate/dummy/0.1/json", "/crate/dummy/0.1.0/json")] + #[tokio::test(flavor = "multi_thread")] + async fn json_download_semver_redirect(path: &str, expected_redirect: &str) -> Result<()> { + let env = TestEnvironment::new().await?; + + env.fake_release() + .await + .name("dummy") + .version("0.1.0") + .archive_storage(true) + .default_target("x86_64-unknown-linux-gnu") + .add_target("i686-pc-windows-msvc") + .create() + .await?; + + env.fake_release() + .await + .name("dummy") + .version("0.2.0") + .archive_storage(true) + .default_target("x86_64-unknown-linux-gnu") + .add_target("i686-pc-windows-msvc") + .create() + .await?; + + let web = env.web_app().await; + + web.assert_redirect_cached( + path, + expected_redirect, + CachePolicy::ForeverInCdn, + env.config(), + ) + .await?; + Ok(()) + } + + #[test_case("latest/json", CompressionAlgorithm::Zstd)] + #[test_case("latest/json.gz", CompressionAlgorithm::Gzip)] + #[test_case("0.1.0/json", CompressionAlgorithm::Zstd)] + #[test_case("latest/json/latest", CompressionAlgorithm::Zstd)] + #[test_case("latest/json/latest.gz", CompressionAlgorithm::Gzip)] + #[test_case("latest/json/42", CompressionAlgorithm::Zstd)] + #[test_case("latest/i686-pc-windows-msvc/json", CompressionAlgorithm::Zstd)] + #[test_case("latest/i686-pc-windows-msvc/json.gz", CompressionAlgorithm::Gzip)] + #[test_case("latest/i686-pc-windows-msvc/json/42", CompressionAlgorithm::Zstd)] + #[test_case("latest/i686-pc-windows-msvc/json/42.gz", CompressionAlgorithm::Gzip)] + #[test_case("latest/i686-pc-windows-msvc/json/42.zst", CompressionAlgorithm::Zstd)] #[tokio::test(flavor = "multi_thread")] async fn json_download( request_path_suffix: &str, - redirect_version: &str, - redirect_target: &str, - redirect_format_version: RustdocJsonFormatVersion, - redirect_compression: CompressionAlgorithm, + expected_compression: CompressionAlgorithm, ) -> Result<()> { - let env = TestEnvironment::with_config( - TestEnvironment::base_config() - .s3_static_root_path("https://static.docs.rs") - .build()?, - ) - .await?; + let env = TestEnvironment::new().await?; env.fake_release() .await @@ -3453,16 +3394,23 @@ mod test { let web = env.web_app().await; - let compression_ext = file_extension_for(redirect_compression); - - web.assert_redirect_cached_unchecked( - &format!("/crate/dummy/{request_path_suffix}"), - &format!("https://static.docs.rs/rustdoc-json/dummy/{redirect_version}/{redirect_target}/\ - dummy_{redirect_version}_{redirect_target}_{redirect_format_version}.json.{compression_ext}"), - CachePolicy::ForeverInCdn, - env.config(), - ) + let path = format!("/crate/dummy/{request_path_suffix}"); + let resp = web + .assert_success_cached(&path, CachePolicy::ForeverInCdn, env.config()) .await?; + web.assert_conditional_get(&path, &resp).await?; + + { + let compressed_body = web.assert_success(&path).await?.bytes().await?.to_vec(); + let json_body = decompress(&*compressed_body, expected_compression, usize::MAX)?; + assert_eq!( + read_format_version_from_rustdoc_json(&*json_body)?, + // for both "Latest", and "Version(42)", the version number in json is the + // specific number. + "42".parse().unwrap() + ); + } + Ok(()) } @@ -3472,12 +3420,7 @@ mod test { async fn test_json_download_fallback_to_old_files_without_compression_extension( ext: &str, ) -> Result<()> { - let env = TestEnvironment::with_config( - TestEnvironment::base_config() - .s3_static_root_path("https://static.docs.rs") - .build()?, - ) - .await?; + let env = TestEnvironment::new().await?; const NAME: &str = "dummy"; const VERSION: Version = Version::new(0, 1, 0); @@ -3523,16 +3466,11 @@ mod test { let web = env.web_app().await; - web.assert_redirect_cached_unchecked( - &format!("/crate/dummy/latest/json{ext}"), - &format!( - "https://static.docs.rs/rustdoc-json/{NAME}/{VERSION}/{TARGET}/\ - {NAME}_{VERSION}_{TARGET}_{FORMAT_VERSION}.json" // without .zstd - ), - CachePolicy::ForeverInCdn, - env.config(), - ) - .await?; + let path = format!("/crate/dummy/latest/json{ext}"); + let resp = web + .assert_success_cached(&path, CachePolicy::ForeverInCdn, env.config()) + .await?; + web.assert_conditional_get(&path, &resp).await?; Ok(()) } @@ -3542,12 +3480,7 @@ mod test { #[test_case("0.42.0/json"; "unknown version")] #[tokio::test(flavor = "multi_thread")] async fn json_download_not_found(request_path_suffix: &str) -> Result<()> { - let env = TestEnvironment::with_config( - TestEnvironment::base_config() - .s3_static_root_path("https://static.docs.rs") - .build()?, - ) - .await?; + let env = TestEnvironment::new().await?; env.fake_release() .await