Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions migrations/20251202020754_remove-file-public.down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALTER TABLE files ADD COLUMN public BOOL NOT NULL DEFAULT FALSE;
1 change: 1 addition & 0 deletions migrations/20251202020754_remove-file-public.up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALTER TABLE files DROP COLUMN public;
9 changes: 0 additions & 9 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,6 @@ pub struct Config {
#[builder(default)]
pub(crate) s3_bucket_is_temporary: bool,

// CloudFront domain which we can access
// public S3 files through
#[cfg_attr(test, builder(setter(into)))]
pub(crate) s3_static_root_path: String,

// Github authentication
pub(crate) github_accesstoken: Option<String>,
pub(crate) github_updater_min_rate_limit: u32,
Expand Down Expand Up @@ -209,10 +204,6 @@ impl Config {
.s3_bucket(env("DOCSRS_S3_BUCKET", "rust-docs-rs".to_string())?)
.s3_region(env("S3_REGION", "us-west-1".to_string())?)
.s3_endpoint(maybe_env("S3_ENDPOINT")?)
.s3_static_root_path(env(
"DOCSRS_S3_STATIC_ROOT_PATH",
"https://static.docs.rs".to_string(),
)?)
.github_accesstoken(maybe_env("DOCSRS_GITHUB_ACCESSTOKEN")?)
.github_updater_min_rate_limit(env("DOCSRS_GITHUB_UPDATER_MIN_RATE_LIMIT", 2500u32)?)
.gitlab_accesstoken(maybe_env("DOCSRS_GITLAB_ACCESSTOKEN")?)
Expand Down
4 changes: 0 additions & 4 deletions src/db/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,14 +80,10 @@ pub async fn add_path_into_remote_archive<P: AsRef<Path> + std::fmt::Debug>(
storage: &AsyncStorage,
archive_path: &str,
path: P,
public_access: bool,
) -> Result<(Vec<FileEntry>, CompressionAlgorithm)> {
let (file_list, algorithm) = storage
.store_all_in_archive(archive_path, path.as_ref())
.await?;
if public_access {
storage.set_public_access(archive_path, true).await?;
}
Ok((file_list, algorithm))
}

Expand Down
4 changes: 0 additions & 4 deletions src/docbuilder/rustwide_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,6 @@ impl RustwideBuilder {
&self.async_storage,
&source_archive_path(name, version),
build.host_source_dir(),
false,
))?;
algs.insert(new_alg);
files_list
Expand Down Expand Up @@ -776,7 +775,6 @@ impl RustwideBuilder {
&self.async_storage,
&rustdoc_archive_path(name, version),
local_storage.path(),
true,
))?;
let documentation_size = file_list.iter().map(|info| info.size).sum::<u64>();
self.metrics
Expand Down Expand Up @@ -1077,7 +1075,6 @@ impl RustwideBuilder {

self.storage
.store_one_uncompressed(&path, compressed_json.clone())?;
self.storage.set_public_access(&path, true)?;
}
}

Expand Down Expand Up @@ -1641,7 +1638,6 @@ mod tests {
Some(*alg),
);
assert!(storage.exists(&path)?);
assert!(storage.get_public_access(&path)?);

let ext = compression::file_extension_for(*alg);

Expand Down
34 changes: 0 additions & 34 deletions src/storage/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,40 +33,6 @@ impl DatabaseBackend {
.await?)
}

pub(super) async fn get_public_access(&self, path: &str) -> Result<bool> {
match sqlx::query_scalar!(
"SELECT public
FROM files
WHERE path = $1",
path
)
.fetch_optional(&self.pool)
.await?
{
Some(public) => Ok(public),
None => Err(super::PathNotFoundError.into()),
}
}

pub(super) async fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
if sqlx::query!(
"UPDATE files
SET public = $2
WHERE path = $1",
path,
public,
)
.execute(&self.pool)
.await?
.rows_affected()
== 1
{
Ok(())
} else {
Err(super::PathNotFoundError.into())
}
}

pub(super) async fn get_stream(
&self,
path: &str,
Expand Down
66 changes: 0 additions & 66 deletions src/storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,22 +293,6 @@ impl AsyncStorage {
}
}

#[instrument]
pub(crate) async fn get_public_access(&self, path: &str) -> Result<bool> {
match &self.backend {
StorageBackend::Database(db) => db.get_public_access(path).await,
StorageBackend::S3(s3) => s3.get_public_access(path).await,
}
}

#[instrument]
pub(crate) async fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
match &self.backend {
StorageBackend::Database(db) => db.set_public_access(path, public).await,
StorageBackend::S3(s3) => s3.set_public_access(path, public).await,
}
}

/// Fetch a rustdoc file from our blob storage.
/// * `name` - the crate name
/// * `version` - the crate version
Expand Down Expand Up @@ -935,15 +919,6 @@ impl Storage {
self.runtime.block_on(self.inner.exists(path))
}

pub(crate) fn get_public_access(&self, path: &str) -> Result<bool> {
self.runtime.block_on(self.inner.get_public_access(path))
}

pub(crate) fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
self.runtime
.block_on(self.inner.set_public_access(path, public))
}

pub(crate) fn fetch_source_file(
&self,
name: &str,
Expand Down Expand Up @@ -1546,35 +1521,6 @@ mod backend_tests {
Ok(())
}

fn test_set_public(storage: &Storage) -> Result<()> {
let path: &str = "foo/bar.txt";

storage.store_blobs(vec![BlobUpload {
path: path.into(),
mime: mime::TEXT_PLAIN,
compression: None,
content: b"test content\n".to_vec(),
}])?;

assert!(!storage.get_public_access(path)?);
storage.set_public_access(path, true)?;
assert!(storage.get_public_access(path)?);
storage.set_public_access(path, false)?;
assert!(!storage.get_public_access(path)?);

for path in &["bar.txt", "baz.txt", "foo/baz.txt"] {
assert!(
storage
.set_public_access(path, true)
.unwrap_err()
.downcast_ref::<PathNotFoundError>()
.is_some()
);
}

Ok(())
}

fn test_get_object(storage: &Storage) -> Result<()> {
let path: &str = "foo/bar.txt";
let blob = BlobUpload {
Expand All @@ -1593,9 +1539,6 @@ mod backend_tests {
// it seems like minio does it too :)
assert_eq!(found.etag, Some(compute_etag(&blob.content)));

// default visibility is private
assert!(!storage.get_public_access(path)?);

for path in &["bar.txt", "baz.txt", "foo/baz.txt"] {
assert!(
storage
Expand All @@ -1604,14 +1547,6 @@ mod backend_tests {
.downcast_ref::<PathNotFoundError>()
.is_some()
);

assert!(
storage
.get_public_access(path)
.unwrap_err()
.downcast_ref::<PathNotFoundError>()
.is_some()
);
}

Ok(())
Expand Down Expand Up @@ -2065,7 +2000,6 @@ mod backend_tests {
test_delete_prefix_without_matches,
test_delete_percent,
test_exists_without_remote_archive,
test_set_public,
}

tests_with_metrics {
Expand Down
48 changes: 1 addition & 47 deletions src/storage/s3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use aws_sdk_s3::{
Client,
config::{Region, retry::RetryConfig},
error::{ProvideErrorMetadata, SdkError},
types::{Delete, ObjectIdentifier, Tag, Tagging},
types::{Delete, ObjectIdentifier},
};
use aws_smithy_types_convert::date_time::DateTimeExt;
use axum_extra::headers;
Expand All @@ -20,9 +20,6 @@ use futures_util::{
use std::sync::Arc;
use tracing::{error, instrument, warn};

const PUBLIC_ACCESS_TAG: &str = "static-cloudfront-access";
const PUBLIC_ACCESS_VALUE: &str = "allow";

// error codes to check for when trying to determine if an error is
// a "NOT FOUND" error.
// Definition taken from the S3 rust SDK,
Expand Down Expand Up @@ -138,49 +135,6 @@ impl S3Backend {
}
}

pub(super) async fn get_public_access(&self, path: &str) -> Result<bool, Error> {
Ok(self
.client
.get_object_tagging()
.bucket(&self.bucket)
.key(path)
.send()
.await
.convert_errors()?
.tag_set()
.iter()
.filter(|tag| tag.key() == PUBLIC_ACCESS_TAG)
.any(|tag| tag.value() == PUBLIC_ACCESS_VALUE))
}

pub(super) async fn set_public_access(&self, path: &str, public: bool) -> Result<(), Error> {
self.client
.put_object_tagging()
.bucket(&self.bucket)
.key(path)
.tagging(if public {
Tagging::builder()
.tag_set(
Tag::builder()
.key(PUBLIC_ACCESS_TAG)
.value(PUBLIC_ACCESS_VALUE)
.build()
.context("could not build tag")?,
)
.build()
.context("could not build tags")?
} else {
Tagging::builder()
.set_tag_set(Some(vec![]))
.build()
.context("could not build tags")?
})
.send()
.await
.convert_errors()
.map(|_| ())
}

#[instrument(skip(self))]
pub(super) async fn get_stream(
&self,
Expand Down
20 changes: 6 additions & 14 deletions src/test/fakes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -411,22 +411,14 @@ impl<'a> FakeRelease<'a> {
source_directory.display()
);
if archive_storage {
let (archive, public) = match kind {
FileKind::Rustdoc => {
(rustdoc_archive_path(&package.name, &package.version), true)
}
FileKind::Sources => {
(source_archive_path(&package.name, &package.version), false)
}
let archive = match kind {
FileKind::Rustdoc => rustdoc_archive_path(&package.name, &package.version),
FileKind::Sources => source_archive_path(&package.name, &package.version),
};
debug!("store in archive: {:?}", archive);
let (files_list, new_alg) = crate::db::add_path_into_remote_archive(
storage,
&archive,
source_directory,
public,
)
.await?;
let (files_list, new_alg) =
crate::db::add_path_into_remote_archive(storage, &archive, source_directory)
.await?;
Ok((files_list, new_alg))
} else {
let prefix = match kind {
Expand Down
16 changes: 16 additions & 0 deletions src/web/extractors/rustdoc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -574,6 +574,13 @@ impl RustdocParams {
EscapedURI::from_path(path)
}

pub(crate) fn zip_download_url(&self) -> EscapedURI {
EscapedURI::from_path(format!(
"/crate/{}/{}/download",
self.name, self.req_version
))
}

pub(crate) fn json_download_url(
&self,
wanted_compression: Option<CompressionAlgorithm>,
Expand Down Expand Up @@ -1806,4 +1813,13 @@ mod tests {
)
);
}

#[test]
fn test_zip_download_url() {
let params = RustdocParams::new(KRATE).with_req_version(ReqVersion::Exact(V1));
assert_eq!(
params.zip_download_url(),
format!("/crate/{KRATE}/{V1}/download")
);
}
}
Loading
Loading