Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ hive_metastore = "0.2.0"
home = "=0.5.11"
http = "1.2"
iceberg = { version = "0.7.0", path = "./crates/iceberg" }
iceberg-catalog-loader = { version = "0.7.0", path = "./crates/catalog/loader" }
iceberg-catalog-glue = { version = "0.7.0", path = "./crates/catalog/glue" }
iceberg-catalog-hms = { version = "0.7.0", path = "./crates/catalog/hms" }
iceberg-catalog-sql = { version = "0.7.0", path = "./crates/catalog/sql" }
Expand Down
4 changes: 3 additions & 1 deletion crates/catalog/glue/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ use iceberg::{Error, ErrorKind};

/// Format AWS SDK error into iceberg error
pub(crate) fn from_aws_sdk_error<T>(error: aws_sdk_glue::error::SdkError<T>) -> Error
where T: Debug {
where
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why this change?

T: Debug,
{
Error::new(
ErrorKind::Unexpected,
"Operation failed for hitting aws sdk error".to_string(),
Expand Down
8 changes: 4 additions & 4 deletions crates/catalog/hms/tests/hms_catalog_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -269,10 +269,10 @@ async fn test_list_tables() -> Result<()> {
catalog.create_table(ns.name(), creation).await?;
let result = catalog.list_tables(ns.name()).await?;

assert_eq!(result, vec![TableIdent::new(
ns.name().clone(),
"my_table".to_string()
)]);
assert_eq!(
result,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There are many format related changes, it's better to avoid that.

vec![TableIdent::new(ns.name().clone(), "my_table".to_string())]
);

Ok(())
}
Expand Down
2 changes: 2 additions & 0 deletions crates/catalog/loader/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ use std::collections::HashMap;
use std::sync::Arc;

use async_trait::async_trait;
use iceberg::memory::MemoryCatalogBuilder;
use iceberg::{Catalog, CatalogBuilder, Error, ErrorKind, Result};
use iceberg_catalog_glue::GlueCatalogBuilder;
use iceberg_catalog_hms::HmsCatalogBuilder;
Expand All @@ -31,6 +32,7 @@ type CatalogBuilderFactory = fn() -> Box<dyn BoxedCatalogBuilder>;

/// A registry of catalog builders.
static CATALOG_REGISTRY: &[(&str, CatalogBuilderFactory)] = &[
("memory", || Box::new(MemoryCatalogBuilder::default())),
("rest", || Box::new(RestCatalogBuilder::default())),
("glue", || Box::new(GlueCatalogBuilder::default())),
("s3tables", || Box::new(S3TablesCatalogBuilder::default())),
Expand Down
4 changes: 3 additions & 1 deletion crates/catalog/s3tables/src/catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,9 @@ impl Catalog for S3TablesCatalog {

/// Format AWS SDK error into iceberg error
pub(crate) fn from_aws_sdk_error<T>(error: aws_sdk_s3tables::error::SdkError<T>) -> Error
where T: std::fmt::Debug {
where
T: std::fmt::Debug,
{
Error::new(
ErrorKind::Unexpected,
format!("Operation failed for hitting aws sdk error: {error:?}"),
Expand Down
83 changes: 46 additions & 37 deletions crates/catalog/sql/src/catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -362,10 +362,10 @@ impl Catalog for SqlCatalog {
);

let namespace_rows = self
.fetch_rows(&all_namespaces_stmt, vec![
Some(&self.name),
Some(&self.name),
])
.fetch_rows(
&all_namespaces_stmt,
vec![Some(&self.name), Some(&self.name)],
)
.await?;

let mut namespaces = HashSet::<NamespaceIdent>::with_capacity(namespace_rows.len());
Expand Down Expand Up @@ -1299,11 +1299,10 @@ mod tests {
let namespace_ident_1 = NamespaceIdent::new("a".into());
let namespace_ident_2 = NamespaceIdent::from_strs(vec!["a", "b"]).unwrap();
let namespace_ident_3 = NamespaceIdent::new("b".into());
create_namespaces(&catalog, &vec![
&namespace_ident_1,
&namespace_ident_2,
&namespace_ident_3,
])
create_namespaces(
&catalog,
&vec![&namespace_ident_1, &namespace_ident_2, &namespace_ident_3],
)
.await;

assert_eq!(
Expand Down Expand Up @@ -1336,11 +1335,10 @@ mod tests {
let namespace_ident_1 = NamespaceIdent::new("a".into());
let namespace_ident_2 = NamespaceIdent::from_strs(vec!["a", "b"]).unwrap();
let namespace_ident_3 = NamespaceIdent::new("c".into());
create_namespaces(&catalog, &vec![
&namespace_ident_1,
&namespace_ident_2,
&namespace_ident_3,
])
create_namespaces(
&catalog,
&vec![&namespace_ident_1, &namespace_ident_2, &namespace_ident_3],
)
.await;

assert_eq!(
Expand All @@ -1366,13 +1364,16 @@ mod tests {
let namespace_ident_3 = NamespaceIdent::from_strs(vec!["a", "b"]).unwrap();
let namespace_ident_4 = NamespaceIdent::from_strs(vec!["a", "c"]).unwrap();
let namespace_ident_5 = NamespaceIdent::new("b".into());
create_namespaces(&catalog, &vec![
&namespace_ident_1,
&namespace_ident_2,
&namespace_ident_3,
&namespace_ident_4,
&namespace_ident_5,
])
create_namespaces(
&catalog,
&vec![
&namespace_ident_1,
&namespace_ident_2,
&namespace_ident_3,
&namespace_ident_4,
&namespace_ident_5,
],
)
.await;

assert_eq!(
Expand Down Expand Up @@ -1673,11 +1674,14 @@ mod tests {
let namespace_ident_a = NamespaceIdent::new("a".into());
let namespace_ident_a_b = NamespaceIdent::from_strs(vec!["a", "b"]).unwrap();
let namespace_ident_a_b_c = NamespaceIdent::from_strs(vec!["a", "b", "c"]).unwrap();
create_namespaces(&catalog, &vec![
&namespace_ident_a,
&namespace_ident_a_b,
&namespace_ident_a_b_c,
])
create_namespaces(
&catalog,
&vec![
&namespace_ident_a,
&namespace_ident_a_b,
&namespace_ident_a_b_c,
],
)
.await;

catalog
Expand Down Expand Up @@ -2039,9 +2043,10 @@ mod tests {
.await
.unwrap();

assert_eq!(catalog.list_tables(&namespace_ident).await.unwrap(), vec![
dst_table_ident
],);
assert_eq!(
catalog.list_tables(&namespace_ident).await.unwrap(),
vec![dst_table_ident],
);
}

#[tokio::test]
Expand Down Expand Up @@ -2085,9 +2090,10 @@ mod tests {
.await
.unwrap();

assert_eq!(catalog.list_tables(&namespace_ident).await.unwrap(), vec![
table_ident
],);
assert_eq!(
catalog.list_tables(&namespace_ident).await.unwrap(),
vec![table_ident],
);
}

#[tokio::test]
Expand All @@ -2097,11 +2103,14 @@ mod tests {
let namespace_ident_a = NamespaceIdent::new("a".into());
let namespace_ident_a_b = NamespaceIdent::from_strs(vec!["a", "b"]).unwrap();
let namespace_ident_a_b_c = NamespaceIdent::from_strs(vec!["a", "b", "c"]).unwrap();
create_namespaces(&catalog, &vec![
&namespace_ident_a,
&namespace_ident_a_b,
&namespace_ident_a_b_c,
])
create_namespaces(
&catalog,
&vec![
&namespace_ident_a,
&namespace_ident_a_b,
&namespace_ident_a_b_c,
],
)
.await;

let src_table_ident = TableIdent::new(namespace_ident_a_b_c.clone(), "tbl1".into());
Expand Down
18 changes: 9 additions & 9 deletions crates/iceberg/src/arrow/caching_delete_file_loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -640,9 +640,10 @@ mod tests {
Arc::new(arrow_schema::Schema::new(fields))
};

let equality_deletes_to_write = RecordBatch::try_new(equality_delete_schema.clone(), vec![
col_y, col_z, col_a, col_s, col_b,
])
let equality_deletes_to_write = RecordBatch::try_new(
equality_delete_schema.clone(),
vec![col_y, col_z, col_a, col_s, col_b],
)
.unwrap();

let path = format!("{}/equality-deletes-1.parquet", &table_location);
Expand Down Expand Up @@ -839,12 +840,11 @@ mod tests {
let file_path_col = Arc::new(StringArray::from_iter_values(&file_path_values));
let pos_col = Arc::new(Int64Array::from_iter_values(vec![0i64, 1, 2, 3]));

let positional_deletes_to_write =
RecordBatch::try_new(positional_delete_schema.clone(), vec![
file_path_col,
pos_col,
])
.unwrap();
let positional_deletes_to_write = RecordBatch::try_new(
positional_delete_schema.clone(),
vec![file_path_col, pos_col],
)
.unwrap();

let props = WriterProperties::builder()
.set_compression(Compression::SNAPPY)
Expand Down
11 changes: 5 additions & 6 deletions crates/iceberg/src/arrow/delete_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,12 +281,11 @@ pub(crate) mod tests {
let pos_vals = pos_values.get(n - 1).unwrap();
let pos_col = Arc::new(Int64Array::from_iter_values(pos_vals.clone()));

let positional_deletes_to_write =
RecordBatch::try_new(positional_delete_schema.clone(), vec![
file_path_col.clone(),
pos_col.clone(),
])
.unwrap();
let positional_deletes_to_write = RecordBatch::try_new(
positional_delete_schema.clone(),
vec![file_path_col.clone(), pos_col.clone()],
)
.unwrap();

let file = File::create(format!(
"{}/pos-del-{}.parquet",
Expand Down
11 changes: 7 additions & 4 deletions crates/iceberg/src/arrow/partition_value_calculator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,10 +206,13 @@ mod tests {
Field::new("name", DataType::Utf8, false),
]));

let batch = RecordBatch::try_new(arrow_schema, vec![
Arc::new(Int32Array::from(vec![10, 20, 30])),
Arc::new(StringArray::from(vec!["a", "b", "c"])),
])
let batch = RecordBatch::try_new(
arrow_schema,
vec![
Arc::new(Int32Array::from(vec![10, 20, 30])),
Arc::new(StringArray::from(vec!["a", "b", "c"])),
],
)
.unwrap();

// Calculate partition values
Expand Down
Loading
Loading