Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
757 changes: 335 additions & 422 deletions Cargo.lock

Large diffs are not rendered by default.

24 changes: 12 additions & 12 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ rust-version = "1.88"
anyhow = "1.0.72"
apache-avro = { version = "0.21", features = ["zstandard"] }
array-init = "2"
arrow-arith = "57.0"
arrow-array = "57.0"
arrow-buffer = "57.0"
arrow-cast = "57.0"
arrow-ord = "57.0"
arrow-schema = "57.0"
arrow-select = "57.0"
arrow-string = "57.0"
arrow-arith = "57.1"
arrow-array = "57.1"
arrow-buffer = "57.1"
arrow-cast = "57.1"
arrow-ord = "57.1"
arrow-schema = "57.1"
arrow-select = "57.1"
arrow-string = "57.1"
as-any = "0.3.2"
async-trait = "0.1.89"
aws-config = "1.8.7"
Expand All @@ -62,9 +62,9 @@ bytes = "1.10"
chrono = "0.4.41"
clap = { version = "4.5.48", features = ["derive", "cargo"] }
ctor = "0.2.8"
datafusion = "51.0"
datafusion-cli = "51.0"
datafusion-sqllogictest = "51.0"
datafusion = { git = "https://github.com/apache/datafusion", rev = "9a9ff8d" }
datafusion-cli = { git = "https://github.com/apache/datafusion", rev = "9a9ff8d" }
datafusion-sqllogictest = { git = "https://github.com/apache/datafusion", rev = "9a9ff8d" }
derive_builder = "0.20"
dirs = "6"
enum-ordinalize = "4.3.0"
Expand Down Expand Up @@ -101,7 +101,7 @@ num-bigint = "0.4.6"
once_cell = "1.20"
opendal = "0.55.0"
ordered-float = "4"
parquet = "57.0"
parquet = "57.1"
pilota = "0.11.10"
port_scanner = "0.1.5"
pretty_assertions = "1.4"
Expand Down
4 changes: 2 additions & 2 deletions bindings/python/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,11 @@ license = "Apache-2.0"
crate-type = ["cdylib"]

[dependencies]
arrow = { version = "57.0", features = ["pyarrow", "chrono-tz"] }
arrow = { version = "57.1", features = ["pyarrow", "chrono-tz"] }
iceberg = { path = "../../crates/iceberg" }
pyo3 = { version = "0.26", features = ["extension-module", "abi3-py310"] }
iceberg-datafusion = { path = "../../crates/integrations/datafusion" }
datafusion-ffi = { version = "51.0" }
datafusion-ffi = { git = "https://github.com/apache/datafusion", rev = "9a9ff8d" }
tokio = { version = "1.46.1", default-features = false }

[profile.release]
Expand Down
2 changes: 2 additions & 0 deletions crates/iceberg/testdata/file_io_gcs/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
services:
gcs-server:
image: fsouza/fake-gcs-server@sha256:36b0116fae5236e8def76ccb07761a9ca323e476f366a5f4bf449cac19deaf2d
ports:
- 4443:4443
expose:
- 4443
command: --scheme http
2 changes: 2 additions & 0 deletions crates/iceberg/testdata/file_io_s3/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
services:
minio:
image: minio/minio:RELEASE.2024-02-26T09-33-48Z
ports:
- 9002:9000
expose:
- 9000
- 9001
Expand Down
8 changes: 1 addition & 7 deletions crates/iceberg/tests/file_io_gcs_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,7 @@ mod tests {
async fn get_file_io_gcs() -> FileIO {
set_up();

let ip = DOCKER_COMPOSE_ENV
.read()
.unwrap()
.as_ref()
.unwrap()
.get_container_ip("gcs-server");
let addr = SocketAddr::new(ip, FAKE_GCS_PORT);
let addr = SocketAddr::new("127.0.0.1".parse().unwrap(), FAKE_GCS_PORT);

// A bucket must exist for FileIO
create_bucket(FAKE_GCS_BUCKET, addr.to_string())
Expand Down
22 changes: 7 additions & 15 deletions crates/iceberg/tests/file_io_s3_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
//! Integration tests for FileIO S3.
#[cfg(all(test, feature = "storage-s3"))]
mod tests {
use std::net::{IpAddr, SocketAddr};
use std::net::SocketAddr;
use std::sync::{Arc, RwLock};

use async_trait::async_trait;
Expand All @@ -32,7 +32,7 @@ mod tests {
use reqsign::{AwsCredential, AwsCredentialLoad};
use reqwest::Client;

const MINIO_PORT: u16 = 9000;
const MINIO_HOST_PORT: u16 = 9002;
static DOCKER_COMPOSE_ENV: RwLock<Option<DockerCompose>> = RwLock::new(None);

#[ctor]
Expand All @@ -55,8 +55,7 @@ mod tests {
async fn get_file_io() -> FileIO {
set_up();

let container_ip = get_container_ip("minio");
let minio_socket_addr = SocketAddr::new(container_ip, MINIO_PORT);
let minio_socket_addr = SocketAddr::new("127.0.0.1".parse().unwrap(), MINIO_HOST_PORT);

FileIOBuilder::new("s3")
.with_props(vec![
Expand All @@ -69,10 +68,8 @@ mod tests {
.unwrap()
}

fn get_container_ip(service_name: &str) -> IpAddr {
let guard = DOCKER_COMPOSE_ENV.read().unwrap();
let docker_compose = guard.as_ref().unwrap();
docker_compose.get_container_ip(service_name)
fn get_minio_socket_addr() -> SocketAddr {
SocketAddr::new("127.0.0.1".parse().unwrap(), MINIO_HOST_PORT)
}

#[tokio::test]
Expand Down Expand Up @@ -200,9 +197,7 @@ mod tests {
let mock_loader = MockCredentialLoader::new_minio();
let custom_loader = CustomAwsCredentialLoader::new(Arc::new(mock_loader));

// Get container info for endpoint
let container_ip = get_container_ip("minio");
let minio_socket_addr = SocketAddr::new(container_ip, MINIO_PORT);
let minio_socket_addr = get_minio_socket_addr();

// Build FileIO with custom credential loader
let file_io_with_custom_creds = FileIOBuilder::new("s3")
Expand All @@ -214,7 +209,6 @@ mod tests {
.build()
.unwrap();

// Test that the FileIO was built successfully with the custom loader
match file_io_with_custom_creds.exists("s3://bucket1/any").await {
Ok(_) => {}
Err(e) => panic!("Failed to check existence of bucket: {e}"),
Expand All @@ -229,9 +223,7 @@ mod tests {
let mock_loader = MockCredentialLoader::new(None);
let custom_loader = CustomAwsCredentialLoader::new(Arc::new(mock_loader));

// Get container info for endpoint
let container_ip = get_container_ip("minio");
let minio_socket_addr = SocketAddr::new(container_ip, MINIO_PORT);
let minio_socket_addr = get_minio_socket_addr();

// Build FileIO with custom credential loader
let file_io_with_custom_creds = FileIOBuilder::new("s3")
Expand Down
11 changes: 7 additions & 4 deletions crates/integration_tests/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ use iceberg_test_utils::docker::DockerCompose;
use iceberg_test_utils::{normalize_test_name, set_up};

const REST_CATALOG_PORT: u16 = 8181;
const MINIO_PORT: u16 = 9000;

pub struct TestFixture {
pub _docker_compose: DockerCompose,
Expand All @@ -40,17 +41,19 @@ pub fn set_test_fixture(func: &str) -> TestFixture {
docker_compose.down();
docker_compose.up();

let rest_catalog_ip = docker_compose.get_container_ip("rest");
let minio_ip = docker_compose.get_container_ip("minio");
// Use localhost with mapped ports instead of container IPs
// Container IPs (172.x.x.x) are not routable from macOS host with Docker Desktop
let rest_catalog_host = "127.0.0.1";
let minio_host = "127.0.0.1";

let catalog_config = HashMap::from([
(
REST_CATALOG_PROP_URI.to_string(),
format!("http://{rest_catalog_ip}:{REST_CATALOG_PORT}"),
format!("http://{rest_catalog_host}:{REST_CATALOG_PORT}"),
),
(
S3_ENDPOINT.to_string(),
format!("http://{}:{}", minio_ip, 9000),
format!("http://{}:{}", minio_host, MINIO_PORT),
),
(S3_ACCESS_KEY_ID.to_string(), "admin".to_string()),
(S3_SECRET_ACCESS_KEY.to_string(), "password".to_string()),
Expand Down
1 change: 1 addition & 0 deletions crates/integration_tests/testdata/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ services:
networks:
rest_bridge:
ports:
- 9000:9000
- 9001:9001
expose:
- 9001
Expand Down
Loading