Skip to content

Commit

Permalink
Use Config System for Object Store Integration Tests
Browse files Browse the repository at this point in the history
  • Loading branch information
tustvold committed Aug 2, 2023
1 parent de5aa48 commit a386305
Show file tree
Hide file tree
Showing 7 changed files with 57 additions and 332 deletions.
21 changes: 9 additions & 12 deletions .github/workflows/object_store.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,19 @@ jobs:
# Run integration tests
TEST_INTEGRATION: 1
EC2_METADATA_ENDPOINT: http://localhost:1338
AZURE_USE_EMULATOR: "1"
AZURE_CONTAINER_NAME: test-bucket
AZURE_STORAGE_USE_EMULATOR: "1"
AZURITE_BLOB_STORAGE_URL: "http://localhost:10000"
AZURITE_QUEUE_STORAGE_URL: "http://localhost:10001"
AWS_BUCKET: test-bucket
AWS_DEFAULT_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: test
AWS_SECRET_ACCESS_KEY: test
AWS_ENDPOINT: http://localhost:4566
AWS_ALLOW_HTTP: true
HTTP_URL: "http://localhost:8080"
GOOGLE_BUCKET: test-bucket
GOOGLE_SERVICE_ACCOUNT: "/tmp/gcs.json"
OBJECT_STORE_BUCKET: test-bucket

steps:
- uses: actions/checkout@v3
Expand All @@ -105,11 +112,6 @@ jobs:
run: docker run -d -p 8080:80 rclone/rclone serve webdav /data --addr :80

- name: Setup LocalStack (AWS emulation)
env:
AWS_DEFAULT_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: test
AWS_SECRET_ACCESS_KEY: test
AWS_ENDPOINT: http://localhost:4566
run: |
docker run -d -p 4566:4566 localstack/localstack:2.0
docker run -d -p 1338:1338 amazon/amazon-ec2-metadata-mock:v1.9.2 --imdsv2
Expand All @@ -128,11 +130,6 @@ jobs:
rustup default stable
- name: Run object_store tests
env:
OBJECT_STORE_AWS_DEFAULT_REGION: "us-east-1"
OBJECT_STORE_AWS_ACCESS_KEY_ID: test
OBJECT_STORE_AWS_SECRET_ACCESS_KEY: test
OBJECT_STORE_AWS_ENDPOINT: http://localhost:4566
run: cargo test --features=aws,azure,gcp,http

# test the object_store crate builds against wasm32 in stable rust
Expand Down
1 change: 0 additions & 1 deletion object_store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ aws = ["cloud"]
http = ["cloud"]

[dev-dependencies] # In alphabetical order
dotenv = "0.15.0"
tempfile = "3.1.0"
futures-test = "0.3"
rand = "0.8"
Expand Down
171 changes: 16 additions & 155 deletions object_store/src/aws/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1067,153 +1067,9 @@ mod tests {
};
use bytes::Bytes;
use std::collections::HashMap;
use std::env;

const NON_EXISTENT_NAME: &str = "nonexistentname";

// Helper macro to skip tests if TEST_INTEGRATION and the AWS
// environment variables are not set. Returns a configured
// AmazonS3Builder
macro_rules! maybe_skip_integration {
() => {{
dotenv::dotenv().ok();

let required_vars = [
"OBJECT_STORE_AWS_DEFAULT_REGION",
"OBJECT_STORE_BUCKET",
"OBJECT_STORE_AWS_ACCESS_KEY_ID",
"OBJECT_STORE_AWS_SECRET_ACCESS_KEY",
];
let unset_vars: Vec<_> = required_vars
.iter()
.filter_map(|&name| match env::var(name) {
Ok(_) => None,
Err(_) => Some(name),
})
.collect();
let unset_var_names = unset_vars.join(", ");

let force = env::var("TEST_INTEGRATION");

if force.is_ok() && !unset_var_names.is_empty() {
panic!(
"TEST_INTEGRATION is set, \
but variable(s) {} need to be set",
unset_var_names
);
} else if force.is_err() {
eprintln!(
"skipping AWS integration test - set {}TEST_INTEGRATION to run",
if unset_var_names.is_empty() {
String::new()
} else {
format!("{} and ", unset_var_names)
}
);
return;
} else {
let config = AmazonS3Builder::new()
.with_access_key_id(
env::var("OBJECT_STORE_AWS_ACCESS_KEY_ID")
.expect("already checked OBJECT_STORE_AWS_ACCESS_KEY_ID"),
)
.with_secret_access_key(
env::var("OBJECT_STORE_AWS_SECRET_ACCESS_KEY")
.expect("already checked OBJECT_STORE_AWS_SECRET_ACCESS_KEY"),
)
.with_region(
env::var("OBJECT_STORE_AWS_DEFAULT_REGION")
.expect("already checked OBJECT_STORE_AWS_DEFAULT_REGION"),
)
.with_bucket_name(
env::var("OBJECT_STORE_BUCKET")
.expect("already checked OBJECT_STORE_BUCKET"),
)
.with_allow_http(true);

let config = if let Ok(endpoint) = env::var("OBJECT_STORE_AWS_ENDPOINT") {
config.with_endpoint(endpoint)
} else {
config
};

let config = if let Ok(token) = env::var("OBJECT_STORE_AWS_SESSION_TOKEN")
{
config.with_token(token)
} else {
config
};

let config = if let Ok(virtual_hosted_style_request) =
env::var("OBJECT_STORE_VIRTUAL_HOSTED_STYLE_REQUEST")
{
config.with_virtual_hosted_style_request(
virtual_hosted_style_request.trim().parse().unwrap(),
)
} else {
config
};

config
}
}};
}

#[test]
fn s3_test_config_from_env() {
let aws_access_key_id = env::var("AWS_ACCESS_KEY_ID")
.unwrap_or_else(|_| "object_store:fake_access_key_id".into());
let aws_secret_access_key = env::var("AWS_SECRET_ACCESS_KEY")
.unwrap_or_else(|_| "object_store:fake_secret_key".into());

let aws_default_region = env::var("AWS_DEFAULT_REGION")
.unwrap_or_else(|_| "object_store:fake_default_region".into());

let aws_endpoint = env::var("AWS_ENDPOINT")
.unwrap_or_else(|_| "object_store:fake_endpoint".into());
let aws_session_token = env::var("AWS_SESSION_TOKEN")
.unwrap_or_else(|_| "object_store:fake_session_token".into());

let container_creds_relative_uri =
env::var("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
.unwrap_or_else(|_| "/object_store/fake_credentials_uri".into());

// required
env::set_var("AWS_ACCESS_KEY_ID", &aws_access_key_id);
env::set_var("AWS_SECRET_ACCESS_KEY", &aws_secret_access_key);
env::set_var("AWS_DEFAULT_REGION", &aws_default_region);

// optional
env::set_var("AWS_ENDPOINT", &aws_endpoint);
env::set_var("AWS_SESSION_TOKEN", &aws_session_token);
env::set_var(
"AWS_CONTAINER_CREDENTIALS_RELATIVE_URI",
&container_creds_relative_uri,
);
env::set_var("AWS_UNSIGNED_PAYLOAD", "true");
env::set_var("AWS_CHECKSUM_ALGORITHM", "sha256");

let builder = AmazonS3Builder::from_env();
assert_eq!(builder.access_key_id.unwrap(), aws_access_key_id.as_str());
assert_eq!(
builder.secret_access_key.unwrap(),
aws_secret_access_key.as_str()
);
assert_eq!(builder.region.unwrap(), aws_default_region);

assert_eq!(builder.endpoint.unwrap(), aws_endpoint);
assert_eq!(builder.token.unwrap(), aws_session_token);
assert_eq!(
builder.container_credentials_relative_uri.unwrap(),
container_creds_relative_uri
);
assert_eq!(
builder.checksum_algorithm.unwrap().get().unwrap(),
Checksum::SHA256
);
assert!(builder.unsigned_payload.get().unwrap());
}

#[test]
fn s3_test_config_from_map() {
let aws_access_key_id = "object_store:fake_access_key_id".to_string();
Expand Down Expand Up @@ -1304,7 +1160,9 @@ mod tests {

#[tokio::test]
async fn s3_test() {
let config = maybe_skip_integration!();
crate::test_util::maybe_skip_integration!();
let config = AmazonS3Builder::from_env();

let is_local = matches!(&config.endpoint, Some(e) if e.starts_with("http://"));
let integration = config.build().unwrap();

Expand All @@ -1317,22 +1175,23 @@ mod tests {
stream_get(&integration).await;

// run integration test with unsigned payload enabled
let config = maybe_skip_integration!().with_unsigned_payload(true);
let config = AmazonS3Builder::from_env().with_unsigned_payload(true);
let is_local = matches!(&config.endpoint, Some(e) if e.starts_with("http://"));
let integration = config.build().unwrap();
put_get_delete_list_opts(&integration, is_local).await;

// run integration test with checksum set to sha256
let config = maybe_skip_integration!().with_checksum_algorithm(Checksum::SHA256);
let config =
AmazonS3Builder::from_env().with_checksum_algorithm(Checksum::SHA256);
let is_local = matches!(&config.endpoint, Some(e) if e.starts_with("http://"));
let integration = config.build().unwrap();
put_get_delete_list_opts(&integration, is_local).await;
}

#[tokio::test]
async fn s3_test_get_nonexistent_location() {
let config = maybe_skip_integration!();
let integration = config.build().unwrap();
crate::test_util::maybe_skip_integration!();
let integration = AmazonS3Builder::from_env().build().unwrap();

let location = Path::from_iter([NON_EXISTENT_NAME]);

Expand All @@ -1344,7 +1203,8 @@ mod tests {

#[tokio::test]
async fn s3_test_get_nonexistent_bucket() {
let config = maybe_skip_integration!().with_bucket_name(NON_EXISTENT_NAME);
crate::test_util::maybe_skip_integration!();
let config = AmazonS3Builder::from_env().with_bucket_name(NON_EXISTENT_NAME);
let integration = config.build().unwrap();

let location = Path::from_iter([NON_EXISTENT_NAME]);
Expand All @@ -1355,8 +1215,8 @@ mod tests {

#[tokio::test]
async fn s3_test_put_nonexistent_bucket() {
let config = maybe_skip_integration!().with_bucket_name(NON_EXISTENT_NAME);

crate::test_util::maybe_skip_integration!();
let config = AmazonS3Builder::from_env().with_bucket_name(NON_EXISTENT_NAME);
let integration = config.build().unwrap();

let location = Path::from_iter([NON_EXISTENT_NAME]);
Expand All @@ -1368,8 +1228,8 @@ mod tests {

#[tokio::test]
async fn s3_test_delete_nonexistent_location() {
let config = maybe_skip_integration!();
let integration = config.build().unwrap();
crate::test_util::maybe_skip_integration!();
let integration = AmazonS3Builder::from_env().build().unwrap();

let location = Path::from_iter([NON_EXISTENT_NAME]);

Expand All @@ -1378,7 +1238,8 @@ mod tests {

#[tokio::test]
async fn s3_test_delete_nonexistent_bucket() {
let config = maybe_skip_integration!().with_bucket_name(NON_EXISTENT_NAME);
crate::test_util::maybe_skip_integration!();
let config = AmazonS3Builder::from_env().with_bucket_name(NON_EXISTENT_NAME);
let integration = config.build().unwrap();

let location = Path::from_iter([NON_EXISTENT_NAME]);
Expand Down
Loading

0 comments on commit a386305

Please sign in to comment.