diff --git a/.devcontainer/.env b/.devcontainer/.env index 7e7f783..866fcd7 100644 --- a/.devcontainer/.env +++ b/.devcontainer/.env @@ -18,5 +18,10 @@ AZURE_TEST_CONTAINER_NAME=testcontainer AZURE_TEST_READ_ONLY_SAS="se=2100-05-05&sp=r&sv=2022-11-02&sr=c&sig=YMPFnAHKe9y0o3hFegncbwQTXtAyvsJEgPB2Ne1b9CQ%3D" AZURE_TEST_READ_WRITE_SAS="se=2100-05-05&sp=rcw&sv=2022-11-02&sr=c&sig=TPz2jEz0t9L651t6rTCQr%2BOjmJHkM76tnCGdcyttnlA%3D" +# GCS tests +GOOGLE_TEST_BUCKET=testbucket +GOOGLE_SERVICE_ACCOUNT_KEY='{"gcs_base_url": "http://localhost:4443","disable_oauth": true,"client_email": "","private_key_id": "","private_key": ""}' +GOOGLE_SERVICE_ENDPOINT=http://localhost:4443 + # Others RUST_TEST_THREADS=1 diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index a432f90..2b75da6 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -12,6 +12,7 @@ services: - ${USERPROFILE}${HOME}/.gitconfig:/home/rust/.gitconfig:ro - ${USERPROFILE}${HOME}/.aws:/home/rust/.aws:rw - ${USERPROFILE}${HOME}/.azure:/home/rust/.azure:rw + - ${USERPROFILE}${HOME}/.config/gcloud:/home/rust/.config/gcloud:rw - ./entrypoint.sh:/entrypoint.sh env_file: - .env @@ -20,6 +21,7 @@ services: depends_on: - minio - azurite + - fake-gcs-server minio: image: minio/minio @@ -47,3 +49,16 @@ services: interval: 6s timeout: 2s retries: 3 + + fake-gcs-server: + image: tustvold/fake-gcs-server + env_file: + - .env + network_mode: host + command: -scheme http -public-host localhost:4443 + restart: unless-stopped + healthcheck: + test: ["CMD", "nc", "-z", "localhost", "4443"] + interval: 6s + timeout: 2s + retries: 3 diff --git a/.devcontainer/entrypoint.sh b/.devcontainer/entrypoint.sh index 43944ed..5408468 100755 --- a/.devcontainer/entrypoint.sh +++ b/.devcontainer/entrypoint.sh @@ -5,4 +5,7 @@ trap "echo 'Caught termination signal. Exiting...'; exit 0" SIGINT SIGTERM # create azurite container az storage container create -n $AZURE_TEST_CONTAINER_NAME --connection-string $AZURE_STORAGE_CONNECTION_STRING +# create fake-gcs bucket +curl -v -X POST --data-binary "{\"name\":\"$GOOGLE_TEST_BUCKET\"}" -H "Content-Type: application/json" "$GOOGLE_SERVICE_ENDPOINT/storage/v1/b" + sleep infinity diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9a16df7..7445eab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -139,6 +139,20 @@ jobs: # create container az storage container create -n $AZURE_TEST_CONTAINER_NAME --connection-string $AZURE_STORAGE_CONNECTION_STRING + - name: Start fake-gcs-server for Google Cloud Storage emulator tests + run: | + docker run -d \ + --env-file .devcontainer/.env \ + -p 4443:4443 \ + tustvold/fake-gcs-server -scheme http -public-host localhost:4443 + + while ! curl $GOOGLE_SERVICE_ENDPOINT; do + echo "Waiting for $GOOGLE_SERVICE_ENDPOINT..." + sleep 1 + done + + curl -v -X POST --data-binary "{\"name\":\"$GOOGLE_TEST_BUCKET\"}" -H "Content-Type: application/json" "$GOOGLE_SERVICE_ENDPOINT/storage/v1/b" + - name: Run tests run: | # Run tests with coverage tool diff --git a/.vscode/settings.json b/.vscode/settings.json index f6ad919..f90c4ab 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -5,6 +5,6 @@ "rust-analyzer.checkOnSave": true, "editor.inlayHints.enabled": "offUnlessPressed", "files.watcherExclude": { - "**/target/**": true - } + "**/target/**": true + } } diff --git a/Cargo.lock b/Cargo.lock index bfe857f..37976b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2295,6 +2295,7 @@ dependencies = [ "rand 0.8.5", "reqwest", "ring", + "rustls-pemfile 2.2.0", "serde", "serde_json", "snafu", diff --git a/Cargo.toml b/Cargo.toml index c545672..26e1a7c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ aws-credential-types = {version = "1", default-features = false} azure_storage = {version = "0.21", default-features = false} futures = "0.3" home = "0.5" -object_store = {version = "0.11", default-features = false, features = ["aws", "azure"]} +object_store = {version = "0.11", default-features = false, features = ["aws", "azure", "gcp"]} once_cell = "1" parquet = {version = "53", default-features = false, features = [ "arrow", diff --git a/README.md b/README.md index 1190d42..3782389 100644 --- a/README.md +++ b/README.md @@ -156,7 +156,7 @@ SELECT uri, encode(key, 'escape') as key, encode(value, 'escape') as value FROM ``` ## Object Store Support -`pg_parquet` supports reading and writing Parquet files from/to `S3` and `Azure Blob Storage` object stores. +`pg_parquet` supports reading and writing Parquet files from/to `S3`, `Azure Blob Storage` and `Google Cloud Service` object stores. > [!NOTE] > To be able to write into a object store location, you need to grant `parquet_object_store_write` role to your current postgres user. @@ -239,6 +239,28 @@ Supported authorization methods' priority order is shown below: 2. Sas token, 3. Storage key. +#### Google Cloud Storage + +The simplest way to configure object storage is by creating a json config file like [`/tmp/gcs.json`]: + +```bash +$ cat /tmp/gcs.json +{ + "gcs_base_url": "http://localhost:4443", + "disable_oauth": true, + "client_email": "", + "private_key_id": "", + "private_key": "" +} +``` + +Alternatively, you can use the following environment variables when starting postgres to configure the Google Cloud Storage client: +- `GOOGLE_SERVICE_ACCOUNT_KEY`: json serialized service account key **(only via environment variables)** +- `GOOGLE_SERVICE_ACCOUNT_PATH`: an alternative location for the config file **(only via environment variables)** + +Supported Google Cloud Storage uri formats are shown below: +- gs:// \ / \ + ## Copy Options `pg_parquet` supports the following options in the `COPY TO` command: - `format parquet`: you need to specify this option to read or write Parquet files which does not end with `.parquet[.]` extension, diff --git a/src/object_store.rs b/src/object_store.rs index 8d36032..0e8d12a 100644 --- a/src/object_store.rs +++ b/src/object_store.rs @@ -7,19 +7,20 @@ use crate::{ arrow_parquet::uri_utils::uri_as_string, object_store::{ aws::create_s3_object_store, azure::create_azure_object_store, - local_file::create_local_file_object_store, + gcs::create_gcs_object_store, local_file::create_local_file_object_store, }, PG_BACKEND_TOKIO_RUNTIME, }; pub(crate) mod aws; pub(crate) mod azure; +pub(crate) mod gcs; pub(crate) mod local_file; pub(crate) fn create_object_store(uri: &Url, copy_from: bool) -> (Arc, Path) { let (scheme, path) = ObjectStoreScheme::parse(uri).unwrap_or_else(|_| { panic!( - "unrecognized uri {}. pg_parquet supports local paths, s3:// or azure:// schemes.", + "unrecognized uri {}. pg_parquet supports local paths, s3://, azure:// or gs:// schemes.", uri ) }); @@ -37,6 +38,11 @@ pub(crate) fn create_object_store(uri: &Url, copy_from: bool) -> (Arc { + let storage_container = Arc::new(create_gcs_object_store(uri)); + + (storage_container, path) + } ObjectStoreScheme::Local => { let storage_container = Arc::new(create_local_file_object_store(uri, copy_from)); @@ -47,7 +53,7 @@ pub(crate) fn create_object_store(uri: &Url, copy_from: bool) -> (Arc { panic!( - "unsupported scheme {} in uri {}. pg_parquet supports local paths, s3:// or azure:// schemes.", + "unsupported scheme {} in uri {}. pg_parquet supports local paths, s3://, azure:// or gs:// schemes.", uri.scheme(), uri ); diff --git a/src/object_store/gcs.rs b/src/object_store/gcs.rs new file mode 100644 index 0000000..00e13b5 --- /dev/null +++ b/src/object_store/gcs.rs @@ -0,0 +1,57 @@ +use object_store::gcp::{GoogleCloudStorage, GoogleCloudStorageBuilder}; +use url::Url; + +// create_gcs_object_store a GoogleCloudStorage object store from given uri. +// It is configured by environment variables. Currently, we only support +// following environment variables: +// - GOOGLE_SERVICE_ACCOUNT_KEY +// - GOOGLE_SERVICE_ACCOUNT_PATH +pub(crate) fn create_gcs_object_store(uri: &Url) -> GoogleCloudStorage { + let bucket_name = parse_gcs_bucket(uri).unwrap_or_else(|| { + panic!("unsupported gcs uri: {}", uri); + }); + + let mut gcs_builder = GoogleCloudStorageBuilder::new().with_bucket_name(bucket_name); + + let gcs_config = GoogleStorageConfig::load(); + + // service account key + if let Some(service_account_key) = gcs_config.service_account_key { + gcs_builder = gcs_builder.with_service_account_key(&service_account_key); + } + + // service account path + if let Some(service_account_path) = gcs_config.service_account_path { + gcs_builder = gcs_builder.with_service_account_path(&service_account_path); + } + + gcs_builder.build().unwrap_or_else(|e| panic!("{}", e)) +} + +fn parse_gcs_bucket(uri: &Url) -> Option { + let host = uri.host_str()?; + + // gs://{bucket}/key + if uri.scheme() == "gs" { + return Some(host.to_string()); + } + + None +} + +// GoogleStorageConfig is a struct that holds the configuration that is +// used to configure the Google Storage object store. +struct GoogleStorageConfig { + service_account_key: Option, + service_account_path: Option, +} + +impl GoogleStorageConfig { + // load loads the Google Storage configuration from the environment. + fn load() -> Self { + Self { + service_account_key: std::env::var("GOOGLE_SERVICE_ACCOUNT_KEY").ok(), + service_account_path: std::env::var("GOOGLE_SERVICE_ACCOUNT_PATH").ok(), + } + } +} diff --git a/src/pgrx_tests/object_store.rs b/src/pgrx_tests/object_store.rs index c5c9e83..9519110 100644 --- a/src/pgrx_tests/object_store.rs +++ b/src/pgrx_tests/object_store.rs @@ -472,7 +472,7 @@ mod tests { ); let copy_to_command = format!( - "COPY (SELECT i FROM generate_series(1,10) i) TO '{}' WITH (format parquet);;", + "COPY (SELECT i FROM generate_series(1,10) i) TO '{}' WITH (format parquet);", azure_blob_uri ); Spi::run(copy_to_command.as_str()).unwrap(); @@ -500,7 +500,7 @@ mod tests { ); let copy_to_command = format!( - "COPY (SELECT i FROM generate_series(1,10) i) TO '{}' WITH (format parquet);;", + "COPY (SELECT i FROM generate_series(1,10) i) TO '{}' WITH (format parquet);", azure_blob_uri ); Spi::run(copy_to_command.as_str()).unwrap(); @@ -547,10 +547,47 @@ mod tests { } #[pg_test] - #[should_panic(expected = "unsupported scheme gs in uri gs://testbucket")] + fn test_gcs_from_env() { + let test_bucket_name: String = + std::env::var("GOOGLE_TEST_BUCKET").expect("GOOGLE_TEST_BUCKET not found"); + + let gcs_uri = format!("gs://{}/pg_parquet_test.parquet", test_bucket_name); + + let test_table = TestTable::::new("int4".into()).with_uri(gcs_uri); + + test_table.insert("INSERT INTO test_expected (a) VALUES (1), (2), (null);"); + test_table.assert_expected_and_result_rows(); + } + + #[pg_test] + #[should_panic(expected = "404 Not Found")] + fn test_gcs_write_wrong_bucket() { + let s3_uri = "gs://randombucketwhichdoesnotexist/pg_parquet_test.parquet"; + + let copy_to_command = format!( + "COPY (SELECT i FROM generate_series(1,10) i) TO '{}';", + s3_uri + ); + Spi::run(copy_to_command.as_str()).unwrap(); + } + + #[pg_test] + #[should_panic(expected = "404 Not Found")] + fn test_gcs_read_wrong_bucket() { + let gcs_uri = "gs://randombucketwhichdoesnotexist/pg_parquet_test.parquet"; + + let create_table_command = "CREATE TABLE test_table (a int);"; + Spi::run(create_table_command).unwrap(); + + let copy_from_command = format!("COPY test_table FROM '{}';", gcs_uri); + Spi::run(copy_from_command.as_str()).unwrap(); + } + + #[pg_test] + #[should_panic(expected = "unsupported scheme http in uri http://testbucket")] fn test_unsupported_uri() { let test_table = - TestTable::::new("int4".into()).with_uri("gs://testbucket".to_string()); + TestTable::::new("int4".into()).with_uri("http://testbucket".to_string()); test_table.insert("INSERT INTO test_expected (a) VALUES (1), (2), (null);"); test_table.assert_expected_and_result_rows(); }