diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 304ba032b416a..37a426ccb7772 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -1920,7 +1920,8 @@ private FSDataInputStream executeOpen( .withCallbacks(createInputStreamCallbacks(auditSpan)) .withContext(readContext.build()) .withObjectAttributes(createObjectAttributes(path, fileStatus)) - .withStreamStatistics(inputStreamStats); + .withStreamStatistics(inputStreamStats) + .withEncryptionSecrets(getEncryptionSecrets()); return new FSDataInputStream(getStore().readObject(parameters)); } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/AnalyticsStream.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/AnalyticsStream.java index 6b910c6538070..fcff6855f6dee 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/AnalyticsStream.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/AnalyticsStream.java @@ -21,9 +21,13 @@ import java.io.EOFException; import java.io.IOException; +import java.util.Optional; +import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; +import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecretOperations; import software.amazon.s3.analyticsaccelerator.S3SeekableInputStreamFactory; import software.amazon.s3.analyticsaccelerator.S3SeekableInputStream; +import software.amazon.s3.analyticsaccelerator.request.EncryptionSecrets; import software.amazon.s3.analyticsaccelerator.request.ObjectMetadata; import software.amazon.s3.analyticsaccelerator.util.InputPolicy; import software.amazon.s3.analyticsaccelerator.util.OpenStreamInformation; @@ -205,6 +209,12 @@ private OpenStreamInformation buildOpenStreamInformation(ObjectReadParameters pa .etag(parameters.getObjectAttributes().getETag()).build()); } + if(parameters.getEncryptionSecrets().getEncryptionMethod() == S3AEncryptionMethods.SSE_C) { + EncryptionSecretOperations.getSSECustomerKey(parameters.getEncryptionSecrets()) + .ifPresent(base64customerKey -> openStreamInformationBuilder.encryptionSecrets( + EncryptionSecrets.builder().sseCustomerKey(Optional.of(base64customerKey)).build())); + } + return openStreamInformationBuilder.build(); } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/ObjectReadParameters.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/ObjectReadParameters.java index e784dadcb651a..6d85094ea51d2 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/ObjectReadParameters.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/ObjectReadParameters.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.s3a.S3AReadOpContext; import org.apache.hadoop.fs.s3a.S3ObjectAttributes; +import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets; import org.apache.hadoop.fs.s3a.statistics.S3AInputStreamStatistics; import static java.util.Objects.requireNonNull; @@ -69,6 +70,29 @@ public final class ObjectReadParameters { */ private LocalDirAllocator directoryAllocator; + /** + * Encryption secrets for this stream + */ + private EncryptionSecrets encryptionSecrets; + + /** + * Getter. + * @return Encryption secrets. + */ + public EncryptionSecrets getEncryptionSecrets() { + return encryptionSecrets; + } + + /** + * Set encryption secrets. + * @param value new value + * @return the builder + */ + public ObjectReadParameters withEncryptionSecrets(final EncryptionSecrets value) { + encryptionSecrets = value; + return this; + } + /** * @return Read operation context. */ @@ -185,6 +209,7 @@ public ObjectReadParameters validate() { requireNonNull(directoryAllocator, "directoryAllocator"); requireNonNull(objectAttributes, "objectAttributes"); requireNonNull(streamStatistics, "streamStatistics"); + requireNonNull(encryptionSecrets, "encryptionSecrets"); return this; } } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java index 0f79881466f1e..51b9aca9eb2ea 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java @@ -40,10 +40,10 @@ import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeStoreAwsHosted; +import static org.apache.hadoop.fs.s3a.S3ATestUtils.enableAnalyticsAccelerator; import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName; import static org.apache.hadoop.fs.s3a.S3ATestUtils.maybeSkipRootTests; import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides; -import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfAnalyticsAcceleratorEnabled; import static org.apache.hadoop.test.LambdaTestUtils.intercept; /** @@ -96,8 +96,6 @@ protected Configuration createConfiguration() { @Override public void setup() throws Exception { super.setup(); - skipIfAnalyticsAcceleratorEnabled(getConfiguration(), - "Analytics Accelerator currently does not support SSE-C"); assumeEnabled(); // although not a root dir test, this confuses paths enough it shouldn't be run in // parallel with other jobs @@ -327,6 +325,65 @@ public void testChecksumRequiresReadAccess() throws Throwable { () -> fsKeyB.getFileChecksum(path)); } + + /** + * Tests the creation and reading of a file using a different encryption key + * when Analytics Accelerator is enabled. + * + * @throws Exception if any error occurs during the test execution + */ + @Test + public void testCreateFileAndReadWithDifferentEncryptionKeyWithAnalyticsAcceleratorEnabled() throws Exception { + enableAnalyticsAccelerator(getConfiguration()); + testCreateFileAndReadWithDifferentEncryptionKey(); + } + + /** + * Tests the creation and movement of a file using a different SSE-C key + * when Analytics Accelerator is enabled. + * + * @throws Exception if any error occurs during the test execution + */ + @Test + public void testCreateFileThenMoveWithDifferentSSECKeyWithAnalyticsAcceleratorEnabled() throws Exception { + enableAnalyticsAccelerator(getConfiguration()); + testCreateFileThenMoveWithDifferentSSECKey(); + } + + /** + * Tests create and file rename operation when Analytics Accelerator is enabled. + * + * @throws Exception if any error occurs during the test execution + */ + @Test + public void testCreateFileAndRenameFileWithAnalyticsAcceleratorEnabled() throws Exception { + enableAnalyticsAccelerator(getConfiguration()); + testRenameFile(); + } + + /** + * Tests the creation and listing of encrypted files when Analytics Accelerator is enabled. + * + * @throws Exception if any error occurs during the test execution + */ + @Test + public void testCreateFileAndListStatusEncryptedFileWithAnalyticsAcceleratorEnabled() throws Exception { + enableAnalyticsAccelerator(getConfiguration()); + testListStatusEncryptedFile(); + } + + /** + * Tests the creation and deletion of an encrypted object using a different key + * when Analytics Accelerator is enabled.t. + * + * @throws Exception if any error occurs during the test execution + */ + @Test + public void testCreateFileAndDeleteEncryptedObjectWithDifferentKeyWithAnalyticsAcceleratorEnabled() throws Exception { + enableAnalyticsAccelerator(getConfiguration()); + testDeleteEncryptedObjectWithDifferentKey(); + } + private S3AFileSystem createNewFileSystemWithSSECKey(String sseCKey) throws IOException { Configuration conf = this.createConfiguration(); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java index 0203b00caab69..6f19ba15c1c9a 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java @@ -31,7 +31,6 @@ import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM; import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides; -import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfAnalyticsAcceleratorEnabled; import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled; /** @@ -55,8 +54,6 @@ public class ITestS3AHugeFilesSSECDiskBlocks public void setup() throws Exception { try { super.setup(); - skipIfAnalyticsAcceleratorEnabled(getConfiguration(), - "Analytics Accelerator currently does not support SSE-C"); } catch (AccessDeniedException | AWSUnsupportedFeatureException e) { skip("Bucket does not allow " + S3AEncryptionMethods.SSE_C + " encryption method"); }