From bdb8dad14fcd1b29f105886be33eece1e18a1304 Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 12:51:50 -0800 Subject: [PATCH 1/9] Rename of S3DataLakeDestination --- .../{IcebergDestination.kt => S3DataLakeDestination.kt} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergDestination.kt => S3DataLakeDestination.kt} (100%) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergDestination.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestination.kt similarity index 100% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergDestination.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestination.kt From b09dbccc82096823a7cf7564bc0314446b2aab53 Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 12:55:39 -0800 Subject: [PATCH 2/9] Update for S3DataLakeWriter --- ...{IcebergV2Writer.kt => S3DataLakeWriter.kt} | 2 +- .../s3_data_lake/IcebergV2WriterTest.kt | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2Writer.kt => S3DataLakeWriter.kt} (99%) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Writer.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt similarity index 99% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Writer.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt index e4f82a0f5664..cf91220b1d11 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Writer.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt @@ -14,7 +14,7 @@ import javax.inject.Singleton import org.apache.iceberg.Schema @Singleton -class IcebergV2Writer( +class S3DataLakeWriter( private val icebergTableWriterFactory: IcebergTableWriterFactory, private val icebergConfiguration: IcebergV2Configuration, private val icebergUtil: IcebergUtil diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt index ba88a4526ed9..f525b9e0aa3c 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt @@ -129,13 +129,13 @@ internal class IcebergV2WriterTest { pipeline.finalSchema.withAirbyteMeta(true).toIcebergSchema(emptyList()) } } - val icebergV2Writer = - IcebergV2Writer( + val s3DataLakeWriter = + S3DataLakeWriter( icebergTableWriterFactory = icebergTableWriterFactory, icebergConfiguration = icebergConfiguration, icebergUtil = icebergUtil, ) - val streamLoader = icebergV2Writer.createStreamLoader(stream = stream) + val streamLoader = s3DataLakeWriter.createStreamLoader(stream = stream) assertNotNull(streamLoader) } @@ -194,15 +194,15 @@ internal class IcebergV2WriterTest { pipeline.finalSchema.withAirbyteMeta(true).toIcebergSchema(emptyList()) } } - val icebergV2Writer = - IcebergV2Writer( + val s3DataLakeWriter = + S3DataLakeWriter( icebergTableWriterFactory = icebergTableWriterFactory, icebergConfiguration = icebergConfiguration, icebergUtil = icebergUtil, ) val e = assertThrows { - icebergV2Writer.createStreamLoader(stream = stream) + s3DataLakeWriter.createStreamLoader(stream = stream) } assertTrue( e.message?.startsWith("Table schema fields are different than catalog schema") ?: false @@ -301,15 +301,15 @@ internal class IcebergV2WriterTest { pipeline.finalSchema.withAirbyteMeta(true).toIcebergSchema(listOf(primaryKeys)) } } - val icebergV2Writer = - IcebergV2Writer( + val s3DataLakeWriter = + S3DataLakeWriter( icebergTableWriterFactory = icebergTableWriterFactory, icebergConfiguration = icebergConfiguration, icebergUtil = icebergUtil, ) val e = assertThrows { - icebergV2Writer.createStreamLoader(stream = stream) + s3DataLakeWriter.createStreamLoader(stream = stream) } assertTrue(e.message?.startsWith("Identifier fields are different") ?: false) } From 7d9915a1c9f4eaee6e36cb5e87b2a8e4ee3b9bc3 Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 13:06:31 -0800 Subject: [PATCH 3/9] More rename --- ...cebergV2Checker.kt => S3DataLakeChecker.kt} | 8 ++++---- ...iguration.kt => S3DataLakeConfiguration.kt} | 18 +++++++++--------- ...ification.kt => S3DataLakeSpecification.kt} | 4 ++-- .../s3_data_lake/S3DataLakeWriter.kt | 2 +- .../s3_data_lake/TableIdGenerator.kt | 2 +- .../destination/s3_data_lake/io/IcebergUtil.kt | 14 +++++++------- .../s3_data_lake/IcebergV2CheckTest.kt | 2 +- .../s3_data_lake/IcebergV2TestUtil.kt | 6 +++--- .../s3_data_lake/IcebergV2WriteTest.kt | 2 +- .../s3_data_lake/IcebergV2WriterTest.kt | 6 +++--- .../s3_data_lake/io/IcebergUtilTest.kt | 4 ++-- 11 files changed, 34 insertions(+), 34 deletions(-) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2Checker.kt => S3DataLakeChecker.kt} (88%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2Configuration.kt => S3DataLakeConfiguration.kt} (78%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2Specification.kt => S3DataLakeSpecification.kt} (96%) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Checker.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt similarity index 88% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Checker.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt index aed868463982..083583c4f567 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Checker.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt @@ -13,16 +13,16 @@ import org.apache.iceberg.Schema import org.apache.iceberg.types.Types @Singleton -class IcebergV2Checker( +class S3DataLakeChecker( private val icebergTableCleaner: IcebergTableCleaner, private val icebergUtil: IcebergUtil, private val tableIdGenerator: TableIdGenerator, -) : DestinationChecker { +) : DestinationChecker { - override fun check(config: IcebergV2Configuration) { + override fun check(config: S3DataLakeConfiguration) { catalogValidation(config) } - private fun catalogValidation(config: IcebergV2Configuration) { + private fun catalogValidation(config: S3DataLakeConfiguration) { val catalogProperties = icebergUtil.toCatalogProperties(config) val catalog = icebergUtil.createCatalog(DEFAULT_CATALOG_NAME, catalogProperties) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Configuration.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeConfiguration.kt similarity index 78% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Configuration.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeConfiguration.kt index 1da1303a93c7..f704391ac47c 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Configuration.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeConfiguration.kt @@ -20,7 +20,7 @@ const val DEFAULT_STAGING_BRANCH = "airbyte_staging" const val TEST_NAMESPACE = "airbyte_test_namespace" const val TEST_TABLE = "airbyte_test_table" -data class IcebergV2Configuration( +data class S3DataLakeConfiguration( override val awsAccessKeyConfiguration: AWSAccessKeyConfiguration, override val s3BucketConfiguration: S3BucketConfiguration, override val icebergCatalogConfiguration: IcebergCatalogConfiguration @@ -31,12 +31,12 @@ data class IcebergV2Configuration( S3BucketConfigurationProvider @Singleton -class IcebergV2ConfigurationFactory : - DestinationConfigurationFactory { +class S3DataLakeConfigurationFactory : + DestinationConfigurationFactory { override fun makeWithoutExceptionHandling( - pojo: IcebergV2Specification - ): IcebergV2Configuration { - return IcebergV2Configuration( + pojo: S3DataLakeSpecification + ): S3DataLakeConfiguration { + return S3DataLakeConfiguration( awsAccessKeyConfiguration = pojo.toAWSAccessKeyConfiguration(), s3BucketConfiguration = pojo.toS3BucketConfiguration(), icebergCatalogConfiguration = pojo.toIcebergCatalogConfiguration(), @@ -45,9 +45,9 @@ class IcebergV2ConfigurationFactory : } @Factory -class IcebergV2ConfigurationProvider(private val config: DestinationConfiguration) { +class S3DataLakeConfigurationProvider(private val config: DestinationConfiguration) { @Singleton - fun get(): IcebergV2Configuration { - return config as IcebergV2Configuration + fun get(): S3DataLakeConfiguration { + return config as S3DataLakeConfiguration } } diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Specification.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeSpecification.kt similarity index 96% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Specification.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeSpecification.kt index 701107af4634..f78e35de942c 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2Specification.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeSpecification.kt @@ -20,7 +20,7 @@ import jakarta.inject.Singleton @Singleton @JsonSchemaTitle("Iceberg V2 Destination Specification") -class IcebergV2Specification : +class S3DataLakeSpecification : ConfigurationSpecification(), AWSAccessKeySpecification, S3BucketSpecification, @@ -61,7 +61,7 @@ class IcebergV2Specification : } @Singleton -class IcebergV2SpecificationExtension : DestinationSpecificationExtension { +class S3DataLakeSpecificationExtension : DestinationSpecificationExtension { override val supportedSyncModes = listOf( DestinationSyncMode.OVERWRITE, diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt index cf91220b1d11..44973c87781e 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt @@ -16,7 +16,7 @@ import org.apache.iceberg.Schema @Singleton class S3DataLakeWriter( private val icebergTableWriterFactory: IcebergTableWriterFactory, - private val icebergConfiguration: IcebergV2Configuration, + private val icebergConfiguration: S3DataLakeConfiguration, private val icebergUtil: IcebergUtil ) : DestinationWriter { diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/TableIdGenerator.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/TableIdGenerator.kt index 30e27df54f30..d83db691315e 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/TableIdGenerator.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/TableIdGenerator.kt @@ -32,7 +32,7 @@ class GlueTableIdGenerator : TableIdGenerator { } @Factory -class TableIdGeneratorFactory(private val icebergConfiguration: IcebergV2Configuration) { +class TableIdGeneratorFactory(private val icebergConfiguration: S3DataLakeConfiguration) { @Singleton fun create() = when (icebergConfiguration.icebergCatalogConfiguration.catalogConfiguration) { diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt index e8901207844e..97ab342301f1 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt @@ -20,7 +20,7 @@ import io.airbyte.cdk.load.data.withAirbyteMeta import io.airbyte.cdk.load.message.DestinationRecordAirbyteValue import io.airbyte.integrations.destination.s3_data_lake.ACCESS_KEY_ID import io.airbyte.integrations.destination.s3_data_lake.GlueCredentialsProvider -import io.airbyte.integrations.destination.s3_data_lake.IcebergV2Configuration +import io.airbyte.integrations.destination.s3_data_lake.S3DataLakeConfiguration import io.airbyte.integrations.destination.s3_data_lake.SECRET_ACCESS_KEY import io.airbyte.integrations.destination.s3_data_lake.TableIdGenerator import io.github.oshai.kotlinlogging.KotlinLogging @@ -202,7 +202,7 @@ class IcebergUtil( * @param config The destination's configuration * @return The Iceberg [Catalog] configuration properties. */ - fun toCatalogProperties(config: IcebergV2Configuration): Map { + fun toCatalogProperties(config: S3DataLakeConfiguration): Map { val icebergCatalogConfig = config.icebergCatalogConfiguration val catalogConfig = icebergCatalogConfig.catalogConfiguration val region = config.s3BucketConfiguration.s3BucketRegion.region @@ -226,7 +226,7 @@ class IcebergUtil( } private fun buildS3Properties( - config: IcebergV2Configuration, + config: S3DataLakeConfiguration, icebergCatalogConfig: IcebergCatalogConfiguration ): Map { return buildMap { @@ -242,7 +242,7 @@ class IcebergUtil( } private fun buildNessieProperties( - config: IcebergV2Configuration, + config: S3DataLakeConfiguration, catalogConfig: NessieCatalogConfiguration, s3Properties: Map ): Map { @@ -276,7 +276,7 @@ class IcebergUtil( } private fun buildGlueProperties( - config: IcebergV2Configuration, + config: S3DataLakeConfiguration, catalogConfig: GlueCatalogConfiguration, icebergCatalogConfig: IcebergCatalogConfiguration ): Map { @@ -302,7 +302,7 @@ class IcebergUtil( private fun buildRoleBasedClientProperties( roleArn: String, - config: IcebergV2Configuration + config: S3DataLakeConfiguration ): Map { val region = config.s3BucketConfiguration.s3BucketRegion.region val (accessKeyId, secretAccessKey, externalId) = @@ -339,7 +339,7 @@ class IcebergUtil( ) } - private fun buildKeyBasedClientProperties(config: IcebergV2Configuration): Map { + private fun buildKeyBasedClientProperties(config: S3DataLakeConfiguration): Map { val awsAccessKeyId = requireNotNull(config.awsAccessKeyConfiguration.accessKeyId) { "AWS Access Key ID is required for key-based authentication" diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt index 080ad38bdb0d..a8366ede8cd6 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt @@ -9,7 +9,7 @@ import io.airbyte.cdk.load.check.CheckTestConfig import io.airbyte.integrations.destination.s3_data_lake.IcebergV2TestUtil.GLUE_CONFIG_PATH class IcebergV2CheckTest : - CheckIntegrationTest( + CheckIntegrationTest( successConfigFilenames = listOf( CheckTestConfig(GLUE_CONFIG_PATH), diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt index 588a10d231c3..1511dbc97eeb 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt @@ -21,7 +21,7 @@ object IcebergV2TestUtil { fun parseConfig(path: Path) = getConfig( - ValidatedJsonUtils.parseOne(IcebergV2Specification::class.java, Files.readString(path)) + ValidatedJsonUtils.parseOne(S3DataLakeSpecification::class.java, Files.readString(path)) ) fun getAWSSystemCredentials(): AWSSystemCredentials { @@ -35,9 +35,9 @@ object IcebergV2TestUtil { } fun getConfig(spec: ConfigurationSpecification) = - IcebergV2ConfigurationFactory().makeWithoutExceptionHandling(spec as IcebergV2Specification) + S3DataLakeConfigurationFactory().makeWithoutExceptionHandling(spec as S3DataLakeSpecification) - fun getCatalog(config: IcebergV2Configuration, awsSystemCredentials: AWSSystemCredentials) = + fun getCatalog(config: S3DataLakeConfiguration, awsSystemCredentials: AWSSystemCredentials) = IcebergUtil(SimpleTableIdGenerator(), awsSystemCredentials).let { icebergUtil -> val props = icebergUtil.toCatalogProperties(config) icebergUtil.createCatalog(DEFAULT_CATALOG_NAME, props) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt index fe17eca2ff5b..2014017759e5 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt @@ -26,7 +26,7 @@ abstract class IcebergV2WriteTest( ) : BasicFunctionalityIntegrationTest( configContents, - IcebergV2Specification::class.java, + S3DataLakeSpecification::class.java, IcebergV2DataDumper, destinationCleaner, IcebergExpectedRecordMapper, diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt index f525b9e0aa3c..956355d058ab 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt @@ -112,7 +112,7 @@ internal class IcebergV2WriterTest { every { catalogConfiguration } returns NessieCatalogConfiguration("http://localhost:8080/api/v1", "access-token") } - val icebergConfiguration: IcebergV2Configuration = mockk { + val icebergConfiguration: S3DataLakeConfiguration = mockk { every { awsAccessKeyConfiguration } returns awsConfiguration every { icebergCatalogConfiguration } returns icebergCatalogConfig every { s3BucketConfiguration } returns bucketConfiguration @@ -177,7 +177,7 @@ internal class IcebergV2WriterTest { every { catalogConfiguration } returns NessieCatalogConfiguration("http://localhost:8080/api/v1", "access-token") } - val icebergConfiguration: IcebergV2Configuration = mockk { + val icebergConfiguration: S3DataLakeConfiguration = mockk { every { awsAccessKeyConfiguration } returns awsConfiguration every { icebergCatalogConfiguration } returns icebergCatalogConfig every { s3BucketConfiguration } returns bucketConfiguration @@ -284,7 +284,7 @@ internal class IcebergV2WriterTest { every { catalogConfiguration } returns NessieCatalogConfiguration("http://localhost:8080/api/v1", "access-token") } - val icebergConfiguration: IcebergV2Configuration = mockk { + val icebergConfiguration: S3DataLakeConfiguration = mockk { every { awsAccessKeyConfiguration } returns awsConfiguration every { icebergCatalogConfiguration } returns icebergCatalogConfig every { s3BucketConfiguration } returns bucketConfiguration diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt index 3abe0e76ade7..0a34bf89b945 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt @@ -27,7 +27,7 @@ import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_EXTRACTED_AT import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_GENERATION_ID import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_META import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_RAW_ID -import io.airbyte.integrations.destination.s3_data_lake.IcebergV2Configuration +import io.airbyte.integrations.destination.s3_data_lake.S3DataLakeConfiguration import io.airbyte.integrations.destination.s3_data_lake.SimpleTableIdGenerator import io.mockk.every import io.mockk.mockk @@ -351,7 +351,7 @@ internal class IcebergUtilTest { NessieCatalogConfiguration(nessieServerUri, nessieAccessToken) ) val configuration = - IcebergV2Configuration( + S3DataLakeConfiguration( awsAccessKeyConfiguration = awsAccessKeyConfiguration, icebergCatalogConfiguration = icebergCatalogConfiguration, s3BucketConfiguration = s3BucketConfiguration, From 9a758354471842bf4f49c9896d5d83f57b3fa62b Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 13:07:19 -0800 Subject: [PATCH 4/9] And more --- .../{IcebergStreamLoader.kt => S3DataLakeStreamLoader.kt} | 2 +- .../integrations/destination/s3_data_lake/S3DataLakeWriter.kt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergStreamLoader.kt => S3DataLakeStreamLoader.kt} (99%) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergStreamLoader.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt similarity index 99% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergStreamLoader.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt index 38840d29c1c6..4bcd9b5f00c3 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergStreamLoader.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt @@ -19,7 +19,7 @@ import io.github.oshai.kotlinlogging.KotlinLogging import org.apache.iceberg.Table @SuppressFBWarnings("NP_NONNULL_PARAM_VIOLATION", justification = "Kotlin async continuation") -class IcebergStreamLoader( +class S3DataLakeStreamLoader( override val stream: DestinationStream, private val table: Table, private val icebergTableWriterFactory: IcebergTableWriterFactory, diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt index 44973c87781e..1857c46d7924 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt @@ -35,7 +35,7 @@ class S3DataLakeWriter( existingAndIncomingSchemaShouldBeSame(catalogSchema = schema, tableSchema = table.schema()) - return IcebergStreamLoader( + return S3DataLakeStreamLoader( stream = stream, table = table, icebergTableWriterFactory = icebergTableWriterFactory, From 5d315849e420a1d83596a2526c2758179613f935 Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 13:14:54 -0800 Subject: [PATCH 5/9] That should be it for the integrationTests --- ...gV2CheckTest.kt => S3DataLakeCheckTest.kt} | 4 +-- ...2DataDumper.kt => S3DataLakeDataDumper.kt} | 6 ++-- ...ner.kt => S3DataLakeDestinationCleaner.kt} | 0 ...r.kt => S3DataLakeExpectedRecordMapper.kt} | 2 +- ...ergV2SpecTest.kt => S3DataLakeSpecTest.kt} | 2 +- ...ergV2TestUtil.kt => S3DataLakeTestUtil.kt} | 2 +- ...gV2WriteTest.kt => S3DataLakeWriteTest.kt} | 36 +++++++++---------- 7 files changed, 26 insertions(+), 26 deletions(-) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2CheckTest.kt => S3DataLakeCheckTest.kt} (81%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2DataDumper.kt => S3DataLakeDataDumper.kt} (95%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergDestinationCleaner.kt => S3DataLakeDestinationCleaner.kt} (100%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergExpectedRecordMapper.kt => S3DataLakeExpectedRecordMapper.kt} (95%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2SpecTest.kt => S3DataLakeSpecTest.kt} (81%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2TestUtil.kt => S3DataLakeTestUtil.kt} (98%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2WriteTest.kt => S3DataLakeWriteTest.kt} (86%) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeCheckTest.kt similarity index 81% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeCheckTest.kt index a8366ede8cd6..5ae4c1e2986e 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2CheckTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeCheckTest.kt @@ -6,9 +6,9 @@ package io.airbyte.integrations.destination.s3_data_lake import io.airbyte.cdk.load.check.CheckIntegrationTest import io.airbyte.cdk.load.check.CheckTestConfig -import io.airbyte.integrations.destination.s3_data_lake.IcebergV2TestUtil.GLUE_CONFIG_PATH +import io.airbyte.integrations.destination.s3_data_lake.S3DataLakeTestUtil.GLUE_CONFIG_PATH -class IcebergV2CheckTest : +class S3DataLakeCheckTest : CheckIntegrationTest( successConfigFilenames = listOf( diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2DataDumper.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDataDumper.kt similarity index 95% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2DataDumper.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDataDumper.kt index ffe5df693df6..1a3dc60c9376 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2DataDumper.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDataDumper.kt @@ -17,7 +17,7 @@ import java.util.UUID import org.apache.iceberg.data.IcebergGenerics import org.apache.iceberg.data.Record -object IcebergV2DataDumper : DestinationDataDumper { +object S3DataLakeDataDumper : DestinationDataDumper { private fun toAirbyteValue(record: Record): ObjectValue { return ObjectValue( @@ -71,9 +71,9 @@ object IcebergV2DataDumper : DestinationDataDumper { spec: ConfigurationSpecification, stream: DestinationStream ): List { - val config = IcebergV2TestUtil.getConfig(spec) + val config = S3DataLakeTestUtil.getConfig(spec) val catalog = - IcebergV2TestUtil.getCatalog(config, IcebergV2TestUtil.getAWSSystemCredentials()) + S3DataLakeTestUtil.getCatalog(config, S3DataLakeTestUtil.getAWSSystemCredentials()) val table = catalog.loadTable( TableIdGeneratorFactory(config).create().toTableIdentifier(stream.descriptor) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergDestinationCleaner.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestinationCleaner.kt similarity index 100% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergDestinationCleaner.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestinationCleaner.kt diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergExpectedRecordMapper.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeExpectedRecordMapper.kt similarity index 95% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergExpectedRecordMapper.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeExpectedRecordMapper.kt index 833d6adac735..b86c45a624a6 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergExpectedRecordMapper.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeExpectedRecordMapper.kt @@ -17,7 +17,7 @@ import io.airbyte.cdk.load.test.util.OutputRecord * Iceberg doesn't have a TimeWithTimezone type. So map expectedRecords' TimeWithTimezone to * TimeWithoutTimezone. */ -object IcebergExpectedRecordMapper : ExpectedRecordMapper { +object S3DataLakeExpectedRecordMapper : ExpectedRecordMapper { override fun mapRecord(expectedRecord: OutputRecord, schema: AirbyteType): OutputRecord { val mappedData = mapTimeTzToTimeNtz(expectedRecord.data) return expectedRecord.copy(data = mappedData as ObjectValue) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2SpecTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeSpecTest.kt similarity index 81% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2SpecTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeSpecTest.kt index 91b1c641ce1f..171a4ec3b641 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2SpecTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeSpecTest.kt @@ -6,4 +6,4 @@ package io.airbyte.integrations.destination.s3_data_lake import io.airbyte.cdk.load.spec.SpecTest -class IcebergV2SpecTest : SpecTest() +class S3DataLakeSpecTest : SpecTest() diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt similarity index 98% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt index 1511dbc97eeb..f8cb34f8571c 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2TestUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt @@ -13,7 +13,7 @@ import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil import java.nio.file.Files import java.nio.file.Path -object IcebergV2TestUtil { +object S3DataLakeTestUtil { val GLUE_CONFIG_PATH: Path = Path.of("secrets/glue.json") val GLUE_ASSUME_ROLE_CONFIG_PATH: Path = Path.of("secrets/glue_assume_role.json") private val GLUE_AWS_ASSUME_ROLE_CONFIG_PATH: Path = diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriteTest.kt similarity index 86% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriteTest.kt index 2014017759e5..80e8456f7b7c 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriteTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriteTest.kt @@ -19,7 +19,7 @@ import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.Disabled import org.junit.jupiter.api.Test -abstract class IcebergV2WriteTest( +abstract class S3DataLakeWriteTest( configContents: String, destinationCleaner: DestinationCleaner, envVars: Map = emptyMap(), @@ -27,9 +27,9 @@ abstract class IcebergV2WriteTest( BasicFunctionalityIntegrationTest( configContents, S3DataLakeSpecification::class.java, - IcebergV2DataDumper, + S3DataLakeDataDumper, destinationCleaner, - IcebergExpectedRecordMapper, + S3DataLakeExpectedRecordMapper, isStreamSchemaRetroactive = true, supportsDedup = true, stringifySchemalessObjects = true, @@ -83,34 +83,34 @@ abstract class IcebergV2WriteTest( } } -class IcebergGlueWriteTest : - IcebergV2WriteTest( - Files.readString(IcebergV2TestUtil.GLUE_CONFIG_PATH), +class GlueWriteTest : + S3DataLakeWriteTest( + Files.readString(S3DataLakeTestUtil.GLUE_CONFIG_PATH), S3DataLakeDestinationCleaner( - IcebergV2TestUtil.getCatalog( - IcebergV2TestUtil.parseConfig(IcebergV2TestUtil.GLUE_CONFIG_PATH), - IcebergV2TestUtil.getAWSSystemCredentials() + S3DataLakeTestUtil.getCatalog( + S3DataLakeTestUtil.parseConfig(S3DataLakeTestUtil.GLUE_CONFIG_PATH), + S3DataLakeTestUtil.getAWSSystemCredentials() ) ) ) -class IcebergGlueAssumeRoleWriteTest : - IcebergV2WriteTest( - Files.readString(IcebergV2TestUtil.GLUE_ASSUME_ROLE_CONFIG_PATH), +class GlueAssumeRoleWriteTest : + S3DataLakeWriteTest( + Files.readString(S3DataLakeTestUtil.GLUE_ASSUME_ROLE_CONFIG_PATH), S3DataLakeDestinationCleaner( - IcebergV2TestUtil.getCatalog( - IcebergV2TestUtil.parseConfig(IcebergV2TestUtil.GLUE_ASSUME_ROLE_CONFIG_PATH), - IcebergV2TestUtil.getAWSSystemCredentials() + S3DataLakeTestUtil.getCatalog( + S3DataLakeTestUtil.parseConfig(S3DataLakeTestUtil.GLUE_ASSUME_ROLE_CONFIG_PATH), + S3DataLakeTestUtil.getAWSSystemCredentials() ) ), - IcebergV2TestUtil.getAWSSystemCredentialsAsMap() + S3DataLakeTestUtil.getAWSSystemCredentialsAsMap() ) @Disabled( "This is currently disabled until we are able to make it run via airbyte-ci. It works as expected locally" ) -class IcebergNessieMinioWriteTest : - IcebergV2WriteTest( +class NessieMinioWriteTest : + S3DataLakeWriteTest( getConfig(), // we're writing to ephemeral testcontainers, so no need to clean up after ourselves NoopDestinationCleaner From edb0cebaa3a4f0aa1e48ed28f2a925b1a7c8a658 Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 13:22:15 -0800 Subject: [PATCH 6/9] Is that it? --- .../s3_data_lake/S3DataLakeChecker.kt | 16 ++++---- .../s3_data_lake/S3DataLakeStreamLoader.kt | 22 +++++----- .../s3_data_lake/S3DataLakeWriter.kt | 20 +++++----- ...leCleaner.kt => S3DataLakeTableCleaner.kt} | 4 +- ...ory.kt => S3DataLakeTableWriterFactory.kt} | 4 +- .../io/{IcebergUtil.kt => S3DataLakeUtil.kt} | 2 +- .../S3DataLakeDestinationCleaner.kt | 6 +-- .../s3_data_lake/S3DataLakeTestUtil.kt | 4 +- ...2WriterTest.kt => S3DataLakeWriterTest.kt} | 30 +++++++------- ...rTest.kt => S3DataLakeTableCleanerTest.kt} | 26 ++++++------ ...kt => S3DataLakeTableWriterFactoryTest.kt} | 18 ++++----- ...ebergUtilTest.kt => S3DataLakeUtilTest.kt} | 40 +++++++++---------- 12 files changed, 96 insertions(+), 96 deletions(-) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/{IcebergTableCleaner.kt => S3DataLakeTableCleaner.kt} (93%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/{IcebergTableWriterFactory.kt => S3DataLakeTableWriterFactory.kt} (97%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/{IcebergUtil.kt => S3DataLakeUtil.kt} (99%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/{IcebergV2WriterTest.kt => S3DataLakeWriterTest.kt} (93%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/{IcebergTableCleanerTest.kt => S3DataLakeTableCleanerTest.kt} (85%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/{IcebergTableWriterFactoryTest.kt => S3DataLakeTableWriterFactoryTest.kt} (94%) rename airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/{IcebergUtilTest.kt => S3DataLakeUtilTest.kt} (93%) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt index 083583c4f567..3b51afd8fab2 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeChecker.kt @@ -6,16 +6,16 @@ package io.airbyte.integrations.destination.s3_data_lake import io.airbyte.cdk.load.check.DestinationChecker import io.airbyte.cdk.load.command.DestinationStream -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergTableCleaner -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeTableCleaner +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeUtil import javax.inject.Singleton import org.apache.iceberg.Schema import org.apache.iceberg.types.Types @Singleton class S3DataLakeChecker( - private val icebergTableCleaner: IcebergTableCleaner, - private val icebergUtil: IcebergUtil, + private val s3DataLakeTableCleaner: S3DataLakeTableCleaner, + private val s3DataLakeUtil: S3DataLakeUtil, private val tableIdGenerator: TableIdGenerator, ) : DestinationChecker { @@ -23,8 +23,8 @@ class S3DataLakeChecker( catalogValidation(config) } private fun catalogValidation(config: S3DataLakeConfiguration) { - val catalogProperties = icebergUtil.toCatalogProperties(config) - val catalog = icebergUtil.createCatalog(DEFAULT_CATALOG_NAME, catalogProperties) + val catalogProperties = s3DataLakeUtil.toCatalogProperties(config) + val catalog = s3DataLakeUtil.createCatalog(DEFAULT_CATALOG_NAME, catalogProperties) val testTableIdentifier = DestinationStream.Descriptor(TEST_NAMESPACE, TEST_TABLE) @@ -34,14 +34,14 @@ class S3DataLakeChecker( Types.NestedField.optional(2, "data", Types.StringType.get()), ) val table = - icebergUtil.createTable( + s3DataLakeUtil.createTable( testTableIdentifier, catalog, testTableSchema, catalogProperties, ) - icebergTableCleaner.clearTable( + s3DataLakeTableCleaner.clearTable( catalog, tableIdGenerator.toTableIdentifier(testTableIdentifier), table.io(), diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt index 4bcd9b5f00c3..d44bc073fdaf 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeStreamLoader.kt @@ -12,9 +12,9 @@ import io.airbyte.cdk.load.message.DestinationRecordAirbyteValue import io.airbyte.cdk.load.message.SimpleBatch import io.airbyte.cdk.load.state.StreamProcessingFailed import io.airbyte.cdk.load.write.StreamLoader -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergTableCleaner -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergTableWriterFactory -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeTableCleaner +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeTableWriterFactory +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeUtil import io.github.oshai.kotlinlogging.KotlinLogging import org.apache.iceberg.Table @@ -22,8 +22,8 @@ import org.apache.iceberg.Table class S3DataLakeStreamLoader( override val stream: DestinationStream, private val table: Table, - private val icebergTableWriterFactory: IcebergTableWriterFactory, - private val icebergUtil: IcebergUtil, + private val s3DataLakeTableWriterFactory: S3DataLakeTableWriterFactory, + private val s3DataLakeUtil: S3DataLakeUtil, private val pipeline: MapperPipeline, private val stagingBranchName: String, private val mainBranchName: String @@ -35,17 +35,17 @@ class S3DataLakeStreamLoader( totalSizeBytes: Long, endOfStream: Boolean ): Batch { - icebergTableWriterFactory + s3DataLakeTableWriterFactory .create( table = table, - generationId = icebergUtil.constructGenerationIdSuffix(stream), + generationId = s3DataLakeUtil.constructGenerationIdSuffix(stream), importType = stream.importType ) .use { writer -> log.info { "Writing records to branch $stagingBranchName" } records.forEach { record -> val icebergRecord = - icebergUtil.toRecord( + s3DataLakeUtil.toRecord( record = record, stream = stream, tableSchema = table.schema(), @@ -84,10 +84,10 @@ class S3DataLakeStreamLoader( } val generationIdsToDelete = (0 until stream.minimumGenerationId).map( - icebergUtil::constructGenerationIdSuffix + s3DataLakeUtil::constructGenerationIdSuffix ) - val icebergTableCleaner = IcebergTableCleaner(icebergUtil = icebergUtil) - icebergTableCleaner.deleteGenerationId( + val s3DataLakeTableCleaner = S3DataLakeTableCleaner(s3DataLakeUtil = s3DataLakeUtil) + s3DataLakeTableCleaner.deleteGenerationId( table, stagingBranchName, generationIdsToDelete diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt index 1857c46d7924..e5d3d832dd27 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriter.kt @@ -8,25 +8,25 @@ import io.airbyte.cdk.load.command.DestinationStream import io.airbyte.cdk.load.data.iceberg.parquet.IcebergParquetPipelineFactory import io.airbyte.cdk.load.write.DestinationWriter import io.airbyte.cdk.load.write.StreamLoader -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergTableWriterFactory -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeTableWriterFactory +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeUtil import javax.inject.Singleton import org.apache.iceberg.Schema @Singleton class S3DataLakeWriter( - private val icebergTableWriterFactory: IcebergTableWriterFactory, + private val s3DataLakeTableWriterFactory: S3DataLakeTableWriterFactory, private val icebergConfiguration: S3DataLakeConfiguration, - private val icebergUtil: IcebergUtil + private val s3DataLakeUtil: S3DataLakeUtil ) : DestinationWriter { override fun createStreamLoader(stream: DestinationStream): StreamLoader { - val properties = icebergUtil.toCatalogProperties(config = icebergConfiguration) - val catalog = icebergUtil.createCatalog(DEFAULT_CATALOG_NAME, properties) + val properties = s3DataLakeUtil.toCatalogProperties(config = icebergConfiguration) + val catalog = s3DataLakeUtil.createCatalog(DEFAULT_CATALOG_NAME, properties) val pipeline = IcebergParquetPipelineFactory().create(stream) - val schema = icebergUtil.toIcebergSchema(stream = stream, pipeline = pipeline) + val schema = s3DataLakeUtil.toIcebergSchema(stream = stream, pipeline = pipeline) val table = - icebergUtil.createTable( + s3DataLakeUtil.createTable( streamDescriptor = stream.descriptor, catalog = catalog, schema = schema, @@ -38,8 +38,8 @@ class S3DataLakeWriter( return S3DataLakeStreamLoader( stream = stream, table = table, - icebergTableWriterFactory = icebergTableWriterFactory, - icebergUtil = icebergUtil, + s3DataLakeTableWriterFactory = s3DataLakeTableWriterFactory, + s3DataLakeUtil = s3DataLakeUtil, pipeline = pipeline, stagingBranchName = DEFAULT_STAGING_BRANCH, mainBranchName = icebergConfiguration.icebergCatalogConfiguration.mainBranchName, diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableCleaner.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableCleaner.kt similarity index 93% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableCleaner.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableCleaner.kt index 4507f280f763..2513712f7a04 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableCleaner.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableCleaner.kt @@ -16,7 +16,7 @@ import org.apache.iceberg.io.SupportsPrefixOperations * catalog implementations do not clear the underlying files written to table storage. */ @Singleton -class IcebergTableCleaner(private val icebergUtil: IcebergUtil) { +class S3DataLakeTableCleaner(private val s3DataLakeUtil: S3DataLakeUtil) { /** * Clears the table identified by the provided [TableIdentifier]. This removes all data and @@ -49,7 +49,7 @@ class IcebergTableCleaner(private val icebergUtil: IcebergUtil) { val genIdsToDelete = generationIdSuffix .filter { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(it) + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(it) true } .toSet() diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableWriterFactory.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableWriterFactory.kt similarity index 97% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableWriterFactory.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableWriterFactory.kt index 8bfe0a333197..1d704b7c3311 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableWriterFactory.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableWriterFactory.kt @@ -29,7 +29,7 @@ import org.apache.iceberg.util.PropertyUtil * and whether primary keys are configured on the destination table's schema. */ @Singleton -class IcebergTableWriterFactory(private val icebergUtil: IcebergUtil) { +class S3DataLakeTableWriterFactory(private val s3DataLakeUtil: S3DataLakeUtil) { /** * Creates a new [BaseTaskWriter] based on the configuration of the destination target [Table]. * @@ -39,7 +39,7 @@ class IcebergTableWriterFactory(private val icebergUtil: IcebergUtil) { * @return The [BaseTaskWriter] that writes records to the target [Table]. */ fun create(table: Table, generationId: String, importType: ImportType): BaseTaskWriter { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(generationId) + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(generationId) val format = FileFormat.valueOf( table diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt similarity index 99% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt index 97ab342301f1..6e1ef5abf1e7 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt @@ -68,7 +68,7 @@ data class AWSSystemCredentials( * will be removed when we change all of this to use Micronaut */ @Singleton -class IcebergUtil( +class S3DataLakeUtil( private val tableIdGenerator: TableIdGenerator, val awsSystemCredentials: AWSSystemCredentials? = null ) { diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestinationCleaner.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestinationCleaner.kt index 209801c44409..9854ddd24fef 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestinationCleaner.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeDestinationCleaner.kt @@ -7,8 +7,8 @@ package io.airbyte.integrations.destination.s3_data_lake import io.airbyte.cdk.load.test.util.DestinationCleaner import io.airbyte.cdk.load.test.util.IntegrationTest.Companion.isNamespaceOld import io.airbyte.cdk.load.test.util.IntegrationTest.Companion.randomizedNamespaceRegex -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergTableCleaner -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeTableCleaner +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeUtil import org.apache.iceberg.catalog.Catalog import org.apache.iceberg.catalog.Namespace import org.apache.iceberg.catalog.SupportsNamespaces @@ -22,7 +22,7 @@ class S3DataLakeDestinationCleaner(private val catalog: Catalog) : DestinationCl } // we're passing explicit TableIdentifier to clearTable, so just use SimpleTableIdGenerator - val tableCleaner = IcebergTableCleaner(IcebergUtil(SimpleTableIdGenerator())) + val tableCleaner = S3DataLakeTableCleaner(S3DataLakeUtil(SimpleTableIdGenerator())) namespaces.forEach { namespace -> catalog.listTables(namespace).forEach { tableId -> diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt index f8cb34f8571c..7f471251ad36 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt @@ -9,7 +9,7 @@ import io.airbyte.cdk.command.ConfigurationSpecification import io.airbyte.cdk.command.ValidatedJsonUtils import io.airbyte.cdk.load.util.Jsons import io.airbyte.integrations.destination.s3_data_lake.io.AWSSystemCredentials -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeUtil import java.nio.file.Files import java.nio.file.Path @@ -38,7 +38,7 @@ object S3DataLakeTestUtil { S3DataLakeConfigurationFactory().makeWithoutExceptionHandling(spec as S3DataLakeSpecification) fun getCatalog(config: S3DataLakeConfiguration, awsSystemCredentials: AWSSystemCredentials) = - IcebergUtil(SimpleTableIdGenerator(), awsSystemCredentials).let { icebergUtil -> + S3DataLakeUtil(SimpleTableIdGenerator(), awsSystemCredentials).let { icebergUtil -> val props = icebergUtil.toCatalogProperties(config) icebergUtil.createCatalog(DEFAULT_CATALOG_NAME, props) } diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriterTest.kt similarity index 93% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriterTest.kt index 956355d058ab..bdef5c680134 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/IcebergV2WriterTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeWriterTest.kt @@ -23,8 +23,8 @@ import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_EXTRACTED_AT import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_GENERATION_ID import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_META import io.airbyte.cdk.load.message.Meta.Companion.COLUMN_NAME_AB_RAW_ID -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergTableWriterFactory -import io.airbyte.integrations.destination.s3_data_lake.io.IcebergUtil +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeTableWriterFactory +import io.airbyte.integrations.destination.s3_data_lake.io.S3DataLakeUtil import io.mockk.every import io.mockk.mockk import org.apache.iceberg.Schema @@ -36,7 +36,7 @@ import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows -internal class IcebergV2WriterTest { +internal class S3DataLakeWriterTest { @Test fun testCreateStreamLoader() { @@ -95,7 +95,7 @@ internal class IcebergV2WriterTest { ), Types.NestedField.of(12, false, COLUMN_NAME_AB_GENERATION_ID, Types.LongType.get()), ) - val icebergTableWriterFactory: IcebergTableWriterFactory = mockk() + val s3DataLakeTableWriterFactory: S3DataLakeTableWriterFactory = mockk() val awsConfiguration: AWSAccessKeyConfiguration = mockk { every { accessKeyId } returns "access-key" every { secretAccessKey } returns "secret-access-key" @@ -119,7 +119,7 @@ internal class IcebergV2WriterTest { } val catalog: Catalog = mockk() val table: Table = mockk { every { schema() } returns icebergSchema } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { createCatalog(any(), any()) } returns catalog every { createTable(any(), any(), any(), any()) } returns table every { toCatalogProperties(any()) } returns mapOf() @@ -131,9 +131,9 @@ internal class IcebergV2WriterTest { } val s3DataLakeWriter = S3DataLakeWriter( - icebergTableWriterFactory = icebergTableWriterFactory, + s3DataLakeTableWriterFactory = s3DataLakeTableWriterFactory, icebergConfiguration = icebergConfiguration, - icebergUtil = icebergUtil, + s3DataLakeUtil = s3DataLakeUtil, ) val streamLoader = s3DataLakeWriter.createStreamLoader(stream = stream) assertNotNull(streamLoader) @@ -161,7 +161,7 @@ internal class IcebergV2WriterTest { Schema( Types.NestedField.of(2, true, "name", Types.StringType.get()), ) - val icebergTableWriterFactory: IcebergTableWriterFactory = mockk() + val s3DataLakeTableWriterFactory: S3DataLakeTableWriterFactory = mockk() val awsConfiguration: AWSAccessKeyConfiguration = mockk { every { accessKeyId } returns "access-key" every { secretAccessKey } returns "secret-access-key" @@ -184,7 +184,7 @@ internal class IcebergV2WriterTest { } val catalog: Catalog = mockk() val table: Table = mockk { every { schema() } returns icebergSchema } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { createCatalog(any(), any()) } returns catalog every { createTable(any(), any(), any(), any()) } returns table every { toCatalogProperties(any()) } returns mapOf() @@ -196,9 +196,9 @@ internal class IcebergV2WriterTest { } val s3DataLakeWriter = S3DataLakeWriter( - icebergTableWriterFactory = icebergTableWriterFactory, + s3DataLakeTableWriterFactory = s3DataLakeTableWriterFactory, icebergConfiguration = icebergConfiguration, - icebergUtil = icebergUtil, + s3DataLakeUtil = s3DataLakeUtil, ) val e = assertThrows { @@ -268,7 +268,7 @@ internal class IcebergV2WriterTest { Types.NestedField.of(12, false, COLUMN_NAME_AB_GENERATION_ID, Types.LongType.get()), ) val icebergSchema = Schema(columns, emptySet()) - val icebergTableWriterFactory: IcebergTableWriterFactory = mockk() + val s3DataLakeTableWriterFactory: S3DataLakeTableWriterFactory = mockk() val awsConfiguration: AWSAccessKeyConfiguration = mockk { every { accessKeyId } returns "access-key" every { secretAccessKey } returns "secret-access-key" @@ -291,7 +291,7 @@ internal class IcebergV2WriterTest { } val catalog: Catalog = mockk() val table: Table = mockk { every { schema() } returns icebergSchema } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { createCatalog(any(), any()) } returns catalog every { createTable(any(), any(), any(), any()) } returns table every { toCatalogProperties(any()) } returns mapOf() @@ -303,9 +303,9 @@ internal class IcebergV2WriterTest { } val s3DataLakeWriter = S3DataLakeWriter( - icebergTableWriterFactory = icebergTableWriterFactory, + s3DataLakeTableWriterFactory = s3DataLakeTableWriterFactory, icebergConfiguration = icebergConfiguration, - icebergUtil = icebergUtil, + s3DataLakeUtil = s3DataLakeUtil, ) val e = assertThrows { diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableCleanerTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableCleanerTest.kt similarity index 85% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableCleanerTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableCleanerTest.kt index 6db980c0b3c0..d976ee0bd8cb 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableCleanerTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableCleanerTest.kt @@ -23,17 +23,17 @@ import org.apache.iceberg.io.FileIO import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertDoesNotThrow -internal class IcebergTableCleanerTest { +internal class S3DataLakeTableCleanerTest { @Test fun testClearingTableWithPrefix() { val catalog: Catalog = mockk { every { dropTable(any(), true) } returns true } - val icebergUtil: IcebergUtil = mockk() + val s3DataLakeUtil: S3DataLakeUtil = mockk() val tableIdentifier: TableIdentifier = mockk() val fileIo: S3FileIO = mockk { every { deletePrefix(any()) } returns Unit } val tableLocation = "table/location" - val cleaner = IcebergTableCleaner(icebergUtil = icebergUtil) + val cleaner = S3DataLakeTableCleaner(s3DataLakeUtil = s3DataLakeUtil) cleaner.clearTable( catalog = catalog, @@ -49,12 +49,12 @@ internal class IcebergTableCleanerTest { @Test fun testClearingTableWithoutPrefix() { val catalog: Catalog = mockk { every { dropTable(any(), true) } returns true } - val icebergUtil: IcebergUtil = mockk() + val s3DataLakeUtil: S3DataLakeUtil = mockk() val tableIdentifier: TableIdentifier = mockk() val fileIo: FileIO = mockk() val tableLocation = "table/location" - val cleaner = IcebergTableCleaner(icebergUtil = icebergUtil) + val cleaner = S3DataLakeTableCleaner(s3DataLakeUtil = s3DataLakeUtil) cleaner.clearTable( catalog = catalog, @@ -69,10 +69,10 @@ internal class IcebergTableCleanerTest { @Test fun `deleteGenerationId handles empty scan results gracefully`() { - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val cleaner = IcebergTableCleaner(icebergUtil = icebergUtil) + val cleaner = S3DataLakeTableCleaner(s3DataLakeUtil = s3DataLakeUtil) val generationIdSuffix = "ab-generation-id-0-e" val tasks = CloseableIterable.empty() @@ -87,10 +87,10 @@ internal class IcebergTableCleanerTest { @Test fun `deleteGenerationId deletes matching file via deleteFile`() { - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val cleaner = IcebergTableCleaner(icebergUtil = icebergUtil) + val cleaner = S3DataLakeTableCleaner(s3DataLakeUtil = s3DataLakeUtil) val generationIdSuffix = "ab-generation-id-0-e" val filePathToDelete = "path/to/gen-5678/foo-bar-ab-generation-id-0-e.parquet" val fileScanTask = mockk() @@ -114,7 +114,7 @@ internal class IcebergTableCleanerTest { } verify { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(generationIdSuffix) + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(generationIdSuffix) table.newDelete().toBranch(eq("staging")) delete.deleteFile(filePathToDelete) delete.commit() @@ -123,10 +123,10 @@ internal class IcebergTableCleanerTest { @Test fun `deleteGenerationId should not delete non matching file via deleteFile`() { - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val cleaner = IcebergTableCleaner(icebergUtil = icebergUtil) + val cleaner = S3DataLakeTableCleaner(s3DataLakeUtil = s3DataLakeUtil) val generationIdSuffix = "ab-generation-id-10-e" val filePathToDelete = "path/to/gen-5678/foo-bar-ab-generation-id-10-e.parquet" val fileScanTask = mockk() @@ -150,7 +150,7 @@ internal class IcebergTableCleanerTest { } verify(exactly = 0) { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(generationIdSuffix) + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(generationIdSuffix) table.newDelete().toBranch(any()) delete.deleteFile(any()) delete.commit() diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableWriterFactoryTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableWriterFactoryTest.kt similarity index 94% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableWriterFactoryTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableWriterFactoryTest.kt index 0917a0274e93..cc17aa846a2b 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergTableWriterFactoryTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeTableWriterFactoryTest.kt @@ -24,7 +24,7 @@ import org.apache.iceberg.types.Types import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test -internal class IcebergTableWriterFactoryTest { +internal class S3DataLakeTableWriterFactoryTest { private val generationIdSuffix: String = "ab-generation-id-0-e" @@ -67,11 +67,11 @@ internal class IcebergTableWriterFactoryTest { every { schema() } returns tableSchema every { spec() } returns tableSpec } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val factory = IcebergTableWriterFactory(icebergUtil = icebergUtil) + val factory = S3DataLakeTableWriterFactory(s3DataLakeUtil = s3DataLakeUtil) val writer = factory.create( table = table, @@ -125,11 +125,11 @@ internal class IcebergTableWriterFactoryTest { every { schema() } returns tableSchema every { spec() } returns tableSpec } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val factory = IcebergTableWriterFactory(icebergUtil = icebergUtil) + val factory = S3DataLakeTableWriterFactory(s3DataLakeUtil = s3DataLakeUtil) val writer = factory.create( table = table, @@ -182,11 +182,11 @@ internal class IcebergTableWriterFactoryTest { every { schema() } returns tableSchema every { spec() } returns tableSpec } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val factory = IcebergTableWriterFactory(icebergUtil = icebergUtil) + val factory = S3DataLakeTableWriterFactory(s3DataLakeUtil = s3DataLakeUtil) val writer = factory.create( table = table, @@ -235,11 +235,11 @@ internal class IcebergTableWriterFactoryTest { every { schema() } returns tableSchema every { spec() } returns tableSpec } - val icebergUtil: IcebergUtil = mockk { + val s3DataLakeUtil: S3DataLakeUtil = mockk { every { assertGenerationIdSuffixIsOfValidFormat(any()) } returns Unit } - val factory = IcebergTableWriterFactory(icebergUtil = icebergUtil) + val factory = S3DataLakeTableWriterFactory(s3DataLakeUtil = s3DataLakeUtil) val writer = factory.create( table = table, diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt similarity index 93% rename from airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt rename to airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt index 0a34bf89b945..e2320bcb8a6e 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/IcebergUtilTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt @@ -51,14 +51,14 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertDoesNotThrow import org.junit.jupiter.api.assertThrows -internal class IcebergUtilTest { +internal class S3DataLakeUtilTest { - private lateinit var icebergUtil: IcebergUtil + private lateinit var s3DataLakeUtil: S3DataLakeUtil private val tableIdGenerator = SimpleTableIdGenerator() @BeforeEach fun setup() { - icebergUtil = IcebergUtil(tableIdGenerator) + s3DataLakeUtil = S3DataLakeUtil(tableIdGenerator) } @Test @@ -70,7 +70,7 @@ internal class IcebergUtilTest { URI to "http://localhost:19120/api/v1", WAREHOUSE_LOCATION to "s3://test/" ) - val catalog = icebergUtil.createCatalog(catalogName = catalogName, properties = properties) + val catalog = s3DataLakeUtil.createCatalog(catalogName = catalogName, properties = properties) assertNotNull(catalog) assertEquals(catalogName, catalog.name()) assertEquals(NessieCatalog::class.java, catalog.javaClass) @@ -98,7 +98,7 @@ internal class IcebergUtilTest { false } val table = - icebergUtil.createTable( + s3DataLakeUtil.createTable( streamDescriptor = streamDescriptor, catalog = catalog, schema = schema, @@ -134,7 +134,7 @@ internal class IcebergUtilTest { false } val table = - icebergUtil.createTable( + s3DataLakeUtil.createTable( streamDescriptor = streamDescriptor, catalog = catalog, schema = schema, @@ -161,7 +161,7 @@ internal class IcebergUtilTest { every { tableExists(tableIdGenerator.toTableIdentifier(streamDescriptor)) } returns true } val table = - icebergUtil.createTable( + s3DataLakeUtil.createTable( streamDescriptor = streamDescriptor, catalog = catalog, schema = schema, @@ -214,7 +214,7 @@ internal class IcebergUtilTest { ) val schema = Schema(columns) val icebergRecord = - icebergUtil.toRecord( + s3DataLakeUtil.toRecord( record = airbyteRecord, pipeline = pipeline, tableSchema = schema, @@ -266,7 +266,7 @@ internal class IcebergUtilTest { ) val schema = Schema(columns, setOf(1)) val icebergRecord = - icebergUtil.toRecord( + s3DataLakeUtil.toRecord( record = airbyteRecord, pipeline = pipeline, tableSchema = schema, @@ -313,7 +313,7 @@ internal class IcebergUtilTest { ) val schema = Schema(columns, setOf(1)) val icebergRecord = - icebergUtil.toRecord( + s3DataLakeUtil.toRecord( record = airbyteRecord, pipeline = pipeline, tableSchema = schema, @@ -356,7 +356,7 @@ internal class IcebergUtilTest { icebergCatalogConfiguration = icebergCatalogConfiguration, s3BucketConfiguration = s3BucketConfiguration, ) - val catalogProperties = icebergUtil.toCatalogProperties(config = configuration) + val catalogProperties = s3DataLakeUtil.toCatalogProperties(config = configuration) assertEquals(ICEBERG_CATALOG_TYPE_NESSIE, catalogProperties[ICEBERG_CATALOG_TYPE]) assertEquals(nessieServerUri, catalogProperties[URI]) assertEquals(warehouseLocation, catalogProperties[WAREHOUSE_LOCATION]) @@ -374,7 +374,7 @@ internal class IcebergUtilTest { fun `assertGenerationIdSuffixIsOfValidFormat accepts valid format`() { val validGenerationId = "ab-generation-id-123-e" assertDoesNotThrow { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(validGenerationId) + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(validGenerationId) } } @@ -382,8 +382,8 @@ internal class IcebergUtilTest { fun `assertGenerationIdSuffixIsOfValidFormat throws exception for invalid prefix`() { val invalidGenerationId = "invalid-generation-id-123" val exception = - assertThrows { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(invalidGenerationId) + assertThrows { + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(invalidGenerationId) } assertEquals( "Invalid format: $invalidGenerationId. Expected format is 'ab-generation-id--e'", @@ -395,8 +395,8 @@ internal class IcebergUtilTest { fun `assertGenerationIdSuffixIsOfValidFormat throws exception for missing number`() { val invalidGenerationId = "ab-generation-id-" val exception = - assertThrows { - icebergUtil.assertGenerationIdSuffixIsOfValidFormat(invalidGenerationId) + assertThrows { + s3DataLakeUtil.assertGenerationIdSuffixIsOfValidFormat(invalidGenerationId) } assertEquals( "Invalid format: $invalidGenerationId. Expected format is 'ab-generation-id--e'", @@ -409,7 +409,7 @@ internal class IcebergUtilTest { val stream = mockk() every { stream.generationId } returns 42 val expectedSuffix = "ab-generation-id-42-e" - val result = icebergUtil.constructGenerationIdSuffix(stream) + val result = s3DataLakeUtil.constructGenerationIdSuffix(stream) assertEquals(expectedSuffix, result) } @@ -419,7 +419,7 @@ internal class IcebergUtilTest { every { stream.generationId } returns -1 val exception = assertThrows { - icebergUtil.constructGenerationIdSuffix(stream) + s3DataLakeUtil.constructGenerationIdSuffix(stream) } assertEquals( "GenerationId must be non-negative. Provided: ${stream.generationId}", @@ -447,7 +447,7 @@ internal class IcebergUtilTest { syncId = 1, ) val pipeline = ParquetMapperPipelineFactory().create(stream) - val schema = icebergUtil.toIcebergSchema(stream = stream, pipeline = pipeline) + val schema = s3DataLakeUtil.toIcebergSchema(stream = stream, pipeline = pipeline) assertEquals(primaryKeys.toSet(), schema.identifierFieldNames()) assertEquals(6, schema.columns().size) assertNotNull(schema.findField("id")) @@ -477,7 +477,7 @@ internal class IcebergUtilTest { syncId = 1, ) val pipeline = ParquetMapperPipelineFactory().create(stream) - val schema = icebergUtil.toIcebergSchema(stream = stream, pipeline = pipeline) + val schema = s3DataLakeUtil.toIcebergSchema(stream = stream, pipeline = pipeline) assertEquals(emptySet(), schema.identifierFieldNames()) assertEquals(6, schema.columns().size) assertNotNull(schema.findField("id")) From 0d756976ec20969e67aa61966eaa3c9cd00b2386 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III Date: Thu, 9 Jan 2025 21:26:31 +0000 Subject: [PATCH 7/9] chore: auto-fix lint and format issues --- .../destination/s3_data_lake/io/S3DataLakeUtil.kt | 4 +++- .../destination/s3_data_lake/S3DataLakeTestUtil.kt | 3 ++- .../destination/s3_data_lake/io/S3DataLakeUtilTest.kt | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt index 6e1ef5abf1e7..4e694978c11d 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtil.kt @@ -339,7 +339,9 @@ class S3DataLakeUtil( ) } - private fun buildKeyBasedClientProperties(config: S3DataLakeConfiguration): Map { + private fun buildKeyBasedClientProperties( + config: S3DataLakeConfiguration + ): Map { val awsAccessKeyId = requireNotNull(config.awsAccessKeyConfiguration.accessKeyId) { "AWS Access Key ID is required for key-based authentication" diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt index 7f471251ad36..d03fc4be4420 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test-integration/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTestUtil.kt @@ -35,7 +35,8 @@ object S3DataLakeTestUtil { } fun getConfig(spec: ConfigurationSpecification) = - S3DataLakeConfigurationFactory().makeWithoutExceptionHandling(spec as S3DataLakeSpecification) + S3DataLakeConfigurationFactory() + .makeWithoutExceptionHandling(spec as S3DataLakeSpecification) fun getCatalog(config: S3DataLakeConfiguration, awsSystemCredentials: AWSSystemCredentials) = S3DataLakeUtil(SimpleTableIdGenerator(), awsSystemCredentials).let { icebergUtil -> diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt index e2320bcb8a6e..ce12721a7419 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/io/S3DataLakeUtilTest.kt @@ -70,7 +70,8 @@ internal class S3DataLakeUtilTest { URI to "http://localhost:19120/api/v1", WAREHOUSE_LOCATION to "s3://test/" ) - val catalog = s3DataLakeUtil.createCatalog(catalogName = catalogName, properties = properties) + val catalog = + s3DataLakeUtil.createCatalog(catalogName = catalogName, properties = properties) assertNotNull(catalog) assertEquals(catalogName, catalog.name()) assertEquals(NessieCatalog::class.java, catalog.javaClass) From f920ff0fa2f0b55b3905b3c73380e19d9d9079cb Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 13:28:37 -0800 Subject: [PATCH 8/9] Fix / update the release note --- docs/integrations/destinations/s3-data-lake.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/integrations/destinations/s3-data-lake.md b/docs/integrations/destinations/s3-data-lake.md index 29c390f1d9ad..3104c7300265 100644 --- a/docs/integrations/destinations/s3-data-lake.md +++ b/docs/integrations/destinations/s3-data-lake.md @@ -15,10 +15,11 @@ for more information.
Expand to review -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:------------------------------------------------------------|:--------------------------------------------| -| 0.2.8 | 2025-01-09 | [\#51012](https://github.com/airbytehq/airbyte/pull/50991) | Rename/Cleanup package name from Iceberg V2 | -| 0.2.7 | 2025-01-09 | [\#50957](https://github.com/airbytehq/airbyte/pull/50991) | Add support for GLUE RBAC (Assume role) | -| 0.2.6 | 2025-01-08 | [\#50991](https://github.com/airbytehq/airbyte/pull/50991) | Initial public release. | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:------------------------------------------------------------|:---------------------------------------------| +| 0.2.8 | 2025-01-09 | [\#51022](https://github.com/airbytehq/airbyte/pull/51022) | Rename all classes and files from Iceberg V2 | +| 0.2.8 | 2025-01-09 | [\#51012](https://github.com/airbytehq/airbyte/pull/51012) | Rename/Cleanup package name from Iceberg V2 | +| 0.2.7 | 2025-01-09 | [\#50957](https://github.com/airbytehq/airbyte/pull/50957) | Add support for GLUE RBAC (Assume role) | +| 0.2.6 | 2025-01-08 | [\#50991](https://github.com/airbytehq/airbyte/pull/50991) | Initial public release. |
From 9f7d7554a41cf990e1341a608d32ec9ae5272dcf Mon Sep 17 00:00:00 2001 From: Francis Genet Date: Thu, 9 Jan 2025 13:31:35 -0800 Subject: [PATCH 9/9] Hmmmm --- .../connectors/destination-s3-data-lake/metadata.yaml | 2 +- docs/integrations/destinations/s3-data-lake.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml b/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml index c500835d818e..793e9e54b179 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml @@ -26,7 +26,7 @@ data: alias: airbyte-connector-testing-secret-store connectorType: destination definitionId: 716ca874-520b-4902-9f80-9fad66754b89 - dockerImageTag: 0.2.8 + dockerImageTag: 0.2.9 dockerRepository: airbyte/destination-s3-data-lake documentationUrl: https://docs.airbyte.com/integrations/destinations/s3-data-lake githubIssueLabel: destination-s3-data-lake diff --git a/docs/integrations/destinations/s3-data-lake.md b/docs/integrations/destinations/s3-data-lake.md index 3104c7300265..15c0008db508 100644 --- a/docs/integrations/destinations/s3-data-lake.md +++ b/docs/integrations/destinations/s3-data-lake.md @@ -17,7 +17,7 @@ for more information. | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------|:---------------------------------------------| -| 0.2.8 | 2025-01-09 | [\#51022](https://github.com/airbytehq/airbyte/pull/51022) | Rename all classes and files from Iceberg V2 | +| 0.2.9 | 2025-01-09 | [\#51022](https://github.com/airbytehq/airbyte/pull/51022) | Rename all classes and files from Iceberg V2 | | 0.2.8 | 2025-01-09 | [\#51012](https://github.com/airbytehq/airbyte/pull/51012) | Rename/Cleanup package name from Iceberg V2 | | 0.2.7 | 2025-01-09 | [\#50957](https://github.com/airbytehq/airbyte/pull/50957) | Add support for GLUE RBAC (Assume role) | | 0.2.6 | 2025-01-08 | [\#50991](https://github.com/airbytehq/airbyte/pull/50991) | Initial public release. |