From 1db8ffdeb5b7c42790257c0b2cfc5622dc25f12c Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Wed, 11 Oct 2023 00:24:00 +0000 Subject: [PATCH 1/8] Initial implementation --- TODO.md | 0 .../iceberg/IcebergTableProperties.java | 29 ++++++++++++++++++- .../io/trino/plugin/iceberg/IcebergUtil.java | 20 ++++++++++++- .../procedure/RegisterTableProcedure.java | 2 +- 4 files changed, 48 insertions(+), 3 deletions(-) create mode 100644 TODO.md diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java index 9de78f2280e17..87076a3d64a5c 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java @@ -19,6 +19,8 @@ import io.trino.spi.TrinoException; import io.trino.spi.session.PropertyMetadata; import io.trino.spi.type.ArrayType; +import io.trino.spi.type.MapType; +import io.trino.spi.type.TypeManager; import java.util.List; import java.util.Map; @@ -45,13 +47,15 @@ public class IcebergTableProperties public static final String FORMAT_VERSION_PROPERTY = "format_version"; public static final String ORC_BLOOM_FILTER_COLUMNS = "orc_bloom_filter_columns"; public static final String ORC_BLOOM_FILTER_FPP = "orc_bloom_filter_fpp"; + public static final String EXTRA_PROPERTIES = "extra_properties"; private final List> tableProperties; @Inject public IcebergTableProperties( IcebergConfig icebergConfig, - OrcWriterConfig orcWriterConfig) + OrcWriterConfig orcWriterConfig, + TypeManager typeManager) { tableProperties = ImmutableList.>builder() .add(enumProperty( @@ -107,6 +111,24 @@ public IcebergTableProperties( orcWriterConfig.getDefaultBloomFilterFpp(), IcebergTableProperties::validateOrcBloomFilterFpp, false)) + .add(new PropertyMetadata<>( + EXTRA_PROPERTIES, + "Extra table properties", + new MapType(VARCHAR, VARCHAR, typeManager.getTypeOperators()), + Map.class, + null, + true, // These properties are not listed in SHOW CREATE TABLE + value -> { + Map extraProperties = (Map) value; + if (extraProperties.containsValue(null)) { + throw new TrinoException(INVALID_TABLE_PROPERTY, format("Extra table property value cannot be null '%s'", extraProperties)); + } + if (extraProperties.containsKey(null)) { + throw new TrinoException(INVALID_TABLE_PROPERTY, format("Extra table property key cannot be null '%s'", extraProperties)); + } + return extraProperties; + }, + value -> value)) .build(); } @@ -169,4 +191,9 @@ private static void validateOrcBloomFilterFpp(double fpp) throw new TrinoException(INVALID_TABLE_PROPERTY, "Bloom filter fpp value must be between 0.0 and 1.0"); } } + + public static Optional> getExtraProperties(Map tableProperties) + { + return Optional.ofNullable((Map) tableProperties.get(EXTRA_PROPERTIES)); + } } diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java index 6a196cab870af..69df66cf63aac 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java @@ -110,6 +110,7 @@ import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_FPP; import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.getExtraProperties; import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterColumns; import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterFpp; import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning; @@ -656,7 +657,24 @@ public static Transaction newCreateTableTransaction(TrinoCatalog catalog, Connec propertiesBuilder.put(TABLE_COMMENT, tableMetadata.getComment().get()); } - return catalog.newCreateTableTransaction(session, schemaTableName, schema, partitionSpec, sortOrder, targetPath, propertiesBuilder.buildOrThrow()); + Map baseProperties = propertiesBuilder.buildOrThrow(); + + // Add properties set via "extra_properties" table property. + Map extraProperties = getExtraProperties(tableMetadata.getProperties()) + .orElseGet(ImmutableMap::of); + Set illegalExtraProperties = Sets.intersection(baseProperties.keySet(), extraProperties.keySet()); + if (!illegalExtraProperties.isEmpty()) { + throw new TrinoException( + INVALID_TABLE_PROPERTY, + "Illegal keys in extra_properties: " + illegalExtraProperties); + } + + Map properties = ImmutableMap.builder() + .putAll(baseProperties) + .putAll(extraProperties) + .buildOrThrow(); + + return catalog.newCreateTableTransaction(session, schemaTableName, schema, partitionSpec, sortOrder, targetPath, properties); } /** diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/procedure/RegisterTableProcedure.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/procedure/RegisterTableProcedure.java index a3b3efb385fb5..659c62f48a92c 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/procedure/RegisterTableProcedure.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/procedure/RegisterTableProcedure.java @@ -238,7 +238,7 @@ private static boolean locationEquivalent(String a, String b) private static String normalizeS3Uri(String tableLocation) { - // Normalize e.g. s3a to s3, so that table can be registed using s3:// location + // Normalize e.g. s3a to s3, so that table can be registered using s3:// location // even if internally it uses s3a:// paths. return tableLocation.replaceFirst("^s3[an]://", "s3://"); } From d86b194b6822a86f5df67d742fdf061e489859f5 Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Wed, 11 Oct 2023 01:49:04 +0100 Subject: [PATCH 2/8] Delete TODO.md --- TODO.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 TODO.md diff --git a/TODO.md b/TODO.md deleted file mode 100644 index e69de29bb2d1d..0000000000000 From ca06b0f5d1931607116620a292b3a15146e08c02 Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Sun, 15 Oct 2023 01:57:43 +0000 Subject: [PATCH 3/8] Add docs --- docs/src/main/sphinx/connector/iceberg.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/src/main/sphinx/connector/iceberg.md b/docs/src/main/sphinx/connector/iceberg.md index 51b9ba9429b35..5599777781c54 100644 --- a/docs/src/main/sphinx/connector/iceberg.md +++ b/docs/src/main/sphinx/connector/iceberg.md @@ -685,6 +685,11 @@ connector using a {doc}`WITH ` clause. * - ``orc_bloom_filter_fpp`` - The ORC bloom filters false positive probability. Requires ORC format. Defaults to ``0.05``. + * - ``extra_properties`` + - Additional properties added to an Iceberg table. The properties are not + used by Trino, and are available in the ``$properties`` metadata table. + The properties are not included in the output of ``SHOW CREATE TABLE`` + statements. ``` The table definition below specifies to use Parquet files, partitioning by columns From cf081219fd45184fd62adf33e40dfd831e699a1a Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Sun, 15 Oct 2023 02:17:04 +0000 Subject: [PATCH 4/8] Allow updates as well. The following now works ALTER TABLE baz SET PROPERTIES extra_properties = MAP(ARRAY['commit.retry.num-retries', 'b'], ARRAY['2', 'xyz']); --- .../java/io/trino/plugin/iceberg/IcebergMetadata.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index 826c0514be146..af877994513a1 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -221,6 +221,7 @@ import static io.trino.plugin.iceberg.IcebergSessionProperties.isMergeManifestsOnWrite; import static io.trino.plugin.iceberg.IcebergSessionProperties.isProjectionPushdownEnabled; import static io.trino.plugin.iceberg.IcebergSessionProperties.isStatisticsEnabled; +import static io.trino.plugin.iceberg.IcebergTableProperties.EXTRA_PROPERTIES; import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY; import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; @@ -301,7 +302,7 @@ public class IcebergMetadata private static final int CLEANING_UP_PROCEDURES_MAX_SUPPORTED_TABLE_VERSION = 2; private static final String RETENTION_THRESHOLD = "retention_threshold"; private static final String UNKNOWN_SNAPSHOT_TOKEN = "UNKNOWN"; - public static final Set UPDATABLE_TABLE_PROPERTIES = ImmutableSet.of(FILE_FORMAT_PROPERTY, FORMAT_VERSION_PROPERTY, PARTITIONING_PROPERTY, SORTED_BY_PROPERTY); + public static final Set UPDATABLE_TABLE_PROPERTIES = ImmutableSet.of(FILE_FORMAT_PROPERTY, FORMAT_VERSION_PROPERTY, PARTITIONING_PROPERTY, SORTED_BY_PROPERTY, EXTRA_PROPERTIES); public static final String ORC_BLOOM_FILTER_COLUMNS_KEY = "orc.bloom.filter.columns"; public static final String ORC_BLOOM_FILTER_FPP_KEY = "orc.bloom.filter.fpp"; @@ -1660,6 +1661,14 @@ public void setTableProperties(ConnectorSession session, ConnectorTableHandle ta beginTransaction(icebergTable); UpdateProperties updateProperties = transaction.updateProperties(); + if (properties.containsKey(EXTRA_PROPERTIES)) { + Map extraProperties = (Map) properties.get(EXTRA_PROPERTIES) + .orElseThrow(() -> new IllegalArgumentException("extra_properties property cannot be empty")); + extraProperties.forEach((key, value) -> { + updateProperties.set(key, value); + }); + } + if (properties.containsKey(FILE_FORMAT_PROPERTY)) { IcebergFileFormat fileFormat = (IcebergFileFormat) properties.get(FILE_FORMAT_PROPERTY) .orElseThrow(() -> new IllegalArgumentException("The format property cannot be empty")); From 57ae088540990cf4e2f3a2bb30c848ce59fd41ee Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Sun, 15 Oct 2023 22:45:01 +0000 Subject: [PATCH 5/8] Minor refactor --- .../main/java/io/trino/plugin/iceberg/IcebergMetadata.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index af877994513a1..e5651f039d493 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -1664,9 +1664,7 @@ public void setTableProperties(ConnectorSession session, ConnectorTableHandle ta if (properties.containsKey(EXTRA_PROPERTIES)) { Map extraProperties = (Map) properties.get(EXTRA_PROPERTIES) .orElseThrow(() -> new IllegalArgumentException("extra_properties property cannot be empty")); - extraProperties.forEach((key, value) -> { - updateProperties.set(key, value); - }); + extraProperties.forEach(updateProperties::set); } if (properties.containsKey(FILE_FORMAT_PROPERTY)) { From 39e4efafd9c08d2bce8045c1b5383fe219d24d9f Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Sun, 15 Oct 2023 23:30:21 +0000 Subject: [PATCH 6/8] Add tests (not passing atm) --- .../iceberg/BaseIcebergConnectorTest.java | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java index d34b5595feb81..19c4272ebe8da 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java @@ -48,6 +48,7 @@ import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedResultWithQueryId; import io.trino.testing.MaterializedRow; +import io.trino.testing.QueryFailedException; import io.trino.testing.QueryRunner; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; @@ -7341,4 +7342,108 @@ private void assertQueryIdStored(String tableName, QueryId queryId) assertThat(getFieldFromLatestSnapshotSummary(tableName, TRINO_QUERY_ID_NAME)) .isEqualTo(queryId.toString()); } + + @Test + public void testExtraProperties() + { + String tableName = format("%s.%s.create_table_with_multiple_extra_properties_%s", getSession().getCatalog().get(), getSession().getSchema().get(), randomNameSuffix()); + assertUpdate("CREATE TABLE %s (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property.one', 'extra.property.two'], ARRAY['one', 'two']))".formatted(tableName)); + + assertQuery( + "SELECT \"extra.property.one\", \"extra.property.two\" FROM \"%s$properties\"".formatted(tableName), + "SELECT 'one', 'two'"); + assertThat(computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue()) + .isEqualTo("CREATE TABLE iceberg.tpch.%s (\n".formatted(tableName) + + " c1 integer\n" + + ")\n" + + "WITH (\n" + + " format = 'ORC'\n" + + ")"); + assertUpdate("DROP TABLE %s".formatted(tableName)); + } + + @Test + public void testExtraPropertiesWithCtas() + { + String tableName = format("%s.%s.create_table_ctas_with_multiple_extra_properties_%s", getSession().getCatalog().get(), getSession().getSchema().get(), randomNameSuffix()); + assertUpdate("CREATE TABLE %s (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property.one', 'extra.property.two'], ARRAY['one', 'two']))".formatted(tableName)); + + assertQuery( + "SELECT \"extra.property.one\", \"extra.property.two\" FROM \"%s$properties\"".formatted(tableName), + "SELECT 'one', 'two'"); + assertThat(computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue()) + .isEqualTo("CREATE TABLE iceberg.tpch.%s (\n".formatted(tableName) + + " c1 integer\n" + + ")\n" + + "WITH (\n" + + " format = 'ORC'\n" + + ")"); + + assertUpdate("DROP TABLE %s".formatted(tableName)); + } + + @Test + public void testShowCreateWithExtraProperties() + { + String tableName = format("%s.%s.show_create_table_with_extra_properties_%s", getSession().getCatalog().get(), getSession().getSchema().get(), randomNameSuffix()); + assertUpdate("CREATE TABLE %s (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property.one', 'extra.property.two'], ARRAY['one', 'two']))".formatted(tableName)); + + assertThat(computeActual("SHOW CREATE TABLE " + tableName).getOnlyValue()) + .isEqualTo("CREATE TABLE %s (\n".formatted(tableName) + + " c1 integer\n" + + ")\n" + + "WITH (\n" + + " format = 'ORC'\n" + + ")"); + + assertUpdate("DROP TABLE %s".formatted(tableName)); + } + + @Test + public void testDuplicateExtraProperties() + { + assertQueryFails( + "CREATE TABLE create_table_with_duplicate_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property', 'extra.property'], ARRAY['true', 'false']))", + "Invalid value for catalog 'iceberg' table property 'extra_properties': Cannot convert.*"); + assertQueryFails( + "CREATE TABLE create_table_select_as_with_duplicate_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property', 'extra.property'], ARRAY['true', 'false']))", + "Invalid value for catalog 'iceberg' table property 'extra_properties': Cannot convert.*"); + } + + @Test + public void testOverwriteExistingPropertyWithExtraProperties() + { + assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_with_overwrite_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['transactional'], ARRAY['true']))")) + .isInstanceOf(QueryFailedException.class) + .hasMessage("Illegal keys in extra_properties: [transactional]"); + + assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_as_select_with_extra_properties WITH (extra_properties = MAP(ARRAY['rawDataSize'], ARRAY['1'])) AS SELECT 1 as c1")) + .isInstanceOf(QueryFailedException.class) + .hasMessage("Illegal keys in extra_properties: [rawDataSize]"); + } + + @Test + public void testNullExtraProperty() + { + assertQueryFails( + "CREATE TABLE create_table_with_duplicate_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['null.property'], ARRAY[null]))", + ".*Extra table property value cannot be null '\\{null.property=null}'.*"); + assertQueryFails( + "CREATE TABLE create_table_as_select_with_extra_properties WITH (extra_properties = MAP(ARRAY['null.property'], ARRAY[null])) AS SELECT 1 as c1", + ".*Extra table property value cannot be null '\\{null.property=null}'.*"); + } + + @Test + public void testCollidingMixedCaseProperty() + { + String tableName = "create_table_with_mixed_case_extra_properties" + randomNameSuffix(); + + assertUpdate("CREATE TABLE %s (c1 integer) WITH (extra_properties = MAP(ARRAY['one', 'ONE'], ARRAY['one', 'ONE']))".formatted(tableName)); + // TODO: (https://github.com/trinodb/trino/issues/17) This should run successfully + assertThatThrownBy(() -> query("SELECT * FROM \"%s$properties\"".formatted(tableName))) + .isInstanceOf(QueryFailedException.class) + .hasMessageContaining("Multiple entries with same key: one=one and one=one"); + + assertUpdate("DROP TABLE %s".formatted(tableName)); + } } From e6cb6cc4416125f2e2bd6ada080affe231bf65a2 Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Tue, 31 Oct 2023 08:47:23 +0000 Subject: [PATCH 7/8] Dummy commit --- .../trino/plugin/iceberg/IcebergMetadata.java | 46 ++++----- .../iceberg/IcebergTableProperties.java | 10 ++ .../io/trino/plugin/iceberg/IcebergUtil.java | 16 +--- .../iceberg/BaseIcebergConnectorTest.java | 93 ++++++------------- 4 files changed, 58 insertions(+), 107 deletions(-) diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index e5651f039d493..c7da3591b55ef 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -160,20 +160,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneOffset; -import java.util.ArrayDeque; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Comparator; -import java.util.Deque; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.OptionalLong; -import java.util.Set; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; @@ -221,12 +208,7 @@ import static io.trino.plugin.iceberg.IcebergSessionProperties.isMergeManifestsOnWrite; import static io.trino.plugin.iceberg.IcebergSessionProperties.isProjectionPushdownEnabled; import static io.trino.plugin.iceberg.IcebergSessionProperties.isStatisticsEnabled; -import static io.trino.plugin.iceberg.IcebergTableProperties.EXTRA_PROPERTIES; -import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning; +import static io.trino.plugin.iceberg.IcebergTableProperties.*; import static io.trino.plugin.iceberg.IcebergUtil.canEnforceColumnConstraintInSpecs; import static io.trino.plugin.iceberg.IcebergUtil.commit; import static io.trino.plugin.iceberg.IcebergUtil.deserializePartitionValue; @@ -258,10 +240,7 @@ import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.EXPIRE_SNAPSHOTS; import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.OPTIMIZE; import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.REMOVE_ORPHAN_FILES; -import static io.trino.spi.StandardErrorCode.COLUMN_ALREADY_EXISTS; -import static io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY; -import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; -import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; +import static io.trino.spi.StandardErrorCode.*; import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.FRESH; import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.STALE; import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.UNKNOWN; @@ -1661,12 +1640,6 @@ public void setTableProperties(ConnectorSession session, ConnectorTableHandle ta beginTransaction(icebergTable); UpdateProperties updateProperties = transaction.updateProperties(); - if (properties.containsKey(EXTRA_PROPERTIES)) { - Map extraProperties = (Map) properties.get(EXTRA_PROPERTIES) - .orElseThrow(() -> new IllegalArgumentException("extra_properties property cannot be empty")); - extraProperties.forEach(updateProperties::set); - } - if (properties.containsKey(FILE_FORMAT_PROPERTY)) { IcebergFileFormat fileFormat = (IcebergFileFormat) properties.get(FILE_FORMAT_PROPERTY) .orElseThrow(() -> new IllegalArgumentException("The format property cannot be empty")); @@ -1708,6 +1681,19 @@ public void setTableProperties(ConnectorSession session, ConnectorTableHandle ta } } + if (properties.containsKey(EXTRA_PROPERTIES)) { + Map extraProperties = (Map) properties.get(EXTRA_PROPERTIES) + .orElseThrow(() -> new IllegalArgumentException("extra_properties property cannot be empty")); + + Set illegalExtraProperties = Sets.intersection(ILLEGAL_EXTRA_PROPERTIES, extraProperties.keySet()); + if (!illegalExtraProperties.isEmpty()) { + throw new TrinoException( + INVALID_TABLE_PROPERTY, + "Illegal keys in extra_properties: " + illegalExtraProperties); + } + extraProperties.forEach(updateProperties::set); + } + try { transaction.commitTransaction(); } diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java index 87076a3d64a5c..6375877405388 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTableProperties.java @@ -14,6 +14,7 @@ package io.trino.plugin.iceberg; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; import io.trino.plugin.hive.orc.OrcWriterConfig; import io.trino.spi.TrinoException; @@ -25,6 +26,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.trino.plugin.iceberg.IcebergConfig.FORMAT_VERSION_SUPPORT_MAX; @@ -48,6 +50,14 @@ public class IcebergTableProperties public static final String ORC_BLOOM_FILTER_COLUMNS = "orc_bloom_filter_columns"; public static final String ORC_BLOOM_FILTER_FPP = "orc_bloom_filter_fpp"; public static final String EXTRA_PROPERTIES = "extra_properties"; + public static final Set ILLEGAL_EXTRA_PROPERTIES = ImmutableSet.of( + FILE_FORMAT_PROPERTY, + PARTITIONING_PROPERTY, + SORTED_BY_PROPERTY, + LOCATION_PROPERTY, + FORMAT_VERSION_PROPERTY, + ORC_BLOOM_FILTER_COLUMNS, + ORC_BLOOM_FILTER_FPP); private final List> tableProperties; diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java index 69df66cf63aac..78dd0f42ab28d 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java @@ -103,19 +103,7 @@ import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_PARTITION_VALUE; import static io.trino.plugin.iceberg.IcebergMetadata.ORC_BLOOM_FILTER_COLUMNS_KEY; import static io.trino.plugin.iceberg.IcebergMetadata.ORC_BLOOM_FILTER_FPP_KEY; -import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.LOCATION_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_COLUMNS; -import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_FPP; -import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY; -import static io.trino.plugin.iceberg.IcebergTableProperties.getExtraProperties; -import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterColumns; -import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterFpp; -import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning; -import static io.trino.plugin.iceberg.IcebergTableProperties.getSortOrder; -import static io.trino.plugin.iceberg.IcebergTableProperties.getTableLocation; +import static io.trino.plugin.iceberg.IcebergTableProperties.*; import static io.trino.plugin.iceberg.PartitionFields.parsePartitionFields; import static io.trino.plugin.iceberg.PartitionFields.toPartitionFields; import static io.trino.plugin.iceberg.SortFieldUtils.parseSortFields; @@ -662,7 +650,7 @@ public static Transaction newCreateTableTransaction(TrinoCatalog catalog, Connec // Add properties set via "extra_properties" table property. Map extraProperties = getExtraProperties(tableMetadata.getProperties()) .orElseGet(ImmutableMap::of); - Set illegalExtraProperties = Sets.intersection(baseProperties.keySet(), extraProperties.keySet()); + Set illegalExtraProperties = Sets.intersection(ILLEGAL_EXTRA_PROPERTIES, extraProperties.keySet()); if (!illegalExtraProperties.isEmpty()) { throw new TrinoException( INVALID_TABLE_PROPERTY, diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java index 19c4272ebe8da..390da8b89fd50 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java @@ -32,25 +32,13 @@ import io.trino.plugin.hive.TestingHivePlugin; import io.trino.plugin.iceberg.fileio.ForwardingFileIo; import io.trino.spi.QueryId; -import io.trino.spi.connector.ColumnHandle; -import io.trino.spi.connector.Constraint; -import io.trino.spi.connector.ConstraintApplicationResult; -import io.trino.spi.connector.SchemaTableName; -import io.trino.spi.connector.TableNotFoundException; +import io.trino.spi.connector.*; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.TupleDomain; import io.trino.sql.planner.plan.FilterNode; import io.trino.sql.planner.plan.OutputNode; import io.trino.sql.planner.plan.ValuesNode; -import io.trino.testing.BaseConnectorTest; -import io.trino.testing.DataProviders; -import io.trino.testing.DistributedQueryRunner; -import io.trino.testing.MaterializedResult; -import io.trino.testing.MaterializedResultWithQueryId; -import io.trino.testing.MaterializedRow; -import io.trino.testing.QueryFailedException; -import io.trino.testing.QueryRunner; -import io.trino.testing.TestingConnectorBehavior; +import io.trino.testing.*; import io.trino.testing.sql.TestTable; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; @@ -62,6 +50,7 @@ import org.apache.iceberg.TableMetadataParser; import org.apache.iceberg.io.FileIO; import org.apache.iceberg.util.JsonUtil; +import org.assertj.core.api.Condition; import org.intellij.lang.annotations.Language; import org.testng.SkipException; import org.testng.annotations.BeforeClass; @@ -78,15 +67,7 @@ import java.time.Instant; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Optional; -import java.util.OptionalInt; -import java.util.Set; +import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.regex.Matcher; @@ -103,14 +84,9 @@ import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.MoreCollectors.onlyElement; import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly; -import static io.trino.SystemSessionProperties.SCALE_WRITERS; -import static io.trino.SystemSessionProperties.TASK_MAX_WRITER_COUNT; -import static io.trino.SystemSessionProperties.TASK_MIN_WRITER_COUNT; -import static io.trino.SystemSessionProperties.USE_PREFERRED_WRITE_PARTITIONING; +import static io.trino.SystemSessionProperties.*; import static io.trino.plugin.hive.metastore.file.TestingFileHiveMetastore.createTestingFileHiveMetastore; -import static io.trino.plugin.iceberg.IcebergFileFormat.AVRO; -import static io.trino.plugin.iceberg.IcebergFileFormat.ORC; -import static io.trino.plugin.iceberg.IcebergFileFormat.PARQUET; +import static io.trino.plugin.iceberg.IcebergFileFormat.*; import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG; import static io.trino.plugin.iceberg.IcebergSessionProperties.COLLECT_EXTENDED_STATISTICS_ON_WRITE; import static io.trino.plugin.iceberg.IcebergSessionProperties.EXTENDED_STATISTICS_ENABLED; @@ -145,19 +121,13 @@ import static java.util.Collections.nCopies; import static java.util.Objects.requireNonNull; import static java.util.UUID.randomUUID; -import static java.util.concurrent.TimeUnit.MILLISECONDS; -import static java.util.concurrent.TimeUnit.MINUTES; -import static java.util.concurrent.TimeUnit.SECONDS; +import static java.util.concurrent.TimeUnit.*; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; +import static org.testng.Assert.*; public abstract class BaseIcebergConnectorTest extends BaseConnectorTest @@ -7352,13 +7322,13 @@ public void testExtraProperties() assertQuery( "SELECT \"extra.property.one\", \"extra.property.two\" FROM \"%s$properties\"".formatted(tableName), "SELECT 'one', 'two'"); - assertThat(computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue()) - .isEqualTo("CREATE TABLE iceberg.tpch.%s (\n".formatted(tableName) + - " c1 integer\n" + - ")\n" + - "WITH (\n" + - " format = 'ORC'\n" + - ")"); + + // Assert that SHOW CREATE TABLE does not contain extra_properties + assertThat((String) computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue()) + .satisfies(new Condition<>( + queryResult -> queryResult.contains("extra_properties"), "noExtraProperties" + )); + assertUpdate("DROP TABLE %s".formatted(tableName)); } @@ -7371,13 +7341,12 @@ public void testExtraPropertiesWithCtas() assertQuery( "SELECT \"extra.property.one\", \"extra.property.two\" FROM \"%s$properties\"".formatted(tableName), "SELECT 'one', 'two'"); - assertThat(computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue()) - .isEqualTo("CREATE TABLE iceberg.tpch.%s (\n".formatted(tableName) + - " c1 integer\n" + - ")\n" + - "WITH (\n" + - " format = 'ORC'\n" + - ")"); + + // Assert that SHOW CREATE TABLE does not contain extra_properties + assertThat((String) computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue()) + .satisfies(new Condition<>( + queryResult -> queryResult.contains("extra_properties"), "noExtraProperties" + )); assertUpdate("DROP TABLE %s".formatted(tableName)); } @@ -7388,13 +7357,11 @@ public void testShowCreateWithExtraProperties() String tableName = format("%s.%s.show_create_table_with_extra_properties_%s", getSession().getCatalog().get(), getSession().getSchema().get(), randomNameSuffix()); assertUpdate("CREATE TABLE %s (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property.one', 'extra.property.two'], ARRAY['one', 'two']))".formatted(tableName)); - assertThat(computeActual("SHOW CREATE TABLE " + tableName).getOnlyValue()) - .isEqualTo("CREATE TABLE %s (\n".formatted(tableName) + - " c1 integer\n" + - ")\n" + - "WITH (\n" + - " format = 'ORC'\n" + - ")"); + // Assert that SHOW CREATE TABLE does not contain extra_properties + assertThat((String) computeActual("SHOW CREATE TABLE " + tableName).getOnlyValue()) + .satisfies(new Condition<>( + queryResult -> queryResult.contains("extra_properties"), "noExtraProperties" + )); assertUpdate("DROP TABLE %s".formatted(tableName)); } @@ -7413,13 +7380,13 @@ public void testDuplicateExtraProperties() @Test public void testOverwriteExistingPropertyWithExtraProperties() { - assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_with_overwrite_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['transactional'], ARRAY['true']))")) + assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_with_overwrite_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['write.format.default'], ARRAY['foobar']))")) .isInstanceOf(QueryFailedException.class) - .hasMessage("Illegal keys in extra_properties: [transactional]"); + .hasMessage("Illegal keys in extra_properties: [write.format.default]"); - assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_as_select_with_extra_properties WITH (extra_properties = MAP(ARRAY['rawDataSize'], ARRAY['1'])) AS SELECT 1 as c1")) + assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_as_select_with_extra_properties WITH (extra_properties = MAP(ARRAY['format-version'], ARRAY['10'])) AS SELECT 1 as c1")) .isInstanceOf(QueryFailedException.class) - .hasMessage("Illegal keys in extra_properties: [rawDataSize]"); + .hasMessage("Illegal keys in extra_properties: [format-version]"); } @Test From 55e17d3b79bc59152d3fe25004f1e6d9db271b3c Mon Sep 17 00:00:00 2001 From: Priyansh Agrawal Date: Sun, 6 Oct 2024 18:25:01 +0100 Subject: [PATCH 8/8] Fix * imports --- .../trino/plugin/iceberg/IcebergMetadata.java | 29 ++++++++++-- .../io/trino/plugin/iceberg/IcebergUtil.java | 15 ++++++- .../iceberg/BaseIcebergConnectorTest.java | 45 ++++++++++++++++--- 3 files changed, 78 insertions(+), 11 deletions(-) diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index c7da3591b55ef..ef2141c4e9bbc 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -160,7 +160,20 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneOffset; -import java.util.*; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.Deque; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.OptionalLong; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; @@ -208,7 +221,13 @@ import static io.trino.plugin.iceberg.IcebergSessionProperties.isMergeManifestsOnWrite; import static io.trino.plugin.iceberg.IcebergSessionProperties.isProjectionPushdownEnabled; import static io.trino.plugin.iceberg.IcebergSessionProperties.isStatisticsEnabled; -import static io.trino.plugin.iceberg.IcebergTableProperties.*; +import static io.trino.plugin.iceberg.IcebergTableProperties.EXTRA_PROPERTIES; +import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.ILLEGAL_EXTRA_PROPERTIES; +import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning; import static io.trino.plugin.iceberg.IcebergUtil.canEnforceColumnConstraintInSpecs; import static io.trino.plugin.iceberg.IcebergUtil.commit; import static io.trino.plugin.iceberg.IcebergUtil.deserializePartitionValue; @@ -240,7 +259,11 @@ import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.EXPIRE_SNAPSHOTS; import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.OPTIMIZE; import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.REMOVE_ORPHAN_FILES; -import static io.trino.spi.StandardErrorCode.*; +import static io.trino.spi.StandardErrorCode.COLUMN_ALREADY_EXISTS; +import static io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY; +import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; +import static io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY; +import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.FRESH; import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.STALE; import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.UNKNOWN; diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java index 78dd0f42ab28d..5b614304a451a 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java @@ -103,7 +103,20 @@ import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_PARTITION_VALUE; import static io.trino.plugin.iceberg.IcebergMetadata.ORC_BLOOM_FILTER_COLUMNS_KEY; import static io.trino.plugin.iceberg.IcebergMetadata.ORC_BLOOM_FILTER_FPP_KEY; -import static io.trino.plugin.iceberg.IcebergTableProperties.*; +import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.ILLEGAL_EXTRA_PROPERTIES; +import static io.trino.plugin.iceberg.IcebergTableProperties.LOCATION_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_COLUMNS; +import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_FPP; +import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY; +import static io.trino.plugin.iceberg.IcebergTableProperties.getExtraProperties; +import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterColumns; +import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterFpp; +import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning; +import static io.trino.plugin.iceberg.IcebergTableProperties.getSortOrder; +import static io.trino.plugin.iceberg.IcebergTableProperties.getTableLocation; import static io.trino.plugin.iceberg.PartitionFields.parsePartitionFields; import static io.trino.plugin.iceberg.PartitionFields.toPartitionFields; import static io.trino.plugin.iceberg.SortFieldUtils.parseSortFields; diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java index 390da8b89fd50..79a3d893f7ef5 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java @@ -32,13 +32,25 @@ import io.trino.plugin.hive.TestingHivePlugin; import io.trino.plugin.iceberg.fileio.ForwardingFileIo; import io.trino.spi.QueryId; -import io.trino.spi.connector.*; +import io.trino.spi.connector.ColumnHandle; +import io.trino.spi.connector.Constraint; +import io.trino.spi.connector.ConstraintApplicationResult; +import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.TupleDomain; import io.trino.sql.planner.plan.FilterNode; import io.trino.sql.planner.plan.OutputNode; import io.trino.sql.planner.plan.ValuesNode; -import io.trino.testing.*; +import io.trino.testing.BaseConnectorTest; +import io.trino.testing.DataProviders; +import io.trino.testing.DistributedQueryRunner; +import io.trino.testing.MaterializedResult; +import io.trino.testing.MaterializedResultWithQueryId; +import io.trino.testing.MaterializedRow; +import io.trino.testing.QueryFailedException; +import io.trino.testing.QueryRunner; +import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; @@ -67,7 +79,15 @@ import java.time.Instant; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.regex.Matcher; @@ -84,9 +104,14 @@ import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.MoreCollectors.onlyElement; import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly; -import static io.trino.SystemSessionProperties.*; +import static io.trino.SystemSessionProperties.SCALE_WRITERS; +import static io.trino.SystemSessionProperties.TASK_MAX_WRITER_COUNT; +import static io.trino.SystemSessionProperties.TASK_MIN_WRITER_COUNT; +import static io.trino.SystemSessionProperties.USE_PREFERRED_WRITE_PARTITIONING; import static io.trino.plugin.hive.metastore.file.TestingFileHiveMetastore.createTestingFileHiveMetastore; -import static io.trino.plugin.iceberg.IcebergFileFormat.*; +import static io.trino.plugin.iceberg.IcebergFileFormat.AVRO; +import static io.trino.plugin.iceberg.IcebergFileFormat.ORC; +import static io.trino.plugin.iceberg.IcebergFileFormat.PARQUET; import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG; import static io.trino.plugin.iceberg.IcebergSessionProperties.COLLECT_EXTENDED_STATISTICS_ON_WRITE; import static io.trino.plugin.iceberg.IcebergSessionProperties.EXTENDED_STATISTICS_ENABLED; @@ -121,13 +146,19 @@ import static java.util.Collections.nCopies; import static java.util.Objects.requireNonNull; import static java.util.UUID.randomUUID; -import static java.util.concurrent.TimeUnit.*; +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.MINUTES; +import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.testng.Assert.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; public abstract class BaseIcebergConnectorTest extends BaseConnectorTest