Skip to content

Commit 44b0243

Browse files
HIVE-29413:Avoid code duplication by updating getPartCols method for iceberg tables
1 parent c80c721 commit 44b0243

46 files changed

Lines changed: 101 additions & 93 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,7 @@ private List<String> getTableData(String table, String database) throws Exceptio
374374
Hive hive = Hive.get(conf);
375375
org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table);
376376
FetchWork work;
377-
if (!tbl.getPartCols().isEmpty()) {
377+
if (!tbl.getPartCols(true).isEmpty()) {
378378
List<Partition> partitions = hive.getPartitions(tbl);
379379
List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
380380
List<Path> partLocs = new ArrayList<Path>();

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ public static boolean isSchemaEvolutionEnabled(Table table, Configuration conf)
7575
}
7676

7777
public static boolean isFullPartitionSpec(Table table, Map<String, String> partitionSpec) {
78-
for (FieldSchema partitionCol : table.getPartCols()) {
78+
for (FieldSchema partitionCol : table.getPartCols(false)) {
7979
if (partitionSpec.get(partitionCol.getName()) == null) {
8080
return false;
8181
}

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsOperation.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,9 +66,7 @@ private List<FieldSchema> getColumnsByPattern() throws HiveException {
6666

6767
private List<FieldSchema> getCols() throws HiveException {
6868
Table table = context.getDb().getTable(desc.getTableName());
69-
List<FieldSchema> allColumns = new ArrayList<>();
70-
allColumns.addAll(table.getCols());
71-
allColumns.addAll(table.getPartCols());
69+
List<FieldSchema> allColumns = new ArrayList<>(table.getAllCols());
7270
return allColumns;
7371
}
7472

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ private Table createViewLikeTable(Table oldTable) throws HiveException {
100100
setUserSpecifiedLocation(table);
101101

102102
table.setFields(oldTable.getCols());
103-
table.setPartCols(oldTable.getPartCols());
103+
table.setPartCols(oldTable.getPartCols(true));
104104

105105
if (desc.getDefaultSerdeProps() != null) {
106106
for (Map.Entry<String, String> e : desc.getDefaultSerdeProps().entrySet()) {

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableOperation.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ private void getColumnsNoColumnPath(Table table, Partition partition, List<Field
131131
cols.addAll(partition == null || table.getTableType() == TableType.VIRTUAL_VIEW ?
132132
table.getCols() : partition.getCols());
133133
if (!desc.isFormatted()) {
134-
cols.addAll(table.getPartCols());
134+
cols.addAll(table.getPartCols(false));
135135
}
136136

137137
// Fetch partition statistics only for describe extended or formatted.

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -174,9 +174,7 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
174174
List<FieldSchema> partitionColumns = null;
175175
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
176176
if (table.isPartitioned()) {
177-
partitionColumns = table.hasNonNativePartitionSupport() ?
178-
table.getStorageHandler().getPartitionKeys(table) :
179-
table.getPartCols();
177+
partitionColumns = table.getPartCols(true);
180178
}
181179
if (CollectionUtils.isNotEmpty(partitionColumns) &&
182180
conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/formatter/JsonShowTableStatusFormatter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ private Map<String, Object> makeOneTableStatus(Table table, Hive db, HiveConf co
6969

7070
builder.put("partitioned", table.isPartitioned());
7171
if (table.isPartitioned()) {
72-
builder.put("partitionColumns", JsonDescTableFormatter.createColumnsInfo(table.getPartCols(),
72+
builder.put("partitionColumns", JsonDescTableFormatter.createColumnsInfo(table.getPartCols(true),
7373
Collections.emptyList()));
7474
}
7575

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/formatter/TextShowTableStatusFormatter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ private void writeStorageInfo(DataOutputStream out, Partition partition, Table t
7373
private void writeColumnsInfo(DataOutputStream out, Table table) throws IOException, UnsupportedEncodingException {
7474
String columns = MetaStoreUtils.getDDLFromFieldSchema("columns", table.getCols());
7575
String partitionColumns = table.isPartitioned() ?
76-
MetaStoreUtils.getDDLFromFieldSchema("partition_columns", table.getPartCols()) : "";
76+
MetaStoreUtils.getDDLFromFieldSchema("partition_columns", table.getPartCols(true)) : "";
7777

7878
out.write(Utilities.newLineCode);
7979
out.write(("columns:" + columns).getBytes(StandardCharsets.UTF_8));

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/PartitionUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ public static List<Partition> getPartitionsWithSpecs(Hive db, Table table, GetPa
150150
}
151151

152152
private static String tablePartitionColNames(Table table) {
153-
List<FieldSchema> partCols = table.getPartCols();
153+
List<FieldSchema> partCols = table.getPartCols(true);
154154
return String.join("/", partCols.toString());
155155
}
156156

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/exchange/AlterTableExchangePartitionAnalyzer.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
8484
if (AcidUtils.isTransactionalTable(sourceTable) || AcidUtils.isTransactionalTable(destTable)) {
8585
throw new SemanticException(ErrorMsg.EXCHANGE_PARTITION_NOT_ALLOWED_WITH_TRANSACTIONAL_TABLES.getMsg());
8686
}
87-
List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getPartCols(), partitionSpecs);
87+
List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getPartCols(true), partitionSpecs);
8888

8989
// check if source partition exists
9090
GetPartitionsFilterSpec sourcePartitionsFilterSpec = new GetPartitionsFilterSpec();
@@ -106,7 +106,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
106106
throw new SemanticException(ErrorMsg.PARTITION_VALUE_NOT_CONTINUOUS.getMsg(partitionSpecs.toString()));
107107
}
108108

109-
List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getPartCols(), partitionSpecs);
109+
List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getPartCols(true), partitionSpecs);
110110

111111
// check if dest partition exists
112112
GetPartitionsFilterSpec getDestPartitionsFilterSpec = new GetPartitionsFilterSpec();

0 commit comments

Comments
 (0)