Skip to content

Commit c223a3f

Browse files
HuangZhenQiuPrabhuJosephPrabhu Josephrootvoonhous
authored
[HUDI-8066] Cherry pick Flink 1.18 into 0.14.2 (#11780)
* [HUDI-6993] Support Flink 1.18 (#9949) * Address build failures in older Flink Versions * Remove unnecessary dependency on flink-connector-hive * Fix Flink 1.18 Validate-bundles --------- Signed-off-by: Prabhu Joseph <[email protected]> Co-authored-by: Prabhu Joseph <[email protected]> Co-authored-by: root <[email protected]> * [HUDI-7173] Fix hudi-on-flink read issues involving schema evolution and decimal types (#10247) * change back to 0.14.2-SNAPSHOT * [HUDI-8067] Use exec to run the IT (#11751) Co-authored-by: Jonathan Vexler <=> --------- Signed-off-by: Prabhu Joseph <[email protected]> Co-authored-by: Prabhu Joseph <[email protected]> Co-authored-by: Prabhu Joseph <[email protected]> Co-authored-by: root <[email protected]> Co-authored-by: voonhous <[email protected]> Co-authored-by: Jon Vexler <[email protected]>
1 parent 6fdbaa7 commit c223a3f

File tree

57 files changed

+4879
-70
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+4879
-70
lines changed

.github/workflows/bot.yml

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ jobs:
119119
include:
120120
- scalaProfile: "scala-2.12"
121121
sparkProfile: "spark3.2"
122-
flinkProfile: "flink1.17"
122+
flinkProfile: "flink1.18"
123123

124124
steps:
125125
- uses: actions/checkout@v3
@@ -210,6 +210,7 @@ jobs:
210210
- flinkProfile: "flink1.15"
211211
- flinkProfile: "flink1.16"
212212
- flinkProfile: "flink1.17"
213+
- flinkProfile: "flink1.18"
213214
steps:
214215
- uses: actions/checkout@v3
215216
- name: Set up JDK 8
@@ -234,7 +235,7 @@ jobs:
234235
env:
235236
SCALA_PROFILE: 'scala-2.12'
236237
FLINK_PROFILE: ${{ matrix.flinkProfile }}
237-
if: ${{ endsWith(env.FLINK_PROFILE, '1.17') }}
238+
if: ${{ endsWith(env.FLINK_PROFILE, '1.18') }}
238239
run: |
239240
mvn clean install -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am -Davro.version=1.10.0 -DskipTests=true $MVN_ARGS
240241
mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink $MVN_ARGS
@@ -244,7 +245,7 @@ jobs:
244245
strategy:
245246
matrix:
246247
include:
247-
- flinkProfile: 'flink1.17'
248+
- flinkProfile: 'flink1.18'
248249
sparkProfile: 'spark3.4'
249250
sparkRuntime: 'spark3.4.0'
250251

@@ -272,9 +273,12 @@ jobs:
272273
strategy:
273274
matrix:
274275
include:
275-
- flinkProfile: 'flink1.17'
276+
- flinkProfile: 'flink1.18'
276277
sparkProfile: 'spark3.4'
277278
sparkRuntime: 'spark3.4.0'
279+
- flinkProfile: 'flink1.18'
280+
sparkProfile: 'spark3.3'
281+
sparkRuntime: 'spark3.3.2'
278282
- flinkProfile: 'flink1.17'
279283
sparkProfile: 'spark3.3'
280284
sparkRuntime: 'spark3.3.2'

README.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,14 +118,15 @@ Starting from versions 0.11, Hudi no longer requires `spark-avro` to be specifie
118118

119119
### Build with different Flink versions
120120

121-
The default Flink version supported is 1.17. The default Flink 1.17.x version, corresponding to `flink1.17` profile is 1.17.0.
121+
The default Flink version supported is 1.18. The default Flink 1.18.x version, corresponding to `flink1.18` profile is 1.18.0.
122122
Flink is Scala-free since 1.15.x, there is no need to specify the Scala version for Flink 1.15.x and above versions.
123123
Refer to the table below for building with different Flink and Scala versions.
124124

125125
| Maven build options | Expected Flink bundle jar name | Notes |
126126
|:---------------------------|:-------------------------------|:---------------------------------|
127-
| (empty) | hudi-flink1.17-bundle | For Flink 1.17 (default options) |
128-
| `-Dflink1.17` | hudi-flink1.17-bundle | For Flink 1.17 (same as default) |
127+
| (empty) | hudi-flink1.18-bundle | For Flink 1.18 (default options) |
128+
| `-Dflink1.18` | hudi-flink1.18-bundle | For Flink 1.18 (same as default) |
129+
| `-Dflink1.17` | hudi-flink1.17-bundle | For Flink 1.17 |
129130
| `-Dflink1.16` | hudi-flink1.16-bundle | For Flink 1.16 |
130131
| `-Dflink1.15` | hudi-flink1.15-bundle | For Flink 1.15 |
131132
| `-Dflink1.14` | hudi-flink1.14-bundle | For Flink 1.14 and Scala 2.12 |

azure-pipelines-20230430.yml

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
# limitations under the License.
1515

1616
# NOTE:
17-
# This config file defines how Azure CI runs tests with Spark 2.4 and Flink 1.17 profiles.
17+
# This config file defines how Azure CI runs tests with Spark 2.4 and Flink 1.18 profiles.
1818
# PRs will need to keep in sync with master's version to trigger the CI runs.
1919

2020
trigger:
@@ -37,6 +37,7 @@ parameters:
3737
- 'hudi-flink-datasource/hudi-flink1.15.x'
3838
- 'hudi-flink-datasource/hudi-flink1.16.x'
3939
- 'hudi-flink-datasource/hudi-flink1.17.x'
40+
- 'hudi-flink-datasource/hudi-flink1.18.x'
4041
- name: job2Modules
4142
type: object
4243
default:
@@ -69,6 +70,7 @@ parameters:
6970
- '!hudi-flink-datasource/hudi-flink1.15.x'
7071
- '!hudi-flink-datasource/hudi-flink1.16.x'
7172
- '!hudi-flink-datasource/hudi-flink1.17.x'
73+
- '!hudi-flink-datasource/hudi-flink1.18.x'
7274
- '!hudi-spark-datasource'
7375
- '!hudi-spark-datasource/hudi-spark'
7476
- '!hudi-spark-datasource/hudi-spark3.2.x'
@@ -92,9 +94,10 @@ parameters:
9294
- '!hudi-flink-datasource/hudi-flink1.15.x'
9395
- '!hudi-flink-datasource/hudi-flink1.16.x'
9496
- '!hudi-flink-datasource/hudi-flink1.17.x'
97+
- '!hudi-flink-datasource/hudi-flink1.18.x'
9598

9699
variables:
97-
BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.17'
100+
BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.18'
98101
PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true -ntp -B -V -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn'
99102
MVN_OPTS_INSTALL: '-Phudi-platform-service -DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS) -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=5'
100103
MVN_OPTS_TEST: '-fae -Pwarn-log $(BUILD_PROFILES) $(PLUGIN_OPTS)'

docker/setup_demo.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,13 @@ if [ "$HUDI_DEMO_ENV" = "--mac-aarch64" ]; then
2424
COMPOSE_FILE_NAME="docker-compose_hadoop284_hive233_spark244_mac_aarch64.yml"
2525
fi
2626
# restart cluster
27-
HUDI_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME} down
27+
HUDI_WS=${WS_ROOT} docker compose down -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME}
2828
if [ "$HUDI_DEMO_ENV" != "dev" ]; then
2929
echo "Pulling docker demo images ..."
30-
HUDI_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME} pull
30+
HUDI_WS=${WS_ROOT} docker compose -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME} pull
3131
fi
3232
sleep 5
33-
HUDI_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME} up -d
33+
HUDI_WS=${WS_ROOT} docker compose up -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME} -d
3434
sleep 15
3535

3636
docker exec -it adhoc-1 /bin/bash /var/hoodie/ws/docker/demo/setup_demo_container.sh

docker/stop_demo.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ if [ "$HUDI_DEMO_ENV" = "--mac-aarch64" ]; then
2525
COMPOSE_FILE_NAME="docker-compose_hadoop284_hive233_spark244_mac_aarch64.yml"
2626
fi
2727
# shut down cluster
28-
HUDI_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME} down
28+
HUDI_WS=${WS_ROOT} docker compose down -f ${SCRIPT_PATH}/compose/${COMPOSE_FILE_NAME}
2929

3030
# remove houst mount directory
3131
rm -rf /tmp/hadoop_data

hudi-flink-datasource/hudi-flink/pom.xml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,7 @@
181181
<dependency>
182182
<groupId>org.apache.flink</groupId>
183183
<artifactId>${flink.connector.kafka.artifactId}</artifactId>
184+
<version>${flink.connector.kafka.version}</version>
184185
<scope>compile</scope>
185186
</dependency>
186187
<dependency>

hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/HoodieHiveCatalog.java

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
package org.apache.hudi.table.catalog;
2020

21+
import org.apache.hudi.adapter.HiveCatalogConstants.AlterHiveDatabaseOp;
2122
import org.apache.hudi.avro.AvroSchemaUtils;
2223
import org.apache.hudi.client.HoodieFlinkWriteClient;
2324
import org.apache.hudi.common.fs.FSUtils;
@@ -47,9 +48,6 @@
4748
import org.apache.flink.annotation.VisibleForTesting;
4849
import org.apache.flink.configuration.ConfigOption;
4950
import org.apache.flink.configuration.Configuration;
50-
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase;
51-
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner;
52-
import org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase;
5351
import org.apache.flink.table.catalog.AbstractCatalog;
5452
import org.apache.flink.table.catalog.CatalogBaseTable;
5553
import org.apache.flink.table.catalog.CatalogDatabase;
@@ -107,17 +105,20 @@
107105
import java.util.List;
108106
import java.util.Map;
109107

110-
import static org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase.ALTER_DATABASE_OP;
111-
import static org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner.DATABASE_OWNER_NAME;
112-
import static org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner.DATABASE_OWNER_TYPE;
113-
import static org.apache.flink.table.factories.FactoryUtil.CONNECTOR;
114-
import static org.apache.flink.util.Preconditions.checkArgument;
115-
import static org.apache.flink.util.Preconditions.checkNotNull;
116-
import static org.apache.flink.util.StringUtils.isNullOrWhitespaceOnly;
108+
import static org.apache.hudi.adapter.HiveCatalogConstants.ALTER_DATABASE_OP;
109+
import static org.apache.hudi.adapter.HiveCatalogConstants.DATABASE_LOCATION_URI;
110+
import static org.apache.hudi.adapter.HiveCatalogConstants.DATABASE_OWNER_NAME;
111+
import static org.apache.hudi.adapter.HiveCatalogConstants.DATABASE_OWNER_TYPE;
112+
import static org.apache.hudi.adapter.HiveCatalogConstants.ROLE_OWNER;
113+
import static org.apache.hudi.adapter.HiveCatalogConstants.USER_OWNER;
117114
import static org.apache.hudi.configuration.FlinkOptions.PATH;
118115
import static org.apache.hudi.table.catalog.TableOptionProperties.COMMENT;
119116
import static org.apache.hudi.table.catalog.TableOptionProperties.PK_CONSTRAINT_NAME;
120117
import static org.apache.hudi.table.catalog.TableOptionProperties.SPARK_SOURCE_PROVIDER;
118+
import static org.apache.flink.table.factories.FactoryUtil.CONNECTOR;
119+
import static org.apache.flink.util.Preconditions.checkArgument;
120+
import static org.apache.flink.util.Preconditions.checkNotNull;
121+
import static org.apache.flink.util.StringUtils.isNullOrWhitespaceOnly;
121122

122123
/**
123124
* A catalog implementation for Hoodie based on MetaStore.
@@ -219,7 +220,7 @@ public CatalogDatabase getDatabase(String databaseName)
219220

220221
Map<String, String> properties = new HashMap<>(hiveDatabase.getParameters());
221222

222-
properties.put(SqlCreateHiveDatabase.DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());
223+
properties.put(DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());
223224

224225
return new CatalogDatabaseImpl(properties, hiveDatabase.getDescription());
225226
}
@@ -248,7 +249,7 @@ public void createDatabase(
248249

249250
Map<String, String> properties = database.getProperties();
250251

251-
String dbLocationUri = properties.remove(SqlCreateHiveDatabase.DATABASE_LOCATION_URI);
252+
String dbLocationUri = properties.remove(DATABASE_LOCATION_URI);
252253
if (dbLocationUri == null && this.catalogPath != null) {
253254
// infer default location uri
254255
dbLocationUri = new Path(this.catalogPath, databaseName).toString();
@@ -318,11 +319,10 @@ private static Database alterDatabase(Database hiveDB, CatalogDatabase newDataba
318319
String opStr = newParams.remove(ALTER_DATABASE_OP);
319320
if (opStr == null) {
320321
// by default is to alter db properties
321-
opStr = SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name();
322+
opStr = AlterHiveDatabaseOp.CHANGE_PROPS.name();
322323
}
323-
String newLocation = newParams.remove(SqlCreateHiveDatabase.DATABASE_LOCATION_URI);
324-
SqlAlterHiveDatabase.AlterHiveDatabaseOp op =
325-
SqlAlterHiveDatabase.AlterHiveDatabaseOp.valueOf(opStr);
324+
String newLocation = newParams.remove(DATABASE_LOCATION_URI);
325+
AlterHiveDatabaseOp op = AlterHiveDatabaseOp.valueOf(opStr);
326326
switch (op) {
327327
case CHANGE_PROPS:
328328
hiveDB.setParameters(newParams);
@@ -335,10 +335,10 @@ private static Database alterDatabase(Database hiveDB, CatalogDatabase newDataba
335335
String ownerType = newParams.remove(DATABASE_OWNER_TYPE);
336336
hiveDB.setOwnerName(ownerName);
337337
switch (ownerType) {
338-
case SqlAlterHiveDatabaseOwner.ROLE_OWNER:
338+
case ROLE_OWNER:
339339
hiveDB.setOwnerType(PrincipalType.ROLE);
340340
break;
341-
case SqlAlterHiveDatabaseOwner.USER_OWNER:
341+
case USER_OWNER:
342342
hiveDB.setOwnerType(PrincipalType.USER);
343343
break;
344344
default:
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hudi.adapter;
20+
21+
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase;
22+
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner;
23+
import org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase;
24+
25+
/**
26+
* Constants for Hive Catalog.
27+
*/
28+
public class HiveCatalogConstants {
29+
30+
// -----------------------------------------------------------------------------------
31+
// Constants for ALTER DATABASE
32+
// -----------------------------------------------------------------------------------
33+
public static final String ALTER_DATABASE_OP = SqlAlterHiveDatabase.ALTER_DATABASE_OP;
34+
35+
public static final String DATABASE_LOCATION_URI = SqlCreateHiveDatabase.DATABASE_LOCATION_URI;
36+
37+
public static final String DATABASE_OWNER_NAME = SqlAlterHiveDatabaseOwner.DATABASE_OWNER_NAME;
38+
39+
public static final String DATABASE_OWNER_TYPE = SqlAlterHiveDatabaseOwner.DATABASE_OWNER_TYPE;
40+
41+
public static final String ROLE_OWNER = SqlAlterHiveDatabaseOwner.ROLE_OWNER;
42+
43+
public static final String USER_OWNER = SqlAlterHiveDatabaseOwner.USER_OWNER;
44+
45+
/** Type of ALTER DATABASE operation. */
46+
public enum AlterHiveDatabaseOp {
47+
CHANGE_PROPS,
48+
CHANGE_LOCATION,
49+
CHANGE_OWNER
50+
}
51+
}
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hudi.adapter;
20+
21+
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase;
22+
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner;
23+
import org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase;
24+
25+
/**
26+
* Constants for Hive Catalog.
27+
*/
28+
public class HiveCatalogConstants {
29+
30+
// -----------------------------------------------------------------------------------
31+
// Constants for ALTER DATABASE
32+
// -----------------------------------------------------------------------------------
33+
public static final String ALTER_DATABASE_OP = SqlAlterHiveDatabase.ALTER_DATABASE_OP;
34+
35+
public static final String DATABASE_LOCATION_URI = SqlCreateHiveDatabase.DATABASE_LOCATION_URI;
36+
37+
public static final String DATABASE_OWNER_NAME = SqlAlterHiveDatabaseOwner.DATABASE_OWNER_NAME;
38+
39+
public static final String DATABASE_OWNER_TYPE = SqlAlterHiveDatabaseOwner.DATABASE_OWNER_TYPE;
40+
41+
public static final String ROLE_OWNER = SqlAlterHiveDatabaseOwner.ROLE_OWNER;
42+
43+
public static final String USER_OWNER = SqlAlterHiveDatabaseOwner.USER_OWNER;
44+
45+
/** Type of ALTER DATABASE operation. */
46+
public enum AlterHiveDatabaseOp {
47+
CHANGE_PROPS,
48+
CHANGE_LOCATION,
49+
CHANGE_OWNER
50+
}
51+
}
52+
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hudi.adapter;
20+
21+
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase;
22+
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner;
23+
import org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase;
24+
25+
/**
26+
* Constants for Hive Catalog.
27+
*/
28+
public class HiveCatalogConstants {
29+
30+
// -----------------------------------------------------------------------------------
31+
// Constants for ALTER DATABASE
32+
// -----------------------------------------------------------------------------------
33+
public static final String ALTER_DATABASE_OP = SqlAlterHiveDatabase.ALTER_DATABASE_OP;
34+
35+
public static final String DATABASE_LOCATION_URI = SqlCreateHiveDatabase.DATABASE_LOCATION_URI;
36+
37+
public static final String DATABASE_OWNER_NAME = SqlAlterHiveDatabaseOwner.DATABASE_OWNER_NAME;
38+
39+
public static final String DATABASE_OWNER_TYPE = SqlAlterHiveDatabaseOwner.DATABASE_OWNER_TYPE;
40+
41+
public static final String ROLE_OWNER = SqlAlterHiveDatabaseOwner.ROLE_OWNER;
42+
43+
public static final String USER_OWNER = SqlAlterHiveDatabaseOwner.USER_OWNER;
44+
45+
/** Type of ALTER DATABASE operation. */
46+
public enum AlterHiveDatabaseOp {
47+
CHANGE_PROPS,
48+
CHANGE_LOCATION,
49+
CHANGE_OWNER
50+
}
51+
}
52+

0 commit comments

Comments
 (0)