Skip to content

Commit

Permalink
Revert "[HUDI-6993] Support Flink 1.18 (apache#9949)"
Browse files Browse the repository at this point in the history
This reverts commit bd136ad.
  • Loading branch information
zhuanshenbsj1 committed Jan 8, 2024
1 parent 2bc5518 commit f3562cd
Show file tree
Hide file tree
Showing 52 changed files with 39 additions and 4,644 deletions.
12 changes: 4 additions & 8 deletions .github/workflows/bot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ jobs:
include:
- scalaProfile: "scala-2.12"
sparkProfile: "spark3.2"
flinkProfile: "flink1.18"
flinkProfile: "flink1.17"

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -219,7 +219,6 @@ jobs:
- flinkProfile: "flink1.15"
- flinkProfile: "flink1.16"
- flinkProfile: "flink1.17"
- flinkProfile: "flink1.18"
steps:
- uses: actions/checkout@v3
- name: Set up JDK 8
Expand All @@ -245,7 +244,7 @@ jobs:
env:
SCALA_PROFILE: 'scala-2.12'
FLINK_PROFILE: ${{ matrix.flinkProfile }}
if: ${{ endsWith(env.FLINK_PROFILE, '1.18') }}
if: ${{ endsWith(env.FLINK_PROFILE, '1.17') }}
run: |
mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am -Davro.version=1.10.0 -DskipTests=true $MVN_ARGS
mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink $MVN_ARGS
Expand All @@ -255,7 +254,7 @@ jobs:
strategy:
matrix:
include:
- flinkProfile: 'flink1.18'
- flinkProfile: 'flink1.17'
sparkProfile: 'spark3.4'
sparkRuntime: 'spark3.4.0'

Expand Down Expand Up @@ -284,12 +283,9 @@ jobs:
strategy:
matrix:
include:
- flinkProfile: 'flink1.18'
- flinkProfile: 'flink1.17'
sparkProfile: 'spark3.4'
sparkRuntime: 'spark3.4.0'
- flinkProfile: 'flink1.18'
sparkProfile: 'spark3.3'
sparkRuntime: 'spark3.3.2'
- flinkProfile: 'flink1.17'
sparkProfile: 'spark3.3'
sparkRuntime: 'spark3.3.2'
Expand Down
7 changes: 3 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,15 +118,14 @@ Starting from versions 0.11, Hudi no longer requires `spark-avro` to be specifie

### Build with different Flink versions

The default Flink version supported is 1.18. The default Flink 1.18.x version, corresponding to `flink1.18` profile is 1.18.0.
The default Flink version supported is 1.17. The default Flink 1.17.x version, corresponding to `flink1.17` profile is 1.17.0.
Flink is Scala-free since 1.15.x, there is no need to specify the Scala version for Flink 1.15.x and above versions.
Refer to the table below for building with different Flink and Scala versions.

| Maven build options | Expected Flink bundle jar name | Notes |
|:---------------------------|:-------------------------------|:---------------------------------|
| (empty) | hudi-flink1.18-bundle | For Flink 1.18 (default options) |
| `-Dflink1.18` | hudi-flink1.18-bundle | For Flink 1.18 (same as default) |
| `-Dflink1.17` | hudi-flink1.17-bundle | For Flink 1.17 |
| (empty) | hudi-flink1.17-bundle | For Flink 1.17 (default options) |
| `-Dflink1.17` | hudi-flink1.17-bundle | For Flink 1.17 (same as default) |
| `-Dflink1.16` | hudi-flink1.16-bundle | For Flink 1.16 |
| `-Dflink1.15` | hudi-flink1.15-bundle | For Flink 1.15 |
| `-Dflink1.14` | hudi-flink1.14-bundle | For Flink 1.14 and Scala 2.12 |
Expand Down
7 changes: 2 additions & 5 deletions azure-pipelines-20230430.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# limitations under the License.

# NOTE:
# This config file defines how Azure CI runs tests with Spark 2.4 and Flink 1.18 profiles.
# This config file defines how Azure CI runs tests with Spark 2.4 and Flink 1.17 profiles.
# PRs will need to keep in sync with master's version to trigger the CI runs.

trigger:
Expand All @@ -37,7 +37,6 @@ parameters:
- 'hudi-flink-datasource/hudi-flink1.15.x'
- 'hudi-flink-datasource/hudi-flink1.16.x'
- 'hudi-flink-datasource/hudi-flink1.17.x'
- 'hudi-flink-datasource/hudi-flink1.18.x'
- name: job2Modules
type: object
default:
Expand Down Expand Up @@ -70,7 +69,6 @@ parameters:
- '!hudi-flink-datasource/hudi-flink1.15.x'
- '!hudi-flink-datasource/hudi-flink1.16.x'
- '!hudi-flink-datasource/hudi-flink1.17.x'
- '!hudi-flink-datasource/hudi-flink1.18.x'
- '!hudi-spark-datasource'
- '!hudi-spark-datasource/hudi-spark'
- '!hudi-spark-datasource/hudi-spark3.2.x'
Expand All @@ -94,10 +92,9 @@ parameters:
- '!hudi-flink-datasource/hudi-flink1.15.x'
- '!hudi-flink-datasource/hudi-flink1.16.x'
- '!hudi-flink-datasource/hudi-flink1.17.x'
- '!hudi-flink-datasource/hudi-flink1.18.x'

variables:
BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.18'
BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.17'
PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true -ntp -B -V -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn'
MVN_OPTS_INSTALL: '-Phudi-platform-service -DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS) -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=5'
MVN_OPTS_TEST: '-fae -Pwarn-log $(BUILD_PROFILES) $(PLUGIN_OPTS)'
Expand Down
1 change: 0 additions & 1 deletion hudi-flink-datasource/hudi-flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,6 @@
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.connector.kafka.artifactId}</artifactId>
<version>${flink.connector.kafka.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.hudi.table.catalog;

import org.apache.hudi.adapter.HiveCatalogConstants.AlterHiveDatabaseOp;
import org.apache.hudi.avro.AvroSchemaUtils;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.common.fs.FSUtils;
Expand Down Expand Up @@ -46,6 +45,9 @@
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase;
import org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner;
import org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase;
import org.apache.flink.table.catalog.AbstractCatalog;
import org.apache.flink.table.catalog.CatalogBaseTable;
import org.apache.flink.table.catalog.CatalogDatabase;
Expand Down Expand Up @@ -102,20 +104,17 @@
import java.util.List;
import java.util.Map;

import static org.apache.hudi.adapter.HiveCatalogConstants.ALTER_DATABASE_OP;
import static org.apache.hudi.adapter.HiveCatalogConstants.DATABASE_LOCATION_URI;
import static org.apache.hudi.adapter.HiveCatalogConstants.DATABASE_OWNER_NAME;
import static org.apache.hudi.adapter.HiveCatalogConstants.DATABASE_OWNER_TYPE;
import static org.apache.hudi.adapter.HiveCatalogConstants.ROLE_OWNER;
import static org.apache.hudi.adapter.HiveCatalogConstants.USER_OWNER;
import static org.apache.hudi.configuration.FlinkOptions.PATH;
import static org.apache.hudi.table.catalog.TableOptionProperties.COMMENT;
import static org.apache.hudi.table.catalog.TableOptionProperties.PK_CONSTRAINT_NAME;
import static org.apache.hudi.table.catalog.TableOptionProperties.SPARK_SOURCE_PROVIDER;
import static org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase.ALTER_DATABASE_OP;
import static org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner.DATABASE_OWNER_NAME;
import static org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabaseOwner.DATABASE_OWNER_TYPE;
import static org.apache.flink.table.factories.FactoryUtil.CONNECTOR;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.StringUtils.isNullOrWhitespaceOnly;
import static org.apache.hudi.configuration.FlinkOptions.PATH;
import static org.apache.hudi.table.catalog.TableOptionProperties.COMMENT;
import static org.apache.hudi.table.catalog.TableOptionProperties.PK_CONSTRAINT_NAME;
import static org.apache.hudi.table.catalog.TableOptionProperties.SPARK_SOURCE_PROVIDER;

/**
* A catalog implementation for Hoodie based on MetaStore.
Expand Down Expand Up @@ -217,7 +216,7 @@ public CatalogDatabase getDatabase(String databaseName)

Map<String, String> properties = new HashMap<>(hiveDatabase.getParameters());

properties.put(DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());
properties.put(SqlCreateHiveDatabase.DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());

return new CatalogDatabaseImpl(properties, hiveDatabase.getDescription());
}
Expand Down Expand Up @@ -246,7 +245,7 @@ public void createDatabase(

Map<String, String> properties = database.getProperties();

String dbLocationUri = properties.remove(DATABASE_LOCATION_URI);
String dbLocationUri = properties.remove(SqlCreateHiveDatabase.DATABASE_LOCATION_URI);
if (dbLocationUri == null && this.catalogPath != null) {
// infer default location uri
dbLocationUri = new Path(this.catalogPath, databaseName).toString();
Expand Down Expand Up @@ -316,10 +315,11 @@ private static Database alterDatabase(Database hiveDB, CatalogDatabase newDataba
String opStr = newParams.remove(ALTER_DATABASE_OP);
if (opStr == null) {
// by default is to alter db properties
opStr = AlterHiveDatabaseOp.CHANGE_PROPS.name();
opStr = SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name();
}
String newLocation = newParams.remove(DATABASE_LOCATION_URI);
AlterHiveDatabaseOp op = AlterHiveDatabaseOp.valueOf(opStr);
String newLocation = newParams.remove(SqlCreateHiveDatabase.DATABASE_LOCATION_URI);
SqlAlterHiveDatabase.AlterHiveDatabaseOp op =
SqlAlterHiveDatabase.AlterHiveDatabaseOp.valueOf(opStr);
switch (op) {
case CHANGE_PROPS:
hiveDB.setParameters(newParams);
Expand All @@ -332,10 +332,10 @@ private static Database alterDatabase(Database hiveDB, CatalogDatabase newDataba
String ownerType = newParams.remove(DATABASE_OWNER_TYPE);
hiveDB.setOwnerName(ownerName);
switch (ownerType) {
case ROLE_OWNER:
case SqlAlterHiveDatabaseOwner.ROLE_OWNER:
hiveDB.setOwnerType(PrincipalType.ROLE);
break;
case USER_OWNER:
case SqlAlterHiveDatabaseOwner.USER_OWNER:
hiveDB.setOwnerType(PrincipalType.USER);
break;
default:
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

Loading

0 comments on commit f3562cd

Please sign in to comment.