Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MINOR] Add bundle validation for new artifacts in Maven central #12486

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 66 additions & 1 deletion .github/workflows/release_candidate_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
- scalaProfile: 'scala-2.12'
flinkProfile: 'flink1.14'
sparkProfile: 'spark3.3'
sparkRuntime: 'spark3.3.1'
sparkRuntime: 'spark3.3.4'
steps:
- uses: actions/checkout@v3
- name: Set up JDK 8
Expand Down Expand Up @@ -77,3 +77,68 @@ jobs:
if: ${{ env.SPARK_PROFILE >= 'spark3.3' }} # Only Spark 3.3 and above support Java 17
run: |
./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 $STAGING_REPO_NUM

validate-release-maven-artifacts:
runs-on: ubuntu-latest
# if: false
env:
HUDI_VERSION: 1.0.0
MAVEN_BASE_URL: 'https://repo1.maven.org/maven2'
strategy:
matrix:
include:
- scalaProfile: 'scala-2.13'
flinkProfile: 'flink1.18'
sparkProfile: 'spark3.5'
sparkRuntime: 'spark3.5.1'
- scalaProfile: 'scala-2.12'
flinkProfile: 'flink1.17'
sparkProfile: 'spark3.5'
sparkRuntime: 'spark3.5.1'
- scalaProfile: 'scala-2.12'
flinkProfile: 'flink1.16'
sparkProfile: 'spark3.4'
sparkRuntime: 'spark3.4.3'
- scalaProfile: 'scala-2.12'
flinkProfile: 'flink1.15'
sparkProfile: 'spark3.3'
sparkRuntime: 'spark3.3.4'
- scalaProfile: 'scala-2.12'
flinkProfile: 'flink1.14'
sparkProfile: 'spark3.3'
sparkRuntime: 'spark3.3.4'
steps:
- uses: actions/checkout@v3
- name: Set up JDK 8
uses: actions/setup-java@v3
with:
java-version: '8'
distribution: 'temurin'
architecture: x64
cache: maven
- name: IT - Bundle Validation - OpenJDK 8
env:
FLINK_PROFILE: ${{ matrix.flinkProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_RUNTIME: ${{ matrix.sparkRuntime }}
SCALA_PROFILE: ${{ matrix.scalaProfile }}
run: |
./packaging/bundle-validation/ci_run.sh hudi_docker_java8 $HUDI_VERSION openjdk8 "" $MAVEN_BASE_URL
- name: IT - Bundle Validation - OpenJDK 11
env:
FLINK_PROFILE: ${{ matrix.flinkProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_RUNTIME: ${{ matrix.sparkRuntime }}
SCALA_PROFILE: ${{ matrix.scalaProfile }}
if: ${{ env.SPARK_PROFILE >= 'spark3' }} # Only run validation on Spark 3
run: |
./packaging/bundle-validation/ci_run.sh hudi_docker_java11 $HUDI_VERSION openjdk11 "" $MAVEN_BASE_URL
- name: IT - Bundle Validation - OpenJDK 17
env:
FLINK_PROFILE: ${{ matrix.flinkProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_RUNTIME: ${{ matrix.sparkRuntime }}
SCALA_PROFILE: ${{ matrix.scalaProfile }}
if: ${{ env.SPARK_PROFILE >= 'spark3.3' }} # Only Spark 3.3 and above support Java 17
run: |
./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 "" $MAVEN_BASE_URL
25 changes: 22 additions & 3 deletions packaging/bundle-validation/ci_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,29 @@ CONTAINER_NAME=$1
HUDI_VERSION=$2
JAVA_RUNTIME_VERSION=$3
STAGING_REPO_NUM=$4
MAVEN_BASE_URL=$5
echo "HUDI_VERSION: $HUDI_VERSION JAVA_RUNTIME_VERSION: $JAVA_RUNTIME_VERSION"
echo "SPARK_RUNTIME: $SPARK_RUNTIME SPARK_PROFILE (optional): $SPARK_PROFILE"
echo "SCALA_PROFILE: $SCALA_PROFILE"
echo "MAVEN_BASE_URL: $MAVEN_BASE_URL"
echo "STAGING_REPO_NUM: $STAGING_REPO_NUM"

# Ensure only one of STAGING_REPO_NUM or MAVEN_BASE_URL is provided
if [[ -n "$STAGING_REPO_NUM" && -n "$MAVEN_BASE_URL" ]]; then
echo "Error: Both STAGING_REPO_NUM and MAVEN_BASE_URL cannot be provided simultaneously."
exit 1
fi

if [[ -n "$STAGING_REPO_NUM" ]]; then
REPO_BASE_URL=https://repository.apache.org/content/repositories/orgapachehudi-$STAGING_REPO_NUM/org/apache/hudi
echo "Downloading bundle jars from staging repo orgapachehudi-$REPO_BASE_URL ..."
elif [[ -n "$MAVEN_BASE_URL" ]]; then
REPO_BASE_URL=$MAVEN_BASE_URL/org/apache/hudi
echo "Downloading bundle jars from maven central - $REPO_BASE_URL ..."
else
echo "Error: Either STAGING_REPO_NUM or MAVEN_BASE_URL must be provided."
exit 1
fi

# choose versions based on build profiles
if [[ ${SPARK_RUNTIME} == 'spark3.3.1' ]]; then
Expand Down Expand Up @@ -112,7 +132,7 @@ fi
TMP_JARS_DIR=/tmp/jars/$(date +%s)
mkdir -p $TMP_JARS_DIR

if [[ -z "$STAGING_REPO_NUM" ]]; then
if [ -z "$STAGING_REPO_NUM" ] && [ -z "$MAVEN_BASE_URL" ]; then
echo 'Adding built bundle jars for validation'
if [[ "$SCALA_PROFILE" != 'scala-2.13' ]]; then
# For Scala 2.13, Flink is not support, so skipping the Flink bundle validation
Expand Down Expand Up @@ -170,8 +190,7 @@ else
HUDI_FLINK_BUNDLE_NAME=hudi-flink1.20-bundle
fi

echo "Downloading bundle jars from staging repo orgapachehudi-$STAGING_REPO_NUM ..."
REPO_BASE_URL=https://repository.apache.org/content/repositories/orgapachehudi-$STAGING_REPO_NUM/org/apache/hudi
echo "Downloading bundle jars from base URL - $REPO_BASE_URL ..."
wget -q $REPO_BASE_URL/$HUDI_FLINK_BUNDLE_NAME/$HUDI_VERSION/$HUDI_FLINK_BUNDLE_NAME-$HUDI_VERSION.jar -P $TMP_JARS_DIR/
wget -q $REPO_BASE_URL/$HUDI_HADOOP_MR_BUNDLE_NAME/$HUDI_VERSION/$HUDI_HADOOP_MR_BUNDLE_NAME-$HUDI_VERSION.jar -P $TMP_JARS_DIR/
wget -q $REPO_BASE_URL/$HUDI_KAFKA_CONNECT_BUNDLE_NAME/$HUDI_VERSION/$HUDI_KAFKA_CONNECT_BUNDLE_NAME-$HUDI_VERSION.jar -P $TMP_JARS_DIR/
Expand Down
Loading