Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add checkstyle in maven build #2610

Open
wants to merge 21 commits into
base: branch-25.02
Choose a base branch
from
Open
35 changes: 35 additions & 0 deletions .github/workflows/checkstyle.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Copyright (c) 2024, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# A workflow to java style check
name: java format check

on:
pull_request:
types: [opened, synchronize, reopened]

jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- name: Set up JDK
uses: actions/[email protected]
with:
java-version: '8'
distribution: 'adopt'

- uses: actions/checkout@v4

- name: Run checkstyle
run: mvn checkstyle:check
3 changes: 2 additions & 1 deletion .github/workflows/license-header-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ jobs:
*.txt,
*.xml,
*.fbs,
build/*
build/*,
dev/*
excluded_file_patterns: |
thirdparty/*
41 changes: 41 additions & 0 deletions dev/checkstyle.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#!/bin/bash
# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#


set -ex

MODIFIED_FILES=$(git diff --name-only)

SRC_DIR="src/main/java/"
TEST_SRC_DIR="src/test/java/"
# Filter out the Java files that have been modified
JAVA_FILES=()
for FILE in $MODIFIED_FILES; do
if [[ $FILE == *.java ]]; then
if [[ $FILE == $SRC_DIR* ]]; then
JAVA_FILES+=("${FILE#"$SRC_DIR"}") # Remove the src/main/java/ prefix
elif [[ $FILE == $TEST_SRC_DIR* ]]; then
JAVA_FILES+=("${FILE#"$TEST_SRC_DIR"}") # Remove the src/test/java/ prefix
fi
fi
done

# If there are Java files to check, run Checkstyle on them
if [ ${#JAVA_FILES[@]} -ne 0 ]; then
mvn checkstyle:check -Dcheckstyle.includes="$(echo "${JAVA_FILES[@]}" | tr ' ' ',')"
else
echo "No Java files modified, skipping Checkstyle."
fi
365 changes: 365 additions & 0 deletions dev/checkstyle.xml

Large diffs are not rendered by default.

22 changes: 22 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -767,6 +767,28 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.3.0</version>
<configuration>
<configLocation>dev/checkstyle.xml</configLocation>
<encoding>UTF-8</encoding>
<consoleOutput>true</consoleOutput>
<failsOnError>true</failsOnError>
<linkXRef>false</linkXRef>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
</configuration>
<executions>
<execution>
<id>validate</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
Expand Down
125 changes: 64 additions & 61 deletions src/main/java/com/nvidia/spark/rapids/jni/Arms.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,77 +25,80 @@
* This class contains utility methods for automatic resource management.
*/
public class Arms {
/**
* This method close the resource if an exception is thrown while executing the function.
*/
public static <R extends AutoCloseable, T> T closeIfException(R resource, Function<R, T> function) {
/**
* This method close the resource if an exception is thrown while executing the function.
*/
public static <R extends AutoCloseable, T> T closeIfException(R resource,
Function<R, T> function) {
try {
return function.apply(resource);
} catch (Exception e) {
if (resource != null) {
try {
return function.apply(resource);
} catch (Exception e) {
if (resource != null) {
try {
resource.close();
} catch (Exception inner) {
e.addSuppressed(inner);
}
}
throw e;
resource.close();
} catch (Exception inner) {
e.addSuppressed(inner);
}
}
throw e;
}
}

/**
* This method safely closes all the resources.
* <p>
* This method will iterate through all the resources and closes them. If any exception happened during the
* traversal, exception will be captured and rethrown after all resources closed.
* </p>
*/
public static <R extends AutoCloseable> void closeAll(Iterator<R> resources) {
Throwable t = null;
while (resources.hasNext()) {
try {
R resource = resources.next();
if (resource != null) {
resource.close();
}
} catch (Exception e) {
if (t == null) {
t = e;
} else {
t.addSuppressed(e);
}
}
/**
* This method safely closes all the resources.
* <p>
* This method will iterate through all the resources and closes them. If any exception happened during the
* traversal, exception will be captured and rethrown after all resources closed.
* </p>
*/
public static <R extends AutoCloseable> void closeAll(Iterator<R> resources) {
Throwable t = null;
while (resources.hasNext()) {
try {
R resource = resources.next();
if (resource != null) {
resource.close();
}
} catch (Exception e) {
if (t == null) {
t = e;
} else {
t.addSuppressed(e);
}
}
}

if (t != null) throw new RuntimeException(t);
if (t != null) {
throw new RuntimeException(t);
}
}


/**
* This method safely closes all the resources. See {@link #closeAll(Iterator)} for more details.
*/
public static <R extends AutoCloseable> void closeAll(R... resources) {
closeAll(Arrays.asList(resources));
}
/**
* This method safely closes all the resources. See {@link #closeAll(Iterator)} for more details.
*/
public static <R extends AutoCloseable> void closeAll(R... resources) {
closeAll(Arrays.asList(resources));
}

/**
* This method safely closes the resources. See {@link #closeAll(Iterator)} for more details.
*/
public static <R extends AutoCloseable> void closeAll(Collection<R> resources) {
closeAll(resources.iterator());
}
/**
* This method safely closes the resources. See {@link #closeAll(Iterator)} for more details.
*/
public static <R extends AutoCloseable> void closeAll(Collection<R> resources) {
closeAll(resources.iterator());
}

/**
* This method safely closes the resources after applying the function.
* <br/>
* See {@link #closeAll(Iterator)} for more details.
*/
public static <R extends AutoCloseable, C extends Collection<R>, V> V withResource(
C resource, Function<C, V> function) {
try {
return function.apply(resource);
} finally {
closeAll(resource);
}
/**
* This method safely closes the resources after applying the function.
* <br/>
* See {@link #closeAll(Iterator)} for more details.
*/
public static <R extends AutoCloseable, C extends Collection<R>, V> V withResource(
C resource, Function<C, V> function) {
try {
return function.apply(resource);
} finally {
closeAll(resource);
}
}
}
64 changes: 36 additions & 28 deletions src/main/java/com/nvidia/spark/rapids/jni/BloomFilter.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2023, NVIDIA CORPORATION.
* Copyright (c) 2023-2024, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -16,16 +16,13 @@

package com.nvidia.spark.rapids.jni;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import ai.rapids.cudf.BaseDeviceMemoryBuffer;
import ai.rapids.cudf.ColumnVector;
import ai.rapids.cudf.CudfAccessor;
import ai.rapids.cudf.CudfException;
import ai.rapids.cudf.DType;
import ai.rapids.cudf.Scalar;
import ai.rapids.cudf.NativeDepsLoader;
import ai.rapids.cudf.Scalar;

public class BloomFilter {
static {
Expand All @@ -34,71 +31,82 @@ public class BloomFilter {

/**
* Create a bloom filter with the specified number of hashes and bloom filter bits.
* @param numHashes The number of hashes to use when inserting values into the bloom filter or
* when probing.
*
* @param numHashes The number of hashes to use when inserting values into the bloom filter or
* when probing.
* @param bloomFilterBits Size of the bloom filter in bits.
* @return a Scalar object which encapsulates the bloom filter.
*/
public static Scalar create(int numHashes, long bloomFilterBits){
if(numHashes <= 0){
public static Scalar create(int numHashes, long bloomFilterBits) {
if (numHashes <= 0) {
throw new IllegalArgumentException("Bloom filters must have a positive hash count");
}
if(bloomFilterBits <= 0){
if (bloomFilterBits <= 0) {
throw new IllegalArgumentException("Bloom filters must have a positive number of bits");
}
return CudfAccessor.scalarFromHandle(DType.LIST, creategpu(numHashes, bloomFilterBits));
}

/**
* Insert a column of longs into a bloom filter.
*
* @param bloomFilter The bloom filter to which values will be inserted.
* @param cv The column containing the values to add.
* @param cv The column containing the values to add.
*/
public static void put(Scalar bloomFilter, ColumnVector cv){
public static void put(Scalar bloomFilter, ColumnVector cv) {
put(CudfAccessor.getScalarHandle(bloomFilter), cv.getNativeView());
}

/**
* Merge one or more bloom filters into a new bloom filter.
* @param bloomFilters A ColumnVector containing a bloom filter per row.
*
* @param bloomFilters A ColumnVector containing a bloom filter per row.
* @return A new bloom filter containing the merged inputs.
*/
public static Scalar merge(ColumnVector bloomFilters){
public static Scalar merge(ColumnVector bloomFilters) {
return CudfAccessor.scalarFromHandle(DType.LIST, merge(bloomFilters.getNativeView()));
}

/**
* Probe a bloom filter with a column of longs. Returns a column of booleans. For
* Probe a bloom filter with a column of longs. Returns a column of booleans. For
* each row in the output; a value of true indicates that the corresponding input value
* -may- be in the set of values used to build the bloom filter; a value of false indicates
* that the corresponding input value is conclusively not in the set of values used to build
* the bloom filter.
* the bloom filter.
*
* @param bloomFilter The bloom filter to be probed.
* @param cv The column containing the values to check.
* @param cv The column containing the values to check.
* @return A boolean column indicating the results of the probe.
*/
public static ColumnVector probe(Scalar bloomFilter, ColumnVector cv){
public static ColumnVector probe(Scalar bloomFilter, ColumnVector cv) {
return new ColumnVector(probe(CudfAccessor.getScalarHandle(bloomFilter), cv.getNativeView()));
}

/**
* Probe a bloom filter with a column of longs. Returns a column of booleans. For
* Probe a bloom filter with a column of longs. Returns a column of booleans. For
* each row in the output; a value of true indicates that the corresponding input value
* -may- be in the set of values used to build the bloom filter; a value of false indicates
* that the corresponding input value is conclusively not in the set of values used to build
* the bloom filter.
* @param bloomFilter The bloom filter to be probed. This buffer is expected to be the
* fully packed Spark bloom filter, including header.
* @param cv The column containing the values to check.
* the bloom filter.
*
* @param bloomFilter The bloom filter to be probed. This buffer is expected to be the
* fully packed Spark bloom filter, including header.
* @param cv The column containing the values to check.
* @return A boolean column indicating the results of the probe.
*/
public static ColumnVector probe(BaseDeviceMemoryBuffer bloomFilter, ColumnVector cv){
return new ColumnVector(probebuffer(bloomFilter.getAddress(), bloomFilter.getLength(), cv.getNativeView()));
public static ColumnVector probe(BaseDeviceMemoryBuffer bloomFilter, ColumnVector cv) {
return new ColumnVector(
probebuffer(bloomFilter.getAddress(), bloomFilter.getLength(), cv.getNativeView()));
}

private static native long creategpu(int numHashes, long bloomFilterBits) throws CudfException;

private static native int put(long bloomFilter, long cv) throws CudfException;

private static native long merge(long bloomFilters) throws CudfException;
private static native long probe(long bloomFilter, long cv) throws CudfException;
private static native long probebuffer(long bloomFilter, long bloomFilterSize, long cv) throws CudfException;

private static native long probe(long bloomFilter, long cv) throws CudfException;

private static native long probebuffer(long bloomFilter, long bloomFilterSize, long cv)
throws CudfException;
}
Loading
Loading