diff --git a/hudi-aws/src/main/java/org/apache/hudi/aws/sync/AWSGlueCatalogSyncClient.java b/hudi-aws/src/main/java/org/apache/hudi/aws/sync/AWSGlueCatalogSyncClient.java
index 506023b22a7d..1609c846d0e8 100644
--- a/hudi-aws/src/main/java/org/apache/hudi/aws/sync/AWSGlueCatalogSyncClient.java
+++ b/hudi-aws/src/main/java/org/apache/hudi/aws/sync/AWSGlueCatalogSyncClient.java
@@ -71,6 +71,7 @@
import software.amazon.awssdk.services.glue.model.GetPartitionsRequest;
import software.amazon.awssdk.services.glue.model.GetPartitionsResponse;
import software.amazon.awssdk.services.glue.model.GetTableRequest;
+import software.amazon.awssdk.services.glue.model.KeySchemaElement;
import software.amazon.awssdk.services.glue.model.PartitionIndex;
import software.amazon.awssdk.services.glue.model.PartitionIndexDescriptor;
import software.amazon.awssdk.services.glue.model.PartitionInput;
@@ -690,7 +691,7 @@ public void managePartitionIndexes(String tableName) throws ExecutionException,
GetPartitionIndexesRequest indexesRequest = GetPartitionIndexesRequest.builder().databaseName(databaseName).tableName(tableName).build();
GetPartitionIndexesResponse existingIdxsResp = awsGlue.getPartitionIndexes(indexesRequest).get();
for (PartitionIndexDescriptor idsToDelete : existingIdxsResp.partitionIndexDescriptorList()) {
- LOG.warn("Dropping partition index: " + idsToDelete.indexName());
+ LOG.warn("Dropping partition index: {}", idsToDelete.indexName());
DeletePartitionIndexRequest idxToDelete = DeletePartitionIndexRequest.builder()
.databaseName(databaseName).tableName(tableName).indexName(idsToDelete.indexName()).build();
awsGlue.deletePartitionIndex(idxToDelete).get();
@@ -712,8 +713,8 @@ public void managePartitionIndexes(String tableName) throws ExecutionException,
// for each existing index remove if not relevant anymore
boolean indexesChanges = false;
for (PartitionIndexDescriptor existingIdx: existingIdxsResp.partitionIndexDescriptorList()) {
- List idxColumns = existingIdx.keys().stream().map(key -> key.name()).collect(Collectors.toList());
- Boolean toBeRemoved = true;
+ List idxColumns = existingIdx.keys().stream().map(KeySchemaElement::name).collect(Collectors.toList());
+ boolean toBeRemoved = true;
for (List neededIdx : partitionsIndexNeeded) {
if (neededIdx.equals(idxColumns)) {
toBeRemoved = false;
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java
index 3b42edc383a5..535ff086e47f 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java
@@ -25,9 +25,9 @@
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.table.HoodieTableMetaClient;
-import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieArchivedTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
+import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.InstantComparator;
import org.apache.hudi.common.table.timeline.TimelineUtils;
import org.apache.hudi.common.util.NumericUtils;
@@ -194,7 +194,7 @@ public String showArchivedCommits(
HoodieArchivedTimeline archivedTimeline = HoodieCLI.getTableMetaClient().getArchivedTimeline();
try {
archivedTimeline.loadInstantDetailsInMemory(startTs, endTs);
- HoodieTimeline timelineRange = (HoodieTimeline)archivedTimeline.findInstantsInRange(startTs, endTs);
+ HoodieTimeline timelineRange = archivedTimeline.findInstantsInRange(startTs, endTs);
if (includeExtraMetadata) {
return printCommitsWithMetadata(timelineRange, limit, sortByField, descending, headerOnly, exportTableName, partition);
} else {
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java
index f98ea7cecbb8..b37146b1c818 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java
@@ -31,10 +31,10 @@
import org.apache.hudi.client.CompactionAdminClient.ValidationOpResult;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
-import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieArchivedTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
+import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.InstantGenerator;
import org.apache.hudi.common.table.timeline.TimelineMetadataUtils;
import org.apache.hudi.common.util.Option;
@@ -431,7 +431,7 @@ protected static String printCompaction(HoodieCompactionPlan compactionPlan,
}
private static String getTmpSerializerFile() {
- return TMP_DIR + UUID.randomUUID().toString() + ".ser";
+ return TMP_DIR + UUID.randomUUID() + ".ser";
}
private T deSerializeOperationResult(StoragePath inputPath,
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/CLIUtils.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/CLIUtils.java
index 524778f8994b..99310aaa3e7a 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/CLIUtils.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/CLIUtils.java
@@ -21,10 +21,10 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.common.table.HoodieTableMetaClient;
-import org.apache.hudi.common.table.timeline.HoodieTimeline;
+import org.apache.hudi.common.table.timeline.BaseHoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieArchivedTimeline;
-import org.apache.hudi.common.table.timeline.BaseHoodieTimeline;
+import org.apache.hudi.common.table.timeline.HoodieTimeline;
import static org.apache.hudi.cli.utils.CommitUtil.getTimeDaysAgo;
import static org.apache.hudi.common.util.StringUtils.isNullOrEmpty;
@@ -57,7 +57,7 @@ public static HoodieTimeline getTimelineInRange(String startTs, String endTs, bo
if (includeArchivedTimeline) {
HoodieArchivedTimeline archivedTimeline = metaClient.getArchivedTimeline();
archivedTimeline.loadInstantDetailsInMemory(startTs, endTs);
- return ((HoodieTimeline)archivedTimeline.findInstantsInRange(startTs, endTs)).mergeTimeline(activeTimeline);
+ return archivedTimeline.findInstantsInRange(startTs, endTs).mergeTimeline(activeTimeline);
}
return activeTimeline;
}
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
index 5209465d8a93..405d4aebb396 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
@@ -32,7 +32,7 @@
public class InputStreamConsumer extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(InputStreamConsumer.class);
- private InputStream is;
+ private final InputStream is;
public InputStreamConsumer(InputStream is) {
this.is = is;
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java
index 0ce6002c5cb7..c4aa1a327ef8 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java
@@ -40,8 +40,8 @@ public class SparkTempViewProvider implements TempViewProvider {
private static final Logger LOG = LoggerFactory.getLogger(SparkTempViewProvider.class);
- private JavaSparkContext jsc;
- private SQLContext sqlContext;
+ private final JavaSparkContext jsc;
+ private final SQLContext sqlContext;
public SparkTempViewProvider(String appName) {
try {
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/async/AsyncCompactService.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/async/AsyncCompactService.java
index 1d1b9421b596..218391eccbcb 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/async/AsyncCompactService.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/async/AsyncCompactService.java
@@ -47,7 +47,7 @@ public abstract class AsyncCompactService extends HoodieAsyncTableService {
private static final Logger LOG = LoggerFactory.getLogger(AsyncCompactService.class);
private final int maxConcurrentCompaction;
protected transient HoodieEngineContext context;
- private transient BaseCompactor compactor;
+ private final transient BaseCompactor compactor;
public AsyncCompactService(HoodieEngineContext context, BaseHoodieWriteClient client) {
this(context, client, false);
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/callback/util/HoodieWriteCommitCallbackUtil.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/callback/util/HoodieWriteCommitCallbackUtil.java
index fff0b713528b..cd05b78dfcf2 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/callback/util/HoodieWriteCommitCallbackUtil.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/callback/util/HoodieWriteCommitCallbackUtil.java
@@ -28,14 +28,14 @@
*/
public class HoodieWriteCommitCallbackUtil {
- private static ObjectMapper mapper = new ObjectMapper();
+ private static final ObjectMapper MAPPER = new ObjectMapper();
/**
* Convert data to json string format.
*/
public static String convertToJsonString(Object obj) {
try {
- return mapper.writeValueAsString(obj);
+ return MAPPER.writeValueAsString(obj);
} catch (IOException e) {
throw new HoodieCommitCallbackException("Callback service convert data to json failed", e);
}
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/WriteStatus.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/WriteStatus.java
index eac71cba191c..e9ab4c10d56d 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/WriteStatus.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/WriteStatus.java
@@ -259,14 +259,12 @@ public boolean isTrackingSuccessfulWrites() {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("WriteStatus {");
- sb.append("fileId=").append(fileId);
- sb.append(", writeStat=").append(stat);
- sb.append(", globalError='").append(globalError).append('\'');
- sb.append(", hasErrors='").append(hasErrors()).append('\'');
- sb.append(", errorCount='").append(totalErrorRecords).append('\'');
- sb.append(", errorPct='").append((100.0 * totalErrorRecords) / totalRecords).append('\'');
- sb.append('}');
- return sb.toString();
+ return "WriteStatus {" + "fileId=" + fileId
+ + ", writeStat=" + stat
+ + ", globalError='" + globalError + '\''
+ + ", hasErrors='" + hasErrors() + '\''
+ + ", errorCount='" + totalErrorRecords + '\''
+ + ", errorPct='" + (100.0 * totalErrorRecords) / totalRecords + '\''
+ + '}';
}
}
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
index 8c0bc8842b91..7193e6234e55 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/transaction/lock/FileSystemBasedLockProvider.java
@@ -67,8 +67,8 @@ public class FileSystemBasedLockProvider implements LockProvider, Serial
private final transient HoodieStorage storage;
private final transient StoragePath lockFile;
protected LockConfiguration lockConfiguration;
- private SimpleDateFormat sdf;
- private LockInfo lockInfo;
+ private final SimpleDateFormat sdf;
+ private final LockInfo lockInfo;
private String currentOwnerLockInfo;
public FileSystemBasedLockProvider(final LockConfiguration lockConfiguration, final StorageConfiguration> configuration) {
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieIndexConfig.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieIndexConfig.java
index 385532917c49..931d50aeb5c0 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieIndexConfig.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieIndexConfig.java
@@ -524,7 +524,7 @@ public class HoodieIndexConfig extends HoodieConfig {
@Deprecated
public static final String DEFAULT_SIMPLE_INDEX_UPDATE_PARTITION_PATH = SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE.defaultValue();
- private EngineType engineType;
+ private final EngineType engineType;
/**
* Use Spark engine by default.
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
index c7e14b6b4e1b..b6977d6324ca 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
@@ -835,7 +835,7 @@ public class HoodieWriteConfig extends HoodieConfig {
private HoodieStorageConfig storageConfig;
private HoodieTimeGeneratorConfig timeGeneratorConfig;
private HoodieIndexingConfig indexingConfig;
- private EngineType engineType;
+ private final EngineType engineType;
/**
* @deprecated Use {@link #TBL_NAME} and its methods instead
@@ -2801,7 +2801,7 @@ public static class Builder {
private boolean isCleanConfigSet = false;
private boolean isArchivalConfigSet = false;
private boolean isClusteringConfigSet = false;
- private boolean isOptimizeConfigSet = false;
+ private final boolean isOptimizeConfigSet = false;
private boolean isMetricsConfigSet = false;
private boolean isBootstrapConfigSet = false;
private boolean isMemoryConfigSet = false;
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java
index 11ffb785f014..2d6808c81dbe 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java
@@ -90,11 +90,9 @@ public int hashCode() {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("BloomIndexFileInfo {");
- sb.append(" fileId=").append(fileId);
- sb.append(" minRecordKey=").append(minRecordKey);
- sb.append(" maxRecordKey=").append(maxRecordKey);
- sb.append('}');
- return sb.toString();
+ return "BloomIndexFileInfo {" + " fileId=" + fileId
+ + " minRecordKey=" + minRecordKey
+ + " maxRecordKey=" + maxRecordKey
+ + '}';
}
}
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/hbase/DefaultHBaseQPSResourceAllocator.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/hbase/DefaultHBaseQPSResourceAllocator.java
index ef17716637c2..a9989b21440f 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/hbase/DefaultHBaseQPSResourceAllocator.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/index/hbase/DefaultHBaseQPSResourceAllocator.java
@@ -24,7 +24,7 @@
import org.slf4j.LoggerFactory;
public class DefaultHBaseQPSResourceAllocator implements HBaseIndexQPSResourceAllocator {
- private HoodieWriteConfig hoodieWriteConfig;
+ private final HoodieWriteConfig hoodieWriteConfig;
private static final Logger LOG = LoggerFactory.getLogger(DefaultHBaseQPSResourceAllocator.class);
public DefaultHBaseQPSResourceAllocator(HoodieWriteConfig hoodieWriteConfig) {
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/metrics/HoodieMetrics.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/metrics/HoodieMetrics.java
index 15a39249d21f..73411a3885ae 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/metrics/HoodieMetrics.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/metrics/HoodieMetrics.java
@@ -91,8 +91,8 @@ public class HoodieMetrics {
private String conflictResolutionFailureCounterName = null;
private String compactionRequestedCounterName = null;
private String compactionCompletedCounterName = null;
- private HoodieWriteConfig config;
- private String tableName;
+ private final HoodieWriteConfig config;
+ private final String tableName;
private Timer rollbackTimer = null;
private Timer cleanTimer = null;
private Timer archiveTimer = null;
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadProfile.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadProfile.java
index 8e6160b09548..fb831fb49b7b 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadProfile.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadProfile.java
@@ -114,12 +114,10 @@ public WriteOperationType getOperationType() {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("WorkloadProfile {");
- sb.append("globalStat=").append(globalStat).append(", ");
- sb.append("InputPartitionStat=").append(inputPartitionPathStatMap).append(", ");
- sb.append("OutputPartitionStat=").append(outputPartitionPathStatMap).append(", ");
- sb.append("operationType=").append(operationType);
- sb.append('}');
- return sb.toString();
+ return "WorkloadProfile {" + "globalStat=" + globalStat + ", "
+ + "InputPartitionStat=" + inputPartitionPathStatMap + ", "
+ + "OutputPartitionStat=" + outputPartitionPathStatMap + ", "
+ + "operationType=" + operationType
+ + '}';
}
}
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadStat.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadStat.java
index 327a5a3ae798..716dce473aef 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadStat.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/WorkloadStat.java
@@ -33,9 +33,9 @@ public class WorkloadStat implements Serializable {
private long numUpdates = 0L;
- private HashMap> insertLocationToCount;
+ private final HashMap> insertLocationToCount;
- private HashMap> updateLocationToCount;
+ private final HashMap> updateLocationToCount;
public WorkloadStat() {
insertLocationToCount = new HashMap<>();
@@ -86,10 +86,8 @@ public HashMap> getInsertLocationToCount() {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("WorkloadStat {");
- sb.append("numInserts=").append(numInserts).append(", ");
- sb.append("numUpdates=").append(numUpdates);
- sb.append('}');
- return sb.toString();
+ return "WorkloadStat {" + "numInserts=" + numInserts + ", "
+ + "numUpdates=" + numUpdates
+ + '}';
}
}
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/clean/CleanPlanner.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/clean/CleanPlanner.java
index 893dfe8548a4..07c622da337d 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/clean/CleanPlanner.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/clean/CleanPlanner.java
@@ -89,7 +89,7 @@ public class CleanPlanner implements Serializable {
private final Map fgIdToPendingLogCompactionOperations;
private final HoodieTable hoodieTable;
private final HoodieWriteConfig config;
- private transient HoodieEngineContext context;
+ private final transient HoodieEngineContext context;
private final List savepointedTimestamps;
private Option earliestCommitToRetain = Option.empty();
@@ -237,10 +237,7 @@ private boolean isAnySavepointDeleted(HoodieCleanMetadata cleanMetadata) {
// check for any savepointed removed in latest compared to previous saved list
List removedSavepointedTimestamps = new ArrayList<>(savepointedTimestampsFromLastClean);
removedSavepointedTimestamps.removeAll(savepointedTimestamps);
- if (removedSavepointedTimestamps.isEmpty()) {
- return false;
- }
- return true;
+ return !removedSavepointedTimestamps.isEmpty();
}
/**
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/cluster/strategy/ClusteringPlanStrategy.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/cluster/strategy/ClusteringPlanStrategy.java
index a6894388f6d2..4d1aa91458eb 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/cluster/strategy/ClusteringPlanStrategy.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/cluster/strategy/ClusteringPlanStrategy.java
@@ -151,13 +151,16 @@ protected int getPlanVersion() {
* Transform {@link FileSlice} to {@link HoodieSliceInfo}.
*/
protected static List getFileSliceInfo(List slices) {
- return slices.stream().map(slice -> new HoodieSliceInfo().newBuilder()
- .setPartitionPath(slice.getPartitionPath())
- .setFileId(slice.getFileId())
- .setDataFilePath(slice.getBaseFile().map(BaseFile::getPath).orElse(StringUtils.EMPTY_STRING))
- .setDeltaFilePaths(slice.getLogFiles().map(f -> f.getPath().toString()).collect(Collectors.toList()))
- .setBootstrapFilePath(slice.getBaseFile().map(bf -> bf.getBootstrapBaseFile().map(bbf -> bbf.getPath()).orElse(StringUtils.EMPTY_STRING)).orElse(StringUtils.EMPTY_STRING))
- .build()).collect(Collectors.toList());
+ return slices.stream().map(slice -> {
+ new HoodieSliceInfo();
+ return HoodieSliceInfo.newBuilder()
+ .setPartitionPath(slice.getPartitionPath())
+ .setFileId(slice.getFileId())
+ .setDataFilePath(slice.getBaseFile().map(BaseFile::getPath).orElse(StringUtils.EMPTY_STRING))
+ .setDeltaFilePaths(slice.getLogFiles().map(f -> f.getPath().toString()).collect(Collectors.toList()))
+ .setBootstrapFilePath(slice.getBaseFile().map(bf -> bf.getBootstrapBaseFile().map(bbf -> bbf.getPath()).orElse(StringUtils.EMPTY_STRING)).orElse(StringUtils.EMPTY_STRING))
+ .build();
+ }).collect(Collectors.toList());
}
/**
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/BucketInfo.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/BucketInfo.java
index 6547da642546..a59bd81218ed 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/BucketInfo.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/BucketInfo.java
@@ -50,12 +50,10 @@ public String getPartitionPath() {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("BucketInfo {");
- sb.append("bucketType=").append(bucketType).append(", ");
- sb.append("fileIdPrefix=").append(fileIdPrefix).append(", ");
- sb.append("partitionPath=").append(partitionPath);
- sb.append('}');
- return sb.toString();
+ return "BucketInfo {" + "bucketType=" + bucketType + ", "
+ + "fileIdPrefix=" + fileIdPrefix + ", "
+ + "partitionPath=" + partitionPath
+ + '}';
}
@Override
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/InsertBucket.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/InsertBucket.java
index 2cedbe865881..8861621ff96e 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/InsertBucket.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/InsertBucket.java
@@ -32,10 +32,8 @@ public class InsertBucket implements Serializable {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("InsertBucket {");
- sb.append("bucketNumber=").append(bucketNumber).append(", ");
- sb.append("weight=").append(weight);
- sb.append('}');
- return sb.toString();
+ return "InsertBucket {" + "bucketNumber=" + bucketNumber + ", "
+ + "weight=" + weight
+ + '}';
}
}
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/SmallFile.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/SmallFile.java
index e495d28e10bd..ef6bef69dbd0 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/SmallFile.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/commit/SmallFile.java
@@ -32,10 +32,8 @@ public class SmallFile implements Serializable {
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("SmallFile {");
- sb.append("location=").append(location).append(", ");
- sb.append("sizeBytes=").append(sizeBytes);
- sb.append('}');
- return sb.toString();
+ return "SmallFile {" + "location=" + location + ", "
+ + "sizeBytes=" + sizeBytes
+ + '}';
}
}
\ No newline at end of file
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/RunCompactionActionExecutor.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/RunCompactionActionExecutor.java
index 276bec1b9bb1..7ad9df2a58b0 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/RunCompactionActionExecutor.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/RunCompactionActionExecutor.java
@@ -56,7 +56,7 @@ public class RunCompactionActionExecutor extends
private final HoodieCompactor compactor;
private final HoodieCompactionHandler compactionHandler;
- private WriteOperationType operationType;
+ private final WriteOperationType operationType;
private final HoodieMetrics metrics;
diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/ScheduleCompactionActionExecutor.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/ScheduleCompactionActionExecutor.java
index e83800e45daa..302d9069b235 100644
--- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/ScheduleCompactionActionExecutor.java
+++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/compact/ScheduleCompactionActionExecutor.java
@@ -53,7 +53,7 @@
public class ScheduleCompactionActionExecutor extends BaseActionExecutor> {
private static final Logger LOG = LoggerFactory.getLogger(ScheduleCompactionActionExecutor.class);
- private WriteOperationType operationType;
+ private final WriteOperationType operationType;
private final Option
*/
public class MurmurHash extends Hash {
- private static MurmurHash _instance = new MurmurHash();
+ private static final MurmurHash INSTANCE = new MurmurHash();
public static Hash getInstance() {
- return _instance;
+ return INSTANCE;
}
@Override
@@ -56,7 +56,7 @@ public int hash(byte[] data, int offset, int length, int seed) {
k = k << 8;
k = k | (data[i4 + 1] & 0xff);
k = k << 8;
- k = k | (data[i4 + 0] & 0xff);
+ k = k | (data[i4] & 0xff);
k *= m;
k ^= k >>> r;
k *= m;
@@ -77,7 +77,7 @@ public int hash(byte[] data, int offset, int length, int seed) {
h ^= (int) data[length - 2] << 8;
}
if (left >= 1) {
- h ^= (int) data[length - 1];
+ h ^= data[length - 1];
}
h *= m;
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/queue/DisruptorMessageQueue.java b/hudi-common/src/main/java/org/apache/hudi/common/util/queue/DisruptorMessageQueue.java
index ae41c2a3a937..606c188986cd 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/queue/DisruptorMessageQueue.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/queue/DisruptorMessageQueue.java
@@ -48,7 +48,7 @@ public class DisruptorMessageQueue implements HoodieMessageQueue {
private final Disruptor queue;
private final Function transformFunction;
private final RingBuffer ringBuffer;
- private AtomicReference throwable = new AtomicReference<>(null);
+ private final AtomicReference throwable = new AtomicReference<>(null);
private boolean isShutdown = false;
private boolean isStarted = false;
diff --git a/hudi-common/src/main/java/org/apache/hudi/config/metrics/HoodieMetricsPrometheusConfig.java b/hudi-common/src/main/java/org/apache/hudi/config/metrics/HoodieMetricsPrometheusConfig.java
index b0a019f81106..2340dd509169 100644
--- a/hudi-common/src/main/java/org/apache/hudi/config/metrics/HoodieMetricsPrometheusConfig.java
+++ b/hudi-common/src/main/java/org/apache/hudi/config/metrics/HoodieMetricsPrometheusConfig.java
@@ -182,7 +182,7 @@ public static HoodieMetricsPrometheusConfig.Builder newBuilder() {
public static class Builder {
- private HoodieMetricsPrometheusConfig hoodieMetricsPrometheusConfig = new HoodieMetricsPrometheusConfig();
+ private final HoodieMetricsPrometheusConfig hoodieMetricsPrometheusConfig = new HoodieMetricsPrometheusConfig();
public Builder fromProperties(Properties props) {
this.hoodieMetricsPrometheusConfig.getProps().putAll(props);
diff --git a/hudi-common/src/main/java/org/apache/hudi/expression/Predicates.java b/hudi-common/src/main/java/org/apache/hudi/expression/Predicates.java
index 11c4f39507f1..49c04de93a18 100644
--- a/hudi-common/src/main/java/org/apache/hudi/expression/Predicates.java
+++ b/hudi-common/src/main/java/org/apache/hudi/expression/Predicates.java
@@ -166,11 +166,7 @@ public Boolean eval(StructLike data) {
if (right != null && !(Boolean) right) {
return false;
} else {
- if (left != null && right != null) {
- return true;
- } else {
- return false;
- }
+ return left != null && right != null;
}
}
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/index/secondary/HoodieSecondaryIndex.java b/hudi-common/src/main/java/org/apache/hudi/index/secondary/HoodieSecondaryIndex.java
index 8e9a7e01a162..c75da2b74cca 100644
--- a/hudi-common/src/main/java/org/apache/hudi/index/secondary/HoodieSecondaryIndex.java
+++ b/hudi-common/src/main/java/org/apache/hudi/index/secondary/HoodieSecondaryIndex.java
@@ -80,7 +80,6 @@ private void validate() {
}
break;
default:
- return;
}
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/internal/schema/InternalSchemaBuilder.java b/hudi-common/src/main/java/org/apache/hudi/internal/schema/InternalSchemaBuilder.java
index 7978c21e54c6..7674caf971bc 100644
--- a/hudi-common/src/main/java/org/apache/hudi/internal/schema/InternalSchemaBuilder.java
+++ b/hudi-common/src/main/java/org/apache/hudi/internal/schema/InternalSchemaBuilder.java
@@ -170,7 +170,6 @@ private void visitIdToField(Type type, Map index) {
}
return;
default:
- return;
}
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/internal/schema/Types.java b/hudi-common/src/main/java/org/apache/hudi/internal/schema/Types.java
index ed03a7349cb7..ac0768897584 100644
--- a/hudi-common/src/main/java/org/apache/hudi/internal/schema/Types.java
+++ b/hudi-common/src/main/java/org/apache/hudi/internal/schema/Types.java
@@ -23,6 +23,7 @@
import java.io.Serializable;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -650,7 +651,7 @@ public Field field(int id) {
@Override
public List fields() {
- return Arrays.asList(elementField);
+ return Collections.singletonList(elementField);
}
public int elementId() {
diff --git a/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/InternalSchemaChangeApplier.java b/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/InternalSchemaChangeApplier.java
index 36aac462a137..b4b404e03518 100644
--- a/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/InternalSchemaChangeApplier.java
+++ b/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/InternalSchemaChangeApplier.java
@@ -28,7 +28,7 @@
* Manage schema change for HoodieWriteClient.
*/
public class InternalSchemaChangeApplier {
- private InternalSchema latestSchema;
+ private final InternalSchema latestSchema;
public InternalSchemaChangeApplier(InternalSchema latestSchema) {
this.latestSchema = latestSchema;
@@ -75,7 +75,7 @@ public InternalSchema applyAddChange(
throw new IllegalArgumentException(String.format("only support first/before/after but found: %s", positionType));
}
} else {
- throw new IllegalArgumentException(String.format("positionType should be specified"));
+ throw new IllegalArgumentException("positionType should be specified");
}
return SchemaChangeUtils.applyTableChanges2Schema(latestSchema, add);
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/TableChange.java b/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/TableChange.java
index 35b3c781b4ee..e9932266b657 100644
--- a/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/TableChange.java
+++ b/hudi-common/src/main/java/org/apache/hudi/internal/schema/action/TableChange.java
@@ -42,9 +42,9 @@ public interface TableChange {
*/
enum ColumnChangeID {
ADD, UPDATE, DELETE, PROPERTY_CHANGE, REPLACE;
- private String name;
+ private final String name;
- private ColumnChangeID() {
+ ColumnChangeID() {
this.name = this.name().toLowerCase(Locale.ROOT);
}
diff --git a/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/InternalSchemaUtils.java b/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/InternalSchemaUtils.java
index 94e72ff7180e..c1fbc902f758 100644
--- a/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/InternalSchemaUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/InternalSchemaUtils.java
@@ -90,7 +90,7 @@ public static InternalSchema pruneInternalSchemaByID(InternalSchema schema, List
if (f != null) {
newFields.add(f);
} else {
- throw new HoodieSchemaException(String.format("cannot find pruned id %s in currentSchema %s", id, schema.toString()));
+ throw new HoodieSchemaException(String.format("cannot find pruned id %s in currentSchema %s", id, schema));
}
}
}
@@ -111,10 +111,8 @@ private static Type pruneType(Type type, List fieldIds) {
Type newType = pruneType(f.type(), fieldIds);
if (fieldIds.contains(f.fieldId())) {
newTypes.add(f.type());
- } else if (newType != null) {
- newTypes.add(newType);
} else {
- newTypes.add(null);
+ newTypes.add(newType);
}
}
boolean changed = false;
diff --git a/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/SerDeHelper.java b/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/SerDeHelper.java
index 7891fc4582cd..35545f16cdf3 100644
--- a/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/SerDeHelper.java
+++ b/hudi-common/src/main/java/org/apache/hudi/internal/schema/utils/SerDeHelper.java
@@ -33,7 +33,7 @@
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
@@ -341,7 +341,7 @@ public static String inheritSchemas(InternalSchema newSchema, String oldSchemas)
return "";
}
if (oldSchemas == null || oldSchemas.isEmpty()) {
- return toJson(Arrays.asList(newSchema));
+ return toJson(Collections.singletonList(newSchema));
}
String checkedString = "{\"schemas\":[";
if (!oldSchemas.startsWith("{\"schemas\":")) {
diff --git a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
index 27ea15bc2509..2da11bea079d 100644
--- a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
+++ b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
@@ -829,9 +829,9 @@ public static HoodieData convertMetadataToRecordIndexRecords(Hoodi
return deletedRecordKeys.stream().map(recordKey -> HoodieMetadataPayload.createRecordIndexDelete(recordKey)).collect(toList()).iterator();
}
// ignore log file data blocks.
- return new ArrayList().iterator();
+ return Collections.emptyIterator();
} else {
- throw new HoodieIOException("Unsupported file type " + fullFilePath.toString() + " while generating MDT records");
+ throw new HoodieIOException("Unsupported file type " + fullFilePath + " while generating MDT records");
}
});
@@ -933,7 +933,7 @@ public static List getRecordKeysDeletedOrUpdated(HoodieEngineContext eng
return getRecordKeys(fullFilePath.toString(), dataTableMetaClient, finalWriterSchemaOpt, maxBufferSize, instantTime, true, true)
.iterator();
} else {
- throw new HoodieIOException("Found unsupported file type " + fullFilePath.toString() + ", while generating MDT records");
+ throw new HoodieIOException("Found unsupported file type " + fullFilePath + ", while generating MDT records");
}
}).collectAsList();
} catch (Exception e) {
@@ -2662,7 +2662,7 @@ public static HoodieMetadataColumnStats mergeColumnStatsRecords(HoodieMetadataCo
Comparable minValue =
(Comparable) Stream.of(
- (Comparable) unwrapAvroValueWrapper(prevColumnStats.getMinValue()),
+ unwrapAvroValueWrapper(prevColumnStats.getMinValue()),
(Comparable) unwrapAvroValueWrapper(newColumnStats.getMinValue()))
.filter(Objects::nonNull)
.min(Comparator.naturalOrder())
@@ -2670,7 +2670,7 @@ public static HoodieMetadataColumnStats mergeColumnStatsRecords(HoodieMetadataCo
Comparable maxValue =
(Comparable) Stream.of(
- (Comparable) unwrapAvroValueWrapper(prevColumnStats.getMaxValue()),
+ unwrapAvroValueWrapper(prevColumnStats.getMaxValue()),
(Comparable) unwrapAvroValueWrapper(newColumnStats.getMaxValue()))
.filter(Objects::nonNull)
.max(Comparator.naturalOrder())
diff --git a/hudi-common/src/main/java/org/apache/hudi/metrics/MetricsGraphiteReporter.java b/hudi-common/src/main/java/org/apache/hudi/metrics/MetricsGraphiteReporter.java
index e3acab9a90b9..161536e19cc5 100644
--- a/hudi-common/src/main/java/org/apache/hudi/metrics/MetricsGraphiteReporter.java
+++ b/hudi-common/src/main/java/org/apache/hudi/metrics/MetricsGraphiteReporter.java
@@ -39,8 +39,8 @@ public class MetricsGraphiteReporter extends MetricsReporter {
private final MetricRegistry registry;
private final GraphiteReporter graphiteReporter;
private final HoodieMetricsConfig metricsConfig;
- private String serverHost;
- private int serverPort;
+ private final String serverHost;
+ private final int serverPort;
private final int periodSeconds;
public MetricsGraphiteReporter(HoodieMetricsConfig metricsConfig, MetricRegistry registry) {
diff --git a/hudi-common/src/main/java/org/apache/hudi/metrics/custom/CustomizableMetricsReporter.java b/hudi-common/src/main/java/org/apache/hudi/metrics/custom/CustomizableMetricsReporter.java
index 13574b1e1569..6922393f39aa 100644
--- a/hudi-common/src/main/java/org/apache/hudi/metrics/custom/CustomizableMetricsReporter.java
+++ b/hudi-common/src/main/java/org/apache/hudi/metrics/custom/CustomizableMetricsReporter.java
@@ -28,8 +28,8 @@
* Extensible metrics reporter for custom implementation.
*/
public abstract class CustomizableMetricsReporter extends MetricsReporter {
- private Properties props;
- private MetricRegistry registry;
+ private final Properties props;
+ private final MetricRegistry registry;
public CustomizableMetricsReporter(Properties props, MetricRegistry registry) {
this.props = props;
diff --git a/hudi-common/src/main/java/org/apache/hudi/metrics/datadog/DatadogReporter.java b/hudi-common/src/main/java/org/apache/hudi/metrics/datadog/DatadogReporter.java
index 32b0ee809f93..1f3e2a4ca4a8 100644
--- a/hudi-common/src/main/java/org/apache/hudi/metrics/datadog/DatadogReporter.java
+++ b/hudi-common/src/main/java/org/apache/hudi/metrics/datadog/DatadogReporter.java
@@ -173,6 +173,6 @@ String build() {
}
enum MetricType {
- gauge;
+ gauge
}
}
diff --git a/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/QuickstartConfigurations.java b/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/QuickstartConfigurations.java
index 47a5009ade59..ca75d8db1171 100644
--- a/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/QuickstartConfigurations.java
+++ b/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/QuickstartConfigurations.java
@@ -113,12 +113,11 @@ public static String getCreateHoodieTableDDL(
}
public static String getCreateHudiCatalogDDL(final String catalogName, final String catalogPath) {
- StringBuilder builder = new StringBuilder();
- builder.append("create catalog ").append(catalogName).append(" with (\n");
- builder.append(" 'type' = 'hudi',\n"
- + " 'catalog.path' = '").append(catalogPath).append("'");
- builder.append("\n)");
- return builder.toString();
+ return "create catalog " + catalogName + " with (\n"
+ + " 'type' = 'hudi',\n"
+ + " 'catalog.path' = '"
+ + catalogPath + "'"
+ + "\n)";
}
public static String getFileSourceDDL(String tableName) {
@@ -175,8 +174,7 @@ public static String getCollectSinkDDL(String tableName, ResolvedSchema tableSch
}
builder.append("\n");
}
- final String withProps = ""
- + ") with (\n"
+ final String withProps = ") with (\n"
+ " 'connector' = '" + CollectSinkTableFactory.FACTORY_ID + "'\n"
+ ")";
builder.append(withProps);
diff --git a/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/SchemaBuilder.java b/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/SchemaBuilder.java
index 76306f780646..ae814acea1f5 100644
--- a/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/SchemaBuilder.java
+++ b/hudi-examples/hudi-examples-flink/src/main/java/org/apache/hudi/examples/quickstart/utils/SchemaBuilder.java
@@ -34,8 +34,8 @@
* Builder for {@link ResolvedSchema}.
*/
public class SchemaBuilder {
- private List columns;
- private List watermarkSpecs;
+ private final List columns;
+ private final List watermarkSpecs;
private UniqueConstraint constraint;
public static SchemaBuilder instance() {
diff --git a/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieSparkBootstrapExample.java b/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieSparkBootstrapExample.java
index f1529b6e03a2..31f93601f927 100644
--- a/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieSparkBootstrapExample.java
+++ b/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieSparkBootstrapExample.java
@@ -34,7 +34,7 @@
public class HoodieSparkBootstrapExample {
- private static String tableType = HoodieTableType.MERGE_ON_READ.name();
+ private static final String TABLE_TYPE = HoodieTableType.MERGE_ON_READ.name();
public static void main(String[] args) throws Exception {
if (args.length < 5) {
diff --git a/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieWriteClientExample.java b/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieWriteClientExample.java
index b8df6161ca45..457020036e7c 100644
--- a/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieWriteClientExample.java
+++ b/hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/spark/HoodieWriteClientExample.java
@@ -66,7 +66,7 @@ public class HoodieWriteClientExample {
private static final Logger LOG = LoggerFactory.getLogger(HoodieWriteClientExample.class);
- private static String tableType = HoodieTableType.COPY_ON_WRITE.name();
+ private static final String TABLE_TYPE = HoodieTableType.COPY_ON_WRITE.name();
public static void main(String[] args) throws Exception {
if (args.length < 2) {
@@ -87,7 +87,7 @@ public static void main(String[] args) throws Exception {
FileSystem fs = HadoopFSUtils.getFs(tablePath, jsc.hadoopConfiguration());
if (!fs.exists(path)) {
HoodieTableMetaClient.newTableBuilder()
- .setTableType(tableType)
+ .setTableType(TABLE_TYPE)
.setTableName(tableName)
.setPayloadClass(HoodieAvroPayload.class)
.initTable(HadoopFSUtils.getStorageConfWithCopy(jsc.hadoopConfiguration()), tablePath);
@@ -139,7 +139,7 @@ public static void main(String[] args) throws Exception {
client.deletePartitions(deleteList, newCommitTime);
// compaction
- if (HoodieTableType.valueOf(tableType) == HoodieTableType.MERGE_ON_READ) {
+ if (HoodieTableType.valueOf(TABLE_TYPE) == HoodieTableType.MERGE_ON_READ) {
Option instant = client.scheduleCompaction(Option.empty());
HoodieWriteMetadata> compactionMetadata = client.compact(instant.get());
client.commitCompaction(instant.get(), compactionMetadata.getCommitMetadata().get(), Option.empty());
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/bulk/sort/SortOperator.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/bulk/sort/SortOperator.java
index 5a898760a282..e223c3bf60b6 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/bulk/sort/SortOperator.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/bulk/sort/SortOperator.java
@@ -54,7 +54,7 @@ public class SortOperator extends TableStreamOperator
private GeneratedNormalizedKeyComputer gComputer;
private GeneratedRecordComparator gComparator;
- private Configuration conf;
+ private final Configuration conf;
private transient BinaryExternalSorter sorter;
private transient StreamRecordCollector collector;
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/clustering/update/strategy/FlinkConsistentBucketUpdateStrategy.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/clustering/update/strategy/FlinkConsistentBucketUpdateStrategy.java
index 73c0d655fb72..e900af3c2500 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/clustering/update/strategy/FlinkConsistentBucketUpdateStrategy.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/clustering/update/strategy/FlinkConsistentBucketUpdateStrategy.java
@@ -57,8 +57,8 @@ public class FlinkConsistentBucketUpdateStrategy
private static final Logger LOG = LoggerFactory.getLogger(FlinkConsistentBucketUpdateStrategy.class);
private boolean initialized = false;
- private List indexKeyFields;
- private Map> partitionToIdentifier;
+ private final List indexKeyFields;
+ private final Map> partitionToIdentifier;
private String lastRefreshInstant = HoodieTimeline.INIT_INSTANT_TS;
public FlinkConsistentBucketUpdateStrategy(HoodieFlinkWriteClient writeClient, List indexKeyFields) {
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/transform/ChainedTransformer.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/transform/ChainedTransformer.java
index 2fe2867b7546..bb59f9961ab9 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/transform/ChainedTransformer.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/transform/ChainedTransformer.java
@@ -29,7 +29,7 @@
*/
public class ChainedTransformer implements Transformer {
- private List transformers;
+ private final List transformers;
public ChainedTransformer(List transformers) {
this.transformers = transformers;
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/ExpressionEvaluators.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/ExpressionEvaluators.java
index 0feda05a4c77..9c6ec425ed51 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/ExpressionEvaluators.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/source/ExpressionEvaluators.java
@@ -571,9 +571,9 @@ private static BigDecimal getBigDecimal(@NotNull Object value) {
} else if (value instanceof Double) {
// new BigDecimal() are used instead of BigDecimal.valueOf() due to
// receive exact decimal representation of the double's binary floating-point value
- return new BigDecimal((Double) value);
+ return BigDecimal.valueOf((Double) value);
} else if (value instanceof Float) {
- return new BigDecimal(((Float) value).doubleValue());
+ return BigDecimal.valueOf(((Float) value).doubleValue());
} else if (value instanceof Long) {
return new BigDecimal((Long) value);
} else if (value instanceof Integer) {
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/HoodieTableSource.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/HoodieTableSource.java
index 90f81289bd2b..5c9cde00ba90 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/HoodieTableSource.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/HoodieTableSource.java
@@ -330,14 +330,10 @@ private String getSourceOperatorName(String operatorName) {
List fields = Arrays.stream(this.requiredPos)
.mapToObj(i -> schemaFieldNames[i])
.collect(Collectors.toList());
- StringBuilder sb = new StringBuilder();
- sb.append(operatorName)
- .append("(")
- .append("table=").append(Collections.singletonList(conf.getString(FlinkOptions.TABLE_NAME)))
- .append(", ")
- .append("fields=").append(fields)
- .append(")");
- return sb.toString();
+ return operatorName + "("
+ + "table=" + Collections.singletonList(conf.get(FlinkOptions.TABLE_NAME))
+ + ", " + "fields=" + fields
+ + ")";
}
@Nullable
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FormatUtils.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FormatUtils.java
index 0628673b8e58..80c33b76cf3c 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FormatUtils.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/format/FormatUtils.java
@@ -310,8 +310,7 @@ public static Option getRawValueWithAltKeys(org.apache.flink.configurati
public static boolean getBooleanWithAltKeys(org.apache.flink.configuration.Configuration conf,
ConfigProperty> configProperty) {
Option rawValue = getRawValueWithAltKeys(conf, configProperty);
- boolean defaultValue = configProperty.hasDefaultValue()
- ? Boolean.parseBoolean(configProperty.defaultValue().toString()) : false;
+ boolean defaultValue = configProperty.hasDefaultValue() && Boolean.parseBoolean(configProperty.defaultValue().toString());
return rawValue.map(Boolean::parseBoolean).orElse(defaultValue);
}
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/lookup/HoodieLookupFunction.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/lookup/HoodieLookupFunction.java
index a43bf1189fb3..a7f6ec2a2bd9 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/lookup/HoodieLookupFunction.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/lookup/HoodieLookupFunction.java
@@ -138,7 +138,7 @@ private void checkCacheReload() {
return;
}
// Determine whether to reload data by comparing instant
- if (currentCommit != null && latestCommitInstant.get().equals(currentCommit)) {
+ if (latestCommitInstant.get().equals(currentCommit)) {
LOG.info("Ignore loading data because the commit instant " + currentCommit + " has not changed.");
return;
}
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/FlinkWriteClients.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/FlinkWriteClients.java
index e9d0310d4756..1984a7c7baea 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/FlinkWriteClients.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/FlinkWriteClients.java
@@ -208,7 +208,7 @@ public static HoodieWriteConfig getHoodieClientConfig(
).build())
.forTable(conf.getString(FlinkOptions.TABLE_NAME))
.withStorageConfig(HoodieStorageConfig.newBuilder()
- .logFileDataBlockMaxSize(conf.getInteger(FlinkOptions.WRITE_LOG_BLOCK_SIZE) * 1024 * 1024)
+ .logFileDataBlockMaxSize((long) conf.getInteger(FlinkOptions.WRITE_LOG_BLOCK_SIZE) * 1024 * 1024)
.logFileMaxSize(conf.getLong(FlinkOptions.WRITE_LOG_MAX_SIZE) * 1024 * 1024)
.parquetBlockSize(conf.getInteger(FlinkOptions.WRITE_PARQUET_BLOCK_SIZE) * 1024 * 1024)
.parquetPageSize(conf.getInteger(FlinkOptions.WRITE_PARQUET_PAGE_SIZE) * 1024 * 1024)
diff --git a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/RowDataToAvroConverters.java b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/RowDataToAvroConverters.java
index 57463d913ccf..f54abd4a16bb 100644
--- a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/RowDataToAvroConverters.java
+++ b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/RowDataToAvroConverters.java
@@ -50,7 +50,7 @@
@Internal
public class RowDataToAvroConverters {
- private static Conversions.DecimalConversion decimalConversion = new Conversions.DecimalConversion();
+ private static final Conversions.DecimalConversion DECIMAL_CONVERSION = new Conversions.DecimalConversion();
// --------------------------------------------------------------------------------
// Runtime Converters
@@ -219,7 +219,7 @@ public Object convert(Schema schema, Object object) {
@Override
public Object convert(Schema schema, Object object) {
BigDecimal javaDecimal = ((DecimalData) object).toBigDecimal();
- return decimalConversion.toFixed(javaDecimal, schema, schema.getLogicalType());
+ return DECIMAL_CONVERSION.toFixed(javaDecimal, schema, schema.getLogicalType());
}
};
break;
diff --git a/hudi-flink-datasource/hudi-flink1.18.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java b/hudi-flink-datasource/hudi-flink1.18.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
index 95d8fd720d30..f828ae9dffa7 100644
--- a/hudi-flink-datasource/hudi-flink1.18.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
+++ b/hudi-flink-datasource/hudi-flink1.18.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
@@ -29,8 +29,8 @@
public class HeapMapColumnVector extends AbstractHeapVector
implements WritableColumnVector, MapColumnVector {
- private WritableColumnVector keys;
- private WritableColumnVector values;
+ private final WritableColumnVector keys;
+ private final WritableColumnVector values;
public HeapMapColumnVector(int len, WritableColumnVector keys, WritableColumnVector values) {
super(len);
diff --git a/hudi-flink-datasource/hudi-flink1.19.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java b/hudi-flink-datasource/hudi-flink1.19.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
index c01b8392893a..a684f64adfb6 100644
--- a/hudi-flink-datasource/hudi-flink1.19.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
+++ b/hudi-flink-datasource/hudi-flink1.19.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
@@ -29,8 +29,8 @@
public class HeapMapColumnVector extends AbstractHeapVector
implements WritableColumnVector, MapColumnVector {
- private WritableColumnVector keys;
- private WritableColumnVector values;
+ private final WritableColumnVector keys;
+ private final WritableColumnVector values;
public HeapMapColumnVector(int len, WritableColumnVector keys, WritableColumnVector values) {
super(len);
diff --git a/hudi-flink-datasource/hudi-flink1.20.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java b/hudi-flink-datasource/hudi-flink1.20.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
index 95d8fd720d30..f828ae9dffa7 100644
--- a/hudi-flink-datasource/hudi-flink1.20.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
+++ b/hudi-flink-datasource/hudi-flink1.20.x/src/main/java/org/apache/hudi/table/format/cow/vector/HeapMapColumnVector.java
@@ -29,8 +29,8 @@
public class HeapMapColumnVector extends AbstractHeapVector
implements WritableColumnVector, MapColumnVector {
- private WritableColumnVector keys;
- private WritableColumnVector values;
+ private final WritableColumnVector keys;
+ private final WritableColumnVector values;
public HeapMapColumnVector(int len, WritableColumnVector keys, WritableColumnVector values) {
super(len);
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
index a1bae5deb7bf..27cb63d58222 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/config/DFSPropertiesConfiguration.java
@@ -73,7 +73,7 @@ public class DFSPropertiesConfiguration extends PropertiesConfig {
@Nullable
private final Configuration hadoopConfig;
- private StoragePath mainFilePath;
+ private final StoragePath mainFilePath;
// props read from user defined configuration file or input stream
private final HoodieConfig hoodieConfig;
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/AvroOrcUtils.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/AvroOrcUtils.java
index 295e5163ed52..d05a07a74a45 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/AvroOrcUtils.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/AvroOrcUtils.java
@@ -147,13 +147,13 @@ public static void addToVector(TypeDescription type, ColumnVector colVector, Sch
final Utf8 utf8 = (Utf8) value;
bytes = utf8.getBytes();
} else if (value instanceof GenericData.EnumSymbol) {
- bytes = getUTF8Bytes(((GenericData.EnumSymbol) value).toString());
+ bytes = getUTF8Bytes(value.toString());
} else {
throw new IllegalStateException(String.format(
"Unrecognized type for Avro %s field value, which has type %s, value %s",
type.getCategory().getName(),
value.getClass().getName(),
- value.toString()
+ value
));
}
@@ -177,7 +177,7 @@ public static void addToVector(TypeDescription type, ColumnVector colVector, Sch
throw new IllegalStateException(String.format(
"Unrecognized type for Avro DATE field value, which has type %s, value %s",
value.getClass().getName(),
- value.toString()
+ value
));
}
dateColVec.vector[vectorPos] = daysSinceEpoch;
@@ -209,7 +209,7 @@ public static void addToVector(TypeDescription type, ColumnVector colVector, Sch
throw new IllegalStateException(String.format(
"Unrecognized type for Avro TIMESTAMP field value, which has type %s, value %s",
value.getClass().getName(),
- value.toString()
+ value
));
}
@@ -231,7 +231,7 @@ public static void addToVector(TypeDescription type, ColumnVector colVector, Sch
throw new IllegalStateException(String.format(
"Unrecognized type for Avro BINARY field value, which has type %s, value %s",
value.getClass().getName(),
- value.toString()
+ value
));
}
binaryColVec.setRef(vectorPos, binaryBytes, 0, binaryBytes.length);
@@ -338,13 +338,13 @@ public static void addToVector(TypeDescription type, ColumnVector colVector, Sch
throw new IllegalStateException(String.format(
"Failed to add value %s to union with type %s",
value == null ? "null" : value.toString(),
- type.toString()
+ type
));
}
break;
default:
- throw new IllegalArgumentException("Invalid TypeDescription " + type.toString() + ".");
+ throw new IllegalArgumentException("Invalid TypeDescription " + type + ".");
}
}
@@ -598,7 +598,7 @@ public static Object readFromVector(TypeDescription type, ColumnVector colVector
ColumnVector fieldVector = unionVector.fields[tag];
return readFromVector(type.getChildren().get(tag), fieldVector, avroSchema.getTypes().get(tag), vectorPos);
default:
- throw new HoodieIOException("Unrecognized TypeDescription " + type.toString());
+ throw new HoodieIOException("Unrecognized TypeDescription " + type);
}
}
@@ -811,7 +811,7 @@ public static Schema createAvroSchemaWithDefaultValue(TypeDescription orcSchema,
if (nullable) {
fields.add(new Schema.Field(field.name(), nullableSchema, null, NULL_VALUE));
} else {
- fields.add(new Schema.Field(field.name(), fieldSchema, null, (Object) null));
+ fields.add(new Schema.Field(field.name(), fieldSchema, null, null));
}
}
Schema schema = Schema.createRecord(recordName, null, null, false);
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/HadoopConfigUtils.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/HadoopConfigUtils.java
index 9f1347872e2c..f4bac6284c9c 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/HadoopConfigUtils.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/HadoopConfigUtils.java
@@ -84,8 +84,7 @@ public static Option getRawValueWithAltKeys(Configuration conf,
public static boolean getBooleanWithAltKeys(Configuration conf,
ConfigProperty> configProperty) {
Option rawValue = getRawValueWithAltKeys(conf, configProperty);
- boolean defaultValue = configProperty.hasDefaultValue()
- ? Boolean.parseBoolean(configProperty.defaultValue().toString()) : false;
+ boolean defaultValue = configProperty.hasDefaultValue() && Boolean.parseBoolean(configProperty.defaultValue().toString());
return rawValue.map(Boolean::parseBoolean).orElse(defaultValue);
}
}
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/BoundedFsDataInputStream.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/BoundedFsDataInputStream.java
index 68a28ab6989c..fb76a4a7d5ae 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/BoundedFsDataInputStream.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/BoundedFsDataInputStream.java
@@ -31,8 +31,8 @@
* Implementation of {@link FSDataInputStream} with bound check based on file size.
*/
public class BoundedFsDataInputStream extends FSDataInputStream {
- private FileSystem fs;
- private Path file;
+ private final FileSystem fs;
+ private final Path file;
private long fileLen = -1L;
public BoundedFsDataInputStream(FileSystem fs, Path file, InputStream in) {
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieRetryWrapperFileSystem.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieRetryWrapperFileSystem.java
index b31d2a8ca08a..c9d8fff3fbbd 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieRetryWrapperFileSystem.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieRetryWrapperFileSystem.java
@@ -36,7 +36,6 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.util.EnumSet;
@@ -46,11 +45,11 @@
*/
public class HoodieRetryWrapperFileSystem extends FileSystem {
- private FileSystem fileSystem;
- private long maxRetryIntervalMs;
- private int maxRetryNumbers;
- private long initialRetryIntervalMs;
- private String retryExceptionsList;
+ private final FileSystem fileSystem;
+ private final long maxRetryIntervalMs;
+ private final int maxRetryNumbers;
+ private final long initialRetryIntervalMs;
+ private final String retryExceptionsList;
public HoodieRetryWrapperFileSystem(FileSystem fs, long maxRetryIntervalMs, int maxRetryNumbers, long initialRetryIntervalMs, String retryExceptions) {
this.fileSystem = fs;
@@ -203,7 +202,7 @@ public boolean delete(Path f) throws IOException {
}
@Override
- public FileStatus[] listStatus(Path f) throws FileNotFoundException, IOException {
+ public FileStatus[] listStatus(Path f) throws IOException {
return new RetryHelper(maxRetryIntervalMs, maxRetryNumbers, initialRetryIntervalMs, retryExceptionsList).tryWith(() -> fileSystem.listStatus(f)).start();
}
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieSerializableFileStatus.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieSerializableFileStatus.java
index 64b827d2613e..8a7b5f0d974f 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieSerializableFileStatus.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieSerializableFileStatus.java
@@ -37,17 +37,17 @@
*/
public class HoodieSerializableFileStatus implements Serializable {
- private Path path;
- private long length;
- private Boolean isDir;
- private short blockReplication;
- private long blockSize;
- private long modificationTime;
- private long accessTime;
- private FsPermission permission;
- private String owner;
- private String group;
- private Path symlink;
+ private final Path path;
+ private final long length;
+ private final Boolean isDir;
+ private final short blockReplication;
+ private final long blockSize;
+ private final long modificationTime;
+ private final long accessTime;
+ private final FsPermission permission;
+ private final String owner;
+ private final String group;
+ private final Path symlink;
HoodieSerializableFileStatus(Path path, long length, boolean isDir, short blockReplication,
long blockSize, long modificationTime, long accessTime,
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieWrapperFileSystem.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieWrapperFileSystem.java
index 5d00a14fadbb..276b215fd18e 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieWrapperFileSystem.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieWrapperFileSystem.java
@@ -93,7 +93,7 @@ public static void setMetricsRegistry(Registry registry, Registry registryMeta)
}
- private ConcurrentMap openStreams = new ConcurrentHashMap<>();
+ private final ConcurrentMap openStreams = new ConcurrentHashMap<>();
private FileSystem fileSystem;
private URI uri;
private ConsistencyGuard consistencyGuard = new NoOpConsistencyGuard();
@@ -1008,7 +1008,7 @@ public long getBytesWritten(Path file) {
}
// When the file is first written, we do not have a track of it
throw new IllegalArgumentException(
- file.toString() + " does not have a open stream. Cannot get the bytes written on the stream");
+ file + " does not have a open stream. Cannot get the bytes written on the stream");
}
public FileSystem getFileSystem() {
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/inline/InMemoryFileSystem.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/inline/InMemoryFileSystem.java
index 7831e76c88fc..572e593f6d89 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/inline/InMemoryFileSystem.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/inline/InMemoryFileSystem.java
@@ -29,7 +29,6 @@
import org.apache.hadoop.util.Progressable;
import java.io.ByteArrayOutputStream;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
@@ -102,7 +101,7 @@ public boolean delete(Path path, boolean b) throws IOException {
}
@Override
- public FileStatus[] listStatus(Path inlinePath) throws FileNotFoundException, IOException {
+ public FileStatus[] listStatus(Path inlinePath) throws IOException {
throw new UnsupportedOperationException("No support for listStatus");
}
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroHFileWriter.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroHFileWriter.java
index 3fc95818bf27..6967f19246da 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroHFileWriter.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroHFileWriter.java
@@ -64,7 +64,7 @@
*/
public class HoodieAvroHFileWriter
implements HoodieAvroFileWriter {
- private static AtomicLong recordIndex = new AtomicLong(1);
+ private static final AtomicLong RECORD_INDEX_COUNT = new AtomicLong(1);
private final Path file;
private final HoodieHFileConfig hfileConfig;
private final boolean isWrapperFileSystem;
@@ -127,7 +127,7 @@ public HoodieAvroHFileWriter(String instantTime, StoragePath file, HoodieHFileCo
public void writeAvroWithMetadata(HoodieKey key, IndexedRecord avroRecord) throws IOException {
if (populateMetaFields) {
prepRecordWithMetadata(key, avroRecord, instantTime,
- taskContextSupplier.getPartitionIdSupplier().get(), recordIndex.getAndIncrement(), file.getName());
+ taskContextSupplier.getPartitionIdSupplier().get(), RECORD_INDEX_COUNT.getAndIncrement(), file.getName());
writeAvro(key.getRecordKey(), avroRecord);
} else {
writeAvro(key.getRecordKey(), avroRecord);
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroOrcWriter.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroOrcWriter.java
index 0516caad9ee5..e3ffc69c1a84 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroOrcWriter.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieAvroOrcWriter.java
@@ -68,7 +68,7 @@ public class HoodieAvroOrcWriter implements HoodieAvroFileWriter, Closeable {
private final String instantTime;
private final TaskContextSupplier taskContextSupplier;
- private HoodieOrcConfig orcConfig;
+ private final HoodieOrcConfig orcConfig;
private String minRecordKey;
private String maxRecordKey;
diff --git a/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieBaseParquetWriter.java b/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieBaseParquetWriter.java
index 8f17fa0fa1e1..ed20f0bca114 100644
--- a/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieBaseParquetWriter.java
+++ b/hudi-hadoop-common/src/main/java/org/apache/hudi/io/hadoop/HoodieBaseParquetWriter.java
@@ -100,7 +100,7 @@ protected void handleParquetBloomFilters(ParquetWriter.Builder parquetWriterbuil
hadoopConf.forEach(conf -> {
String key = conf.getKey();
if (key.startsWith(BLOOM_FILTER_ENABLED)) {
- String column = key.substring(BLOOM_FILTER_ENABLED.length() + 1, key.length());
+ String column = key.substring(BLOOM_FILTER_ENABLED.length() + 1);
try {
Method method = parquetWriterbuilder.getClass().getMethod("withBloomFilterEnabled", String.class, boolean.class);
method.invoke(parquetWriterbuilder, column, Boolean.valueOf(conf.getValue()).booleanValue());
@@ -109,7 +109,7 @@ protected void handleParquetBloomFilters(ParquetWriter.Builder parquetWriterbuil
}
}
if (key.startsWith(BLOOM_FILTER_EXPECTED_NDV)) {
- String column = key.substring(BLOOM_FILTER_EXPECTED_NDV.length() + 1, key.length());
+ String column = key.substring(BLOOM_FILTER_EXPECTED_NDV.length() + 1);
try {
Method method = parquetWriterbuilder.getClass().getMethod("withBloomFilterNDV", String.class, long.class);
method.invoke(parquetWriterbuilder, column, Long.valueOf(conf.getValue()).longValue());
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieHFileRecordReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieHFileRecordReader.java
index 3414f5fdea88..5b32dd435a8b 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieHFileRecordReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieHFileRecordReader.java
@@ -50,10 +50,10 @@
public class HoodieHFileRecordReader implements RecordReader {
private long count = 0;
- private ArrayWritable valueObj;
+ private final ArrayWritable valueObj;
private HoodieFileReader reader;
private ClosableIterator> recordIterator;
- private Schema schema;
+ private final Schema schema;
public HoodieHFileRecordReader(Configuration conf, InputSplit split, JobConf job) throws IOException {
FileSplit fileSplit = (FileSplit) split;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
index c301130a8401..3b6b3940489d 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
@@ -74,7 +74,7 @@ public class HoodieROTablePathFilter implements Configurable, PathFilter, Serial
* Its quite common, to have all files from a given partition path be passed into accept(), cache the check for hoodie
* metadata for known partition paths and the latest versions of files.
*/
- private Map> hoodiePathCache;
+ private final Map> hoodiePathCache;
/**
* Paths that are known to be non-hoodie tables.
@@ -214,7 +214,7 @@ public boolean accept(Path path) {
if (!hoodiePathCache.containsKey(folder.toString())) {
hoodiePathCache.put(folder.toString(), new HashSet<>());
}
- LOG.info("Based on hoodie metadata from base path: " + baseDir.toString() + ", caching " + latestFiles.size()
+ LOG.info("Based on hoodie metadata from base path: " + baseDir + ", caching " + latestFiles.size()
+ " files under " + folder);
for (HoodieBaseFile lfile : latestFiles) {
hoodiePathCache.get(folder.toString()).add(new Path(lfile.getPath()));
@@ -229,7 +229,7 @@ public boolean accept(Path path) {
} catch (TableNotFoundException e) {
// Non-hoodie path, accept it.
if (LOG.isDebugEnabled()) {
- LOG.debug(String.format("(1) Caching non-hoodie path under %s with basePath %s \n", folder.toString(), baseDir.toString()));
+ LOG.debug(String.format("(1) Caching non-hoodie path under %s with basePath %s \n", folder, baseDir));
}
nonHoodiePathCache.add(folder.toString());
nonHoodiePathCache.add(baseDir.toString());
@@ -242,7 +242,7 @@ public boolean accept(Path path) {
} else {
// files is at < 3 level depth in FS tree, can't be hoodie dataset
if (LOG.isDebugEnabled()) {
- LOG.debug(String.format("(2) Caching non-hoodie path under %s \n", folder.toString()));
+ LOG.debug(String.format("(2) Caching non-hoodie path under %s \n", folder));
}
nonHoodiePathCache.add(folder.toString());
return true;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/InputPathHandler.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/InputPathHandler.java
index f0c2e6a1fe2f..88e96d29e1be 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/InputPathHandler.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/InputPathHandler.java
@@ -60,7 +60,7 @@ public class InputPathHandler {
private final Map> groupedIncrementalPaths;
private final List snapshotPaths;
private final List nonHoodieInputPaths;
- private boolean isIncrementalUseDatabase;
+ private final boolean isIncrementalUseDatabase;
public InputPathHandler(Configuration conf, Path[] inputPaths, List incrementalTables) throws IOException {
this.conf = conf;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/avro/HoodieAvroParquetReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/avro/HoodieAvroParquetReader.java
index c31041ddc76b..b5a8e0138826 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/avro/HoodieAvroParquetReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/avro/HoodieAvroParquetReader.java
@@ -48,7 +48,7 @@
public class HoodieAvroParquetReader extends RecordReader {
private final ParquetRecordReader parquetRecordReader;
- private Schema baseSchema;
+ private final Schema baseSchema;
public HoodieAvroParquetReader(InputSplit inputSplit, Configuration conf) throws IOException {
// get base schema
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java
index 99c9ecd210e4..445531b3f2f5 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineHiveInputFormat.java
@@ -759,11 +759,9 @@ public String[] getLocations() throws IOException {
*/
@Override
public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append(inputSplitShim.toString());
- sb.append("InputFormatClass: " + inputFormatClassName);
- sb.append("\n");
- return sb.toString();
+ return inputSplitShim.toString()
+ + "InputFormatClass: " + inputFormatClassName
+ + "\n";
}
/**
@@ -829,8 +827,7 @@ public boolean equals(Object o) {
if (o instanceof CombinePathInputFormat) {
CombinePathInputFormat mObj = (CombinePathInputFormat) o;
return (opList.equals(mObj.opList)) && (inputFormatClassName.equals(mObj.inputFormatClassName))
- && (deserializerClassName == null ? (mObj.deserializerClassName == null)
- : deserializerClassName.equals(mObj.deserializerClassName));
+ && (Objects.equals(deserializerClassName, mObj.deserializerClassName));
}
return false;
}
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineRealtimeFileSplit.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineRealtimeFileSplit.java
index a30aa178f123..5636339b4d22 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineRealtimeFileSplit.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/hive/HoodieCombineRealtimeFileSplit.java
@@ -51,11 +51,11 @@ public HoodieCombineRealtimeFileSplit() {
public HoodieCombineRealtimeFileSplit(JobConf jobConf, List realtimeFileSplits) {
super(jobConf, realtimeFileSplits.stream().map(p ->
- ((HoodieRealtimeFileSplit) p).getPath()).collect(Collectors.toList()).toArray(new
+ p.getPath()).collect(Collectors.toList()).toArray(new
Path[realtimeFileSplits.size()]),
- ArrayUtils.toPrimitive(realtimeFileSplits.stream().map(p -> ((HoodieRealtimeFileSplit) p).getStart())
+ ArrayUtils.toPrimitive(realtimeFileSplits.stream().map(p -> p.getStart())
.collect(Collectors.toList()).toArray(new Long[realtimeFileSplits.size()])),
- ArrayUtils.toPrimitive(realtimeFileSplits.stream().map(p -> ((HoodieRealtimeFileSplit) p).getLength())
+ ArrayUtils.toPrimitive(realtimeFileSplits.stream().map(p -> p.getLength())
.collect(Collectors.toList()).toArray(new Long[realtimeFileSplits.size()])),
realtimeFileSplits.stream().map(p -> {
try {
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java
index a0df73c34e21..3b36974a0c95 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/AbstractRealtimeRecordReader.java
@@ -76,7 +76,7 @@ public abstract class AbstractRealtimeRecordReader {
protected boolean supportPayload;
// handle hive type to avro record
protected HiveAvroSerializer serializer;
- private boolean supportTimestamp;
+ private final boolean supportTimestamp;
public AbstractRealtimeRecordReader(RealtimeSplit split, JobConf job) {
this.split = split;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieCombineRealtimeRecordReader.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieCombineRealtimeRecordReader.java
index a663bbc6ba73..84d8868f5f3b 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieCombineRealtimeRecordReader.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieCombineRealtimeRecordReader.java
@@ -44,9 +44,9 @@
*/
public class HoodieCombineRealtimeRecordReader implements RecordReader {
- private static final transient Logger LOG = LoggerFactory.getLogger(HoodieCombineRealtimeRecordReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HoodieCombineRealtimeRecordReader.class);
// RecordReaders for each split
- private List recordReaders = new LinkedList<>();
+ private final List recordReaders = new LinkedList<>();
// Points to the currently iterating record reader
private RecordReader currentRecordReader;
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieHiveUtils.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieHiveUtils.java
index b4894c35d418..981ab9ce54b3 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieHiveUtils.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieHiveUtils.java
@@ -139,7 +139,7 @@ public static List getIncrementalTableNames(JobContext job) {
Map tablesModeMap = job.getConfiguration()
.getValByRegex(HOODIE_CONSUME_MODE_PATTERN_STRING.pattern());
List result = tablesModeMap.entrySet().stream().map(s -> {
- if (s.getValue().trim().toUpperCase().equals(INCREMENTAL_SCAN_MODE)) {
+ if (s.getValue().trim().equalsIgnoreCase(INCREMENTAL_SCAN_MODE)) {
Matcher matcher = HOODIE_CONSUME_MODE_PATTERN_STRING.matcher(s.getKey());
return (!matcher.find() ? null : matcher.group(1));
}
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
index aa24bd36fd92..be7c363f6196 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieInputFormatUtils.java
@@ -27,8 +27,8 @@
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodiePartitionMetadata;
import org.apache.hudi.common.table.HoodieTableMetaClient;
-import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
+import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.TimelineUtils.HollowCommitHandling;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.apache.hudi.common.table.view.TableFileSystemView;
@@ -218,12 +218,12 @@ public static FileInputFormat getInputFormat(String path, boolean realtime, Conf
* @return
*/
public static HoodieTimeline filterInstantsTimeline(HoodieTimeline timeline) {
- HoodieTimeline commitsAndCompactionTimeline = (HoodieTimeline)timeline.getWriteTimeline();
+ HoodieTimeline commitsAndCompactionTimeline = timeline.getWriteTimeline();
Option pendingCompactionInstant = commitsAndCompactionTimeline
.filterPendingCompactionTimeline().firstInstant();
if (pendingCompactionInstant.isPresent()) {
- HoodieTimeline instantsTimeline = (HoodieTimeline)(commitsAndCompactionTimeline
- .findInstantsBefore(pendingCompactionInstant.get().requestedTime()));
+ HoodieTimeline instantsTimeline = commitsAndCompactionTimeline
+ .findInstantsBefore(pendingCompactionInstant.get().requestedTime());
int numCommitsFilteredByCompaction = commitsAndCompactionTimeline.getCommitsTimeline().countInstants()
- instantsTimeline.getCommitsTimeline().countInstants();
LOG.info("Earliest pending compaction instant is: " + pendingCompactionInstant.get().requestedTime()
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieRealtimeInputFormatUtils.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieRealtimeInputFormatUtils.java
index b8308011fd88..fb92c70fd4a4 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieRealtimeInputFormatUtils.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HoodieRealtimeInputFormatUtils.java
@@ -119,8 +119,7 @@ public static boolean requiredProjectionFieldsExistInConf(Configuration configur
&& readColNames.contains(HoodieRecord.PARTITION_PATH_METADATA_FIELD);
} else {
return readColNames.contains(hoodieVirtualKeyInfo.get().getRecordKeyField())
- && (hoodieVirtualKeyInfo.get().getPartitionPathField().isPresent() ? readColNames.contains(hoodieVirtualKeyInfo.get().getPartitionPathField().get())
- : true);
+ && (!hoodieVirtualKeyInfo.get().getPartitionPathField().isPresent() || readColNames.contains(hoodieVirtualKeyInfo.get().getPartitionPathField().get()));
}
}
diff --git a/hudi-io/src/main/java/org/apache/hudi/common/util/ComparableVersion.java b/hudi-io/src/main/java/org/apache/hudi/common/util/ComparableVersion.java
index d9c7c6e6f200..68fe013a1432 100644
--- a/hudi-io/src/main/java/org/apache/hudi/common/util/ComparableVersion.java
+++ b/hudi-io/src/main/java/org/apache/hudi/common/util/ComparableVersion.java
@@ -154,7 +154,7 @@ private static class StringItem
*/
private static final String RELEASE_VERSION_INDEX = String.valueOf(QUALIFIER_LIST.indexOf(""));
- private String value;
+ private final String value;
public StringItem(String value, boolean followedByDigit) {
if (followedByDigit && value.length() == 1) {
diff --git a/hudi-io/src/main/java/org/apache/hudi/common/util/StringUtils.java b/hudi-io/src/main/java/org/apache/hudi/common/util/StringUtils.java
index 2b9f1af11b43..58b4adefd33c 100644
--- a/hudi-io/src/main/java/org/apache/hudi/common/util/StringUtils.java
+++ b/hudi-io/src/main/java/org/apache/hudi/common/util/StringUtils.java
@@ -299,7 +299,7 @@ public static String replace(String text, String searchString, String replacemen
increase *= (max < 0 ? 16 : (max > 64 ? 64 : max));
StringBuilder buf = new StringBuilder(text.length() + increase);
while (end != INDEX_NOT_FOUND) {
- buf.append(text.substring(start, end)).append(replacement);
+ buf.append(text, start, end).append(replacement);
start = end + replLength;
if (--max == 0) {
break;
diff --git a/hudi-io/src/main/java/org/apache/hudi/io/hfile/HFileBlockType.java b/hudi-io/src/main/java/org/apache/hudi/io/hfile/HFileBlockType.java
index 72a0ecec78bc..284988cc4cdd 100644
--- a/hudi-io/src/main/java/org/apache/hudi/io/hfile/HFileBlockType.java
+++ b/hudi-io/src/main/java/org/apache/hudi/io/hfile/HFileBlockType.java
@@ -108,7 +108,7 @@ public int getId() {
INDEX_V1("IDXBLK)+", BlockCategory.INDEX);
public enum BlockCategory {
- DATA, META, INDEX, BLOOM, ALL_CATEGORIES, UNKNOWN;
+ DATA, META, INDEX, BLOOM, ALL_CATEGORIES, UNKNOWN
}
private final byte[] magic;
diff --git a/hudi-io/src/main/java/org/apache/hudi/storage/StorageSchemes.java b/hudi-io/src/main/java/org/apache/hudi/storage/StorageSchemes.java
index 00e8594a0c83..dc61280e1686 100644
--- a/hudi-io/src/main/java/org/apache/hudi/storage/StorageSchemes.java
+++ b/hudi-io/src/main/java/org/apache/hudi/storage/StorageSchemes.java
@@ -84,11 +84,11 @@ public enum StorageSchemes {
// Hopsworks File System
HOPSFS("hopsfs", false, true);
- private String scheme;
+ private final String scheme;
// null for uncertain if write is transactional, please update this for each FS
- private Boolean isWriteTransactional;
+ private final Boolean isWriteTransactional;
// null for uncertain if dfs support atomic create&delete, please update this for each FS
- private Boolean supportAtomicCreation;
+ private final Boolean supportAtomicCreation;
StorageSchemes(String scheme, Boolean isWriteTransactional, Boolean supportAtomicCreation) {
this.scheme = scheme;
diff --git a/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/kafka/KafkaConnectControlAgent.java b/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/kafka/KafkaConnectControlAgent.java
index a40a6f73a2c1..4c3582277242 100644
--- a/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/kafka/KafkaConnectControlAgent.java
+++ b/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/kafka/KafkaConnectControlAgent.java
@@ -125,7 +125,7 @@ private void start() {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
// Todo fetch the worker id or name instead of a uuid.
- props.put(ConsumerConfig.GROUP_ID_CONFIG, "hudi-control-group" + UUID.randomUUID().toString());
+ props.put(ConsumerConfig.GROUP_ID_CONFIG, "hudi-control-group" + UUID.randomUUID());
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
diff --git a/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/utils/KafkaConnectUtils.java b/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/utils/KafkaConnectUtils.java
index be7d866df92d..8f904f8d87b6 100644
--- a/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/utils/KafkaConnectUtils.java
+++ b/hudi-kafka-connect/src/main/java/org/apache/hudi/connect/utils/KafkaConnectUtils.java
@@ -55,7 +55,7 @@
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -123,7 +123,7 @@ public static int getLatestNumPartitions(String bootstrapServers, String topicNa
props.put("bootstrap.servers", bootstrapServers);
try {
AdminClient client = AdminClient.create(props);
- DescribeTopicsResult result = client.describeTopics(Arrays.asList(topicName));
+ DescribeTopicsResult result = client.describeTopics(Collections.singletonList(topicName));
Map> values = result.values();
KafkaFuture topicDescription = values.get(topicName);
int numPartitions = topicDescription.get().partitions().size();
diff --git a/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/common/table/view/HoodieMetaserverFileSystemView.java b/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/common/table/view/HoodieMetaserverFileSystemView.java
index 2ec9dbb9a912..2cef699cb319 100644
--- a/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/common/table/view/HoodieMetaserverFileSystemView.java
+++ b/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/common/table/view/HoodieMetaserverFileSystemView.java
@@ -29,10 +29,10 @@
* is specifically for hoodie table whose metadata is stored in the hoodie metaserver.
*/
public class HoodieMetaserverFileSystemView extends HoodieTableFileSystemView {
- private String databaseName;
- private String tableName;
+ private final String databaseName;
+ private final String tableName;
- private HoodieMetaserverClient metaserverClient;
+ private final HoodieMetaserverClient metaserverClient;
public HoodieMetaserverFileSystemView(HoodieTableMetaClient metaClient,
HoodieTimeline visibleActiveTimeline, HoodieMetaserverConfig config) {
diff --git a/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/metaserver/client/HoodieMetaserverClientImp.java b/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/metaserver/client/HoodieMetaserverClientImp.java
index 2c8be6c30aab..409c3aeece40 100644
--- a/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/metaserver/client/HoodieMetaserverClientImp.java
+++ b/hudi-platform-service/hudi-metaserver/hudi-metaserver-client/src/main/java/org/apache/hudi/metaserver/client/HoodieMetaserverClientImp.java
@@ -55,9 +55,9 @@ public class HoodieMetaserverClientImp implements HoodieMetaserverClient {
private final long retryDelayMs;
private boolean isConnected;
private boolean isLocal;
- private ThriftHoodieMetaserver.Iface client;
+ private final ThriftHoodieMetaserver.Iface client;
private TTransport transport;
- private DefaultInstantGenerator instantGenerator = new DefaultInstantGenerator();
+ private final DefaultInstantGenerator instantGenerator = new DefaultInstantGenerator();
public HoodieMetaserverClientImp(HoodieMetaserverConfig config) {
this.config = config;
diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/DataSourceUtils.java b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/DataSourceUtils.java
index c8666711189a..4cf4a4c2f162 100644
--- a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/DataSourceUtils.java
+++ b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/DataSourceUtils.java
@@ -229,7 +229,7 @@ public static HoodieWriteResult doWriteOperation(SparkRDDWriteClient client, Jav
case INSERT_OVERWRITE_TABLE:
return client.insertOverwriteTable(hoodieRecords, instantTime);
default:
- throw new HoodieException("Not a valid operation type for doWriteOperation: " + operation.toString());
+ throw new HoodieException("Not a valid operation type for doWriteOperation: " + operation);
}
}
diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/BaseWriterCommitMessage.java b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/BaseWriterCommitMessage.java
index 3ba0474a34cb..d8e0e5372e70 100644
--- a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/BaseWriterCommitMessage.java
+++ b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/BaseWriterCommitMessage.java
@@ -29,7 +29,7 @@
*/
public class BaseWriterCommitMessage implements Serializable {
- private List writeStatuses;
+ private final List writeStatuses;
public BaseWriterCommitMessage(List writeStatuses) {
this.writeStatuses = writeStatuses;
diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/DataSourceInternalWriterHelper.java b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/DataSourceInternalWriterHelper.java
index d9db9cd51d19..e79203b918a3 100644
--- a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/DataSourceInternalWriterHelper.java
+++ b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/internal/DataSourceInternalWriterHelper.java
@@ -55,7 +55,7 @@ public class DataSourceInternalWriterHelper {
private final SparkRDDWriteClient writeClient;
private final HoodieTable hoodieTable;
private final WriteOperationType operationType;
- private Map extraMetadata;
+ private final Map extraMetadata;
public DataSourceInternalWriterHelper(String instantTime, HoodieWriteConfig writeConfig, StructType structType,
SparkSession sparkSession, StorageConfiguration> storageConf, Map extraMetadata) {
diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/sql/InsertMode.java b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/sql/InsertMode.java
index c68bd60ba634..7b7c08106bca 100644
--- a/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/sql/InsertMode.java
+++ b/hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/sql/InsertMode.java
@@ -40,7 +40,7 @@ public enum InsertMode {
*/
NON_STRICT("non-strict");
- private String value;
+ private final String value;
InsertMode(String value) {
this.value = value;
diff --git a/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java b/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java
index f7eb35090e0c..404e3c736107 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java
+++ b/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java
@@ -65,7 +65,7 @@ public static class DataGenerator {
+ "{\"name\":\"fare\",\"type\": \"double\"}]}";
static Schema avroSchema = new Schema.Parser().parse(TRIP_EXAMPLE_SCHEMA);
- private static Random rand = new Random(46474747);
+ private static final Random RAND = new Random(46474747);
private final Map existingKeys;
private final String[] partitionPaths;
@@ -90,7 +90,7 @@ private static String generateRandomString() {
int stringLength = 3;
StringBuilder buffer = new StringBuilder(stringLength);
for (int i = 0; i < stringLength; i++) {
- int randomLimitedInt = leftLimit + (int) (rand.nextFloat() * (rightLimit - leftLimit + 1));
+ int randomLimitedInt = leftLimit + (int) (RAND.nextFloat() * (rightLimit - leftLimit + 1));
buffer.append((char) randomLimitedInt);
}
return buffer.toString();
@@ -107,11 +107,11 @@ public static GenericRecord generateGenericRecord(String rowKey, String riderNam
rec.put("ts", timestamp);
rec.put("rider", riderName);
rec.put("driver", driverName);
- rec.put("begin_lat", rand.nextDouble());
- rec.put("begin_lon", rand.nextDouble());
- rec.put("end_lat", rand.nextDouble());
- rec.put("end_lon", rand.nextDouble());
- rec.put("fare", rand.nextDouble() * 100);
+ rec.put("begin_lat", RAND.nextDouble());
+ rec.put("begin_lon", RAND.nextDouble());
+ rec.put("end_lat", RAND.nextDouble());
+ rec.put("end_lon", RAND.nextDouble());
+ rec.put("fare", RAND.nextDouble() * 100);
return rec;
}
@@ -146,7 +146,7 @@ public Stream generateInsertsStream(String randomString, Integer n
int currSize = getNumExistingKeys();
return IntStream.range(0, n).boxed().map(i -> {
- String partitionPath = partitionPaths[rand.nextInt(partitionPaths.length)];
+ String partitionPath = partitionPaths[RAND.nextInt(partitionPaths.length)];
HoodieKey key = new HoodieKey(UUID.randomUUID().toString(), partitionPath);
existingKeys.put(currSize + i, key);
numExistingKeys++;
@@ -184,7 +184,7 @@ public List generateUpdates(Integer n) {
String randomString = generateRandomString();
return IntStream.range(0, n).boxed().map(x -> {
try {
- return generateUpdateRecord(existingKeys.get(rand.nextInt(numExistingKeys)), randomString);
+ return generateUpdateRecord(existingKeys.get(RAND.nextInt(numExistingKeys)), randomString);
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
@@ -249,13 +249,12 @@ private static Option convertToString(HoodieRecord record) {
}
private static Option convertToString(String uuid, String partitionPath, Long ts) {
- StringBuffer stringBuffer = new StringBuffer();
- stringBuffer.append("{");
- stringBuffer.append("\"ts\": \"" + (ts == null ? "0.0" : ts) + "\",");
- stringBuffer.append("\"uuid\": \"" + uuid + "\",");
- stringBuffer.append("\"partitionpath\": \"" + partitionPath + "\"");
- stringBuffer.append("}");
- return Option.of(stringBuffer.toString());
+ String stringBuffer = "{"
+ + "\"ts\": \"" + (ts == null ? "0.0" : ts) + "\","
+ + "\"uuid\": \"" + uuid + "\","
+ + "\"partitionpath\": \"" + partitionPath + "\""
+ + "}";
+ return Option.of(stringBuffer);
}
public static List convertToStringList(List records) {
diff --git a/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/HDFSParquetImporterUtils.java b/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/HDFSParquetImporterUtils.java
index 0c9c12298cdc..52b452d8a4be 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/HDFSParquetImporterUtils.java
+++ b/hudi-spark-datasource/hudi-spark/src/main/java/org/apache/hudi/cli/HDFSParquetImporterUtils.java
@@ -69,8 +69,6 @@
import java.util.ArrayList;
import java.util.List;
-import scala.Tuple2;
-
import static org.apache.hudi.common.util.StringUtils.fromUTF8Bytes;
/**
@@ -194,7 +192,7 @@ public JavaRDD> buildHoodieRecordsForImport(Ja
job.getConfiguration())
// To reduce large number of tasks.
.coalesce(16 * this.parallelism).map(entry -> {
- GenericRecord genericRecord = ((Tuple2) entry)._2();
+ GenericRecord genericRecord = ((scala.Tuple2) entry)._2();
Object partitionField = genericRecord.get(this.partitionKey);
if (partitionField == null) {
throw new HoodieIOException("partition key is missing. :" + this.partitionKey);
diff --git a/hudi-spark-datasource/hudi-spark3-common/src/main/java/org/apache/spark/sql/execution/datasources/parquet/Spark3HoodieVectorizedParquetRecordReader.java b/hudi-spark-datasource/hudi-spark3-common/src/main/java/org/apache/spark/sql/execution/datasources/parquet/Spark3HoodieVectorizedParquetRecordReader.java
index f1d10d288558..554f137c0c1b 100644
--- a/hudi-spark-datasource/hudi-spark3-common/src/main/java/org/apache/spark/sql/execution/datasources/parquet/Spark3HoodieVectorizedParquetRecordReader.java
+++ b/hudi-spark-datasource/hudi-spark3-common/src/main/java/org/apache/spark/sql/execution/datasources/parquet/Spark3HoodieVectorizedParquetRecordReader.java
@@ -17,10 +17,11 @@
package org.apache.spark.sql.execution.datasources.parquet;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hudi.client.utils.SparkInternalSchemaConverter;
import org.apache.hudi.common.util.collection.Pair;
+
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.execution.vectorized.OffHeapColumnVector;
@@ -39,7 +40,7 @@
public class Spark3HoodieVectorizedParquetRecordReader extends VectorizedParquetRecordReader {
// save the col type change info.
- private Map> typeChangeInfos;
+ private final Map> typeChangeInfos;
private ColumnarBatch columnarBatch;
@@ -48,7 +49,7 @@ public class Spark3HoodieVectorizedParquetRecordReader extends VectorizedParquet
private ColumnVector[] columnVectors;
// The capacity of vectorized batch.
- private int capacity;
+ private final int capacity;
// If true, this class returns batches instead of rows.
private boolean returnColumnarBatch;
diff --git a/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/ColumnNameXLator.java b/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/ColumnNameXLator.java
index 7d4eda29dc44..e2f60ec97b0c 100644
--- a/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/ColumnNameXLator.java
+++ b/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/ColumnNameXLator.java
@@ -24,11 +24,11 @@
public class ColumnNameXLator {
- private static Map xformMap = new HashMap<>();
+ private static final Map X_FORM_MAP = new HashMap<>();
public static String translateNestedColumn(String colName) {
Map.Entry entry;
- for (Iterator> ic = xformMap.entrySet().iterator(); ic.hasNext(); colName =
+ for (Iterator> ic = X_FORM_MAP.entrySet().iterator(); ic.hasNext(); colName =
colName.replaceAll(entry.getKey(), entry.getValue())) {
entry = ic.next();
}
@@ -45,6 +45,6 @@ public static String translate(String colName, boolean nestedColumn) {
}
static {
- xformMap.put("\\$", "_dollar_");
+ X_FORM_MAP.put("\\$", "_dollar_");
}
}
diff --git a/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/HiveSchemaUtil.java b/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/HiveSchemaUtil.java
index baf905094d40..76297759892e 100644
--- a/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/HiveSchemaUtil.java
+++ b/hudi-sync/hudi-hive-sync/src/main/java/org/apache/hudi/hive/util/HiveSchemaUtil.java
@@ -446,8 +446,8 @@ public static String generateCreateDDL(String tableName, MessageType storageSche
List partitionFields = new ArrayList<>();
for (String partitionKey : config.getSplitStrings(META_SYNC_PARTITION_FIELDS)) {
String partitionKeyWithTicks = tickSurround(partitionKey);
- partitionFields.add(new StringBuilder().append(partitionKeyWithTicks).append(" ")
- .append(getPartitionKeyType(hiveSchema, partitionKeyWithTicks)).toString());
+ partitionFields.add(partitionKeyWithTicks + " "
+ + getPartitionKeyType(hiveSchema, partitionKeyWithTicks));
}
String partitionsStr = String.join(",", partitionFields);
diff --git a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/hive/SchemaDifference.java b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/hive/SchemaDifference.java
index f48208a439fd..7a77cab2df1d 100644
--- a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/hive/SchemaDifference.java
+++ b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/hive/SchemaDifference.java
@@ -83,9 +83,9 @@ public static class Builder {
private final MessageType storageSchema;
private final Map tableSchema;
- private List deleteColumns;
- private Map updateColumnTypes;
- private Map addColumnTypes;
+ private final List deleteColumns;
+ private final Map updateColumnTypes;
+ private final Map addColumnTypes;
public Builder(MessageType storageSchema, Map tableSchema) {
this.storageSchema = storageSchema;
diff --git a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncConfig.java b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncConfig.java
index 475a43a0bab0..bac20f851d8c 100644
--- a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncConfig.java
+++ b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncConfig.java
@@ -201,7 +201,7 @@ public class HoodieSyncConfig extends HoodieConfig {
+ "obtained from Hudi's internal metadata table. Note, " + HoodieMetadataConfig.ENABLE + " must be set to true.");
private Configuration hadoopConf;
- private HoodieMetricsConfig metricsConfig;
+ private final HoodieMetricsConfig metricsConfig;
public HoodieSyncConfig(Properties props) {
this(props, HadoopConfigUtils.createHadoopConf(props));
diff --git a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncException.java b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncException.java
index d7238fbe8bf9..109296d6b33a 100644
--- a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncException.java
+++ b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/HoodieSyncException.java
@@ -37,6 +37,6 @@ public HoodieSyncException(Throwable t) {
}
protected static String format(String message, Object... args) {
- return String.format(String.valueOf(message), (Object[]) args);
+ return String.format(String.valueOf(message), args);
}
}
diff --git a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/metrics/HoodieMetaSyncMetrics.java b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/metrics/HoodieMetaSyncMetrics.java
index cbe729ae0301..d810c396c743 100644
--- a/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/metrics/HoodieMetaSyncMetrics.java
+++ b/hudi-sync/hudi-sync-common/src/main/java/org/apache/hudi/sync/common/metrics/HoodieMetaSyncMetrics.java
@@ -43,7 +43,7 @@ public class HoodieMetaSyncMetrics {
private static final String RECREATE_TABLE_DURATION_MS_METRIC = "recreate_table_duration_ms";
// Metrics are shut down by the shutdown hook added in the Metrics class
private Metrics metrics;
- private HoodieMetricsConfig metricsConfig;
+ private final HoodieMetricsConfig metricsConfig;
private transient HoodieStorage storage;
private final String syncToolName;
diff --git a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/AsyncTimelineServerBasedDetectionStrategy.java b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/AsyncTimelineServerBasedDetectionStrategy.java
index d73d787a5dc0..bc710af3a2d2 100644
--- a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/AsyncTimelineServerBasedDetectionStrategy.java
+++ b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/AsyncTimelineServerBasedDetectionStrategy.java
@@ -43,7 +43,7 @@ public class AsyncTimelineServerBasedDetectionStrategy extends TimelineServerBas
private static final Logger LOG = LoggerFactory.getLogger(AsyncTimelineServerBasedDetectionStrategy.class);
- private AtomicBoolean hasConflict = new AtomicBoolean(false);
+ private final AtomicBoolean hasConflict = new AtomicBoolean(false);
private ScheduledExecutorService asyncDetectorExecutor;
public AsyncTimelineServerBasedDetectionStrategy(String basePath, String markerDir, String markerName, Boolean checkCommitConflict) {
diff --git a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
index bce28e8ae9cd..102203025f35 100644
--- a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
+++ b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerBasedEarlyConflictDetectionRunnable.java
@@ -43,13 +43,13 @@
public class MarkerBasedEarlyConflictDetectionRunnable implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(MarkerBasedEarlyConflictDetectionRunnable.class);
- private MarkerHandler markerHandler;
- private String markerDir;
- private String basePath;
- private HoodieStorage storage;
- private AtomicBoolean hasConflict;
- private long maxAllowableHeartbeatIntervalInMs;
- private Set completedCommits;
+ private final MarkerHandler markerHandler;
+ private final String markerDir;
+ private final String basePath;
+ private final HoodieStorage storage;
+ private final AtomicBoolean hasConflict;
+ private final long maxAllowableHeartbeatIntervalInMs;
+ private final Set completedCommits;
private final boolean checkCommitConflict;
public MarkerBasedEarlyConflictDetectionRunnable(AtomicBoolean hasConflict, MarkerHandler markerHandler,
diff --git a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerDirState.java b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerDirState.java
index 4204a06876f6..cd4ec27f10b6 100644
--- a/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerDirState.java
+++ b/hudi-timeline-service/src/main/java/org/apache/hudi/timeline/service/handlers/marker/MarkerDirState.java
@@ -378,6 +378,6 @@ private void flushMarkersToFile(int markerFileIndex) {
closeQuietly(bufferedWriter);
closeQuietly(outputStream);
}
- LOG.debug(markersFilePath.toString() + " written in " + timer.endTimer() + " ms");
+ LOG.debug(markersFilePath + " written in " + timer.endTimer() + " ms");
}
}
\ No newline at end of file
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
index 05b902087a66..73569dc67284 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
@@ -62,8 +62,6 @@
import java.util.Collections;
import java.util.List;
-import scala.Tuple2;
-
/**
* Loads data from Parquet Sources.
*
@@ -179,7 +177,7 @@ protected JavaRDD> buildHoodieRecordsForImport
job.getConfiguration())
// To reduce large number of tasks.
.coalesce(16 * cfg.parallelism).map(entry -> {
- GenericRecord genericRecord = ((Tuple2) entry)._2();
+ GenericRecord genericRecord = ((scala.Tuple2) entry)._2();
Object partitionField = genericRecord.get(cfg.partitionKey);
if (partitionField == null) {
throw new HoodieIOException("partition key is missing. :" + cfg.partitionKey);
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCleaner.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCleaner.java
index b09c056c6fe5..8ccb745a6204 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCleaner.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCleaner.java
@@ -52,7 +52,7 @@ public class HoodieCleaner {
/**
* Bag of properties with source, hoodie client, key generator etc.
*/
- private TypedProperties props;
+ private final TypedProperties props;
public HoodieCleaner(Config cfg, JavaSparkContext jssc) {
this(cfg, jssc, UtilHelpers.buildProperties(jssc.hadoopConfiguration(), cfg.propsFilePath, cfg.configs));
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
index 4cf59d58e2e1..fbe3d5c702bb 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
@@ -55,7 +55,7 @@ public class HoodieCompactor {
public static final String SCHEDULE_AND_EXECUTE = "scheduleandexecute";
private final Config cfg;
private transient FileSystem fs;
- private TypedProperties props;
+ private final TypedProperties props;
private final JavaSparkContext jsc;
private HoodieTableMetaClient metaClient;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDataTableValidator.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDataTableValidator.java
index 5d7f9dae90a0..f8d0d4c49ab4 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDataTableValidator.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDataTableValidator.java
@@ -110,7 +110,7 @@ public class HoodieDataTableValidator implements Serializable {
// Properties with source, hoodie client, key generator etc.
private TypedProperties props;
- private HoodieTableMetaClient metaClient;
+ private final HoodieTableMetaClient metaClient;
protected transient Option asyncDataTableValidateService;
@@ -351,7 +351,7 @@ public void doDataTableValidation() {
if (!danglingFiles.isEmpty()) {
LOG.error("Data table validation failed due to extra files found for completed commits " + danglingFiles.size());
- danglingFiles.forEach(entry -> LOG.error("Dangling file: " + entry.toString()));
+ danglingFiles.forEach(entry -> LOG.error("Dangling file: " + entry));
finalResult = false;
if (!cfg.ignoreFailed) {
throw new HoodieValidationException("Data table validation failed due to dangling files " + danglingFiles.size());
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDropPartitionsTool.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDropPartitionsTool.java
index f82c8149e364..e81d15ff6798 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDropPartitionsTool.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieDropPartitionsTool.java
@@ -114,7 +114,7 @@ public class HoodieDropPartitionsTool implements Serializable {
// config
private final Config cfg;
// Properties with source, hoodie client, key generator etc.
- private TypedProperties props;
+ private final TypedProperties props;
private final HoodieTableMetaClient metaClient;
@@ -286,7 +286,7 @@ public static void main(String[] args) {
try {
tool.run();
} catch (Throwable throwable) {
- LOG.error("Fail to run deleting table partitions for " + cfg.toString(), throwable);
+ LOG.error("Fail to run deleting table partitions for " + cfg, throwable);
} finally {
jsc.stop();
}
@@ -384,7 +384,7 @@ private void syncHive(HiveSyncConfig hiveSyncConfig) {
+ "). Hive metastore URL :"
+ hiveSyncConfig.getStringOrDefault(HiveSyncConfigHolder.HIVE_URL)
+ ", basePath :" + cfg.basePath);
- LOG.info("Hive Sync Conf => " + hiveSyncConfig.toString());
+ LOG.info("Hive Sync Conf => " + hiveSyncConfig);
FileSystem fs = HadoopFSUtils.getFs(cfg.basePath, jsc.hadoopConfiguration());
HiveConf hiveConf = new HiveConf();
if (!StringUtils.isNullOrEmpty(cfg.hiveHMSUris)) {
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java
index 7071e57c9309..90d958d540cd 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java
@@ -93,7 +93,7 @@ public class HoodieIndexer {
static final String DROP_INDEX = "dropindex";
private final HoodieIndexer.Config cfg;
- private TypedProperties props;
+ private final TypedProperties props;
private final JavaSparkContext jsc;
private final HoodieTableMetaClient metaClient;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieMetadataTableValidator.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieMetadataTableValidator.java
index 8a8bf31a8ee9..771cba994f49 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieMetadataTableValidator.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieMetadataTableValidator.java
@@ -202,9 +202,9 @@ public class HoodieMetadataTableValidator implements Serializable {
// Spark context
private transient JavaSparkContext jsc;
// config
- private Config cfg;
+ private final Config cfg;
// Properties with source, hoodie client, key generator etc.
- private TypedProperties props;
+ private final TypedProperties props;
private final HoodieTableMetaClient metaClient;
@@ -212,7 +212,7 @@ public class HoodieMetadataTableValidator implements Serializable {
private final String taskLabels;
- private List throwables = new ArrayList<>();
+ private final List throwables = new ArrayList<>();
public HoodieMetadataTableValidator(JavaSparkContext jsc, Config cfg) {
this.jsc = jsc;
@@ -1444,11 +1444,8 @@ private boolean assertBaseFilesEquality(FileSlice fileSlice1, FileSlice fileSlic
return baseFile1.getFileName().equals(baseFile2.getFileName()) && baseFile1.getFileId().equals(baseFile2.getFileId())
&& baseFile1.getFileSize() == baseFile2.getFileSize();
} else {
- if (!fileSlice1.getBaseFile().isPresent() == fileSlice2.getBaseFile().isPresent()) {
- return false;
- }
+ return fileSlice1.getBaseFile().isPresent() == !fileSlice2.getBaseFile().isPresent();
}
- return true;
}
/**
@@ -1770,7 +1767,7 @@ public List> getSortedColumnStatsList(Stri
.collect(Collectors.toList());
} else {
FileFormatUtils formatUtils = HoodieIOFactory.getIOFactory(metaClient.getStorage()).getFileFormatUtils(HoodieFileFormat.PARQUET);
- return fileNames.stream().flatMap(filename -> {
+ return (List>) fileNames.stream().flatMap(filename -> {
if (filename.endsWith(HoodieFileFormat.PARQUET.getFileExtension())) {
return formatUtils.readColumnStatsFromMetadata(
metaClient.getStorage(),
@@ -1781,14 +1778,12 @@ public List> getSortedColumnStatsList(Stri
StoragePath storagePartitionPath = new StoragePath(metaClient.getBasePath(), partitionPath);
String filePath = new StoragePath(storagePartitionPath, filename).toString();
try {
- return ((List>) getLogFileColumnRangeMetadata(filePath, metaClient, allColumnNameList, Option.of(readerSchema),
+ return getLogFileColumnRangeMetadata(filePath, metaClient, allColumnNameList, Option.of(readerSchema),
metadataConfig.getMaxReaderBufferSize())
.stream()
// We need to convert file path and use only the file name instead of the complete file path
.map(m -> (HoodieColumnRangeMetadata) HoodieColumnRangeMetadata.create(filename, m.getColumnName(), m.getMinValue(), m.getMaxValue(),
- m.getNullCount(), m.getValueCount(), m.getTotalSize(), m.getTotalUncompressedSize()))
- .collect(Collectors.toList()))
- .stream();
+ m.getNullCount(), m.getValueCount(), m.getTotalSize(), m.getTotalUncompressedSize()));
} catch (IOException e) {
throw new HoodieIOException(String.format("Failed to get column stats for file: %s", filePath), e);
}
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieRepairTool.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieRepairTool.java
index 5bc5867be6e7..982434a1abf6 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieRepairTool.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieRepairTool.java
@@ -146,7 +146,7 @@ public class HoodieRepairTool {
// Repair config
private final Config cfg;
// Properties with source, hoodie client, key generator etc.
- private TypedProperties props;
+ private final TypedProperties props;
// Spark context
private final HoodieEngineContext context;
private final HoodieTableMetaClient metaClient;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieTTLJob.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieTTLJob.java
index 7ab5b5747c6f..fb608c9cb668 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieTTLJob.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieTTLJob.java
@@ -47,7 +47,7 @@ public class HoodieTTLJob {
private final Config cfg;
private final TypedProperties props;
private final JavaSparkContext jsc;
- private HoodieTableMetaClient metaClient;
+ private final HoodieTableMetaClient metaClient;
public HoodieTTLJob(JavaSparkContext jsc, Config cfg) {
this(jsc, cfg, UtilHelpers.buildProperties(jsc.hadoopConfiguration(), cfg.propsFilePath, cfg.configs),
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/TableSizeStats.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/TableSizeStats.java
index 0df69a685e0f..90267c8c0b8d 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/TableSizeStats.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/TableSizeStats.java
@@ -111,9 +111,9 @@ public class TableSizeStats implements Serializable {
// Spark context
private transient JavaSparkContext jsc;
// config
- private Config cfg;
+ private final Config cfg;
// Properties with source, hoodie client, key generator etc.
- private TypedProperties props;
+ private final TypedProperties props;
public TableSizeStats(JavaSparkContext jsc, Config cfg) {
this.jsc = jsc;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/callback/kafka/HoodieWriteCommitKafkaCallback.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/callback/kafka/HoodieWriteCommitKafkaCallback.java
index 75cc9df86d3a..78bd0ae0228f 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/callback/kafka/HoodieWriteCommitKafkaCallback.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/callback/kafka/HoodieWriteCommitKafkaCallback.java
@@ -49,9 +49,9 @@ public class HoodieWriteCommitKafkaCallback implements HoodieWriteCommitCallback
private static final Logger LOG = LoggerFactory.getLogger(HoodieWriteCommitKafkaCallback.class);
- private HoodieConfig hoodieConfig;
- private String bootstrapServers;
- private String topic;
+ private final HoodieConfig hoodieConfig;
+ private final String bootstrapServers;
+ private final String topic;
public HoodieWriteCommitKafkaCallback(HoodieWriteConfig config) {
this.hoodieConfig = config;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/checkpointing/KafkaConnectHdfsProvider.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/checkpointing/KafkaConnectHdfsProvider.java
index 8e8af55a3c56..273bbb12f228 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/checkpointing/KafkaConnectHdfsProvider.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/checkpointing/KafkaConnectHdfsProvider.java
@@ -38,7 +38,7 @@
* Documentation: https://docs.confluent.io/current/connect/kafka-connect-hdfs/index.html
*/
public class KafkaConnectHdfsProvider extends InitialCheckPointProvider {
- private static String FILENAME_SEPARATOR = "[\\+\\.]";
+ private static final String FILENAME_SEPARATOR = "[\\+\\.]";
public KafkaConnectHdfsProvider(TypedProperties props) {
super(props);
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/schema/JdbcbasedSchemaProvider.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/schema/JdbcbasedSchemaProvider.java
index ef76a797ca50..9cbc57539e53 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/schema/JdbcbasedSchemaProvider.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/schema/JdbcbasedSchemaProvider.java
@@ -41,7 +41,7 @@
*/
public class JdbcbasedSchemaProvider extends SchemaProvider {
private Schema sourceSchema;
- private Map options = new HashMap<>();
+ private final Map options = new HashMap<>();
public JdbcbasedSchemaProvider(TypedProperties props, JavaSparkContext jssc) {
super(props, jssc);
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/debezium/DebeziumSource.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/debezium/DebeziumSource.java
index e4dc95e26ecc..acb6e925681a 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/debezium/DebeziumSource.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/debezium/DebeziumSource.java
@@ -192,7 +192,7 @@ public static Dataset convertDateColumns(Dataset dataset, Schema schem
}
}).map(Field::name).collect(Collectors.toList());
- LOG.info("Date fields: " + dateFields.toString());
+ LOG.info("Date fields: {}", dateFields);
for (String dateCol : dateFields) {
dataset = dataset.withColumn(dateCol, functions.col(dateCol).cast(DataTypes.DateType));
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/DatePartitionPathSelector.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/DatePartitionPathSelector.java
index 7458e3a59e00..432e2994b799 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/DatePartitionPathSelector.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/DatePartitionPathSelector.java
@@ -71,7 +71,7 @@
*/
public class DatePartitionPathSelector extends DFSPathSelector {
- private static volatile Logger LOG = LoggerFactory.getLogger(DatePartitionPathSelector.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DatePartitionPathSelector.class);
private final String dateFormat;
private final int datePartitionDepth;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java
index e09e1c7be812..d9b940097dbd 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java
@@ -104,12 +104,10 @@ public static Map strToOffsets(String checkpointStr) {
public static String offsetsToStr(OffsetRange[] ranges) {
// merge the ranges by partition to maintain one offset range map to one topic partition.
ranges = mergeRangesByTopicPartition(ranges);
- StringBuilder sb = new StringBuilder();
// at least 1 partition will be present.
- sb.append(ranges[0].topic() + ",");
- sb.append(Arrays.stream(ranges).map(r -> String.format("%s:%d", r.partition(), r.untilOffset()))
- .collect(Collectors.joining(",")));
- return sb.toString();
+ return ranges[0].topic() + ","
+ + Arrays.stream(ranges).map(r -> String.format("%s:%d", r.partition(), r.untilOffset()))
+ .collect(Collectors.joining(","));
}
/**
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/gcs/MessageValidity.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/gcs/MessageValidity.java
index 27aa90661981..a1c73d95800e 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/gcs/MessageValidity.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/gcs/MessageValidity.java
@@ -50,6 +50,6 @@ public Option getDescription() {
* */
public enum ProcessingDecision {
DO_PROCESS,
- DO_SKIP;
+ DO_SKIP
}
}
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/BootstrapExecutor.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/BootstrapExecutor.java
index cc2615455547..a2aaeeb89e24 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/BootstrapExecutor.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/BootstrapExecutor.java
@@ -54,9 +54,9 @@
import java.io.Serializable;
import java.util.HashMap;
-import static org.apache.hudi.common.table.HoodieTableConfig.TIMELINE_HISTORY_PATH;
import static org.apache.hudi.common.table.HoodieTableConfig.PARTITION_METAFILE_USE_BASE_FORMAT;
import static org.apache.hudi.common.table.HoodieTableConfig.POPULATE_META_FIELDS;
+import static org.apache.hudi.common.table.HoodieTableConfig.TIMELINE_HISTORY_PATH;
import static org.apache.hudi.common.table.HoodieTableConfig.TIMELINE_TIMEZONE;
import static org.apache.hudi.config.HoodieWriteConfig.PRECOMBINE_FIELD_NAME;
import static org.apache.hudi.config.HoodieWriteConfig.WRITE_TABLE_VERSION;
@@ -110,7 +110,7 @@ public class BootstrapExecutor implements Serializable {
*/
private transient FileSystem fs;
- private String bootstrapBasePath;
+ private final String bootstrapBasePath;
/**
* Bootstrap Executor.
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieMultiTableStreamer.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieMultiTableStreamer.java
index 92c4a843c94a..0b5ed0ff26c4 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieMultiTableStreamer.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieMultiTableStreamer.java
@@ -70,12 +70,12 @@
*/
public class HoodieMultiTableStreamer {
- private static Logger logger = LoggerFactory.getLogger(HoodieMultiTableStreamer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HoodieMultiTableStreamer.class);
- private List tableExecutionContexts;
+ private final List tableExecutionContexts;
private transient JavaSparkContext jssc;
- private Set successTables;
- private Set failedTables;
+ private final Set successTables;
+ private final Set failedTables;
public HoodieMultiTableStreamer(Config config, JavaSparkContext jssc) throws IOException {
this.tableExecutionContexts = new ArrayList<>();
@@ -119,7 +119,7 @@ private void checkIfTableConfigFileExists(String configFolder, FileSystem fs, St
//commonProps are passed as parameter which contain table to config file mapping
private void populateTableExecutionContextList(TypedProperties properties, String configFolder, FileSystem fs, Config config) throws IOException {
List tablesToBeIngested = getTablesToBeIngested(properties);
- logger.info("tables to be ingested via MultiTableDeltaStreamer : " + tablesToBeIngested);
+ LOG.info("tables to be ingested via MultiTableDeltaStreamer : " + tablesToBeIngested);
TableExecutionContext executionContext;
for (String table : tablesToBeIngested) {
String[] tableWithDatabase = table.split("\\.");
@@ -270,11 +270,11 @@ public static void main(String[] args) throws IOException {
}
if (config.enableHiveSync) {
- logger.warn("--enable-hive-sync will be deprecated in a future release; please use --enable-sync instead for Hive syncing");
+ LOG.warn("--enable-hive-sync will be deprecated in a future release; please use --enable-sync instead for Hive syncing");
}
if (config.targetTableName != null) {
- logger.warn(String.format("--target-table is deprecated and will be removed in a future release due to it's useless;"
+ LOG.warn(String.format("--target-table is deprecated and will be removed in a future release due to it's useless;"
+ " please use %s to configure multiple target tables", HoodieStreamerConfig.TABLES_TO_BE_INGESTED.key()));
}
@@ -458,14 +458,14 @@ public void sync() {
new HoodieStreamer(context.getConfig(), jssc, Option.ofNullable(context.getProperties())).sync();
successTables.add(Helpers.getTableWithDatabase(context));
} catch (Exception e) {
- logger.error("error while running MultiTableDeltaStreamer for table: " + context.getTableName(), e);
+ LOG.error("error while running MultiTableDeltaStreamer for table: " + context.getTableName(), e);
failedTables.add(Helpers.getTableWithDatabase(context));
}
}
- logger.info("Ingestion was successful for topics: " + successTables);
+ LOG.info("Ingestion was successful for topics: " + successTables);
if (!failedTables.isEmpty()) {
- logger.info("Ingestion failed for topics: " + failedTables);
+ LOG.info("Ingestion failed for topics: " + failedTables);
}
}
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieStreamer.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieStreamer.java
index f784a7dba6db..a23687d5c7d7 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieStreamer.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/HoodieStreamer.java
@@ -119,7 +119,7 @@ public class HoodieStreamer implements Serializable {
public static final String CHECKPOINT_KEY = HoodieWriteConfig.STREAMER_CHECKPOINT_KEY;
public static final String CHECKPOINT_RESET_KEY = STREAMER_CHECKPOINT_RESET_KEY_V1;
- protected final transient Config cfg;
+ protected transient Config cfg;
/**
* NOTE: These properties are already consolidated w/ CLI provided config-overrides.
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/StreamSync.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/StreamSync.java
index 12c869666e1f..8413b1fac318 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/StreamSync.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/streamer/StreamSync.java
@@ -192,7 +192,7 @@ public class StreamSync implements Serializable, Closeable {
*/
private transient Option transformer;
- private String keyGenClassName;
+ private final String keyGenClassName;
/**
* Filesystem used.
@@ -202,12 +202,12 @@ public class StreamSync implements Serializable, Closeable {
/**
* Spark context Wrapper.
*/
- private final transient HoodieSparkEngineContext hoodieSparkContext;
+ private transient HoodieSparkEngineContext hoodieSparkContext;
/**
* Spark Session.
*/
- private transient SparkSession sparkSession;
+ private final transient SparkSession sparkSession;
/**
* Hive Config.
@@ -1278,7 +1278,7 @@ public Option getClusteringInstantOpt() {
class WriteClientWriteResult {
private Map> partitionToReplacedFileIds = Collections.emptyMap();
- private JavaRDD writeStatusRDD;
+ private final JavaRDD writeStatusRDD;
public WriteClientWriteResult(JavaRDD writeStatusRDD) {
this.writeStatusRDD = writeStatusRDD;