Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
dengzhhu653 committed Feb 21, 2024
2 parents 30bc4c2 + 5b76949 commit 422b0a8
Show file tree
Hide file tree
Showing 1,546 changed files with 70,562 additions and 35,980 deletions.
7 changes: 3 additions & 4 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def sonarAnalysis(args) {
"""+args+" -DskipTests -Dit.skipTests -Dmaven.javadoc.skip"

sh """#!/bin/bash -e
sw java 11 && . /etc/profile.d/java.sh
sw java 17 && . /etc/profile.d/java.sh
export MAVEN_OPTS=-Xmx5G
"""+mvnCmd
}
Expand All @@ -121,7 +121,7 @@ def sonarAnalysis(args) {
def hdbPodTemplate(closure) {
podTemplate(
containers: [
containerTemplate(name: 'hdb', image: 'kgyrtkirk/hive-dev-box:executor', ttyEnabled: true, command: 'tini -- cat',
containerTemplate(name: 'hdb', image: 'wecharyu/hive-dev-box:executor', ttyEnabled: true, command: 'tini -- cat',
alwaysPullImage: true,
resourceRequestCpu: '1800m',
resourceLimitCpu: '8000m',
Expand Down Expand Up @@ -287,7 +287,6 @@ set -x
echo 127.0.0.1 dev_$dbType | sudo tee -a /etc/hosts
. /etc/profile.d/confs.sh
sw hive-dev $PWD
ping -c2 dev_$dbType
export DOCKER_NETWORK=host
export DBNAME=metastore
reinit_metastore $dbType
Expand All @@ -308,7 +307,7 @@ time docker rm -f dev_$dbType || true
set -e
dev-support/nightly
'''
buildHive("install -Dtest=noMatches -Pdist -pl packaging -am")
buildHive("install -Dtest=noMatches -Pdist -Piceberg -pl packaging -am")
}
stage('Verify') {
sh '''#!/bin/bash
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ subqueries, common table expressions, and more. Hive's SQL can also be
extended with user code via user defined functions (UDFs), user defined
aggregates (UDAFs), and user defined table functions (UDTFs).

Hive users have a choice of 3 runtimes when executing SQL queries.
Users can choose between Apache Hadoop MapReduce or Apache Tez
frameworks as their execution backend. MapReduce is a
mature framework that is proven at large scales. However, MapReduce
Hive users can choose between Apache Hadoop MapReduce or Apache Tez
frameworks as their execution backend. Note that MapReduce framework
has been deprecated since Hive 2, and Apache Tez is recommended. MapReduce
is a mature framework that is proven at large scales. However, MapReduce
is a purely batch framework, and queries using it may experience
higher latencies (tens of seconds), even over small datasets. Apache
Tez is designed for interactive query, and has substantially reduced
Expand Down
8 changes: 3 additions & 5 deletions beeline/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-inline</artifactId>
<version>4.11.0</version>
<version>${mockito-inline.version}</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -243,14 +243,12 @@
<goal>shade</goal>
</goals>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<createDependencyReducedPom>false</createDependencyReducedPom>
<finalName>jar-with-dependencies</finalName>
<transformers>
<transformer implementation="com.github.edwgiz.mavenShadePlugin.log4j2CacheTransformer.PluginsCacheFileTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>org.openjdk.jmh.Main</mainClass>
<mainClass>org.apache.hive.beeline.BeeLine</mainClass>
</transformer>
</transformers>
<filters>
Expand Down
2 changes: 1 addition & 1 deletion beeline/src/java/org/apache/hive/beeline/BeeLine.java
Original file line number Diff line number Diff line change
Expand Up @@ -1831,7 +1831,7 @@ String getPromptForCli() {
String prompt;
// read prompt configuration and substitute variables.
HiveConf conf = getCommands().getHiveConf(true);
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = conf.getVar(HiveConf.ConfVars.CLI_PROMPT);
prompt = getCommands().substituteVariables(conf, prompt);
return prompt + getFormattedDb() + "> ";
}
Expand Down
4 changes: 2 additions & 2 deletions beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ public void updateBeeLineOptsFromConf() {
if (conf == null) {
conf = beeLine.getCommands().getHiveConf(false);
}
setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS));
setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_IGNORE_ERRORS));
}
}

Expand Down Expand Up @@ -529,7 +529,7 @@ public boolean getShowDbInPrompt() {
return showDbInPrompt;
} else {
HiveConf conf = beeLine.getCommands().getHiveConf(true);
return HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB);
return HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_PRINT_CURRENT_DB);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ private void verifyCMD(String CMD, String keywords, OutputStream os, String[] op
public static void init(){
// something changed scratch dir permissions, so test can't execute
HiveConf hiveConf = new HiveConf();
String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname);
String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCH_DIR.varname);
File file = new File(scratchDir);
if (file.exists()) {
file.setWritable(true, false);
Expand Down
7 changes: 4 additions & 3 deletions cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ public void handle(Signal signal) {
ret = processCmd(command.toString());
lastRet = ret;
} catch (CommandProcessorException e) {
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS);
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_IGNORE_ERRORS);
if (!ignoreErrors) {
throw e;
}
Expand Down Expand Up @@ -773,7 +773,7 @@ public int run(String[] args) throws Exception {
}

// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = conf.getVar(HiveConf.ConfVars.CLI_PROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
Expand Down Expand Up @@ -809,6 +809,7 @@ public Map<String, String> getHiveVariable() {
} catch (CommandProcessorException e) {
return e.getResponseCode();
} finally {
SessionState.endStart(ss);
ss.resetThreadName();
ss.close();
}
Expand Down Expand Up @@ -936,7 +937,7 @@ protected void setupConsoleReader() throws IOException {
* @return String to show user for current db value
*/
private static String getFormattedDb(HiveConf conf, CliSessionState ss) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB)) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_PRINT_CURRENT_DB)) {
return "";
}
//BUG: This will not work in remote mode - HIVE-5153
Expand Down
24 changes: 19 additions & 5 deletions common/src/java/org/apache/hadoop/hive/common/FileUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@

package org.apache.hadoop.hive.common;

import static org.apache.hadoop.hive.shims.Utils.RAW_RESERVED_VIRTUAL_PATH;

import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
Expand Down Expand Up @@ -61,11 +63,13 @@
import org.apache.hadoop.fs.PathExistsException;
import org.apache.hadoop.fs.PathIsDirectoryException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import com.google.common.base.Preconditions;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.functional.RemoteIterators;
import org.apache.hive.common.util.ShutdownHookManager;
Expand Down Expand Up @@ -767,7 +771,7 @@ static boolean copy(FileSystem srcFS, Path src,
// is tried and it fails. We depend upon that behaviour in cases like replication,
// wherein if distcp fails, there is good reason to not plod along with a trivial
// implementation, and fail instead.
copied = doIOUtilsCopyBytes(srcFS, srcFS.getFileStatus(src), dstFS, dst, deleteSource, overwrite, shouldPreserveXAttrs(conf, srcFS, dstFS), conf, copyStatistics);
copied = doIOUtilsCopyBytes(srcFS, srcFS.getFileStatus(src), dstFS, dst, deleteSource, overwrite, shouldPreserveXAttrs(conf, srcFS, dstFS, src), conf, copyStatistics);
}
return copied;
}
Expand Down Expand Up @@ -895,11 +899,21 @@ private static void checkDependencies(FileSystem srcFS, Path src, FileSystem dst
}
}

public static boolean shouldPreserveXAttrs(HiveConf conf, FileSystem srcFS, FileSystem dstFS) throws IOException {
if (!Utils.checkFileSystemXAttrSupport(srcFS) || !Utils.checkFileSystemXAttrSupport(dstFS)){
return false;
public static boolean shouldPreserveXAttrs(HiveConf conf, FileSystem srcFS, FileSystem dstFS, Path path) throws IOException {
Preconditions.checkNotNull(path);
if (conf.getBoolVar(ConfVars.DFS_XATTR_ONLY_SUPPORTED_ON_RESERVED_NAMESPACE)) {

if (!(path.toUri().getPath().startsWith(RAW_RESERVED_VIRTUAL_PATH)
&& Utils.checkFileSystemXAttrSupport(srcFS, new Path(RAW_RESERVED_VIRTUAL_PATH))
&& Utils.checkFileSystemXAttrSupport(dstFS, new Path(RAW_RESERVED_VIRTUAL_PATH)))) {
return false;
}
} else {
if (!Utils.checkFileSystemXAttrSupport(srcFS) || !Utils.checkFileSystemXAttrSupport(dstFS)) {
return false;
}
}
for (Map.Entry<String,String> entry : conf.getPropsWithPrefix(Utils.DISTCP_OPTIONS_PREFIX).entrySet()) {
for (Map.Entry<String, String> entry : conf.getPropsWithPrefix(Utils.DISTCP_OPTIONS_PREFIX).entrySet()) {
String distCpOption = entry.getKey();
if (distCpOption.startsWith("p")) {
return distCpOption.contains("x");
Expand Down
12 changes: 6 additions & 6 deletions common/src/java/org/apache/hadoop/hive/common/LogUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,11 @@ public static String initHiveLog4jCommon(HiveConf conf, ConfVars confVarName)
// property specified file found in local file system
// use the specified file
if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) {
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID);
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID);
if(queryId == null || (queryId = queryId.trim()).isEmpty()) {
queryId = "unknown-" + System.currentTimeMillis();
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
System.setProperty(HiveConf.ConfVars.HIVE_QUERY_ID.toString(), queryId);
}
final boolean async = checkAndSetAsyncLogging(conf);
// required for MDC based routing appender so that child threads can inherit the MDC context
Expand Down Expand Up @@ -157,8 +157,8 @@ private static String initHiveLog4jDefault(
if (hive_l4j == null) {
hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
System.setProperty(HiveConf.ConfVars.HIVE_QUERY_ID.toString(),
HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID));
break;
case HIVE_LOG4J_FILE:
hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
Expand Down Expand Up @@ -216,8 +216,8 @@ public static String maskIfPassword(String key, String value) {
*/
public static void registerLoggingContext(Configuration conf) {
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVESESSIONID));
MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SESSION_ID));
MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID));
MDC.put(OPERATIONLOG_LEVEL_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL));
MDC.put(OPERATIONLOG_LOCATION_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION));
l4j.info("Thread context registration is done.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class ServerUtils {

public static void cleanUpScratchDir(HiveConf hiveConf) {
if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_START_CLEANUP_SCRATCHDIR)) {
String hiveScratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname);
String hiveScratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCH_DIR.varname);
try {
Path jobScratchDir = new Path(hiveScratchDir);
LOG.info("Cleaning scratchDir : " + hiveScratchDir);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,11 @@ private static TSocket getSSLSocketWithHttps(TSocket tSSLSocket, int maxMessageS
throws TTransportException {
SSLSocket sslSocket = (SSLSocket) tSSLSocket.getSocket();
SSLParameters sslParams = sslSocket.getSSLParameters();
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
if (sslSocket.getLocalAddress().getHostAddress().equals("127.0.0.1")) {
sslParams.setEndpointIdentificationAlgorithm(null);
} else {
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
}
sslSocket.setSSLParameters(sslParams);
TSocket tSocket = new TSocket(sslSocket);
return configureThriftMaxMessageSize(tSocket, maxMessageSize);
Expand Down
9 changes: 3 additions & 6 deletions common/src/java/org/apache/hadoop/hive/conf/Constants.java
Original file line number Diff line number Diff line change
Expand Up @@ -98,18 +98,15 @@ public class Constants {

public static final String ORC_INPUT_FORMAT = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat";
public static final String ORC_OUTPUT_FORMAT = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat";



public static final Pattern COMPACTION_POOLS_PATTERN = Pattern.compile("hive\\.compactor\\.worker\\.(.*)\\.threads");
public static final String HIVE_COMPACTOR_WORKER_POOL = "hive.compactor.worker.pool";
public static final String HIVE_COMPACTOR_REBALANCE_ORDERBY = "hive.compactor.rebalance.orderby";

public static final String HTTP_HEADER_REQUEST_TRACK = "X-Request-ID";
public static final String TIME_POSTFIX_REQUEST_TRACK = "_TIME";

public static final String ICEBERG = "iceberg";

public static final String ICEBERG_PARTITION_TABLE_SCHEMA = "partition,spec_id,record_count,file_count," +
"position_delete_record_count,position_delete_file_count,equality_delete_record_count," +
"equality_delete_file_count";
"equality_delete_file_count,last_updated_at,total_data_file_size_in_bytes,last_updated_snapshot_id";
public static final String DELIMITED_JSON_SERDE = "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe";
}
Loading

0 comments on commit 422b0a8

Please sign in to comment.