Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/hive into HIVE-277…
Browse files Browse the repository at this point in the history
…46-1
  • Loading branch information
dengzhhu653 committed Mar 7, 2024
2 parents 002158d + f4e4114 commit fce8f09
Show file tree
Hide file tree
Showing 1,198 changed files with 55,594 additions and 35,419 deletions.
5 changes: 2 additions & 3 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def sonarAnalysis(args) {
"""+args+" -DskipTests -Dit.skipTests -Dmaven.javadoc.skip"

sh """#!/bin/bash -e
sw java 11 && . /etc/profile.d/java.sh
sw java 17 && . /etc/profile.d/java.sh
export MAVEN_OPTS=-Xmx5G
"""+mvnCmd
}
Expand All @@ -121,7 +121,7 @@ def sonarAnalysis(args) {
def hdbPodTemplate(closure) {
podTemplate(
containers: [
containerTemplate(name: 'hdb', image: 'kgyrtkirk/hive-dev-box:executor', ttyEnabled: true, command: 'tini -- cat',
containerTemplate(name: 'hdb', image: 'wecharyu/hive-dev-box:executor', ttyEnabled: true, command: 'tini -- cat',
alwaysPullImage: true,
resourceRequestCpu: '1800m',
resourceLimitCpu: '8000m',
Expand Down Expand Up @@ -287,7 +287,6 @@ set -x
echo 127.0.0.1 dev_$dbType | sudo tee -a /etc/hosts
. /etc/profile.d/confs.sh
sw hive-dev $PWD
ping -c2 dev_$dbType
export DOCKER_NETWORK=host
export DBNAME=metastore
reinit_metastore $dbType
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ subqueries, common table expressions, and more. Hive's SQL can also be
extended with user code via user defined functions (UDFs), user defined
aggregates (UDAFs), and user defined table functions (UDTFs).

Hive users have a choice of 3 runtimes when executing SQL queries.
Users can choose between Apache Hadoop MapReduce or Apache Tez
frameworks as their execution backend. MapReduce is a
mature framework that is proven at large scales. However, MapReduce
Hive users can choose between Apache Hadoop MapReduce or Apache Tez
frameworks as their execution backend. Note that MapReduce framework
has been deprecated since Hive 2, and Apache Tez is recommended. MapReduce
is a mature framework that is proven at large scales. However, MapReduce
is a purely batch framework, and queries using it may experience
higher latencies (tens of seconds), even over small datasets. Apache
Tez is designed for interactive query, and has substantially reduced
Expand Down
2 changes: 1 addition & 1 deletion accumulo-handler/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
<version>4.0.0-beta-2-SNAPSHOT</version>
<version>4.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>hive-accumulo-handler</artifactId>
Expand Down
8 changes: 3 additions & 5 deletions beeline/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
<version>4.0.0-beta-2-SNAPSHOT</version>
<version>4.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>hive-beeline</artifactId>
Expand Down Expand Up @@ -243,14 +243,12 @@
<goal>shade</goal>
</goals>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<createDependencyReducedPom>false</createDependencyReducedPom>
<finalName>jar-with-dependencies</finalName>
<transformers>
<transformer implementation="com.github.edwgiz.mavenShadePlugin.log4j2CacheTransformer.PluginsCacheFileTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>org.openjdk.jmh.Main</mainClass>
<mainClass>org.apache.hive.beeline.BeeLine</mainClass>
</transformer>
</transformers>
<filters>
Expand Down
2 changes: 1 addition & 1 deletion beeline/src/java/org/apache/hive/beeline/BeeLine.java
Original file line number Diff line number Diff line change
Expand Up @@ -1831,7 +1831,7 @@ String getPromptForCli() {
String prompt;
// read prompt configuration and substitute variables.
HiveConf conf = getCommands().getHiveConf(true);
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = conf.getVar(HiveConf.ConfVars.CLI_PROMPT);
prompt = getCommands().substituteVariables(conf, prompt);
return prompt + getFormattedDb() + "> ";
}
Expand Down
4 changes: 2 additions & 2 deletions beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ public void updateBeeLineOptsFromConf() {
if (conf == null) {
conf = beeLine.getCommands().getHiveConf(false);
}
setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS));
setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_IGNORE_ERRORS));
}
}

Expand Down Expand Up @@ -529,7 +529,7 @@ public boolean getShowDbInPrompt() {
return showDbInPrompt;
} else {
HiveConf conf = beeLine.getCommands().getHiveConf(true);
return HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB);
return HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_PRINT_CURRENT_DB);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ private void verifyCMD(String CMD, String keywords, OutputStream os, String[] op
public static void init(){
// something changed scratch dir permissions, so test can't execute
HiveConf hiveConf = new HiveConf();
String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname);
String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCH_DIR.varname);
File file = new File(scratchDir);
if (file.exists()) {
file.setWritable(true, false);
Expand Down
2 changes: 1 addition & 1 deletion classification/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
<version>4.0.0-beta-2-SNAPSHOT</version>
<version>4.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>hive-classification</artifactId>
Expand Down
2 changes: 1 addition & 1 deletion cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
<version>4.0.0-beta-2-SNAPSHOT</version>
<version>4.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>hive-cli</artifactId>
Expand Down
7 changes: 4 additions & 3 deletions cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ public void handle(Signal signal) {
ret = processCmd(command.toString());
lastRet = ret;
} catch (CommandProcessorException e) {
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS);
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_IGNORE_ERRORS);
if (!ignoreErrors) {
throw e;
}
Expand Down Expand Up @@ -773,7 +773,7 @@ public int run(String[] args) throws Exception {
}

// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = conf.getVar(HiveConf.ConfVars.CLI_PROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
Expand Down Expand Up @@ -809,6 +809,7 @@ public Map<String, String> getHiveVariable() {
} catch (CommandProcessorException e) {
return e.getResponseCode();
} finally {
SessionState.endStart(ss);
ss.resetThreadName();
ss.close();
}
Expand Down Expand Up @@ -936,7 +937,7 @@ protected void setupConsoleReader() throws IOException {
* @return String to show user for current db value
*/
private static String getFormattedDb(HiveConf conf, CliSessionState ss) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB)) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_PRINT_CURRENT_DB)) {
return "";
}
//BUG: This will not work in remote mode - HIVE-5153
Expand Down
2 changes: 1 addition & 1 deletion common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
<version>4.0.0-beta-2-SNAPSHOT</version>
<version>4.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>hive-common</artifactId>
Expand Down
12 changes: 6 additions & 6 deletions common/src/java/org/apache/hadoop/hive/common/LogUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,11 @@ public static String initHiveLog4jCommon(HiveConf conf, ConfVars confVarName)
// property specified file found in local file system
// use the specified file
if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) {
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID);
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID);
if(queryId == null || (queryId = queryId.trim()).isEmpty()) {
queryId = "unknown-" + System.currentTimeMillis();
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
System.setProperty(HiveConf.ConfVars.HIVE_QUERY_ID.toString(), queryId);
}
final boolean async = checkAndSetAsyncLogging(conf);
// required for MDC based routing appender so that child threads can inherit the MDC context
Expand Down Expand Up @@ -157,8 +157,8 @@ private static String initHiveLog4jDefault(
if (hive_l4j == null) {
hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
System.setProperty(HiveConf.ConfVars.HIVE_QUERY_ID.toString(),
HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID));
break;
case HIVE_LOG4J_FILE:
hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
Expand Down Expand Up @@ -216,8 +216,8 @@ public static String maskIfPassword(String key, String value) {
*/
public static void registerLoggingContext(Configuration conf) {
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVESESSIONID));
MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SESSION_ID));
MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID));
MDC.put(OPERATIONLOG_LEVEL_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL));
MDC.put(OPERATIONLOG_LOCATION_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION));
l4j.info("Thread context registration is done.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class ServerUtils {

public static void cleanUpScratchDir(HiveConf hiveConf) {
if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_START_CLEANUP_SCRATCHDIR)) {
String hiveScratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname);
String hiveScratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCH_DIR.varname);
try {
Path jobScratchDir = new Path(hiveScratchDir);
LOG.info("Cleaning scratchDir : " + hiveScratchDir);
Expand Down
Loading

0 comments on commit fce8f09

Please sign in to comment.