Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion beeline/src/java/org/apache/hive/beeline/BeeLine.java
Original file line number Diff line number Diff line change
Expand Up @@ -1831,7 +1831,7 @@ String getPromptForCli() {
String prompt;
// read prompt configuration and substitute variables.
HiveConf conf = getCommands().getHiveConf(true);
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = conf.getVar(HiveConf.ConfVars.CLI_PROMPT);
prompt = getCommands().substituteVariables(conf, prompt);
return prompt + getFormattedDb() + "> ";
}
Expand Down
4 changes: 2 additions & 2 deletions beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ public void updateBeeLineOptsFromConf() {
if (conf == null) {
conf = beeLine.getCommands().getHiveConf(false);
}
setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS));
setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_IGNORE_ERRORS));
}
}

Expand Down Expand Up @@ -529,7 +529,7 @@ public boolean getShowDbInPrompt() {
return showDbInPrompt;
} else {
HiveConf conf = beeLine.getCommands().getHiveConf(true);
return HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB);
return HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_PRINT_CURRENT_DB);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ private void verifyCMD(String CMD, String keywords, OutputStream os, String[] op
public static void init(){
// something changed scratch dir permissions, so test can't execute
HiveConf hiveConf = new HiveConf();
String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname);
String scratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCH_DIR.varname);
File file = new File(scratchDir);
if (file.exists()) {
file.setWritable(true, false);
Expand Down
6 changes: 3 additions & 3 deletions cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ public void handle(Signal signal) {
ret = processCmd(command.toString());
lastRet = ret;
} catch (CommandProcessorException e) {
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS);
boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_IGNORE_ERRORS);
if (!ignoreErrors) {
throw e;
}
Expand Down Expand Up @@ -773,7 +773,7 @@ public int run(String[] args) throws Exception {
}

// read prompt configuration and substitute variables.
prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT);
prompt = conf.getVar(HiveConf.ConfVars.CLI_PROMPT);
prompt = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
Expand Down Expand Up @@ -936,7 +936,7 @@ protected void setupConsoleReader() throws IOException {
* @return String to show user for current db value
*/
private static String getFormattedDb(HiveConf conf, CliSessionState ss) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB)) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLI_PRINT_CURRENT_DB)) {
return "";
}
//BUG: This will not work in remote mode - HIVE-5153
Expand Down
12 changes: 6 additions & 6 deletions common/src/java/org/apache/hadoop/hive/common/LogUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,11 @@ public static String initHiveLog4jCommon(HiveConf conf, ConfVars confVarName)
// property specified file found in local file system
// use the specified file
if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) {
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID);
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID);
if(queryId == null || (queryId = queryId.trim()).isEmpty()) {
queryId = "unknown-" + System.currentTimeMillis();
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
System.setProperty(HiveConf.ConfVars.HIVE_QUERY_ID.toString(), queryId);
}
final boolean async = checkAndSetAsyncLogging(conf);
// required for MDC based routing appender so that child threads can inherit the MDC context
Expand Down Expand Up @@ -157,8 +157,8 @@ private static String initHiveLog4jDefault(
if (hive_l4j == null) {
hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
System.setProperty(HiveConf.ConfVars.HIVE_QUERY_ID.toString(),
HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID));
break;
case HIVE_LOG4J_FILE:
hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
Expand Down Expand Up @@ -216,8 +216,8 @@ public static String maskIfPassword(String key, String value) {
*/
public static void registerLoggingContext(Configuration conf) {
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVESESSIONID));
MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SESSION_ID));
MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUERY_ID));
MDC.put(OPERATIONLOG_LEVEL_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL));
MDC.put(OPERATIONLOG_LOCATION_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION));
l4j.info("Thread context registration is done.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class ServerUtils {

public static void cleanUpScratchDir(HiveConf hiveConf) {
if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_START_CLEANUP_SCRATCHDIR)) {
String hiveScratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCHDIR.varname);
String hiveScratchDir = hiveConf.get(HiveConf.ConfVars.SCRATCH_DIR.varname);
try {
Path jobScratchDir = new Path(hiveScratchDir);
LOG.info("Cleaning scratchDir : " + hiveScratchDir);
Expand Down
Loading