org.apache.hadoop.hive.ql.exec.Utilities.getNameMessage()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(7.4k)|赞(0)|评价(0)|浏览(82)

本文整理了Java中org.apache.hadoop.hive.ql.exec.Utilities.getNameMessage()方法的一些代码示例,展示了Utilities.getNameMessage()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utilities.getNameMessage()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.Utilities
类名称:Utilities
方法名:getNameMessage

Utilities.getNameMessage介绍

暂无

代码示例

代码示例来源:origin: apache/hive

@VisibleForTesting
int close(TezWork work, int rc, DAGClient dagClient) {
 try {
  List<BaseWork> ws = work.getAllWork();
  for (BaseWork w: ws) {
   if (w instanceof MergeJoinWork) {
    w = ((MergeJoinWork) w).getMainWork();
   }
   for (Operator<?> op: w.getAllOperators()) {
    op.jobClose(conf, rc == 0);
   }
  }
 } catch (Exception e) {
  // jobClose needs to execute successfully otherwise fail task
  if (rc == 0) {
   rc = 3;
   String mesg = "Job Commit failed with exception '"
    + Utilities.getNameMessage(e) + "'";
   console.printError(mesg, "\n" + StringUtils.stringifyException(e));
  }
 }
 if (dagClient != null) { // null in tests
  closeDagClientWithoutEx(dagClient);
 }
 return rc;
}

代码示例来源:origin: apache/drill

String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg;

代码示例来源:origin: apache/hive

private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse {
 errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
 if(rootMsg != null) {
  errorMessage += "\n" + rootMsg;
 }
 SQLState = e.getCanonicalErrorMsg() != null ?
  e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage());
 downstreamError = e;
 console.printError(errorMessage + "\n"
  + org.apache.hadoop.util.StringUtils.stringifyException(e));
 throw createProcessorResponse(ret);
}
private boolean requiresLock() {

代码示例来源:origin: apache/hive

/**
 * Close will move the temp files into the right place for the fetch
 * task. If the job has failed it will clean up the files.
 */
private int close(int rc) {
 try {
  List<BaseWork> ws = work.getAllWork();
  for (BaseWork w: ws) {
   for (Operator<?> op: w.getAllOperators()) {
    op.jobClose(conf, rc == 0);
   }
  }
 } catch (Exception e) {
  // jobClose needs to execute successfully otherwise fail task
  if (rc == 0) {
   rc = 3;
   String mesg = "Job Commit failed with exception '"
     + Utilities.getNameMessage(e) + "'";
   console.printError(mesg, "\n" + StringUtils.stringifyException(e));
   setException(e);
  }
 }
 return rc;
}

代码示例来源:origin: apache/drill

private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) {
 errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
 if(rootMsg != null) {
  errorMessage += "\n" + rootMsg;
 }
 SQLState = e.getCanonicalErrorMsg() != null ?
  e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage());
 downstreamError = e;
 console.printError(errorMessage + "\n"
  + org.apache.hadoop.util.StringUtils.stringifyException(e));
 return createProcessorResponse(ret);
}
private boolean requiresLock() {

代码示例来源:origin: apache/drill

/**
 * Close will move the temp files into the right place for the fetch
 * task. If the job has failed it will clean up the files.
 */
private int close(int rc) {
 try {
  List<BaseWork> ws = work.getAllWork();
  for (BaseWork w: ws) {
   for (Operator<?> op: w.getAllOperators()) {
    op.jobClose(conf, rc == 0);
   }
  }
 } catch (Exception e) {
  // jobClose needs to execute successfully otherwise fail task
  if (rc == 0) {
   rc = 3;
   String mesg = "Job Commit failed with exception '"
     + Utilities.getNameMessage(e) + "'";
   console.printError(mesg, "\n" + StringUtils.stringifyException(e));
   setException(e);
  }
 }
 return rc;
}

代码示例来源:origin: apache/drill

int close(TezWork work, int rc) {
 try {
  List<BaseWork> ws = work.getAllWork();
  for (BaseWork w: ws) {
   if (w instanceof MergeJoinWork) {
    w = ((MergeJoinWork) w).getMainWork();
   }
   for (Operator<?> op: w.getAllOperators()) {
    op.jobClose(conf, rc == 0);
   }
  }
 } catch (Exception e) {
  // jobClose needs to execute successfully otherwise fail task
  if (rc == 0) {
   rc = 3;
   String mesg = "Job Commit failed with exception '"
    + Utilities.getNameMessage(e) + "'";
   console.printError(mesg, "\n" + StringUtils.stringifyException(e));
  }
 }
 closeDagClientWithoutEx();
 return rc;
}

代码示例来源:origin: apache/hive

String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg;

代码示例来源:origin: apache/drill

String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg;

代码示例来源:origin: apache/hive

String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;
  returnVal = 3;
  String mesg = "Job Commit failed with exception '" +
    Utilities.getNameMessage(e) + "'";
  console.printError(mesg, "\n" +
    org.apache.hadoop.util.StringUtils.stringifyException(e));

代码示例来源:origin: apache/drill

e.printStackTrace();
setException(e);
String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;

代码示例来源:origin: apache/hive

LOG.warn("Interrupted while monitoring the Hive on Spark application, exiting");
} else {
 String msg = " with exception '" + Utilities.getNameMessage(e) + "' Last known state = " +
     (state != null ? state.name() : "UNKNOWN");
 msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg;

代码示例来源:origin: apache/drill

String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;
  returnVal = 3;
  String mesg = "Job Commit failed with exception '" +
    Utilities.getNameMessage(e) + "'";
  console.printError(mesg, "\n" +
    org.apache.hadoop.util.StringUtils.stringifyException(e));

代码示例来源:origin: apache/hive

e.printStackTrace();
setException(e);
String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;
 success = false;
 returnVal = 3;
 String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'";
 console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));

代码示例来源:origin: apache/drill

String msg = "Failed to execute spark task, with exception '" + Utilities.getNameMessage(e) + "'";

代码示例来源:origin: apache/drill

e.printStackTrace();
setException(e);
String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;

代码示例来源:origin: apache/hive

hookRunner.runPreDriverHooks(hookContext);
} catch (Exception e) {
 errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
 SQLState = ErrorMsg.findSQLState(e.getMessage());
 downstreamError = e;
 hookRunner.runPostDriverHooks(hookContext);
} catch (Exception e) {
 errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
 SQLState = ErrorMsg.findSQLState(e.getMessage());
 downstreamError = e;

代码示例来源:origin: apache/drill

errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
if (hookContext != null) {
 try {

代码示例来源:origin: apache/hive

errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
if (hookContext != null) {
 try {

代码示例来源:origin: apache/drill

errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
downstreamError = e;
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
downstreamError = e;

相关文章

微信公众号

最新文章

更多

Utilities类方法