本文整理了Java中org.apache.hadoop.mapred.QueueManager.dumpConfiguration
方法的一些代码示例,展示了QueueManager.dumpConfiguration
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。QueueManager.dumpConfiguration
方法的具体详情如下:
包路径:org.apache.hadoop.mapred.QueueManager
类名称:QueueManager
方法名:dumpConfiguration
[英]method to perform depth-first search and write the parameters of every queue in JSON format.
[中]方法执行深度优先搜索,并以JSON格式写入每个队列的参数。
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
/**
* Dumps the configuration of hierarchy of queues
* @param out the writer object to which dump is written
* @throws IOException
*/
static void dumpConfiguration(Writer out,Configuration conf) throws IOException {
dumpConfiguration(out, null,conf);
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
/**
* Dumps the configuration of hierarchy of queues
* @param out the writer object to which dump is written
* @throws IOException
*/
static void dumpConfiguration(Writer out,Configuration conf) throws IOException {
dumpConfiguration(out, null,conf);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
/**
* Dumps the configuration of hierarchy of queues
* @param out the writer object to which dump is written
* @throws IOException
*/
static void dumpConfiguration(Writer out,Configuration conf) throws IOException {
dumpConfiguration(out, null,conf);
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
/**
* Dumps the configuration of hierarchy of queues
* @param out the writer object to which dump is written
* @throws IOException
*/
static void dumpConfiguration(Writer out,Configuration conf) throws IOException {
dumpConfiguration(out, null,conf);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
/**
* Dumps the configuration of hierarchy of queues
* @param out the writer object to which dump is written
* @throws IOException
*/
static void dumpConfiguration(Writer out,Configuration conf) throws IOException {
dumpConfiguration(out, null,conf);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
/**
* Dumps the configuration properties in Json format
* @param writer {@link}Writer object to which the output is written
* @throws IOException
*/
private static void dumpConfiguration(Writer writer) throws IOException {
Configuration.dumpConfiguration(new JobConf(), writer);
writer.write("\n");
// get the QueueManager configuration properties
QueueManager.dumpConfiguration(writer);
writer.write("\n");
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-core
QueueManager.dumpConfiguration(writer, f.getAbsolutePath(), conf);
String result = writer.toString();
assertTrue(result
QueueManager.dumpConfiguration(writer, conf);
result = writer.toString();
assertTrue(result.contains("{\"queues\":[{\"name\":\"default\",\"state\":\"running\",\"acl_submit_job\":\"*\",\"acl_administer_jobs\":\"*\",\"properties\":[],\"children\":[]},{\"name\":\"q1\",\"state\":\"running\",\"acl_submit_job\":\" \",\"acl_administer_jobs\":\" \",\"properties\":[],\"children\":[{\"name\":\"q1:q2\",\"state\":\"running\",\"acl_submit_job\":\" \",\"acl_administer_jobs\":\" \",\"properties\":["));
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
/**
* Start the JobTracker process. This is used only for debugging. As a rule,
* JobTracker should be run as part of the DFS Namenode process.
*/
public static void main(String argv[]
) throws IOException, InterruptedException {
StringUtils.startupShutdownMessage(JobTracker.class, argv, LOG);
try {
if (argv.length == 0) {
JobTracker tracker = startTracker(new JobConf());
tracker.offerService();
}
else {
if ("-dumpConfiguration".equals(argv[0]) && argv.length == 1) {
dumpConfiguration(new PrintWriter(System.out));
System.out.println();
Configuration conf = new Configuration();
QueueManager.dumpConfiguration(new PrintWriter(System.out), conf);
}
else {
System.out.println("usage: JobTracker [-dumpConfiguration]");
System.exit(-1);
}
}
} catch (Throwable e) {
LOG.fatal(StringUtils.stringifyException(e));
System.exit(-1);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test
Configuration conf = new Configuration(false);
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
QueueManager.dumpConfiguration(out, QUEUES_CONFIG_FILE_PATH, conf);
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
dumpGenerator.writeFieldName("queues");
dumpGenerator.writeStartArray();
dumpConfiguration(dumpGenerator,parser.getRoot().getChildren());
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
dumpGenerator.writeFieldName("queues");
dumpGenerator.writeStartArray();
dumpConfiguration(dumpGenerator,parser.getRoot().getChildren());
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
dumpGenerator.writeFieldName("queues");
dumpGenerator.writeStartArray();
dumpConfiguration(dumpGenerator,parser.getRoot().getChildren());
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
dumpGenerator.writeFieldName("queues");
dumpGenerator.writeStartArray();
dumpConfiguration(dumpGenerator,parser.getRoot().getChildren());
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
dumpGenerator.writeFieldName("queues");
dumpGenerator.writeStartArray();
dumpConfiguration(dumpGenerator,parser.getRoot().getChildren());
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
dumpGenerator.writeStartArray();
if (childQueues != null && childQueues.size() > 0) {
dumpConfiguration(dumpGenerator, childQueues);
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
dumpGenerator.writeStartArray();
if (childQueues != null && childQueues.size() > 0) {
dumpConfiguration(dumpGenerator, childQueues);
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
dumpGenerator.writeStartArray();
if (childQueues != null && childQueues.size() > 0) {
dumpConfiguration(dumpGenerator, childQueues);
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
dumpGenerator.writeStartArray();
if (childQueues != null && childQueues.size() > 0) {
dumpConfiguration(dumpGenerator, childQueues);
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
dumpGenerator.writeStartArray();
if (childQueues != null && childQueues.size() > 0) {
dumpConfiguration(dumpGenerator, childQueues);
内容来源于网络,如有侵权,请联系作者删除!