org.apache.hadoop.mapred.jobcontrol.Job.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-22 转载在 其他  
字(8.7k)|赞(0)|评价(0)|浏览(102)

本文整理了Java中org.apache.hadoop.mapred.jobcontrol.Job.<init>()方法的一些代码示例,展示了Job.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Job.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.mapred.jobcontrol.Job
类名称:Job
方法名:<init>

Job.<init>介绍

[英]Construct a job.
[中]构建一个工作。

代码示例

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public static JobControl createValueAggregatorJobs(String args[]
 , Class<? extends ValueAggregatorDescriptor>[] descriptors) throws IOException {
 
 JobControl theControl = new JobControl("ValueAggregatorJobs");
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobConf aJobConf = createValueAggregatorJob(args);
 if(descriptors != null)
  setAggregatorDescriptors(aJobConf, descriptors);
 Job aJob = new Job(aJobConf, dependingJobs);
 theControl.addJob(aJob);
 return theControl;
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test

public void testGetAssignedJobId() throws Exception {
 JobConf jc = new JobConf();
 Job j = new Job(jc);
 //Just make sure no exception is thrown
 assertNull(j.getAssignedJobID());
 org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class);
 org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0);
 when(mockjob.getJobID()).thenReturn(jid);
 j.setJob(mockjob);
 JobID expected = new JobID("test",0);
 assertEquals(expected, j.getAssignedJobID());
 verify(mockjob).getJobID();
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test

inPaths_1.add(indir);
JobConf jobConf_1 = JobControlTestUtils.createCopyJob(inPaths_1, outdir_1);
Job job_1 = new Job(jobConf_1, dependingJobs);
ArrayList<Path> inPaths_2 = new ArrayList<Path>();
inPaths_2.add(indir);
JobConf jobConf_2 = JobControlTestUtils.createCopyJob(inPaths_2, outdir_2);
Job job_2 = new Job(jobConf_2, dependingJobs);
dependingJobs.add(job_1);
dependingJobs.add(job_2);
Job job_3 = new Job(jobConf_3, dependingJobs);
dependingJobs = new ArrayList<Job>();
dependingJobs.add(job_3);
Job job_4 = new Job(jobConf_4, dependingJobs);

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

public Job getCopyJob() throws Exception {
 Configuration defaults = new Configuration();
 FileSystem fs = FileSystem.get(defaults);
 Path rootDataDir =
   new Path(System.getProperty("test.build.data", "."),
    "TestJobControlData");
 Path indir = new Path(rootDataDir, "indir");
 Path outdir_1 = new Path(rootDataDir, "outdir_1");
 JobControlTestUtils.cleanData(fs, indir);
 JobControlTestUtils.generateData(fs, indir);
 JobControlTestUtils.cleanData(fs, outdir_1);
 ArrayList<Job> dependingJobs = null;
 ArrayList<Path> inPaths_1 = new ArrayList<Path>();
 inPaths_1.add(indir);
 JobConf jobConf_1 = JobControlTestUtils.createCopyJob(inPaths_1, outdir_1);
 Job job_1 = new Job(jobConf_1, dependingJobs);
 return job_1;
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test

inPaths_1.add(indir);
JobConf jobConf_1 = JobControlTestUtils.createCopyJob(inPaths_1, outdir_1);
Job job_1 = new Job(jobConf_1, dependingJobs);
ArrayList<Path> inPaths_2 = new ArrayList<Path>();
inPaths_2.add(indir);
JobConf jobConf_2 = JobControlTestUtils.createCopyJob(inPaths_2, outdir_2);
Job job_2 = new Job(jobConf_2, dependingJobs);
dependingJobs.add(job_1);
dependingJobs.add(job_2);
Job job_3 = new Job(jobConf_3, dependingJobs);
dependingJobs = new ArrayList<Job>();
dependingJobs.add(job_3);
Job job_4 = new Job(jobConf_4, dependingJobs);

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

@Test (timeout = 30000)
public void testGetAssignedJobId() throws Exception {
 JobConf jc = new JobConf();
 Job j = new Job(jc);
 //Just make sure no exception is thrown
 assertNull(j.getAssignedJobID());
 org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class);
 org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0);
 when(mockjob.getJobID()).thenReturn(jid);
 j.setJob(mockjob);
 JobID expected = new JobID("test",0);
 assertEquals(expected, j.getAssignedJobID());
 verify(mockjob).getJobID();
}

代码示例来源:origin: org.apache.pig/pig

Job cjob = new Job(new JobConf(conf), new ArrayList<Job>());
jobStoreMap.put(cjob,new Pair<List<POStore>, Path>(storeLocations, tmpLocation));
return cjob;

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

inPaths_1.add(indir);
JobConf jobConf_1 = JobControlTestUtils.createCopyJob(inPaths_1, outdir_1);
Job job_1 = new Job(jobConf_1, dependingJobs);
ArrayList<Path> inPaths_2 = new ArrayList<Path>();
inPaths_2.add(indir);
JobConf jobConf_2 = JobControlTestUtils.createCopyJob(inPaths_2, outdir_2);
Job job_2 = new Job(jobConf_2, dependingJobs);
dependingJobs.add(job_1);
dependingJobs.add(job_2);
Job job_3 = new Job(jobConf_3, dependingJobs);
dependingJobs = new ArrayList<Job>();
dependingJobs.add(job_3);
Job job_4 = new Job(jobConf_4, dependingJobs);

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

@Test(timeout = 30000)
public void testAddingDependingJob() throws Exception {
 Job job_1 = getCopyJob();
 ArrayList<Job> dependingJobs = new ArrayList<Job>();
 JobControl jc = new JobControl("Test");
 jc.addJob(job_1);
 Assert.assertEquals(Job.WAITING, job_1.getState());
 Assert.assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(),
  dependingJobs)));
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

inPaths_1.add(indir);
JobConf jobConf_1 = JobControlTestUtils.createCopyJob(inPaths_1, outdir_1);
Job job_1 = new Job(jobConf_1, dependingJobs);
ArrayList<Path> inPaths_2 = new ArrayList<Path>();
inPaths_2.add(indir);
JobConf jobConf_2 = JobControlTestUtils.createCopyJob(inPaths_2, outdir_2);
Job job_2 = new Job(jobConf_2, dependingJobs);
dependingJobs.add(job_1);
dependingJobs.add(job_2);
Job job_3 = new Job(jobConf_3, dependingJobs);
dependingJobs = new ArrayList<Job>();
dependingJobs.add(job_3);
Job job_4 = new Job(jobConf_4, dependingJobs);

相关文章