org.apache.hadoop.mapreduce.InputSplit.getLocations()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(6.0k)|赞(0)|评价(0)|浏览(138)

本文整理了Java中org.apache.hadoop.mapreduce.InputSplit.getLocations()方法的一些代码示例,展示了InputSplit.getLocations()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。InputSplit.getLocations()方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.InputSplit
类名称:InputSplit
方法名:getLocations

InputSplit.getLocations介绍

[英]Get the list of nodes by name where the data for the split would be local. The locations do not need to be serialized.
[中]按名称获取节点列表,其中用于拆分的数据是本地的。这些位置不需要序列化。

代码示例

代码示例来源:origin: apache/flink

@Override
public String[] getHostnames() {
  try {
    return mapreduceInputSplit.getLocations();
  }
  catch (Exception e) {
    return new String[0];
  }
}

代码示例来源:origin: apache/ignite

res.add(HadoopUtils.wrapSplit(id, nativeSplit, nativeSplit.getLocations()));

代码示例来源:origin: apache/hive

protected static FileSplit getFileSplit(Job vectorJob) throws IOException, InterruptedException {
 ParquetInputFormat parquetInputFormat = new ParquetInputFormat(GroupReadSupport.class);
 InputSplit split = (InputSplit) parquetInputFormat.getSplits(vectorJob).get(0);
 FileSplit fsplit = new FileSplit(file, 0L, split.getLength(), split.getLocations());
 return fsplit;
}

代码示例来源:origin: apache/hbase

TableSnapshotRegionSplit snapshotRegionSplit = (TableSnapshotRegionSplit) split;
if (localityEnabled) {
 Assert.assertTrue(split.getLocations() != null && split.getLocations().length != 0);
} else {
 Assert.assertTrue(split.getLocations() != null && split.getLocations().length == 0);

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

/**
 * getLocations from ith InputSplit.
 */
public String[] getLocation(int i) throws IOException, InterruptedException {
 return splits[i].getLocations();
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

/**
 * getLocations from ith InputSplit.
 */
public String[] getLocation(int i) throws IOException, InterruptedException {
 return splits[i].getLocations();
}

代码示例来源:origin: com.alibaba.blink/flink-hadoop-compatibility

@Override
public String[] getHostnames() {
  try {
    return mapreduceInputSplit.getLocations();
  }
  catch (Exception e) {
    return new String[0];
  }
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

/**
 * getLocations from ith InputSplit.
 */
public String[] getLocation(int i) throws IOException, InterruptedException {
 return splits[i].getLocations();
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-core

@Test
public void testSplitLocationInfo() throws Exception {
 Configuration conf = getConfiguration();
 conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
   "test:///a1/a2");
 Job job = Job.getInstance(conf);
 TextInputFormat fileInputFormat = new TextInputFormat();
 List<InputSplit> splits = fileInputFormat.getSplits(job);
 String[] locations = splits.get(0).getLocations();
 Assert.assertEquals(2, locations.length);
 SplitLocationInfo[] locationInfo = splits.get(0).getLocationInfo();
 Assert.assertEquals(2, locationInfo.length);
 SplitLocationInfo localhostInfo = locations[0].equals("localhost") ?
   locationInfo[0] : locationInfo[1];
 SplitLocationInfo otherhostInfo = locations[0].equals("otherhost") ?
   locationInfo[0] : locationInfo[1];
 Assert.assertTrue(localhostInfo.isOnDisk());
 Assert.assertTrue(localhostInfo.isInMemory());
 Assert.assertTrue(otherhostInfo.isOnDisk());
 Assert.assertFalse(otherhostInfo.isInMemory());
}

代码示例来源:origin: com.twitter.elephantbird/elephant-bird-core

@Override
public String[] getLocations() throws IOException {
 try {
  return realSplit.getLocations();
 } catch (InterruptedException e) {
  throw new IOException(e);
 }
}

代码示例来源:origin: com.google.cloud.bigdataoss/util-hadoop

public static String toString(InputSplit input) throws IOException, InterruptedException {
 if (input == null) {
  return "null";
 }
 String result = "InputSplit::";
 result += " length:" + input.getLength();
 result += " locations: " + Arrays.toString(input.getLocations());
 result += " toString(): " + input.toString();
 return result;
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

public SplitMetaInfo(InputSplit split, long startOffset) throws IOException {
 try {
  this.locations = split.getLocations();
  this.inputDataLength = split.getLength();
  this.startOffset = startOffset;
 } catch (InterruptedException ie) {
  throw new IOException(ie);
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

public SplitMetaInfo(InputSplit split, long startOffset) throws IOException {
 try {
  this.locations = split.getLocations();
  this.inputDataLength = split.getLength();
  this.startOffset = startOffset;
 } catch (InterruptedException ie) {
  throw new IOException(ie);
 }
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

public SplitMetaInfo(InputSplit split, long startOffset) throws IOException {
 try {
  this.locations = split.getLocations();
  this.inputDataLength = split.getLength();
  this.startOffset = startOffset;
 } catch (InterruptedException ie) {
  throw new IOException(ie);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred

public SplitMetaInfo(InputSplit split, long startOffset) throws IOException {
 try {
  this.locations = split.getLocations();
  this.inputDataLength = split.getLength();
  this.startOffset = startOffset;
 } catch (InterruptedException ie) {
  throw new IOException(ie);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred

public TaskSplitMetaInfo(InputSplit split, long startOffset) 
throws InterruptedException, IOException {
 this(new TaskSplitIndex("", startOffset), split.getLocations(), 
   split.getLength());
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

public TaskSplitMetaInfo(InputSplit split, long startOffset) 
throws InterruptedException, IOException {
 this(new TaskSplitIndex("", startOffset), split.getLocations(), 
   split.getLength());
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

public TaskSplitMetaInfo(InputSplit split, long startOffset) 
throws InterruptedException, IOException {
 this(new TaskSplitIndex("", startOffset), split.getLocations(), 
   split.getLength());
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

public TaskSplitMetaInfo(InputSplit split, long startOffset) 
throws InterruptedException, IOException {
 this(new TaskSplitIndex("", startOffset), split.getLocations(), 
   split.getLength());
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

public TaskSplitMetaInfo(InputSplit split, long startOffset) 
throws InterruptedException, IOException {
 this(new TaskSplitIndex("", startOffset), split.getLocations(), 
   split.getLength());
}

相关文章

微信公众号

最新文章

更多