org.apache.spark.api.java.JavaSparkContext.addFile()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(3.3k)|赞(0)|评价(0)|浏览(101)

本文整理了Java中org.apache.spark.api.java.JavaSparkContext.addFile()方法的一些代码示例,展示了JavaSparkContext.addFile()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaSparkContext.addFile()方法的具体详情如下:
包路径:org.apache.spark.api.java.JavaSparkContext
类名称:JavaSparkContext
方法名:addFile

JavaSparkContext.addFile介绍

暂无

代码示例

代码示例来源:origin: apache/hive

private void addResources(String addedFiles) {
 for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) {
  if (!localFiles.contains(addedFile)) {
   localFiles.add(addedFile);
   sc.addFile(addedFile);
  }
 }
}

代码示例来源:origin: apache/hive

@Override
public Serializable call(JobContext jc) throws Exception {
 jc.sc().addFile(path);
 return null;
}

代码示例来源:origin: apache/drill

private void addResources(String addedFiles) {
 for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) {
  if (!localFiles.contains(addedFile)) {
   localFiles.add(addedFile);
   sc.addFile(addedFile);
  }
 }
}

代码示例来源:origin: databricks/learning-spark

sc.addFile(distScript);
JavaRDD<String> pipeInputs = contactsContactLists.values().map(new VerifyCallLogs()).flatMap(
 new FlatMapFunction<CallLog[], String>() { public Iterable<String> call(CallLog[] calls) {

代码示例来源:origin: com.github.hyukjinkwon/spark-client

@Override
public Serializable call(JobContext jc) throws Exception {
 jc.sc().addFile(path);
 return null;
}

代码示例来源:origin: org.spark-project.hive/spark-client

@Override
public Serializable call(JobContext jc) throws Exception {
 jc.sc().addFile(path);
 return null;
}

代码示例来源:origin: com.cloudera.livy/livy-rsc

protected void addFile(String path) {
 jc.sc().addFile(path);
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

private void addResources(String addedFiles) {
 for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) {
  if (!localFiles.contains(addedFile)) {
   localFiles.add(addedFile);
   sc.addFile(addedFile);
  }
 }
}

代码示例来源:origin: org.apache.pig/pig

.toExternalForm());
}else if( resourceType == ResourceType.FILE){
  sparkContext.addFile(resourcePath.toURI().toURL()
      .toExternalForm());

代码示例来源:origin: org.apache.crunch/crunch-spark

private void distributeFiles() {
 try {
  URI[] uris = DistributedCache.getCacheFiles(conf);
  if (uris != null) {
   URI[] outURIs = new URI[uris.length];
   for (int i = 0; i < uris.length; i++) {
    Path path = new Path(uris[i]);
    FileSystem fs = path.getFileSystem(conf);
    if (fs.isFile(path)) {
     outURIs[i] = uris[i];
    } else {
     Path mergePath = new Path(path.getParent(), "sparkreadable-" + path.getName());
     FileUtil.copyMerge(fs, path, fs, mergePath, false, conf, "");
     outURIs[i] = mergePath.toUri();
    }
    sparkContext.addFile(outURIs[i].toString());
   }
   DistributedCache.setCacheFiles(outURIs, conf);
  }
 } catch (IOException e) {
  throw new RuntimeException("Error retrieving cache files", e);
 }
}

代码示例来源:origin: apache/crunch

private void distributeFiles() {
 try {
  URI[] uris = DistributedCache.getCacheFiles(conf);
  if (uris != null) {
   URI[] outURIs = new URI[uris.length];
   for (int i = 0; i < uris.length; i++) {
    Path path = new Path(uris[i]);
    FileSystem fs = path.getFileSystem(conf);
    if (fs.isFile(path)) {
     outURIs[i] = uris[i];
    } else {
     Path mergePath = new Path(path.getParent(), "sparkreadable-" + path.getName());
     FileUtil.copyMerge(fs, path, fs, mergePath, false, conf, "");
     outURIs[i] = mergePath.toUri();
    }
    sparkContext.addFile(outURIs[i].toString());
   }
   DistributedCache.setCacheFiles(outURIs, conf);
  }
 } catch (IOException e) {
  throw new RuntimeException("Error retrieving cache files", e);
 }
}

相关文章

微信公众号

最新文章

更多