本文整理了Java中org.apache.spark.api.java.JavaSparkContext.addFile()
方法的一些代码示例,展示了JavaSparkContext.addFile()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaSparkContext.addFile()
方法的具体详情如下:
包路径:org.apache.spark.api.java.JavaSparkContext
类名称:JavaSparkContext
方法名:addFile
暂无
代码示例来源:origin: apache/hive
private void addResources(String addedFiles) {
for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) {
if (!localFiles.contains(addedFile)) {
localFiles.add(addedFile);
sc.addFile(addedFile);
}
}
}
代码示例来源:origin: apache/hive
@Override
public Serializable call(JobContext jc) throws Exception {
jc.sc().addFile(path);
return null;
}
代码示例来源:origin: apache/drill
private void addResources(String addedFiles) {
for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) {
if (!localFiles.contains(addedFile)) {
localFiles.add(addedFile);
sc.addFile(addedFile);
}
}
}
代码示例来源:origin: databricks/learning-spark
sc.addFile(distScript);
JavaRDD<String> pipeInputs = contactsContactLists.values().map(new VerifyCallLogs()).flatMap(
new FlatMapFunction<CallLog[], String>() { public Iterable<String> call(CallLog[] calls) {
代码示例来源:origin: com.github.hyukjinkwon/spark-client
@Override
public Serializable call(JobContext jc) throws Exception {
jc.sc().addFile(path);
return null;
}
代码示例来源:origin: org.spark-project.hive/spark-client
@Override
public Serializable call(JobContext jc) throws Exception {
jc.sc().addFile(path);
return null;
}
代码示例来源:origin: com.cloudera.livy/livy-rsc
protected void addFile(String path) {
jc.sc().addFile(path);
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
private void addResources(String addedFiles) {
for (String addedFile : CSV_SPLITTER.split(Strings.nullToEmpty(addedFiles))) {
if (!localFiles.contains(addedFile)) {
localFiles.add(addedFile);
sc.addFile(addedFile);
}
}
}
代码示例来源:origin: org.apache.pig/pig
.toExternalForm());
}else if( resourceType == ResourceType.FILE){
sparkContext.addFile(resourcePath.toURI().toURL()
.toExternalForm());
代码示例来源:origin: org.apache.crunch/crunch-spark
private void distributeFiles() {
try {
URI[] uris = DistributedCache.getCacheFiles(conf);
if (uris != null) {
URI[] outURIs = new URI[uris.length];
for (int i = 0; i < uris.length; i++) {
Path path = new Path(uris[i]);
FileSystem fs = path.getFileSystem(conf);
if (fs.isFile(path)) {
outURIs[i] = uris[i];
} else {
Path mergePath = new Path(path.getParent(), "sparkreadable-" + path.getName());
FileUtil.copyMerge(fs, path, fs, mergePath, false, conf, "");
outURIs[i] = mergePath.toUri();
}
sparkContext.addFile(outURIs[i].toString());
}
DistributedCache.setCacheFiles(outURIs, conf);
}
} catch (IOException e) {
throw new RuntimeException("Error retrieving cache files", e);
}
}
代码示例来源:origin: apache/crunch
private void distributeFiles() {
try {
URI[] uris = DistributedCache.getCacheFiles(conf);
if (uris != null) {
URI[] outURIs = new URI[uris.length];
for (int i = 0; i < uris.length; i++) {
Path path = new Path(uris[i]);
FileSystem fs = path.getFileSystem(conf);
if (fs.isFile(path)) {
outURIs[i] = uris[i];
} else {
Path mergePath = new Path(path.getParent(), "sparkreadable-" + path.getName());
FileUtil.copyMerge(fs, path, fs, mergePath, false, conf, "");
outURIs[i] = mergePath.toUri();
}
sparkContext.addFile(outURIs[i].toString());
}
DistributedCache.setCacheFiles(outURIs, conf);
}
} catch (IOException e) {
throw new RuntimeException("Error retrieving cache files", e);
}
}
内容来源于网络,如有侵权,请联系作者删除!