org.apache.spark.broadcast.Broadcast.getValue()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(11.1k)|赞(0)|评价(0)|浏览(104)

本文整理了Java中org.apache.spark.broadcast.Broadcast.getValue()方法的一些代码示例,展示了Broadcast.getValue()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Broadcast.getValue()方法的具体详情如下:
包路径:org.apache.spark.broadcast.Broadcast
类名称:Broadcast
方法名:getValue

Broadcast.getValue介绍

暂无

代码示例

代码示例来源:origin: SeldonIO/seldon-server

@Override
  public String call(Tuple2<String, ActionData> v1) throws Exception {
    Map<String, Integer> m = broadcastVar.getValue();
    ActionData actionData = v1._2;
    if (actionData.userid == 0) {
      String key = currentClient + "_" + actionData.client_userid;
      if (m.containsKey(key)) {
        actionData.userid = m.get(key);
      } else {
        return "";
      }
    }
    String json = JobUtils.getJsonFromActionData(actionData);
    return json;
  }
});

代码示例来源:origin: deeplearning4j/dl4j-examples

@Override
  public DataSet call(String s) throws Exception {
    //Here: take a String, and map the characters to a one-hot representation
    Map<Character, Integer> cti = ctiBroadcast.getValue();
    int length = s.length();
    INDArray features = Nd4j.zeros(1, N_CHARS, length - 1);
    INDArray labels = Nd4j.zeros(1, N_CHARS, length - 1);
    char[] chars = s.toCharArray();
    int[] f = new int[3];
    int[] l = new int[3];
    for (int i = 0; i < chars.length - 2; i++) {
      f[1] = cti.get(chars[i]);
      f[2] = i;
      l[1] = cti.get(chars[i + 1]);   //Predict the next character given past and current characters
      l[2] = i;
      features.putScalar(f, 1.0);
      labels.putScalar(l, 1.0);
    }
    return new DataSet(features, labels);
  }
}

代码示例来源:origin: apache/tinkerpop

@Override
public Set<String> keys() {
  if (this.inExecute)
    return this.broadcast.getValue().keySet();
  else {
    final Set<String> trueKeys = new HashSet<>();
    this.sparkMemory.forEach((key, value) -> {
      if (!value.value().isEmpty())
        trueKeys.add(key);
    });
    return Collections.unmodifiableSet(trueKeys);
  }
}

代码示例来源:origin: org.qcri.rheem/rheem-spark

@Override
@SuppressWarnings("unchecked")
public <T> Collection<T> getBroadcast(String name) {
  final Broadcast<?> broadcast = this.broadcasts.get(name);
  if (broadcast == null) {
    throw new RheemException("No such broadcast found: " + name);
  }
  return (Collection<T>) broadcast.getValue();
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

@Override
public Set<String> keys() {
  if (this.inExecute)
    return this.broadcast.getValue().keySet();
  else {
    final Set<String> trueKeys = new HashSet<>();
    this.sparkMemory.forEach((key, value) -> {
      if (!value.value().isEmpty())
        trueKeys.add(key);
    });
    return Collections.unmodifiableSet(trueKeys);
  }
}

代码示例来源:origin: ai.grakn/grakn-kb

@Override
public Set<String> keys() {
  if (this.inExecute) {
    return this.broadcast.getValue().keySet();
  } else {
    final Set<String> trueKeys = new HashSet<>();
    this.sparkMemory.forEach((key, value) -> {
      if (!value.value().isEmpty()) {
        trueKeys.add(key);
      }
    });
    return Collections.unmodifiableSet(trueKeys);
  }
}

代码示例来源:origin: com.stratio.deep/deep-core

@Override
public Partition[] getPartitions() {
  initExtractorClient();
  return extractorClient.getPartitions(config.getValue());
}

代码示例来源:origin: usc-isi-i2/Web-Karma

@Override
  public Iterable<Tuple2<String, String>> call(Tuple2<String, String> writableIterableTuple2) throws Exception {
    List<Tuple2<String, String>> results = new LinkedList<>();
    Properties karmaContentSettings = new Properties();
    for(Map.Entry<Object, Object> objectObjectEntry : karmaSettings.entrySet())
      karmaContentSettings.put(objectObjectEntry.getKey(), objectObjectEntry.getValue());
    karmaContentSettings.put("model.content", model.value());
    karmaContentSettings.put("context.content", context.getValue());
    
    if(outputFormat != null && outputFormat.equals("n3")) {
      final N3Impl mapper = new N3Impl(karmaContentSettings);
      String result = mapper.mapResult(writableIterableTuple2._1, writableIterableTuple2._2);
      String[] lines = result.split("(\r\n|\n)");
      for(String line : lines)
      {
        if((line = line.trim()).isEmpty())
        {
          continue;
        }
        int splitBetweenSubjectAndPredicate = line.indexOf(' ');
        String key = (line.substring(0, splitBetweenSubjectAndPredicate));
        String value = line;
        results.add(new Tuple2<>(key, value));
      }
    }
    return results;
  }
});

代码示例来源:origin: Stratio/deep-spark

@Override
public Partition[] getPartitions() {
  initExtractorClient();
  return extractorClient.getPartitions(config.getValue());
}

代码示例来源:origin: Stratio/deep-spark

/**
 * It tries to get an Extractor Instance,
 * if there is any problem try to instance an extractorClient
 */
private void initExtractorClient() {
  try {
    if (extractorClient == null) {
      extractorClient = getExtractorInstance(config.getValue());
    }
  } catch (DeepExtractorInitializationException e) {
    extractorClient = getExtractorClient();
  }
}

代码示例来源:origin: com.stratio.deep/deep-core

/**
 * It tries to get an Extractor Instance,
 * if there is any problem try to instance an extractorClient
 */
private void initExtractorClient() {
  try {
    if (extractorClient == null) {
      extractorClient = getExtractorInstance(config.getValue());
    }
  } catch (DeepExtractorInitializationException e) {
    extractorClient = getExtractorClient();
  }
}

代码示例来源:origin: scipr-lab/dizk

public static <GroupT extends AbstractGroup<GroupT>,
    FieldT extends AbstractFieldElementExpanded<FieldT>> JavaPairRDD<Long, GroupT>
distributedBatchMSM(
    final int scalarSize,
    final int windowSize,
    final List<List<GroupT>> multiplesOfBase,
    final JavaPairRDD<Long, FieldT> scalars,
    final JavaSparkContext sc) {
  final Broadcast<List<List<GroupT>>> baseBroadcast = sc.broadcast(multiplesOfBase);
  return scalars.mapToPair(scalar -> new Tuple2<>(
      scalar._1,
      serialMSM(scalarSize, windowSize, baseBroadcast.getValue(), scalar._2)));
}

代码示例来源:origin: usc-isi-i2/Web-Karma

@SuppressWarnings("unchecked")
  @Override
  public Tuple2<String, String> call(Tuple2<String, String> t)
      throws Exception {
    String key = t._1();
    String value = t._2();
    
    JSONObject obj = new JSONObject(value);
    Object outobj = JsonLdProcessor.compact(JsonUtils.fromString(value), 
        context.getValue(), 
        new JsonLdOptions(""));
    if(outobj instanceof Map) {
      @SuppressWarnings("rawtypes")
      Map outjsonobj = (Map) outobj;
      outjsonobj.put("@context", contextUrl);
    }
    
    value = JsonUtils.toString(outobj);
    if (obj.has("uri")) {
      key = obj.getString("uri");
    }
    else if (obj.has("@id")) {
      key = obj.getString("@id");
    }
    else {
      key = obj.toString();
    }
    return new Tuple2<>(key, value);
  }
});

代码示例来源:origin: cerner/bunsen

/**
 * Returns true if the given input CodeableConcept, or sequence of CodeableConcept, has a Coding
 * contained in the ValueSet having the given reference name, or false otherwise. This method
 * is dynamically typed as it may be invoked over either a structure or sequence of structures in
 * SparkSQL.
 */
private static Boolean inValueSet(Object input,
  String referenceName,
  Broadcast<BroadcastableValueSets> valueSets) {
 // A null code never matches.
 if (input == null) {
  return false;
 }
 if (input instanceof Row) {
  return inValueSet((Row) input, referenceName, valueSets.getValue());
 } else {
  IndexedSeq<Row> codeableConceptSeq = (IndexedSeq<Row>) input;
  boolean found = false;
  for (int i = 0; i < codeableConceptSeq.size(); i++) {
   if (inValueSet(codeableConceptSeq.apply(i), referenceName, valueSets.getValue())) {
    found = true;
    break;
   }
  }
  return found;
 }
}

代码示例来源:origin: com.cerner.bunsen/bunsen-core

/**
 * Returns true if the given input CodeableConcept, or sequence of CodeableConcept, has a Coding
 * contained in the ValueSet having the given reference name, or false otherwise. This method
 * is dynamically typed as it may be invoked over either a structure or sequence of structures in
 * SparkSQL.
 */
private static Boolean inValueSet(Object input,
  String referenceName,
  Broadcast<BroadcastableValueSets> valueSets) {
 // A null code never matches.
 if (input == null) {
  return false;
 }
 if (input instanceof Row) {
  return inValueSet((Row) input, referenceName, valueSets.getValue());
 } else {
  IndexedSeq<Row> codeableConceptSeq = (IndexedSeq<Row>) input;
  boolean found = false;
  for (int i = 0; i < codeableConceptSeq.size(); i++) {
   if (inValueSet(codeableConceptSeq.apply(i), referenceName, valueSets.getValue())) {
    found = true;
    break;
   }
  }
  return found;
 }
}

代码示例来源:origin: com.cerner.bunsen/bunsen-core

/**
 * Returns a dataset with the values for each element in the map of uri to version.
 *
 * @param uriToVersion a map of value set URI to the version to load
 * @return a dataset of values for the given URIs and versions.
 */
public Dataset<Value> getValues(Map<String,String> uriToVersion) {
 JavaSparkContext context = new JavaSparkContext(this.spark.sparkContext());
 Broadcast<Map<String,String>> broadcastUrisToVersion = context.broadcast(uriToVersion);
 return this.values.filter((FilterFunction<Value>) value -> {
  String latestVersion = broadcastUrisToVersion.getValue().get(value.getValueSetUri());
  return latestVersion != null && latestVersion.equals(value.getValueSetVersion());
 });
}

代码示例来源:origin: com.cerner.bunsen/bunsen-core

/**
 * Returns a dataset with the mappings for each uri and version.
 *
 * @param uriToVersion a map of concept map URI to the version to load
 * @return a dataset of mappings for the given URIs and versions.
 */
public Dataset<Mapping> getMappings(Map<String,String> uriToVersion) {
 JavaSparkContext context = new JavaSparkContext(this.spark.sparkContext());
 Broadcast<Map<String,String>> broadcastMaps = context.broadcast(uriToVersion);
 return this.mappings.filter((FilterFunction<Mapping>) mapping -> {
  String latestVersion = broadcastMaps.getValue().get(mapping.getConceptMapUri());
  return latestVersion != null && latestVersion.equals(mapping.getConceptMapVersion());
 });
}

代码示例来源:origin: cerner/bunsen

/**
 * Returns a dataset with the values for each element in the map of uri to version.
 *
 * @param uriToVersion a map of value set URI to the version to load
 * @return a dataset of values for the given URIs and versions.
 */
public Dataset<Value> getValues(Map<String,String> uriToVersion) {
 JavaSparkContext context = new JavaSparkContext(this.spark.sparkContext());
 Broadcast<Map<String,String>> broadcastUrisToVersion = context.broadcast(uriToVersion);
 return this.values.filter((FilterFunction<Value>) value -> {
  String latestVersion = broadcastUrisToVersion.getValue().get(value.getValueSetUri());
  return latestVersion != null && latestVersion.equals(value.getValueSetVersion());
 });
}

代码示例来源:origin: cerner/bunsen

/**
 * Returns a dataset with the mappings for each uri and version.
 *
 * @param uriToVersion a map of concept map URI to the version to load
 * @return a dataset of mappings for the given URIs and versions.
 */
public Dataset<Mapping> getMappings(Map<String,String> uriToVersion) {
 JavaSparkContext context = new JavaSparkContext(this.spark.sparkContext());
 Broadcast<Map<String,String>> broadcastMaps = context.broadcast(uriToVersion);
 return this.mappings.filter((FilterFunction<Mapping>) mapping -> {
  String latestVersion = broadcastMaps.getValue().get(mapping.getConceptMapUri());
  return latestVersion != null && latestVersion.equals(mapping.getConceptMapVersion());
 });
}

代码示例来源:origin: Stratio/deep-spark

@Override
public Iterator<T> compute(Partition split, TaskContext context) {
  initExtractorClient();
  extractorClient.initIterator(split, config.getValue());
  context.addTaskCompletionListener(new AbstractFunction1<TaskContext, BoxedUnit>() {
    @Override
    public BoxedUnit apply(TaskContext v1) {
      extractorClient.close();
      return null;
    }
  });
  java.util.Iterator<T> iterator = new java.util.Iterator<T>() {
    @Override
    public boolean hasNext() {
      return extractorClient.hasNext();
    }
    @Override
    public T next() {
      return extractorClient.next();
    }
    @Override
    public void remove() {
      throw new DeepIOException(
          "Method not implemented (and won't be implemented anytime soon!!!)");
    }
  };
  return new InterruptibleIterator<>(context, asScalaIterator(iterator));
}

相关文章

微信公众号

最新文章

更多