org.apache.hadoop.util.bloom.Key.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-23 转载在 其他  
字(6.8k)|赞(0)|评价(0)|浏览(122)

本文整理了Java中org.apache.hadoop.util.bloom.Key.<init>()方法的一些代码示例,展示了Key.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Key.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.util.bloom.Key
类名称:Key
方法名:<init>

Key.<init>介绍

[英]default constructor - use with readFields
[中]默认构造函数-与readFields一起使用

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
 public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  createVector();
  for (int i = 0; i < fpVector.length; i++) {
   List<Key> list = fpVector[i];
   int size = in.readInt();
   for (int j = 0; j < size; j++) {
    Key k = new Key();
    k.readFields(in);
    list.add(k);
   }
  }
  for (int i = 0; i < keyVector.length; i++) {
   List<Key> list = keyVector[i];
   int size = in.readInt();
   for (int j = 0; j < size; j++) {
    Key k = new Key();
    k.readFields(in);
    list.add(k);
   }
  }
  for (int i = 0; i < ratio.length; i++) {
   ratio[i] = in.readDouble();
  }
 }
}

代码示例来源:origin: klout/brickhouse

public boolean iterate(String key) {
  if (key != null) {
    if (bloomFilter == null) {
      init();
    }
    bloomFilter.add(new Key(key.getBytes()));
    /**
     try {
     ///LOG.info( "BloomFilter is " + BloomFactory.WriteBloomToString(bloomFilter ) + " after adding Key " +key);
     } catch (IOException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
     }
     **/
  }
  return true;
}

代码示例来源:origin: klout/brickhouse

public Boolean evaluate(String key, String bloomFilter) throws HiveException {
  Filter bloom = BloomFactory.GetBloomFilter(bloomFilter);
  if (bloom != null) {
    return bloom.membershipTest(new Key(key.getBytes()));
  } else {
    throw new HiveException("Unable to find bloom " + bloomFilter);
  }
}

代码示例来源:origin: apache/accumulo

@Override
public Key transform(org.apache.accumulo.core.data.Key acuKey) {
 byte keyData[];
 ByteSequence row = acuKey.getRowData();
 keyData = new byte[row.length()];
 System.arraycopy(row.getBackingArray(), 0, keyData, 0, row.length());
 return new Key(keyData, 1.0);
}

代码示例来源:origin: apache/accumulo

@Override
public Key transform(org.apache.accumulo.core.data.Key acuKey) {
 byte keyData[];
 ByteSequence row = acuKey.getRowData();
 ByteSequence cf = acuKey.getColumnFamilyData();
 keyData = new byte[row.length() + cf.length()];
 System.arraycopy(row.getBackingArray(), row.offset(), keyData, 0, row.length());
 System.arraycopy(cf.getBackingArray(), cf.offset(), keyData, row.length(), cf.length());
 return new Key(keyData, 1.0);
}

代码示例来源:origin: apache/accumulo

@Override
public org.apache.hadoop.util.bloom.Key transform(org.apache.accumulo.core.data.Key acuKey) {
 byte keyData[];
 ByteSequence row = acuKey.getRowData();
 ByteSequence cf = acuKey.getColumnFamilyData();
 ByteSequence cq = acuKey.getColumnQualifierData();
 keyData = new byte[row.length() + cf.length() + cq.length()];
 System.arraycopy(row.getBackingArray(), row.offset(), keyData, 0, row.length());
 System.arraycopy(cf.getBackingArray(), cf.offset(), keyData, row.length(), cf.length());
 System.arraycopy(cq.getBackingArray(), cq.offset(), keyData, row.length() + cf.length(),
   cq.length());
 return new org.apache.hadoop.util.bloom.Key(keyData, 1.0);
}

代码示例来源:origin: alexholmes/hadoop-book

@Override
 protected void map(Text key, Text value, Context context)
   throws IOException, InterruptedException {
  System.out.println("K[" + key + "]");
  if(filter.membershipTest(new Key(key.toString().getBytes()))) {
   context.write(key, value);
  }
 }
}

代码示例来源:origin: org.wso2.siddhi/siddhi-extension-event-table

public void removeFromBloomFilters(Object[] obj) {
  for (int i = 0; i < attributeList.size(); i++) {
    if (obj[i] != null) {
      bloomFilters[i].delete(new Key(obj[i].toString().getBytes()));
    }
  }
}

代码示例来源:origin: uber/hudi

public void add(String key) {
 if (key == null) {
  throw new NullPointerException("Key cannot by null");
 }
 filter.add(new Key(key.getBytes(StandardCharsets.UTF_8)));
}

代码示例来源:origin: uber/hudi

public boolean mightContain(String key) {
 if (key == null) {
  throw new NullPointerException("Key cannot by null");
 }
 return filter.membershipTest(new Key(key.getBytes(StandardCharsets.UTF_8)));
}

代码示例来源:origin: org.apache.hivemall/hivemall-core

@Override
public void init() {
  this.filter = BloomFilterUtils.newDynamicBloomFilter();
  this.key = new Key();
}

代码示例来源:origin: alexholmes/hadoop-book

@Override
public void map(Text key, Text value,
        OutputCollector<NullWritable, BloomFilter> output,
        Reporter reporter) throws IOException {
 System.out.println("K[" + key + "]");
 int age = Integer.valueOf(value.toString());
 if (age > 30) {
  filter.add(new Key(key.toString().getBytes()));
 }
 collector = output;
}

代码示例来源:origin: usc-isi-i2/Web-Karma

public void addUriToBloomFilter(String id, String uri) {
  KR2RMLBloomFilter bf = null;
  if(!idToBloomFilter.containsKey(id))
  {
    idToBloomFilter.putIfAbsent(id, new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH));
  }
  bf = idToBloomFilter.get(id);
  
  Key k = new Key(uri.getBytes(UTF8_CHARSET));
  bf.add(k);
  return;
}

代码示例来源:origin: org.apache.crunch/crunch-core

@Override
public void process(K input, Emitter<BloomFilter> emitter) {
 bloomFilter.add(new Key(keyToBytesFn.map(input)));
}

代码示例来源:origin: org.apache.accumulo/accumulo-core

@Override
public Key transform(org.apache.accumulo.core.data.Key acuKey) {
 byte keyData[];
 ByteSequence row = acuKey.getRowData();
 keyData = new byte[row.length()];
 System.arraycopy(row.getBackingArray(), 0, keyData, 0, row.length());
 return new Key(keyData, 1.0);
}

代码示例来源:origin: uk.gov.gchq.gaffer/accumulo-store

private void addToBloomFilter(final Object vertex, final BloomFilter filter) throws RetrieverException {
  try {
    filter.add(new org.apache.hadoop.util.bloom.Key(elementConverter.serialiseVertex(vertex)));
  } catch (final AccumuloElementConversionException e) {
    throw new RetrieverException("Failed to add identifier to the bloom key", e);
  }
}

代码示例来源:origin: uk.gov.gchq.gaffer/accumulo-store

/**
 * Transforms an Accumulo {@link org.apache.accumulo.core.data.Key} into the
 * corresponding key for the Bloom filter. If the key does not correspond to
 * either an {@link uk.gov.gchq.gaffer.data.element.Entity} or an
 * {@link uk.gov.gchq.gaffer.data.element.Edge} then an {@link java.io.IOException} will
 * be thrown by the method which will be caught and then {@code null}
 * is returned.
 */
@Override
public org.apache.hadoop.util.bloom.Key transform(final Key key) {
  return new org.apache.hadoop.util.bloom.Key(getVertexFromRangeKey(key.getRowData().getBackingArray()));
}

代码示例来源:origin: org.apache.crunch/crunch-core

@Override
 public boolean accept(Pair<K, V> input) {
  Key key = new Key(keyToBytesFn.map(input.first()));
  return bloomFilter.membershipTest(key);
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

private void checkSetOnIAE() {
 Key key = new Key();
 try {
  key.set(null, 0);
 } catch (IllegalArgumentException ex) {
  // expected
 } catch (Exception e) {
  Assert.fail("checkSetOnIAE ex error");
 }
}

代码示例来源:origin: org.apache.accumulo/accumulo-core

@Override
public Key transform(org.apache.accumulo.core.data.Key acuKey) {
 byte keyData[];
 ByteSequence row = acuKey.getRowData();
 ByteSequence cf = acuKey.getColumnFamilyData();
 keyData = new byte[row.length() + cf.length()];
 System.arraycopy(row.getBackingArray(), row.offset(), keyData, 0, row.length());
 System.arraycopy(cf.getBackingArray(), cf.offset(), keyData, row.length(), cf.length());
 return new Key(keyData, 1.0);
}

相关文章