org.apache.hadoop.hbase.util.Bytes.get()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(11.3k)|赞(0)|评价(0)|浏览(164)

本文整理了Java中org.apache.hadoop.hbase.util.Bytes.get()方法的一些代码示例,展示了Bytes.get()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Bytes.get()方法的具体详情如下:
包路径:org.apache.hadoop.hbase.util.Bytes
类名称:Bytes
方法名:get

Bytes.get介绍

[英]Get the data from the Bytes.
[中]从字节中获取数据。

代码示例

代码示例来源:origin: apache/hbase

/**
 * Set the new Bytes to the contents of the passed
 * <code>ibw</code>.
 * @param ibw the value to set this Bytes to.
 */
public Bytes(final Bytes ibw) {
 this(ibw.get(), ibw.getOffset(), ibw.getLength());
}

代码示例来源:origin: apache/hbase

private <T> T getOrDefault(Bytes key, Function<byte[], T> function, T defaultValue) {
 Bytes value = values.get(key);
 if (value == null) {
  return defaultValue;
 } else {
  return function.apply(value.get());
 }
}

代码示例来源:origin: apache/hbase

private <T> T getOrDefault(Bytes key, Function<String, T> function, T defaultValue) {
 Bytes value = values.get(key);
 if (value == null) {
  return defaultValue;
 } else {
  return function.apply(Bytes.toString(value.get(), value.getOffset(), value.getLength()));
 }
}

代码示例来源:origin: apache/hbase

@Override
public String get(String key) {
 Bytes ibw = new Bytes(Bytes
   .toBytes(key));
 if (!m.containsKey(ibw))
  return null;
 Bytes value = m.get(ibw);
 if (value == null || value.get() == null)
  return null;
 return Bytes.toString(value.get());
}

代码示例来源:origin: apache/hbase

@Override
public String getValue(String key) {
 Bytes rval = values.get(new Bytes(Bytes.toBytes(key)));
 return rval == null ? null : Bytes.toString(rval.get(), rval.getOffset(), rval.getLength());
}

代码示例来源:origin: apache/hbase

@Override
public Iterator<Map.Entry<String,String>> iterator() {
 Map<String, String> ret = new HashMap<>();
 for (Map.Entry<Bytes, Bytes> entry : map.entrySet()) {
  String key = Bytes.toString(entry.getKey().get());
  String val = entry.getValue() == null ? null : Bytes.toString(entry.getValue().get());
  ret.put(key, val);
 }
 return ret.entrySet().iterator();
}

代码示例来源:origin: apache/hbase

/**
 * Getter for fetching an unmodifiable map.
 */
public Map<String, String> getConfiguration() {
 return delegatee.getValues().entrySet().stream()
     .collect(Collectors.toMap(
         e -> Bytes.toString(e.getKey().get(), e.getKey().getOffset(), e.getKey().getLength()),
         e -> Bytes.toString(e.getValue().get(), e.getValue().getOffset(), e.getValue().getLength())
     ));
}

代码示例来源:origin: apache/hbase

@Override
public byte[] getValue(byte[] key) {
 Bytes value = values.get(new Bytes(key));
 return value == null ? null : value.get();
}

代码示例来源:origin: apache/hbase

/**
 * Convert a protocol buffer Mutate to an Append
 * @param cellScanner
 * @param proto the protocol buffer Mutate to convert
 * @return the converted client Append
 * @throws IOException
 */
public static Append toAppend(final MutationProto proto, final CellScanner cellScanner)
    throws IOException {
 MutationType type = proto.getMutateType();
 assert type == MutationType.APPEND : type.name();
 Append append = toDelta((Bytes row) -> new Append(row.get(), row.getOffset(), row.getLength()),
     Append::add, proto, cellScanner);
 if (proto.hasTimeRange()) {
  TimeRange timeRange = toTimeRange(proto.getTimeRange());
  append.setTimeRange(timeRange.getMin(), timeRange.getMax());
 }
 return append;
}

代码示例来源:origin: apache/hbase

/**
 * Return the list of attached co-processor represented by their name
 * className
 *
 * @return The list of co-processors classNames
 */
@Override
public List<CoprocessorDescriptor> getCoprocessorDescriptors() {
 List<CoprocessorDescriptor> result = new ArrayList<>();
 for (Map.Entry<Bytes, Bytes> e: getValues().entrySet()) {
  String key = Bytes.toString(e.getKey().get()).trim();
  if (CP_HTD_ATTR_KEY_PATTERN.matcher(key).matches()) {
   toCoprocessorDescriptor(Bytes.toString(e.getValue().get()).trim())
    .ifPresent(result::add);
  }
 }
 return result;
}

代码示例来源:origin: apache/hbase

/**
 * Convert a protocol buffer Mutate to an Append
 * @param cellScanner
 * @param proto the protocol buffer Mutate to convert
 * @return the converted client Append
 * @throws IOException
 */
public static Append toAppend(final MutationProto proto, final CellScanner cellScanner)
    throws IOException {
 MutationType type = proto.getMutateType();
 assert type == MutationType.APPEND : type.name();
 Append append = toDelta((Bytes row) -> new Append(row.get(), row.getOffset(), row.getLength()),
   Append::add, proto, cellScanner);
 if (proto.hasTimeRange()) {
  TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
  append.setTimeRange(timeRange.getMin(), timeRange.getMax());
 }
 return append;
}

代码示例来源:origin: apache/hbase

/**
 * Add coprocessor to values Map
 * @param specStr The Coprocessor specification all in in one String
 * @return Returns <code>this</code>
 */
private ModifyableTableDescriptor setCoprocessorToMap(final String specStr) {
 if (specStr == null) {
  return this;
 }
 // generate a coprocessor key
 int maxCoprocessorNumber = 0;
 Matcher keyMatcher;
 for (Map.Entry<Bytes, Bytes> e : this.values.entrySet()) {
  keyMatcher = CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e.getKey().get()));
  if (!keyMatcher.matches()) {
   continue;
  }
  maxCoprocessorNumber = Math.max(Integer.parseInt(keyMatcher.group(1)), maxCoprocessorNumber);
 }
 maxCoprocessorNumber++;
 String key = "coprocessor$" + Integer.toString(maxCoprocessorNumber);
 return setValue(new Bytes(Bytes.toBytes(key)), new Bytes(Bytes.toBytes(specStr)));
}

代码示例来源:origin: apache/hbase

/**
 * Convert a protocol buffer Mutate to an Increment
 *
 * @param proto the protocol buffer Mutate to convert
 * @return the converted client Increment
 * @throws IOException
 */
public static Increment toIncrement(final MutationProto proto, final CellScanner cellScanner)
    throws IOException {
 MutationType type = proto.getMutateType();
 assert type == MutationType.INCREMENT : type.name();
 Increment increment = toDelta((Bytes row) -> new Increment(row.get(), row.getOffset(), row.getLength()),
     Increment::add, proto, cellScanner);
 if (proto.hasTimeRange()) {
  TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
  increment.setTimeRange(timeRange.getMin(), timeRange.getMax());
 }
 return increment;
}

代码示例来源:origin: apache/hbase

/**
 * Remove all {@link Constraint Constraints} that have been added to the table
 * and turn off the constraint processing.
 * <p>
 * All {@link Configuration Configurations} and their associated
 * {@link Constraint} are removed.
 * 
 * @param desc
 *          {@link HTableDescriptor} to remove {@link Constraint Constraints}
 *          from.
 */
public static void remove(HTableDescriptor desc) {
 // disable constraints
 disable(desc);
 // remove all the constraint settings
 List<Bytes> keys = new ArrayList<>();
 // loop through all the key, values looking for constraints
 for (Map.Entry<Bytes, Bytes> e : desc
   .getValues().entrySet()) {
  String key = Bytes.toString((e.getKey().get()));
  String[] className = CONSTRAINT_HTD_ATTR_KEY_PATTERN.split(key);
  if (className.length == 2) {
   keys.add(e.getKey());
  }
 }
 // now remove all the keys we found
 for (Bytes key : keys) {
  desc.remove(key);
 }
}

代码示例来源:origin: apache/hbase

/**
 * Convert a protocol buffer Mutate to an Increment
 *
 * @param proto the protocol buffer Mutate to convert
 * @return the converted client Increment
 * @throws IOException
 */
public static Increment toIncrement(final MutationProto proto, final CellScanner cellScanner)
    throws IOException {
 MutationType type = proto.getMutateType();
 assert type == MutationType.INCREMENT : type.name();
 Increment increment = toDelta((Bytes row) -> new Increment(row.get(), row.getOffset(), row.getLength()),
     Increment::add, proto, cellScanner);
 if (proto.hasTimeRange()) {
  TimeRange timeRange = toTimeRange(proto.getTimeRange());
  increment.setTimeRange(timeRange.getMin(), timeRange.getMax());
 }
 return increment;
}

代码示例来源:origin: apache/hbase

/**
 * Constructor
 * @param htd the table descriptor
 */
public TableSchemaModel(HTableDescriptor htd) {
 setName(htd.getTableName().getNameAsString());
 for (Map.Entry<Bytes, Bytes> e:
   htd.getValues().entrySet()) {
  addAttribute(Bytes.toString(e.getKey().get()),
   Bytes.toString(e.getValue().get()));
 }
 for (HColumnDescriptor hcd: htd.getFamilies()) {
  ColumnSchemaModel columnModel = new ColumnSchemaModel();
  columnModel.setName(hcd.getNameAsString());
  for (Map.Entry<Bytes, Bytes> e:
    hcd.getValues().entrySet()) {
   columnModel.addAttribute(Bytes.toString(e.getKey().get()),
     Bytes.toString(e.getValue().get()));
  }
  addColumnFamily(columnModel);
 }
}

代码示例来源:origin: apache/hbase

public static TTableDescriptor tableDescriptorFromHBase(TableDescriptor in) {
 TTableDescriptor out = new TTableDescriptor();
 out.setTableName(tableNameFromHBase(in.getTableName()));
 Map<Bytes, Bytes> attributes = in.getValues();
 for (Map.Entry<Bytes, Bytes> attribute : attributes.entrySet()) {
  out.putToAttributes(ByteBuffer.wrap(attribute.getKey().get()),
    ByteBuffer.wrap(attribute.getValue().get()));
 }
 for (ColumnFamilyDescriptor column : in.getColumnFamilies()) {
  out.addToColumns(columnFamilyDescriptorFromHBase(column));
 }
 out.setDurability(durabilityFromHBase(in.getDurability()));
 return out;
}

代码示例来源:origin: apache/hbase

/**
 * Converts an ColumnFamilyDescriptor to ColumnFamilySchema
 * @param hcd the ColumnFamilySchema
 * @return Convert this instance to a the pb column family type
 */
public static ColumnFamilySchema toColumnFamilySchema(ColumnFamilyDescriptor hcd) {
 ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
 builder.setName(UnsafeByteOperations.unsafeWrap(hcd.getName()));
 for (Map.Entry<Bytes, Bytes> e : hcd.getValues().entrySet()) {
  BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
  aBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey().get()));
  aBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue().get()));
  builder.addAttributes(aBuilder.build());
 }
 for (Map.Entry<String, String> e : hcd.getConfiguration().entrySet()) {
  NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
  aBuilder.setName(e.getKey());
  aBuilder.setValue(e.getValue());
  builder.addConfiguration(aBuilder.build());
 }
 return builder.build();
}

代码示例来源:origin: apache/hbase

/**
 * Converts an TableDescriptor to TableSchema
 * @param htd the TableDescriptor
 * @return Convert the current {@link TableDescriptor} into a pb TableSchema instance.
 */
public static TableSchema toTableSchema(TableDescriptor htd) {
 TableSchema.Builder builder = TableSchema.newBuilder();
 builder.setTableName(toProtoTableName(htd.getTableName()));
 for (Map.Entry<Bytes, Bytes> e : htd.getValues().entrySet()) {
  BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
  aBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey().get()));
  aBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue().get()));
  builder.addAttributes(aBuilder.build());
 }
 for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) {
  builder.addColumnFamilies(toColumnFamilySchema(hcd));
 }
 return builder.build();
}

代码示例来源:origin: apache/hbase

public static TColumnFamilyDescriptor columnFamilyDescriptorFromHBase(
  ColumnFamilyDescriptor in) {
 TColumnFamilyDescriptor out = new TColumnFamilyDescriptor();
 out.setName(in.getName());
 for (Map.Entry<Bytes, Bytes> attribute : in.getValues().entrySet()) {
  out.putToAttributes(ByteBuffer.wrap(attribute.getKey().get()),
    ByteBuffer.wrap(attribute.getValue().get()));
 }
 for (Map.Entry<String, String> conf : in.getConfiguration().entrySet()) {
  out.putToConfiguration(conf.getKey(), conf.getValue());
 }
 out.setBlockSize(in.getBlocksize());
 out.setBloomnFilterType(bloomFilterFromHBase(in.getBloomFilterType()));
 out.setCompressionType(compressionAlgorithmFromHBase(in.getCompressionType()));
 out.setDfsReplication(in.getDFSReplication());
 out.setDataBlockEncoding(dataBlockEncodingFromHBase(in.getDataBlockEncoding()));
 out.setKeepDeletedCells(keepDeletedCellsFromHBase(in.getKeepDeletedCells()));
 out.setMaxVersions(in.getMaxVersions());
 out.setMinVersions(in.getMinVersions());
 out.setScope(in.getScope());
 out.setTimeToLive(in.getTimeToLive());
 out.setBlockCacheEnabled(in.isBlockCacheEnabled());
 out.setCacheBloomsOnWrite(in.isCacheBloomsOnWrite());
 out.setCacheDataOnWrite(in.isCacheDataOnWrite());
 out.setCacheIndexesOnWrite(in.isCacheIndexesOnWrite());
 out.setCompressTags(in.isCompressTags());
 out.setEvictBlocksOnClose(in.isEvictBlocksOnClose());
 out.setInMemory(in.isInMemory());
 return out;
}

相关文章

微信公众号

最新文章

更多