org.apache.hadoop.record.Buffer.get()方法的使用及代码示例

x33g5p2x  于2022-01-17 转载在 其他  
字(5.1k)|赞(0)|评价(0)|浏览(77)

本文整理了Java中org.apache.hadoop.record.Buffer.get()方法的一些代码示例,展示了Buffer.get()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Buffer.get()方法的具体详情如下:
包路径:org.apache.hadoop.record.Buffer
类名称:Buffer
方法名:get

Buffer.get介绍

[英]Get the data from the Buffer.
[中]从缓冲区获取数据。

代码示例

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/**
 * Get the capacity, which is the maximum count that could handled without
 * resizing the backing storage.
 * 
 * @return The number of bytes
 */
public int getCapacity() {
 return this.get().length;
}

代码示例来源:origin: io.hops/hadoop-common

/**
 * Convert the byte buffer to a string an specific character encoding
 *
 * @param charsetName Valid Java Character Set Name
 */
public String toString(String charsetName)
 throws UnsupportedEncodingException {
 return new String(this.get(), 0, this.getCount(), charsetName);
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

/**
 * Convert the byte buffer to a string an specific character encoding
 *
 * @param charsetName Valid Java Character Set Name
 */
public String toString(String charsetName)
 throws UnsupportedEncodingException {
 return new String(this.get(), 0, this.getCount(), charsetName);
}

代码示例来源:origin: io.hops/hadoop-common

/**
 * Get the capacity, which is the maximum count that could handled without
 * resizing the backing storage.
 * 
 * @return The number of bytes
 */
public int getCapacity() {
 return this.get().length;
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Convert the byte buffer to a string an specific character encoding
 *
 * @param charsetName Valid Java Character Set Name
 */
public String toString(String charsetName)
 throws UnsupportedEncodingException {
 return new String(this.get(), 0, this.getCount(), charsetName);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Get the capacity, which is the maximum count that could handled without
 * resizing the backing storage.
 * 
 * @return The number of bytes
 */
public int getCapacity() {
 return this.get().length;
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

/**
 * Writes a bytes buffer as a typed bytes sequence.
 * 
 * @param buffer the bytes buffer to be written
 * @throws IOException
 */
public void writeBytes(Buffer buffer) throws IOException {
 writeBytes(buffer.get(), Type.BYTES.code, buffer.getCount());
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
public int hashCode() {
 int hash = 1;
 byte[] b = this.get();
 for (int i = 0; i < count; i++)
  hash = (31 * hash) + b[i];
 return hash;
}

代码示例来源:origin: ch.cern.hadoop/hadoop-streaming

/**
 * Writes a bytes buffer as a typed bytes sequence.
 * 
 * @param buffer the bytes buffer to be written
 * @throws IOException
 */
public void writeBytes(Buffer buffer) throws IOException {
 writeBytes(buffer.get(), Type.BYTES.code, buffer.getCount());
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public int hashCode() {
 int hash = 1;
 byte[] b = this.get();
 for (int i = 0; i < count; i++)
  hash = (31 * hash) + (int)b[i];
 return hash;
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Override
 public Object clone() throws CloneNotSupportedException {
  Buffer result = (Buffer) super.clone();
  result.copy(this.get(), 0, this.getCount());
  return result;
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Append specified bytes to the buffer.
 *
 * @param bytes byte array to be appended
 * @param offset offset into byte array
 * @param length length of data
*/
public void append(byte[] bytes, int offset, int length) {
 setCapacity(count+length);
 System.arraycopy(bytes, offset, this.get(), count, length);
 count = count + length;
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public Object clone() throws CloneNotSupportedException {
  Buffer result = (Buffer) super.clone();
  result.copy(this.get(), 0, this.getCount());
  return result;
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Append specified bytes to the buffer.
 *
 * @param bytes byte array to be appended
 * @param offset offset into byte array
 * @param length length of data
*/
public void append(byte[] bytes, int offset, int length) {
 setCapacity(count+length);
 System.arraycopy(bytes, offset, this.get(), count, length);
 count = count + length;
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

@Override
 public Object clone() throws CloneNotSupportedException {
  Buffer result = (Buffer) super.clone();
  result.copy(this.get(), 0, this.getCount());
  return result;
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/**
 * Append specified bytes to the buffer.
 *
 * @param bytes byte array to be appended
 * @param offset offset into byte array
 * @param length length of data
*/
public void append(byte[] bytes, int offset, int length) {
 setCapacity(count+length);
 System.arraycopy(bytes, offset, this.get(), count, length);
 count = count + length;
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public Object clone() throws CloneNotSupportedException {
  Buffer result = (Buffer) super.clone();
  result.copy(this.get(), 0, this.getCount());
  return result;
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/**
 * Append specified bytes to the buffer.
 *
 * @param bytes byte array to be appended
 * @param offset offset into byte array
 * @param length length of data
*/
public void append(byte[] bytes, int offset, int length) {
 setCapacity(count+length);
 System.arraycopy(bytes, offset, this.get(), count, length);
 count = count + length;
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override
public void writeBuffer(Buffer buf, String tag)
 throws IOException {
 byte[] barr = buf.get();
 int len = buf.getCount();
 Utils.writeVInt(out, len);
 out.write(barr, 0, len);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
public void writeBuffer(Buffer buf, String tag)
 throws IOException {
 byte[] barr = buf.get();
 int len = buf.getCount();
 Utils.writeVInt(out, len);
 out.write(barr, 0, len);
}

相关文章