java.nio.Buffer.rewind()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(9.9k)|赞(0)|评价(0)|浏览(538)

本文整理了Java中java.nio.Buffer.rewind()方法的一些代码示例,展示了Buffer.rewind()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Buffer.rewind()方法的具体详情如下:
包路径:java.nio.Buffer
类名称:Buffer
方法名:rewind

Buffer.rewind介绍

[英]Rewinds this buffer.

The position is set to zero, and the mark is cleared. The content of this buffer is not changed.
[中]倒带这个缓冲区。
该位置设置为零,标记被清除。此缓冲区的内容未更改。

代码示例

代码示例来源:origin: wildfly/wildfly

/**
 * Rewind the buffer.
 *
 * @see Buffer#rewind()
 * @param <T> the buffer type
 * @param buffer the buffer to rewind
 * @return the buffer instance
 */
public static <T extends Buffer> T rewind(T buffer) {
  buffer.rewind();
  return buffer;
}

代码示例来源:origin: spring-projects/spring-framework

private Object convertToByteBuffer(@Nullable Object source, TypeDescriptor sourceType) {
  byte[] bytes = (byte[]) (source instanceof byte[] ? source :
      this.conversionService.convert(source, sourceType, BYTE_ARRAY_TYPE));
  if (bytes == null) {
    return ByteBuffer.wrap(new byte[0]);
  }
  ByteBuffer byteBuffer = ByteBuffer.allocate(bytes.length);
  byteBuffer.put(bytes);
  // Extra cast necessary for compiling on JDK 9 plus running on JDK 8, since
  // otherwise the overridden ByteBuffer-returning rewind method would be chosen
  // which isn't available on JDK 8.
  return ((Buffer) byteBuffer).rewind();
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

synchronized void setInputFromSavedData() {
 compressedDirectBufOff = 0;
 compressedDirectBufLen = userBufLen;
 if (compressedDirectBufLen > directBufferSize) {
  compressedDirectBufLen = directBufferSize;
 }
 // Reinitialize bzip2's input direct buffer.
 compressedDirectBuf.rewind();
 ((ByteBuffer)compressedDirectBuf).put(userBuf, userBufOff, 
                    compressedDirectBufLen);
 
 // Note how much data is being fed to bzip2.
 userBufOff += compressedDirectBufLen;
 userBufLen -= compressedDirectBufLen;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

void setInputFromSavedData() {
 compressedDirectBufOff = 0;
 compressedDirectBufLen = userBufLen;
 if (compressedDirectBufLen > directBufferSize) {
  compressedDirectBufLen = directBufferSize;
 }
 // Reinitialize zlib's input direct buffer
 compressedDirectBuf.rewind();
 ((ByteBuffer)compressedDirectBuf).put(userBuf, userBufOff, 
                    compressedDirectBufLen);
 
 // Note how much data is being fed to zlib
 userBufOff += compressedDirectBufLen;
 userBufLen -= compressedDirectBufLen;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * If a write would exceed the capacity of the direct buffers, it is set
 * aside to be loaded by this function while the compressed data are
 * consumed.
 */
synchronized void setInputFromSavedData() {
 compressedDirectBufLen = Math.min(userBufLen, directBufferSize);
 // Reinitialize lz4's input direct buffer
 compressedDirectBuf.rewind();
 ((ByteBuffer) compressedDirectBuf).put(userBuf, userBufOff,
   compressedDirectBufLen);
 // Note how much data is being fed to lz4
 userBufOff += compressedDirectBufLen;
 userBufLen -= compressedDirectBufLen;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * If a write would exceed the capacity of the direct buffers, it is set
 * aside to be loaded by this function while the compressed data are
 * consumed.
 */
void setInputFromSavedData() {
 compressedDirectBufLen = Math.min(userBufLen, directBufferSize);
 // Reinitialize snappy's input direct buffer
 compressedDirectBuf.rewind();
 ((ByteBuffer) compressedDirectBuf).put(userBuf, userBufOff,
   compressedDirectBufLen);
 // Note how much data is being fed to snappy
 userBufOff += compressedDirectBufLen;
 userBufLen -= compressedDirectBufLen;
}

代码示例来源:origin: SonarSource/sonarqube

public int[] toIntArray() {
  // Pad the size to multiple of 4
  int size = (bytes.length / 4) + (bytes.length % 4 == 0 ? 0 : 1);
  ByteBuffer bb = ByteBuffer.allocate(size * 4);
  bb.put(bytes);
  // see https://github.com/mongodb/mongo-java-driver/commit/21c91bd364d38489e0bbe2e390efdb3746ee3fff
  // The Java 9 ByteBuffer classes introduces overloaded methods with covariant return types for the following methods used by the driver:
// Without casting, exceptions like this are thrown when executing on Java 8 and lower:

  //  java.lang.NoSuchMethodError: java.nio.ByteBuffer.limit(I)Ljava/nio/ByteBuffer

  //This is because the generated byte code includes the static return type of the method, which is not found on Java 8 and lower because
  //the overloaded methods with covariant return types don't exist.
  ((Buffer) bb).rewind();
  IntBuffer ib = bb.asIntBuffer();
  int[] result = new int[size];
  ib.get(result);
  return result;
 }

代码示例来源:origin: org.springframework/spring-core

private Object convertToByteBuffer(@Nullable Object source, TypeDescriptor sourceType) {
  byte[] bytes = (byte[]) (source instanceof byte[] ? source :
      this.conversionService.convert(source, sourceType, BYTE_ARRAY_TYPE));
  if (bytes == null) {
    return ByteBuffer.wrap(new byte[0]);
  }
  ByteBuffer byteBuffer = ByteBuffer.allocate(bytes.length);
  byteBuffer.put(bytes);
  // Extra cast necessary for compiling on JDK 9 plus running on JDK 8, since
  // otherwise the overridden ByteBuffer-returning rewind method would be chosen
  // which isn't available on JDK 8.
  return ((Buffer) byteBuffer).rewind();
}

代码示例来源:origin: org.mongodb/mongo-java-driver

private ByteBuffer getBufferForInternalBytes() {
  ByteBuffer buffer = ByteBuffer.wrap(bytes, offset, bytes.length - offset).slice();
  buffer.order(ByteOrder.LITTLE_ENDIAN);
  ((Buffer) buffer).limit(buffer.getInt());
  ((Buffer) buffer).rewind();
  return buffer;
}

代码示例来源:origin: jMonkeyEngine/jmonkeyengine

/**
 * @see Buffer#rewind()
 */
public void rewind() {
  getBuffer().rewind();
}

代码示例来源:origin: org.jsoup/jsoup

public String body() {
  prepareByteData();
  // charset gets set from header on execute, and from meta-equiv on parse. parse may not have happened yet
  String body;
  if (charset == null)
    body = Charset.forName(DataUtil.defaultCharset).decode(byteData).toString();
  else
    body = Charset.forName(charset).decode(byteData).toString();
  ((Buffer)byteData).rewind(); // cast to avoid covariant return type change in jdk9
  return body;
}

代码示例来源:origin: stackoverflow.com

public static ByteBuffer clone(ByteBuffer original) {
    ByteBuffer clone = ByteBuffer.allocate(original.capacity());
    original.rewind();//copy from the beginning
    clone.put(original);
    original.rewind();
    clone.flip();
    return clone;
}

代码示例来源:origin: stackoverflow.com

// Create an empty ByteBuffer with a 10 byte capacity
 ByteBuffer bbuf = ByteBuffer.allocate(10);
 // Get the buffer's capacity
 int capacity = bbuf.capacity(); // 10
 // Use the absolute put(int, byte).
 // This method does not affect the position.
 bbuf.put(0, (byte)0xFF); // position=0
 // Set the position
 bbuf.position(5);
 // Use the relative put(byte)
 bbuf.put((byte)0xFF);
 // Get the new position
 int pos = bbuf.position(); // 6
 // Get remaining byte count
 int rem = bbuf.remaining(); // 4
 // Set the limit
 bbuf.limit(7); // remaining=1
 // This convenience method sets the position to 0
 bbuf.rewind(); // remaining=7

代码示例来源:origin: deeplearning4j/nd4j

/**
 * Load an ndarray from a blob
 *
 * @param blob the blob to load from
 * @return the loaded ndarray
 */
@Override
public INDArray load(Blob blob) throws SQLException {
  if (blob == null)
    return null;
  try(InputStream is = blob.getBinaryStream()) {
    ByteBuffer direct = ByteBuffer.allocateDirect((int) blob.length());
    ReadableByteChannel readableByteChannel = Channels.newChannel(is);
    readableByteChannel.read(direct);
    Buffer byteBuffer = (Buffer) direct;
    byteBuffer.rewind();
    return BinarySerde.toArray(direct);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void reset() {
 checkStream();
 reset(stream);
 finish = false;
 finished = false;
 uncompressedDirectBuf.rewind();
 uncompressedDirectBufOff = uncompressedDirectBufLen = 0;
 keepUncompressedBuf = false;
 compressedDirectBuf.limit(directBufferSize);
 compressedDirectBuf.position(directBufferSize);
 userBufOff = userBufLen = 0;
}

代码示例来源:origin: deeplearning4j/nd4j

/**
 * Convert an ndarray to a blob
 *
 * @param toConvert the ndarray to convert
 * @return the converted ndarray
 */
@Override
public Blob convert(INDArray toConvert) throws SQLException {
  ByteBuffer byteBuffer = BinarySerde.toByteBuffer(toConvert);
  Buffer buffer = (Buffer) byteBuffer;
  buffer.rewind();
  byte[] arr = new byte[byteBuffer.capacity()];
  byteBuffer.get(arr);
  Connection c = dataSource.getConnection();
  Blob b = c.createBlob();
  b.setBytes(1, arr);
  return b;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public synchronized void reset() {
 checkStream();
 end(stream);
 stream = init(blockSize, workFactor);
 finish = false;
 finished = false;
 uncompressedDirectBuf.rewind();
 uncompressedDirectBufOff = uncompressedDirectBufLen = 0;
 keepUncompressedBuf = false;
 compressedDirectBuf.limit(directBufferSize);
 compressedDirectBuf.position(directBufferSize);
 userBufOff = userBufLen = 0;
}

代码示例来源:origin: org.jsoup/jsoup

private static BomCharset detectCharsetFromBom(final ByteBuffer byteData) {
  final Buffer buffer = byteData; // .mark and rewind used to return Buffer, now ByteBuffer, so cast for backward compat
  buffer.mark();
  byte[] bom = new byte[4];
  if (byteData.remaining() >= bom.length) {
    byteData.get(bom);
    buffer.rewind();
  }
  if (bom[0] == 0x00 && bom[1] == 0x00 && bom[2] == (byte) 0xFE && bom[3] == (byte) 0xFF || // BE
    bom[0] == (byte) 0xFF && bom[1] == (byte) 0xFE && bom[2] == 0x00 && bom[3] == 0x00) { // LE
    return new BomCharset("UTF-32", false); // and I hope it's on your system
  } else if (bom[0] == (byte) 0xFE && bom[1] == (byte) 0xFF || // BE
    bom[0] == (byte) 0xFF && bom[1] == (byte) 0xFE) {
    return new BomCharset("UTF-16", false); // in all Javas
  } else if (bom[0] == (byte) 0xEF && bom[1] == (byte) 0xBB && bom[2] == (byte) 0xBF) {
    return new BomCharset("UTF-8", true); // in all Javas
    // 16 and 32 decoders consume the BOM to determine be/le; utf-8 should be consumed here
  }
  return null;
}

代码示例来源:origin: Rajawali/Rajawali

public static int[] getIntArrayFromBuffer(Buffer buffer) {
  int[] array = new int[0];
  if (buffer != null) {
    if (buffer.hasArray()) {
      array = (int[]) buffer.array();
    } else {
      buffer.rewind();
      array = new int[buffer.capacity()];
      if (buffer instanceof IntBuffer) {
        ((IntBuffer) buffer).get(array);
      } else if (buffer instanceof ShortBuffer) {
        int count = 0;
        while (buffer.hasRemaining()) {
          array[count] = (int) (((ShortBuffer) buffer).get());
          ++count;
        }
      }
    }
  }
  return array;
}

代码示例来源:origin: Rajawali/Rajawali

/**
 * Change a specific subset of the buffer's data at the given offset to the given length.
 *
 * @param bufferInfo
 * @param newData
 * @param index
 * @param size
 * @param resizeBuffer
 */
public void changeBufferData(BufferInfo bufferInfo, Buffer newData, int index, int size, boolean resizeBuffer) {
  newData.rewind();
  GLES20.glBindBuffer(bufferInfo.target, bufferInfo.bufferHandle);
  if (resizeBuffer) {
    bufferInfo.buffer = newData;
    GLES20.glBufferData(bufferInfo.target, size * bufferInfo.byteSize, newData, bufferInfo.usage);
  } else {
    GLES20.glBufferSubData(bufferInfo.target, index * bufferInfo.byteSize, size * bufferInfo.byteSize, newData);
  }
  GLES20.glBindBuffer(bufferInfo.target, 0);
}

相关文章