org.apache.hadoop.hive.ql.io.orc.Reader.rows()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(7.5k)|赞(0)|评价(0)|浏览(86)

本文整理了Java中org.apache.hadoop.hive.ql.io.orc.Reader.rows方法的一些代码示例,展示了Reader.rows的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Reader.rows方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.io.orc.Reader
类名称:Reader
方法名:rows

Reader.rows介绍

[英]Create a RecordReader that reads everything with the default options.
[中]创建一个RecordReader,使用默认选项读取所有内容。

代码示例

代码示例来源:origin: prestodb/presto

private static void assertFileContentsOrcHive(
    Type type,
    TempFile tempFile,
    Iterable<?> expectedValues)
    throws Exception
{
  JobConf configuration = new JobConf(new Configuration(false));
  configuration.set(READ_COLUMN_IDS_CONF_STR, "0");
  configuration.setBoolean(READ_ALL_COLUMNS, false);
  Reader reader = OrcFile.createReader(
      new Path(tempFile.getFile().getAbsolutePath()),
      new ReaderOptions(configuration));
  org.apache.hadoop.hive.ql.io.orc.RecordReader recordReader = reader.rows();
  StructObjectInspector rowInspector = (StructObjectInspector) reader.getObjectInspector();
  StructField field = rowInspector.getStructFieldRef("test");
  Iterator<?> iterator = expectedValues.iterator();
  Object rowData = null;
  while (recordReader.hasNext()) {
    rowData = recordReader.next(rowData);
    Object expectedValue = iterator.next();
    Object actualValue = rowInspector.getStructFieldData(rowData, field);
    actualValue = decodeRecordReaderValue(type, actualValue);
    assertColumnValueEquals(type, actualValue, expectedValue);
  }
  assertFalse(iterator.hasNext());
}

代码示例来源:origin: apache/hive

private ArrayList<SampleRec> dumpBucket(Path orcFile) throws IOException {
 org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(new Configuration());
 Reader reader = OrcFile.createReader(orcFile,
     OrcFile.readerOptions(conf).filesystem(fs));
 RecordReader rows = reader.rows();
 StructObjectInspector inspector = (StructObjectInspector) reader
     .getObjectInspector();
 System.out.format("Found Bucket File : %s \n", orcFile.getName());
 ArrayList<SampleRec> result = new ArrayList<SampleRec>();
 while (rows.hasNext()) {
  Object row = rows.next(null);
  SampleRec rec = (SampleRec) deserializeDeltaFileRow(row, inspector)[5];
  result.add(rec);
 }
 return result;
}

代码示例来源:origin: apache/hive

private ArrayList<SampleRec> dumpBucket(Path orcFile) throws IOException {
 org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(new Configuration());
 Reader reader = OrcFile.createReader(orcFile,
  OrcFile.readerOptions(conf).filesystem(fs));
 RecordReader rows = reader.rows();
 StructObjectInspector inspector = (StructObjectInspector) reader
  .getObjectInspector();
 System.out.format("Found Bucket File : %s \n", orcFile.getName());
 ArrayList<SampleRec> result = new ArrayList<SampleRec>();
 while (rows.hasNext()) {
  Object row = rows.next(null);
  SampleRec rec = (SampleRec) deserializeDeltaFileRow(row, inspector)[5];
  result.add(rec);
 }
 return result;
}

代码示例来源:origin: apache/hive

OrcFile.readerOptions(conf).filesystem(localFs));
RecordReader rows = reader.rows();
ObjectInspector orcOi = reader.getObjectInspector();
ObjectInspector stoi = TypeInfoUtils

代码示例来源:origin: apache/hive

PrimitiveObjectInspector.PrimitiveCategory.STRING);
RecordReader rows = reader.rows();
Object row = rows.next(null);

代码示例来源:origin: apache/hive

Reader reader = OrcFile.createReader(testFilePath,
  OrcFile.readerOptions(conf));
RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows();
RecordReaderImpl rr = (RecordReaderImpl) reader.rows();
VectorizedRowBatch batch = reader.getSchema().createRowBatchV2();
OrcStruct row = null;

代码示例来源:origin: apache/hive

@Test
public void emptyFile() throws Exception {
 ObjectInspector inspector;
 synchronized (TestOrcFile.class) {
  inspector = ObjectInspectorFactory.getReflectionObjectInspector
    (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
 }
 Writer writer = OrcFile.createWriter(testFilePath,
                    OrcFile.writerOptions(conf)
                    .inspector(inspector)
                    .stripeSize(1000)
                    .compress(CompressionKind.NONE)
                    .bufferSize(100));
 writer.close();
 Reader reader = OrcFile.createReader(testFilePath,
   OrcFile.readerOptions(conf).filesystem(fs));
 assertEquals(false, reader.rows().hasNext());
 assertEquals(CompressionKind.NONE, reader.getCompression());
 assertEquals(0, reader.getNumberOfRows());
 assertEquals(0, reader.getCompressionSize());
 assertEquals(false, reader.getMetadataKeys().iterator().hasNext());
 assertEquals(3, reader.getContentLength());
 assertEquals(false, reader.getStripes().iterator().hasNext());
}

代码示例来源:origin: apache/hive

Reader reader = OrcFile.createReader(testFilePath,
  OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
rand = new Random(12);
OrcStruct row = null;

代码示例来源:origin: apache/hive

writer.close();
RecordReader reader = OrcFile.createReader(testFilePath,
  OrcFile.readerOptions(conf)).rows();
assertEquals(true, reader.hasNext());
OrcStruct orcrow = (OrcStruct) reader.next(null);

代码示例来源:origin: apache/hive

RecordReader rows1 = reader.rows(new boolean[]{true, true, false});
RecordReader rows2 = reader.rows(new boolean[]{true, false, true});
r1 = new Random(1);
r2 = new Random(2);

代码示例来源:origin: apache/hive

assertEquals(true, stripe.getDataLength() != 0);
assertEquals(0, stripe.getIndexLength());
RecordReader rows = reader.rows();
rand = new Random(24);
OrcStruct row = null;

代码示例来源:origin: apache/hive

@Test
public void testBitPack64Large() throws Exception {
 ObjectInspector inspector;
 synchronized (TestOrcFile.class) {
  inspector = ObjectInspectorFactory.getReflectionObjectInspector(Long.class,
    ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
 }
 int size = 1080832;
 long[] inp = new long[size];
 Random rand = new Random(1234);
 for (int i = 0; i < size; i++) {
  inp[i] = rand.nextLong();
 }
 List<Long> input = Lists.newArrayList(Longs.asList(inp));
 Writer writer = OrcFile.createWriter(testFilePath,
   OrcFile.writerOptions(conf).inspector(inspector).compress(CompressionKind.ZLIB));
 for (Long l : input) {
  writer.addRow(l);
 }
 writer.close();
 Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
 RecordReader rows = reader.rows();
 int idx = 0;
 while (rows.hasNext()) {
  Object row = rows.next(null);
  Assert.assertEquals(input.get(idx++).longValue(), ((LongWritable) row).get());
 }
}

代码示例来源:origin: apache/hive

RecordReader rows = reader.rows();
int idx = 0;
while (rows.hasNext()) {

代码示例来源:origin: apache/hive

StringObjectInspector st = (StringObjectInspector) readerInspector.
  getStructFieldRef("string1").getFieldObjectInspector();
RecordReader rows = reader.rows();
Object row = rows.next(null);
assertNotNull(row);

代码示例来源:origin: apache/hive

StringObjectInspector st = (StringObjectInspector) readerInspector.
  getStructFieldRef("string1").getFieldObjectInspector();
RecordReader rows = reader.rows();
Object row = rows.next(null);
assertNotNull(row);

代码示例来源:origin: apache/hive

assertEquals(5000, ((StringColumnStatistics)ss3.getColumnStatistics()[2]).getSum());
RecordReaderImpl recordReader = (RecordReaderImpl) reader.rows();
OrcProto.RowIndex[] index = recordReader.readRowIndex(0, null, null).getRowGroupIndex();
assertEquals(3, index.length);

代码示例来源:origin: apache/hive

HiveDecimalObjectInspector doi = (HiveDecimalObjectInspector) readerInspector.
  getStructFieldRef("dec").getFieldObjectInspector();
RecordReader rows = reader.rows();
while (rows.hasNext()) {
 Object row = rows.next(null);

代码示例来源:origin: apache/hive

HiveDecimalObjectInspector doi = (HiveDecimalObjectInspector) readerInspector.
  getStructFieldRef("dec").getFieldObjectInspector();
RecordReader rows = reader.rows();
int idx = 0;
while (rows.hasNext()) {

代码示例来源:origin: apache/hive

Reader reader = OrcFile.createReader(file,
  OrcFile.readerOptions(conf));
RecordReader rows = reader.rows();
for (int year = minYear; year < maxYear; ++year) {
 for(int ms = 1000; ms < 2000; ++ms) {

代码示例来源:origin: apache/hive

assertEquals(1, reader.getNumberOfRows());
RecordReader rows = reader.rows();
assertEquals(2, reader.getNumberOfRows());
rows = reader.rows();
assertEquals(true, rows.hasNext());
row = (OrcStruct) rows.next(null);

相关文章