本文整理了Java中org.apache.hadoop.hive.ql.io.orc.Reader.getTypes
方法的一些代码示例,展示了Reader.getTypes
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Reader.getTypes
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.io.orc.Reader
类名称:Reader
方法名:getTypes
[英]Get the list of types contained in the file. The root type is the first type in the list.
[中]获取文件中包含的类型列表。根类型是列表中的第一种类型。
代码示例来源:origin: apache/drill
@Override
int getColumns() {
return reader.getTypes().get(0).getSubtypesCount();
}
}
代码示例来源:origin: apache/drill
int getColumns() {
return reader.getTypes().get(OrcRecordUpdater.ROW + 1).getSubtypesCount();
}
}
代码示例来源:origin: apache/hive
@Override
public int getColumns() {
return getReader().getTypes().get(0).getSubtypesCount();
}
@Override
代码示例来源:origin: apache/hive
@Override
public final int getColumns() {
return getReader().getTypes().get(OrcRecordUpdater.ROW + 1).getSubtypesCount();
}
代码示例来源:origin: apache/hive
OrcRecordReader(Reader file, Configuration conf,
long offset, long length) throws IOException {
List<OrcProto.Type> types = file.getTypes();
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
value = new OrcStruct(numColumns);
this.reader = OrcInputFormat.createReaderFromFile(file, conf, offset,
length);
}
代码示例来源:origin: apache/drill
OrcRecordReader(Reader file, Configuration conf,
long offset, long length) throws IOException {
List<OrcProto.Type> types = file.getTypes();
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
value = new OrcStruct(numColumns);
this.reader = OrcInputFormat.createReaderFromFile(file, conf, offset,
length);
}
代码示例来源:origin: apache/hive
OrcRecordReader(Reader file, Configuration conf,
FileSplit split) throws IOException {
List<OrcProto.Type> types = file.getTypes();
this.file = file;
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
this.offset = split.getStart();
this.length = split.getLength();
this.reader = createReaderFromFile(file, conf, offset, length);
this.stats = new SerDeStats();
}
代码示例来源:origin: apache/drill
OrcRecordReader(Reader file, Configuration conf,
FileSplit split) throws IOException {
List<OrcProto.Type> types = file.getTypes();
this.file = file;
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
this.offset = split.getStart();
this.length = split.getLength();
this.reader = createReaderFromFile(file, conf, offset, length);
this.stats = new SerDeStats();
}
代码示例来源:origin: apache/drill
keyWrapper.setVersion(reader.getFileVersion());
keyWrapper.setRowIndexStride(reader.getRowIndexStride());
keyWrapper.setTypes(reader.getTypes());
} else {
stripeIdx++;
代码示例来源:origin: apache/hive
List<OrcProto.Type> types = reader.getTypes();
assertEquals(3, types.size());
assertEquals(OrcProto.Type.Kind.STRUCT, types.get(0).getKind());
代码示例来源:origin: apache/hive
types.add(typeBuilder.build());
Mockito.when(reader.getTypes()).thenReturn(types);
Mockito.when(reader.rowsOptions(Mockito.any(Reader.Options.class), Mockito.any(HiveConf.class)))
.thenReturn(recordReader);
代码示例来源:origin: com.facebook.presto.hive/hive-apache
@Override
int getColumns() {
return reader.getTypes().get(0).getSubtypesCount();
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
int getColumns() {
return reader.getTypes().get(OrcRecordUpdater.ROW + 1).getSubtypesCount();
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
OrcRecordReader(Reader file, Configuration conf,
long offset, long length) throws IOException {
List<OrcProto.Type> types = file.getTypes();
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
value = new OrcStruct(numColumns);
this.reader = OrcInputFormat.createReaderFromFile(file, conf, offset,
length);
}
代码示例来源:origin: org.apache.spark/spark-hive_2.10
public SparkOrcNewRecordReader(Reader file, Configuration conf,
long offset, long length) throws IOException {
List<OrcProto.Type> types = file.getTypes();
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
value = new OrcStruct(numColumns);
this.reader = OrcInputFormat.createReaderFromFile(file, conf, offset,
length);
this.objectInspector = file.getObjectInspector();
}
代码示例来源:origin: org.apache.spark/spark-hive_2.11
public SparkOrcNewRecordReader(Reader file, Configuration conf,
long offset, long length) throws IOException {
List<OrcProto.Type> types = file.getTypes();
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
value = new OrcStruct(numColumns);
this.reader = OrcInputFormat.createReaderFromFile(file, conf, offset,
length);
this.objectInspector = file.getObjectInspector();
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
OrcRecordReader(Reader file, Configuration conf,
FileSplit split) throws IOException {
List<OrcProto.Type> types = file.getTypes();
this.file = file;
numColumns = (types.size() == 0) ? 0 : types.get(0).getSubtypesCount();
this.offset = split.getStart();
this.length = split.getLength();
this.reader = createReaderFromFile(file, conf, offset, length);
this.stats = new SerDeStats();
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
static void printJsonData(Configuration conf,
String filename) throws IOException, JSONException {
Path path = new Path(filename);
Reader reader = OrcFile.createReader(path.getFileSystem(conf), path);
OutputStreamWriter out = new OutputStreamWriter(System.out, "UTF-8");
RecordReader rows = reader.rows(null);
Object row = null;
List<OrcProto.Type> types = reader.getTypes();
while (rows.hasNext()) {
row = rows.next(row);
JSONWriter writer = new JSONWriter(out);
printObject(writer, row, types, 0);
out.write("\n");
out.flush();
}
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public static RecordReader createReaderFromFile(Reader file,
Configuration conf,
long offset, long length
) throws IOException {
Reader.Options options = new Reader.Options().range(offset, length);
boolean isOriginal = isOriginal(file);
List<OrcProto.Type> types = file.getTypes();
options.include(genIncludedColumns(types, conf, isOriginal));
setSearchArgument(options, types, conf, isOriginal);
return file.rowsOptions(options);
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
VectorizedOrcRecordReader(Reader file, Configuration conf,
FileSplit fileSplit) throws IOException {
List<OrcProto.Type> types = file.getTypes();
Reader.Options options = new Reader.Options();
this.offset = fileSplit.getStart();
this.length = fileSplit.getLength();
options.range(offset, length);
options.include(OrcInputFormat.genIncludedColumns(types, conf, true));
OrcInputFormat.setSearchArgument(options, types, conf, true);
this.reader = file.rowsOptions(options);
try {
rbCtx = new VectorizedRowBatchCtx();
rbCtx.init(conf, fileSplit);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
内容来源于网络,如有侵权,请联系作者删除!