本文整理了Java中org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx.init()
方法的一些代码示例,展示了VectorizedRowBatchCtx.init()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。VectorizedRowBatchCtx.init()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx
类名称:VectorizedRowBatchCtx
方法名:init
[英]Initializes the VectorizedRowBatch context based on an scratch column type map and object inspector.
[中]基于scratch列类型映射和对象检查器初始化VectorizedRowBatch上下文。
代码示例来源:origin: apache/drill
@Override
public void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
vrbCtx = new VectorizedRowBatchCtx();
vrbCtx.init((StructObjectInspector) this.outputObjInspector, vOutContext.getScratchColumnTypeNames());
outputBatch = vrbCtx.createVectorizedRowBatch();
outputVectorAssignRowMap = new HashMap<ObjectInspector, VectorAssignRow>();
}
代码示例来源:origin: apache/hive
vrbCtx.init(oi, new String[0]);
} catch (HiveException e) {
throw new IOException(e);
代码示例来源:origin: apache/hive
@Override
public void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
vrbCtx = new VectorizedRowBatchCtx();
vrbCtx.init((StructObjectInspector) this.outputObjInspector,
vOutContext.getScratchColumnTypeNames(), vOutContext.getScratchDataTypePhysicalVariations());
outputBatch = vrbCtx.createVectorizedRowBatch();
outputVectorAssignRowMap = new HashMap<ObjectInspector, VectorAssignRow>();
}
代码示例来源:origin: apache/drill
vrbCtx.init((StructObjectInspector) this.outputObjInspector, vOutContext.getScratchColumnTypeNames());
代码示例来源:origin: apache/hive
vrbCtx.init((StructObjectInspector) this.outputObjInspector, vOutContext.getScratchColumnTypeNames());
代码示例来源:origin: apache/hive
if (isVectorized) {
VectorizedRowBatchCtx vectorizedRowBatchCtx = new VectorizedRowBatchCtx();
vectorizedRowBatchCtx.init(structOI, new String[0]);
mapWork.setVectorizedRowBatchCtx(vectorizedRowBatchCtx);
代码示例来源:origin: apache/hive
protected static void initialVectorizedRowBatchCtx(Configuration conf) throws HiveException {
MapWork mapWork = new MapWork();
VectorizedRowBatchCtx rbCtx = new VectorizedRowBatchCtx();
rbCtx.init(createStructObjectInspector(conf), new String[0]);
mapWork.setVectorMode(true);
mapWork.setVectorizedRowBatchCtx(rbCtx);
Utilities.setMapWork(conf, mapWork);
}
代码示例来源:origin: apache/drill
if (isVectorOutput) {
vrbCtx = new VectorizedRowBatchCtx();
vrbCtx.init((StructObjectInspector) outputObjInspector, vOutContext.getScratchColumnTypeNames());
outputBatch = vrbCtx.createVectorizedRowBatch();
vectorAssignRow = new VectorAssignRow();
代码示例来源:origin: apache/hive
batchContext.init(source.rowStructObjectInspector(), emptyScratchTypeNames);
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
代码示例来源:origin: apache/hive
batchContext.init(source.rowStructObjectInspector(), emptyScratchTypeNames);
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
代码示例来源:origin: apache/hive
batchContext.init(source.rowStructObjectInspector(), emptyScratchTypeNames);
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public VectorizedParquetRecordReader(
ParquetInputFormat<ArrayWritable> realInput,
FileSplit split,
JobConf conf, Reporter reporter) throws IOException, InterruptedException {
internalReader = new ParquetRecordReaderWrapper(
realInput,
split,
conf,
reporter);
try {
rbCtx = new VectorizedRowBatchCtx();
rbCtx.init(conf, split);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
VectorizedOrcAcidRowReader(AcidInputFormat.RowReader<OrcStruct> inner,
Configuration conf,
FileSplit split) throws IOException {
this.innerReader = inner;
this.key = inner.createKey();
this.rowBatchCtx = new VectorizedRowBatchCtx();
this.value = inner.createValue();
this.objectInspector = inner.getObjectInspector();
try {
rowBatchCtx.init(conf, split);
} catch (ClassNotFoundException e) {
throw new IOException("Failed to initialize context", e);
} catch (SerDeException e) {
throw new IOException("Failed to initialize context", e);
} catch (InstantiationException e) {
throw new IOException("Failed to initialize context", e);
} catch (IllegalAccessException e) {
throw new IOException("Failed to initialize context", e);
} catch (HiveException e) {
throw new IOException("Failed to initialize context", e);
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
@Override
public Collection<Future<?>> initializeOp(Configuration hconf) throws HiveException {
Collection<Future<?>> result = super.initializeOp(hconf);
vrbCtx = new VectorizedRowBatchCtx();
vrbCtx.init(vOutContext.getScratchColumnTypeMap(), (StructObjectInspector) this.outputObjInspector);
outputBatch = vrbCtx.createVectorizedRowBatch();
outputVectorAssignRowMap = new HashMap<ObjectInspector, VectorAssignRowSameBatch>();
return result;
}
代码示例来源:origin: org.apache.hive/hive-llap-server
vrbCtx.init(oi, new String[0]);
} catch (HiveException e) {
throw new IOException(e);
代码示例来源:origin: com.facebook.presto.hive/hive-apache
vrbCtx.init(vOutContext.getScratchColumnTypeMap(), (StructObjectInspector) this.outputObjInspector);
代码示例来源:origin: com.facebook.presto.hive/hive-apache
VectorizedOrcRecordReader(Reader file, Configuration conf,
FileSplit fileSplit) throws IOException {
List<OrcProto.Type> types = file.getTypes();
Reader.Options options = new Reader.Options();
this.offset = fileSplit.getStart();
this.length = fileSplit.getLength();
options.range(offset, length);
options.include(OrcInputFormat.genIncludedColumns(types, conf, true));
OrcInputFormat.setSearchArgument(options, types, conf, true);
this.reader = file.rowsOptions(options);
try {
rbCtx = new VectorizedRowBatchCtx();
rbCtx.init(conf, fileSplit);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
batchContext.init(vectorScratchColumnTypeMap, (StructObjectInspector) rowObjectInspector);
batch = batchContext.createVectorizedRowBatch();
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public VectorizedRCFileRecordReader(Configuration conf, FileSplit split)
throws IOException {
Path path = split.getPath();
FileSystem fs = path.getFileSystem(conf);
this.in = new RCFile.Reader(fs, path, conf);
this.end = split.getStart() + split.getLength();
this.conf = conf;
this.split = split;
useCache = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEUSERCFILESYNCCACHE);
if (split.getStart() > in.getPosition()) {
long oldSync = useCache ? syncCache.get(split) : -1;
if (oldSync == -1) {
in.sync(split.getStart()); // sync to start
} else {
in.seek(oldSync);
}
}
this.start = in.getPosition();
more = start < end;
try {
rbCtx = new VectorizedRowBatchCtx();
rbCtx.init(conf, split);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
if (isVectorOutput) {
vrbCtx = new VectorizedRowBatchCtx();
vrbCtx.init(vOutContext.getScratchColumnTypeMap(), (StructObjectInspector) outputObjInspector);
outputBatch = vrbCtx.createVectorizedRowBatch();
vectorAssignRowSameBatch = new VectorAssignRowSameBatch();
内容来源于网络,如有侵权,请联系作者删除!