本文整理了Java中org.apache.hadoop.mapreduce.Mapper.cleanup()
方法的一些代码示例,展示了Mapper.cleanup()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Mapper.cleanup()
方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.Mapper
类名称:Mapper
方法名:cleanup
[英]Called once at the end of the task.
[中]任务结束时调用一次。
代码示例来源:origin: apache/hbase
@Override
protected void
cleanup(Mapper<WALKey, WALEdit, ImmutableBytesWritable, Mutation>.Context context)
throws IOException, InterruptedException {
super.cleanup(context);
}
代码示例来源:origin: thinkaurelius/titan
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
super.cleanup(context);
job.workerIterationEnd(metrics);
}
代码示例来源:origin: apache/ignite
/** {@inheritDoc} */
@Override protected void cleanup(Context ctx) throws IOException, InterruptedException {
super.cleanup(ctx);
HadoopErrorSimulator.instance().onMapCleanup();
}
代码示例来源:origin: apache/phoenix
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
super.cleanup(context);
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
LOG.error("Error {} while closing connection in the PhoenixIndexMapper class ",
e.getMessage());
}
}
}
}
代码示例来源:origin: apache/phoenix
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
super.cleanup(context);
if (connection != null) {
try {
processBatch(context);
connection.close();
if (outputConn != null) {
outputConn.close();
}
} catch (SQLException e) {
LOG.error("Error while closing connection in the PhoenixIndexMapper class ", e);
throw new IOException(e);
}
}
}
代码示例来源:origin: apache/phoenix
super.cleanup(context);
} catch (SQLException e) {
LOG.error(" Error {} while read/write of a record ", e.getMessage());
代码示例来源:origin: com.aliyun.phoenix/ali-phoenix-core
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
super.cleanup(context);
if (outputStream != null) {
outputStream.close();
}
}
}
代码示例来源:origin: org.apache.mahout/mahout-utils
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
for (Map.Entry<Integer, WeightedVectorWritable> entry : mostDistantPoints.entrySet()) {
context.write(new IntWritable(entry.getKey()), entry.getValue());
}
super.cleanup(context);
}
代码示例来源:origin: aseldawy/spatialhadoop2
@Override
protected void cleanup(
Mapper<Rectangle, Iterable<S>, IntWritable, Triangulation>.Context context)
throws IOException, InterruptedException {
super.cleanup(context);
writer.close(context);
}
}
代码示例来源:origin: ukwa/webarchive-discovery
@Override
protected void cleanup(
Mapper<Path, BytesWritable, Text, Text>.Context context)
throws IOException, InterruptedException {
log.debug("Cleaning up and emitting final result...");
super.cleanup(context);
this.emit(context);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient
protected void cleanup(Context context)
throws IOException, InterruptedException {
mapCleanup = true;
super.cleanup(context);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
protected void cleanup(org.apache.hadoop.mapreduce.Mapper.Context context)
throws IOException, InterruptedException {
mapCleanup = true;
super.cleanup(context);
}
代码示例来源:origin: twitter/hraven
@Override
protected void cleanup(
Mapper<JobFile, FileStatus, ImmutableBytesWritable, Put>.Context context)
throws IOException, InterruptedException {
if (hbaseConnection != null) {
hbaseConnection.close();
}
super.cleanup(context);
}
代码示例来源:origin: locationtech/geowave
@Override
protected void cleanup(
final Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable>.Context context)
throws IOException, InterruptedException {
super.cleanup(context);
writeRemainingAvroBatches(context);
}
代码示例来源:origin: locationtech/geowave
@Override
protected void cleanup(final Context context) throws IOException, InterruptedException {
osmProvider.close();
super.cleanup(context);
}
代码示例来源:origin: org.apache.hadoop/hadoop-distcp
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
super.cleanup(context);
long secs = (System.currentTimeMillis() - startEpoch) / 1000;
incrementCounter(context, Counter.BANDWIDTH_IN_BYTES,
totalBytesCopied / ((secs == 0 ? 1 : secs)));
}
}
代码示例来源:origin: org.apache.mahout/mahout-core
@Override
protected void cleanup(Context ctx) throws IOException, InterruptedException {
if (weightsPerFeature != null) {
ctx.write(new Text(TrainNaiveBayesJob.WEIGHTS_PER_FEATURE), new VectorWritable(weightsPerFeature));
ctx.write(new Text(TrainNaiveBayesJob.WEIGHTS_PER_LABEL), new VectorWritable(weightsPerLabel));
}
super.cleanup(ctx);
}
}
代码示例来源:origin: org.apache.mahout/mahout-core
@Override
protected void cleanup(Context ctx) throws IOException, InterruptedException {
ctx.write(new Text(TrainNaiveBayesJob.LABEL_THETA_NORMALIZER),
new VectorWritable(trainer.retrievePerLabelThetaNormalizer()));
super.cleanup(ctx);
}
}
代码示例来源:origin: MKLab-ITI/multimedia-indexing
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
super.cleanup(context);
downloader.shutDown();
vectorizer.shutDown();
}
代码示例来源:origin: com.moz.fiji.examples.phonebook/fiji-phonebook
/** {@inheritDoc} */
@Override
protected void cleanup(Context hadoopContext) throws IOException, InterruptedException {
ResourceUtils.closeOrLog(mTableWriter);
ResourceUtils.releaseOrLog(mTable);
ResourceUtils.releaseOrLog(mFiji);
super.cleanup(hadoopContext);
}
}
内容来源于网络,如有侵权,请联系作者删除!