org.apache.hadoop.hive.ql.exec.spark.KryoSerializer.serialize()方法的使用及代码示例

x33g5p2x  于2022-01-24 转载在 其他  
字(3.9k)|赞(0)|评价(0)|浏览(97)

本文整理了Java中org.apache.hadoop.hive.ql.exec.spark.KryoSerializer.serialize()方法的一些代码示例,展示了KryoSerializer.serialize()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。KryoSerializer.serialize()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.spark.KryoSerializer
类名称:KryoSerializer
方法名:serialize

KryoSerializer.serialize介绍

暂无

代码示例

代码示例来源:origin: apache/hive

private SparkJobRef submit(final DriverContext driverContext, final SparkWork sparkWork) throws Exception {
 final Context ctx = driverContext.getCtx();
 final HiveConf hiveConf = (HiveConf) ctx.getConf();
 refreshLocalResources(sparkWork, hiveConf);
 final JobConf jobConf = new JobConf(hiveConf);
 //update the credential provider location in the jobConf
 HiveConfUtil.updateJobCredentialProviders(jobConf);
 // Create temporary scratch dir
 final Path emptyScratchDir = ctx.getMRTmpPath();
 FileSystem fs = emptyScratchDir.getFileSystem(jobConf);
 fs.mkdirs(emptyScratchDir);
 // make sure NullScanFileSystem can be loaded - HIVE-18442
 jobConf.set("fs." + NullScanFileSystem.getBaseScheme() + ".impl",
   NullScanFileSystem.class.getCanonicalName());
 byte[] jobConfBytes = KryoSerializer.serializeJobConf(jobConf);
 byte[] scratchDirBytes = KryoSerializer.serialize(emptyScratchDir);
 byte[] sparkWorkBytes = KryoSerializer.serialize(sparkWork);
 JobStatusJob job = new JobStatusJob(jobConfBytes, scratchDirBytes, sparkWorkBytes);
 if (driverContext.isShutdown()) {
  throw new HiveException("Operation is cancelled.");
 }
 JobHandle<Serializable> jobHandle = remoteClient.submit(job);
 RemoteSparkJobStatus sparkJobStatus = new RemoteSparkJobStatus(remoteClient, jobHandle, sparkClientTimtout);
 return new RemoteSparkJobRef(hiveConf, jobHandle, sparkJobStatus);
}

代码示例来源:origin: apache/drill

private SparkJobRef submit(final DriverContext driverContext, final SparkWork sparkWork) throws Exception {
 final Context ctx = driverContext.getCtx();
 final HiveConf hiveConf = (HiveConf) ctx.getConf();
 refreshLocalResources(sparkWork, hiveConf);
 final JobConf jobConf = new JobConf(hiveConf);
 //update the credential provider location in the jobConf
 HiveConfUtil.updateJobCredentialProviders(jobConf);
 // Create temporary scratch dir
 final Path emptyScratchDir = ctx.getMRTmpPath();
 FileSystem fs = emptyScratchDir.getFileSystem(jobConf);
 fs.mkdirs(emptyScratchDir);
 byte[] jobConfBytes = KryoSerializer.serializeJobConf(jobConf);
 byte[] scratchDirBytes = KryoSerializer.serialize(emptyScratchDir);
 byte[] sparkWorkBytes = KryoSerializer.serialize(sparkWork);
 JobStatusJob job = new JobStatusJob(jobConfBytes, scratchDirBytes, sparkWorkBytes);
 if (driverContext.isShutdown()) {
  throw new HiveException("Operation is cancelled.");
 }
 JobHandle<Serializable> jobHandle = remoteClient.submit(job);
 RemoteSparkJobStatus sparkJobStatus = new RemoteSparkJobStatus(remoteClient, jobHandle, sparkClientTimtout);
 return new RemoteSparkJobRef(hiveConf, jobHandle, sparkJobStatus);
}

代码示例来源:origin: apache/hive

byte[] scratchDirBytes = KryoSerializer.serialize(tmpDir);
byte[] sparkWorkBytes = KryoSerializer.serialize(sparkTask.getWork());

代码示例来源:origin: com.facebook.presto.hive/hive-apache

@Override
public SparkJobRef execute(final DriverContext driverContext, final SparkWork sparkWork) throws Exception {
 final Context ctx = driverContext.getCtx();
 final HiveConf hiveConf = (HiveConf) ctx.getConf();
 refreshLocalResources(sparkWork, hiveConf);
 final JobConf jobConf = new JobConf(hiveConf);
 // Create temporary scratch dir
 final Path emptyScratchDir = ctx.getMRTmpPath();
 FileSystem fs = emptyScratchDir.getFileSystem(jobConf);
 fs.mkdirs(emptyScratchDir);
 byte[] jobConfBytes = KryoSerializer.serializeJobConf(jobConf);
 byte[] scratchDirBytes = KryoSerializer.serialize(emptyScratchDir);
 byte[] sparkWorkBytes = KryoSerializer.serialize(sparkWork);
 JobStatusJob job = new JobStatusJob(jobConfBytes, scratchDirBytes, sparkWorkBytes);
 JobHandle<Serializable> jobHandle = remoteClient.submit(job);
 RemoteSparkJobStatus sparkJobStatus = new RemoteSparkJobStatus(remoteClient, jobHandle, sparkClientTimtout);
 return new RemoteSparkJobRef(hiveConf, jobHandle, sparkJobStatus);
}

相关文章