org.apache.pig.impl.util.ObjectSerializer.deserialize()方法的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(8.8k)|赞(0)|评价(0)|浏览(79)

本文整理了Java中org.apache.pig.impl.util.ObjectSerializer.deserialize()方法的一些代码示例,展示了ObjectSerializer.deserialize()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。ObjectSerializer.deserialize()方法的具体详情如下:
包路径:org.apache.pig.impl.util.ObjectSerializer
类名称:ObjectSerializer
方法名:deserialize

ObjectSerializer.deserialize介绍

暂无

代码示例

代码示例来源:origin: apache/hive

@Override
public void prepareToWrite(RecordWriter writer) throws IOException {
 this.writer = writer;
 computedSchema = (HCatSchema) ObjectSerializer.deserialize(UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).getProperty(COMPUTED_OUTPUT_SCHEMA));
}

代码示例来源:origin: apache/hive

PigHCatUtil.PIG_EXCEPTION_CODE);
Schema schema = (Schema) ObjectSerializer.deserialize(udfProps.getProperty(PIG_SCHEMA));
if (schema != null) {
 pigSchema = schema;

代码示例来源:origin: apache/phoenix

@SuppressWarnings("unchecked")
@Override
public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
  this.reader = reader;
  final String resourceSchemaAsStr = getValueFromUDFContext(this.contextSignature,RESOURCE_SCHEMA_SIGNATURE);
  if (resourceSchemaAsStr == null) {
    throw new IOException("Could not find schema in UDF context");
  }
  schema = (ResourceSchema)ObjectSerializer.deserialize(resourceSchemaAsStr); 
}

代码示例来源:origin: forcedotcom/phoenix

/**
 * Parse the HBase table name and configure job
 */
@Override
public void setStoreLocation(String location, Job job) throws IOException {
  String prefix = "hbase://";
  if (location.startsWith(prefix)) {
    tableName = location.substring(prefix.length());
  }
  config = new PhoenixPigConfiguration(job.getConfiguration());
  config.configure(server, tableName, batchSize);
  String serializedSchema = getUDFProperties().getProperty(contextSignature + SCHEMA);
  if (serializedSchema != null) {
    schema = (ResourceSchema) ObjectSerializer.deserialize(serializedSchema);
  }
}

代码示例来源:origin: apache/phoenix

/**
 * Parse the HBase table name and configure job
 */
@Override
public void setStoreLocation(String location, Job job) throws IOException {
  String tableSchema = location.substring(PHOENIX_TABLE_NAME_SCHEME.length());
  final TableSchemaParserFunction parseFunction = new TableSchemaParserFunction();
  Pair<String,String> pair =  parseFunction.apply(tableSchema);
  PhoenixConfigurationUtil.loadHBaseConfiguration(job);
  config = job.getConfiguration();
  config.set(HConstants.ZOOKEEPER_QUORUM, server);
  String tableName = pair.getFirst();
  String columns = pair.getSecond(); 
  if(columns != null && columns.length() > 0) {
    PhoenixConfigurationUtil.setUpsertColumnNames(config, columns.split(","));
  }
  PhoenixConfigurationUtil.setPhysicalTableName(config,tableName);
  PhoenixConfigurationUtil.setOutputTableName(config,tableName);
  PhoenixConfigurationUtil.setBatchSize(config,batchSize);
  String serializedSchema = getUDFProperties().getProperty(contextSignature + SCHEMA);
  if (serializedSchema != null) {
    schema = (ResourceSchema) ObjectSerializer.deserialize(serializedSchema);
  }
 }

代码示例来源:origin: org.apache.pig/pig

public void deserializeForSpark(String udfConfsStr, String clientSysPropsStr) throws IOException {
  if( udfConfsStr!= null && clientSysPropsStr!=null) {
    udfConfs = (HashMap<UDFContextKey, Properties>) ObjectSerializer.deserialize(udfConfsStr);
    clientSysProps = (Properties) ObjectSerializer.deserialize(
        clientSysPropsStr);
  }
}

代码示例来源:origin: org.apache.pig/pig

/**
 * Populate the udfConfs field.  This function is intended to
 * be called by Map.configure or Reduce.configure on the backend.
 * It assumes that addJobConf has already been called.
 * @throws IOException if underlying deseralization throws it
 */
@SuppressWarnings("unchecked")
public void deserialize() throws IOException {
  udfConfs = (HashMap<UDFContextKey, Properties>)ObjectSerializer.deserialize(jconf.get(UDF_CONTEXT));
  clientSysProps = (Properties)ObjectSerializer.deserialize(
      jconf.get(CLIENT_SYS_PROPS));
}

代码示例来源:origin: org.apache.pig/pig

/**
 * @param currentConf2
 * @param storeLookupKey
 * @return
 * @throws IOException
 */
private List<POStore> getStores(Configuration conf, String storeLookupKey)
throws IOException {
  return (List<POStore>)ObjectSerializer.deserialize(
      conf.get(storeLookupKey));
}

代码示例来源:origin: com.twitter/parquet-pig

public static RequiredFieldList deserializeRequiredFieldList(String requiredFieldString) {
 if(requiredFieldString == null) {
   return null;
 }
 try {
  return (RequiredFieldList) ObjectSerializer.deserialize(requiredFieldString);
 } catch (IOException e) {
  throw new RuntimeException("Failed to deserialize pushProjection", e);
 }
}

代码示例来源:origin: ShifuML/shifu

private void initFieldList() {
  if(requiredFieldList == null) {
    try {
      requiredFieldList = (RequiredFieldList) ObjectSerializer.deserialize(super.props
          .getProperty("parquet.private.pig.required.fields"));
      LOG.debug("required list: {}", requiredFieldList);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
}

代码示例来源:origin: org.apache.pig/pig

@SuppressWarnings("unchecked")
private static String getLoadLocation(int inputIndex, Configuration conf) throws IOException {
  ArrayList<FileSpec> inputs =
      (ArrayList<FileSpec>) ObjectSerializer.deserialize(
          conf.get(PIG_INPUTS));
  return inputs.get(inputIndex).getFileName();
}

代码示例来源:origin: org.apache.phoenix/phoenix-pig

@SuppressWarnings("unchecked")
@Override
public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
  this.reader = reader;
  final String resourceSchemaAsStr = getValueFromUDFContext(this.contextSignature,RESOURCE_SCHEMA_SIGNATURE);
  if (resourceSchemaAsStr == null) {
    throw new IOException("Could not find schema in UDF context");
  }
  schema = (ResourceSchema)ObjectSerializer.deserialize(resourceSchemaAsStr); 
}

代码示例来源:origin: org.apache.pig/pig

protected static Schema staticSchemaGen(String s) {
  try {
    if (s.equals("")) {
      LOG.warn("No Schema present in SchemaTuple generated class");
      return new Schema();
    }
    return (Schema) ObjectSerializer.deserialize(s);
  } catch (IOException e) {
    throw new RuntimeException("Unable to deserialize serialized Schema: " + s, e);
  }
}

代码示例来源:origin: Netflix/iceberg

@SuppressWarnings("unchecked")
public <T extends Serializable> T getFromUDFContext(String key, Class<T> clazz) throws IOException {
 Properties properties = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{signature});
 return (T) ObjectSerializer.deserialize(properties.getProperty(key));
}

代码示例来源:origin: org.apache.hive.hcatalog/hive-hcatalog-pig-adapter

@Override
public void prepareToWrite(RecordWriter writer) throws IOException {
 this.writer = writer;
 computedSchema = (HCatSchema) ObjectSerializer.deserialize(UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).getProperty(COMPUTED_OUTPUT_SCHEMA));
}

代码示例来源:origin: org.apache.pig/pig

private void initialize() {
    if (this.jobConf == null) {
      try {
        this.jobConf = KryoSerializer.deserializeJobConf(this.confBytes);
        PigContext pc = (PigContext) ObjectSerializer.deserialize(jobConf.get("pig.pigContext"));
        SchemaTupleBackend.initialize(jobConf, pc);
      } catch (IOException e) {
        throw new RuntimeException("Couldn't initialize ForEachConverter");
      }
    }
  }
}

代码示例来源:origin: org.apache.pig/pig

private void initialize(Configuration jobConf) throws IOException {
  MapRedUtil.setupUDFContext(jobConf);
  PigContext pc = (PigContext) ObjectSerializer.deserialize(jobConf.get("pig.pigContext"));
  SchemaTupleBackend.initialize(jobConf, pc);
  PigMapReduce.sJobConfInternal.set(jobConf);
  PigHadoopLogger pigHadoopLogger = PigHadoopLogger.getInstance();
  pigHadoopLogger.setAggregate("true".equalsIgnoreCase(jobConf.get("aggregate.warning")));
  pigHadoopLogger.setReporter((SparkCounters)ObjectSerializer.deserialize(jobConf.get("pig.spark.counters")));
  PhysicalOperator.setPigLogger(pigHadoopLogger);
}

代码示例来源:origin: org.apache.pig/pig

private TypeInfo getTypeInfo(String location, Job job) throws IOException {
  Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass());
  TypeInfo typeInfo = (TypeInfo) ObjectSerializer.deserialize(p.getProperty(signature + SchemaSignatureSuffix));
  if (typeInfo == null) {
    typeInfo = getTypeInfoFromLocation(location, job);
  }
  if (typeInfo != null) {
    p.setProperty(signature + SchemaSignatureSuffix, ObjectSerializer.serialize(typeInfo));
  }
  return typeInfo;
}

代码示例来源:origin: com.twitter.elephantbird/elephant-bird-pig

@Override
public void setLocation(String location, Job job) throws IOException {
 FileInputFormat.setInputPaths(job, location);
 this.jobConf = HadoopCompat.getConfiguration(job);
 String projectedFields = getUDFProperties().getProperty(projectionKey);
 if (projectedFields != null) {
  requiredFieldList =
   (RequiredFieldList) ObjectSerializer.deserialize(projectedFields);
 }
}

代码示例来源:origin: org.apache.pig/pig

@SuppressWarnings("unchecked")
private static String getMultiInputsCounerName(PigSplit pigSplit,
    Configuration conf) throws IOException {
  ArrayList<FileSpec> inputs =
    (ArrayList<FileSpec>) ObjectSerializer.deserialize(
        conf.get(PigInputFormat.PIG_INPUTS));
  String fname = inputs.get(pigSplit.getInputIndex()).getFileName();
  return PigStatsUtil.getMultiInputsCounterName(fname, pigSplit.getInputIndex());
}

相关文章

微信公众号

最新文章

更多