org.apache.gobblin.configuration.State.getPropAsList()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(12.9k)|赞(0)|评价(0)|浏览(88)

本文整理了Java中org.apache.gobblin.configuration.State.getPropAsList()方法的一些代码示例,展示了State.getPropAsList()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。State.getPropAsList()方法的具体详情如下:
包路径:org.apache.gobblin.configuration.State
类名称:State
方法名:getPropAsList

State.getPropAsList介绍

[英]Get the value of a comma separated property as a List of strings.
[中]以字符串列表的形式获取逗号分隔属性的值。

代码示例

代码示例来源:origin: apache/incubator-gobblin

private List<Pattern> getNormalPriorityPatterns() {
 List<String> list = this.state.getPropAsList(MRCompactor.COMPACTION_NORMAL_PRIORITY_TOPICS, StringUtils.EMPTY);
 return DatasetFilterUtils.getPatternsFromStrings(list);
}

代码示例来源:origin: apache/incubator-gobblin

private Optional<Iterable<String>> getKeys (State state) {
 if (!state.contains(CONF_PREFIX + "keys")) {
  return Optional.empty();
 }
 Iterable<String> keys = state.getPropAsList(CONF_PREFIX + "keys");
 return Optional.ofNullable(keys);
}

代码示例来源:origin: apache/incubator-gobblin

public static List<Pattern> getPatternList(State state, String propKey, String def) {
 List<String> list = state.getPropAsList(propKey, def);
 return getPatternsFromStrings(list);
}

代码示例来源:origin: apache/incubator-gobblin

private List<Pattern> getHighPriorityPatterns() {
 List<String> list = this.state.getPropAsList(MRCompactor.COMPACTION_HIGH_PRIORITY_TOPICS, StringUtils.EMPTY);
 return DatasetFilterUtils.getPatternsFromStrings(list);
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Parse custom {@link org.apache.gobblin.metrics.Tag}s from property {@link #METRICS_STATE_CUSTOM_TAGS}
 * in the input {@link org.apache.gobblin.configuration.State}.
 * @param state {@link org.apache.gobblin.configuration.State} possibly containing custom tags.
 * @return List of {@link org.apache.gobblin.metrics.Tag} parsed from input.
 */
public static List<Tag<?>> getCustomTagsFromState(State state) {
 List<Tag<?>> tags = Lists.newArrayList();
 for (String tagKeyValue : state.getPropAsList(METRICS_STATE_CUSTOM_TAGS, "")) {
  Tag<?> tag = Tag.fromString(tagKeyValue);
  if (tag != null) {
   tags.add(tag);
  }
 }
 return tags;
}

代码示例来源:origin: apache/incubator-gobblin

private static Optional<List<String>> getWriterPartitionColumns(State state, int numBranches, int branchId) {
 String propName = ForkOperatorUtils.getPropertyNameForBranch(WRITER_PARTITION_COLUMNS, numBranches, branchId);
 return state.contains(propName) ? Optional.of(state.getPropAsList(propName)) : Optional.<List<String>> absent();
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * @param state {@link State} containing properties for a job.
 * @param targetFs destination {@link FileSystem} where file is to be copied
 * @return whether to allow for splitting of work units based on the filesystem, state, converter/writer config.
 */
public static boolean allowSplit(State state, FileSystem targetFs) {
 // Don't allow distcp jobs that use decrypt/ungzip converters or tararchive/encrypt writers to split work units
 Collection<String> converterClassNames = Collections.emptyList();
 if (state.contains(ConfigurationKeys.CONVERTER_CLASSES_KEY)) {
  converterClassNames = state.getPropAsList(ConfigurationKeys.CONVERTER_CLASSES_KEY);
 }
 return state.getPropAsBoolean(SPLIT_ENABLED, false) &&
   KNOWN_SCHEMES_SUPPORTING_CONCAT.contains(targetFs.getUri().getScheme()) &&
   state.getProp(ConfigurationKeys.WRITER_BUILDER_CLASS, "")
     .equals(FileAwareInputStreamDataWriterBuilder.class.getName()) &&
   converterClassNames.stream().noneMatch(s -> !s.equals(IdentityConverter.class.getName()));
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Copy dependency jars from local fs to HDFS.
 */
private void copyDependencyJarsToHdfs() throws IOException {
 if (!this.state.contains(ConfigurationKeys.JOB_JAR_FILES_KEY)) {
  return;
 }
 LocalFileSystem lfs = FileSystem.getLocal(this.conf);
 Path tmpJarFileDir = new Path(this.tmpOutputDir, "_gobblin_compaction_jars");
 this.state.setProp(COMPACTION_JARS, tmpJarFileDir.toString());
 this.fs.delete(tmpJarFileDir, true);
 for (String jarFile : this.state.getPropAsList(ConfigurationKeys.JOB_JAR_FILES_KEY)) {
  for (FileStatus status : lfs.globStatus(new Path(jarFile))) {
   Path tmpJarFile = new Path(this.fs.makeQualified(tmpJarFileDir), status.getPath().getName());
   this.fs.copyFromLocalFile(status.getPath(), tmpJarFile);
   LOG.info(String.format("%s will be added to classpath", tmpJarFile));
  }
 }
}

代码示例来源:origin: apache/incubator-gobblin

static Set<SourceEntity> getSourceEntitiesHelper(State state) {
 if (state.contains(ConfigurationKeys.SOURCE_ENTITIES)) {
  log.info("Using entity names in " + ConfigurationKeys.SOURCE_ENTITIES);
  HashSet<SourceEntity> res = new HashSet<>();
  for (String sourceEntityName: state.getPropAsList(ConfigurationKeys.SOURCE_ENTITIES)) {
   res.add(SourceEntity.fromSourceEntityName(sourceEntityName));
  }
  return res;
 } else if (state.contains(ConfigurationKeys.SOURCE_ENTITY) ||
       state.contains(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY)) {
  Optional<SourceEntity> sourceEntity = SourceEntity.fromState(state);
  // Guaranteed to be present
  log.info("Using entity name in " + sourceEntity.get());
  return ImmutableSet.of(sourceEntity.get());
 }
 throw new IllegalStateException(String.format("One of the following properties must be specified: %s, %s.",
   ConfigurationKeys.SOURCE_ENTITIES, ConfigurationKeys.SOURCE_ENTITY));
}

代码示例来源:origin: apache/incubator-gobblin

/**
  * Copy dependent jars to a temporary job directory on HDFS
  */
 private void copyJarDependencies (State state) throws IOException {
  if (this.tmpJobDir == null) {
   throw new RuntimeException("Job directory is not created");
  }

  if (!state.contains(ConfigurationKeys.JOB_JAR_FILES_KEY)) {
   return;
  }

  // create sub-dir to save jar files
  LocalFileSystem lfs = FileSystem.getLocal(HadoopUtils.getConfFromState(state));
  Path tmpJarFileDir = new Path(this.tmpJobDir, MRCompactor.COMPACTION_JAR_SUBDIR);
  this.fs.mkdirs(tmpJarFileDir);
  state.setProp (MRCompactor.COMPACTION_JARS, tmpJarFileDir.toString());

  // copy jar files to hdfs
  for (String jarFile : state.getPropAsList(ConfigurationKeys.JOB_JAR_FILES_KEY)) {
   for (FileStatus status : lfs.globStatus(new Path(jarFile))) {
    Path tmpJarFile = new Path(this.fs.makeQualified(tmpJarFileDir), status.getPath().getName());
    this.fs.copyFromLocalFile(status.getPath(), tmpJarFile);
    log.info(String.format("%s will be added to classpath", tmpJarFile));
   }
  }
 }
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 public Optional<CompactorListener> createCompactorListener(Properties properties)
   throws CompactorListenerCreationException {
  State state = new State(properties);

  if (Strings.isNullOrEmpty(state.getProp(COMPACTOR_LISTENERS))) {
   return Optional.absent();
  }

  List<CompactorListener> listeners = new ArrayList<>();
  for (String listenerClassName : state.getPropAsList(COMPACTOR_LISTENERS)) {
   try {
    listeners.add((CompactorListener) ConstructorUtils
      .invokeConstructor(Class.forName(listenerClassName), properties));
   } catch (ReflectiveOperationException e) {
    throw new CompactorListenerCreationException(String
      .format("Unable to create CompactorListeners from key \"%s\" with value \"%s\"", COMPACTOR_LISTENERS,
        properties.getProperty(COMPACTOR_LISTENERS)), e);
   }
  }
  return Optional.<CompactorListener>of(new SerialCompactorListener(listeners));
 }
}

代码示例来源:origin: apache/incubator-gobblin

protected JobListener initJobListener() {
 CompositeJobListener compositeJobListener = new CompositeJobListener();
 List<String> listeners = new State(props).getPropAsList(GOBBLIN_CUSTOM_JOB_LISTENERS, EmailNotificationJobListener.class.getSimpleName());
 try {
  for (String listenerAlias: listeners) {
   ClassAliasResolver<JobListener> conditionClassAliasResolver = new ClassAliasResolver<>(JobListener.class);
   compositeJobListener.addJobListener(conditionClassAliasResolver.resolveClass(listenerAlias).newInstance());
  }
 } catch (IllegalAccessException | InstantiationException | ClassNotFoundException e) {
  throw new IllegalArgumentException(e);
 }
 return compositeJobListener;
}

代码示例来源:origin: apache/incubator-gobblin

private ImmutableList<RecompactionCondition> getConditionsFromProperties (Dataset dataset) {
 ClassAliasResolver<RecompactionConditionFactory> conditionClassAliasResolver = new ClassAliasResolver<>(RecompactionConditionFactory.class);
 List<String> factoryNames = dataset.jobProps().getPropAsList(MRCompactor.COMPACTION_RECOMPACT_COMBINE_CONDITIONS,
   MRCompactor.DEFAULT_COMPACTION_RECOMPACT_CONDITION);
 ImmutableList.Builder<RecompactionCondition> builder = ImmutableList.builder();
 for (String factoryName : factoryNames) {
  try {
   RecompactionConditionFactory factory = GobblinConstructorUtils.invokeFirstConstructor(
     conditionClassAliasResolver.resolveClass(factoryName), ImmutableList.of());
   RecompactionCondition condition = factory.createRecompactionCondition(dataset);
   builder.add(condition);
  } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
    | ClassNotFoundException e) {
   throw new IllegalArgumentException(e);
  }
 }
 return builder.build();
}

代码示例来源:origin: apache/incubator-gobblin

private static void getFsAndJtTokensImpl(final State state, final Configuration conf, final Credentials cred)
  throws IOException {
 getHdfsToken(conf, cred);
 if (state.contains(OTHER_NAMENODES)) {
  getOtherNamenodesToken(state.getPropAsList(OTHER_NAMENODES), conf, cred);
 }
 getJtToken(cred);
}

代码示例来源:origin: apache/incubator-gobblin

@SuppressWarnings({ "serial", "unchecked" })
protected static <T> Optional<T> populateField(State state, String key, TypeToken<T> token) {
 if (state.contains(key)) {
  Optional<T> fieldValue;
  if (new TypeToken<Boolean>() {}.isAssignableFrom(token)) {
   fieldValue = (Optional<T>) Optional.of(state.getPropAsBoolean(key));
  } else if (new TypeToken<Integer>() {}.isAssignableFrom(token)) {
   fieldValue = (Optional<T>) Optional.of(state.getPropAsInt(key));
  } else if (new TypeToken<Long>() {}.isAssignableFrom(token)) {
   fieldValue = (Optional<T>) Optional.of(state.getPropAsLong(key));
  } else if (new TypeToken<List<String>>() {}.isAssignableFrom(token)) {
   fieldValue = (Optional<T>) Optional.of(state.getPropAsList(key));
  } else {
   fieldValue = (Optional<T>) Optional.of(state.getProp(key));
  }
  state.removeProp(key);
  return fieldValue;
 }
 return Optional.<T> absent();
}

代码示例来源:origin: apache/incubator-gobblin

for ( final String path : state.getPropAsList(ConfigurationKeys.PUBLISHER_DIRS) ) {
 if (isPathDedupeEnabled && pathsToRegisterFromSingleState.contains(path)){
  continue;

代码示例来源:origin: apache/incubator-gobblin

/**
 * Create a KafkaWrapper based on the given type of Kafka API and list of Kafka brokers.
 *
 * @param state A {@link State} object that should contain a list of comma separated Kafka brokers
 * in property "kafka.brokers". It may optionally specify whether to use the new Kafka API by setting
 * use.new.kafka.api=true.
 */
public static KafkaWrapper create(State state) {
 Preconditions.checkNotNull(state.getProp(ConfigurationKeys.KAFKA_BROKERS),
   "Need to specify at least one Kafka broker.");
 KafkaWrapper.Builder builder = new KafkaWrapper.Builder();
 if (state.getPropAsBoolean(USE_NEW_KAFKA_API, DEFAULT_USE_NEW_KAFKA_API)) {
  builder = builder.withNewKafkaAPI();
 }
 Config config = ConfigUtils.propertiesToConfig(state.getProperties());
 return builder.withBrokers(state.getPropAsList(ConfigurationKeys.KAFKA_BROKERS))
   .withConfig(config)
   .build();
}

代码示例来源:origin: apache/incubator-gobblin

state.contains(USER_DEFINED_HIVE_LOCATIONS) ? state.getPropAsList(USER_DEFINED_HIVE_LOCATIONS)
    : Collections.EMPTY_LIST;
if (!extraHcatLocations.isEmpty()) {

代码示例来源:origin: apache/incubator-gobblin

/**
 * Initialize file system helper at most once for this instance.
 * {@inheritDoc}
 * @see org.apache.gobblin.source.extractor.filebased.FileBasedSource#initFileSystemHelper(org.apache.gobblin.configuration.State)
 */
@Override
public synchronized void initFileSystemHelper(State state) throws FileBasedHelperException {
 if (fsHelper == null) {
  Credential credential = new GoogleCommon.CredentialBuilder(state.getProp(SOURCE_CONN_PRIVATE_KEY), state.getPropAsList(API_SCOPES))
                      .fileSystemUri(state.getProp(PRIVATE_KEY_FILESYSTEM_URI))
                      .proxyUrl(state.getProp(SOURCE_CONN_USE_PROXY_URL))
                      .port(state.getProp(SOURCE_CONN_USE_PROXY_PORT))
                      .serviceAccountId(state.getProp(SOURCE_CONN_USERNAME))
                      .build();
  Drive driveClient = new Drive.Builder(credential.getTransport(),
                     GoogleCommon.getJsonFactory(),
                     credential)
                 .setApplicationName(Preconditions.checkNotNull(state.getProp(APPLICATION_NAME), "ApplicationName is required"))
                 .build();
  this.fsHelper = closer.register(new GoogleDriveFsHelper(state, driveClient));
 }
}

代码示例来源:origin: apache/incubator-gobblin

@Override
public synchronized void initialize(URI uri, Configuration conf) throws IOException {
 if (this.client == null) {
  super.initialize(uri, conf);
  State state = HadoopUtils.getStateFromConf(conf);
  Credential credential = new GoogleCommon.CredentialBuilder(state.getProp(SOURCE_CONN_PRIVATE_KEY), state.getPropAsList(API_SCOPES))
                      .fileSystemUri(state.getProp(PRIVATE_KEY_FILESYSTEM_URI))
                      .proxyUrl(state.getProp(SOURCE_CONN_USE_PROXY_URL))
                      .port(state.getProp(SOURCE_CONN_USE_PROXY_PORT))
                      .serviceAccountId(state.getProp(SOURCE_CONN_USERNAME))
                      .build();
  this.client = new Drive.Builder(credential.getTransport(),
                  GoogleCommon.getJsonFactory(),
                  credential)
              .setApplicationName(Preconditions.checkNotNull(state.getProp(APPLICATION_NAME),
                   "ApplicationName is required"))
              .build();
  this.pageSize = state.getPropAsInt(PAGE_SIZE, DEFAULT_PAGE_SIZE);
 }
}

相关文章