org.apache.storm.Config.setMaxSpoutPending()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(10.5k)|赞(0)|评价(0)|浏览(110)

本文整理了Java中org.apache.storm.Config.setMaxSpoutPending()方法的一些代码示例,展示了Config.setMaxSpoutPending()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Config.setMaxSpoutPending()方法的具体详情如下:
包路径:org.apache.storm.Config
类名称:Config
方法名:setMaxSpoutPending

Config.setMaxSpoutPending介绍

暂无

代码示例

代码示例来源:origin: apache/storm

public void setMaxSpoutPending(int max) {
  setMaxSpoutPending(this, max);
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  if (args.length != 2) {
    System.out.println("Usage: WordCountTrident redis-host redis-port");
    System.exit(1);
  }
  String redisHost = args[0];
  Integer redisPort = Integer.valueOf(args[1]);
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  conf.setNumWorkers(3);
  StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  if (args.length != 1) {
    System.out.println("Usage: WordCountTrident 127.0.0.1:6379,127.0.0.1:6380");
    System.exit(1);
  }
  String redisHostPort = args[0];
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  conf.setNumWorkers(3);
  StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
      System.out.println("Usage: WordCountTrident redis-host redis-port");
      System.exit(1);
    }

    String redisHost = args[0];
    Integer redisPort = Integer.valueOf(args[1]);

    Config conf = new Config();
    conf.setMaxSpoutPending(5);
    conf.setNumWorkers(3);
    StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  if (args.length != 1) {
    System.out.println("Usage: WordCountTrident 127.0.0.1:6379,127.0.0.1:6380");
    System.exit(1);
  }
  String redisHostPort = args[0];
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  conf.setNumWorkers(3);
  StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  String metaStoreURI = args[0];
  String dbName = args[1];
  String tblName = args[2];
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  String topoName = "tridentHiveTopology";
  String keytab = null;
  String principal = null;
  
  if (args.length > 3) {
    topoName = args[3];
  }
  if (args.length == 6) {
    keytab = args[4];
    principal = args[5];
  } else if (args.length != 3 && args.length != 4) {
    LOG.info("Usage: TridentHiveTopology metastoreURI dbName tableName [topologyName] [keytab principal]");
    return;
  }
  
  try {
    StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreURI, dbName, tblName,null,null));
  } catch(SubmitterHookException e) {
    LOG.warn("Topology is submitted but invoking ISubmitterHook failed", e);
  } catch (Exception e) {
    LOG.warn("Failed to submit topology ", e);
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
    Config conf = new Config();
    conf.setMaxSpoutPending(5);

    Yaml yaml = new Yaml();
    InputStream in = new FileInputStream(args[1]);
    Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
    in.close();
    conf.put("hdfs.config", yamlConf);
    String topoName = "wordCounter";
    if (args.length == 3) {
      topoName = args[2];
    } else if (args.length > 3) {
      System.out.println("Usage: TridentFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
      return;
    }
    conf.setNumWorkers(3);
    StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0]));
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
    Config conf = new Config();
    conf.setMaxSpoutPending(5);

    Yaml yaml = new Yaml();
    InputStream in = new FileInputStream(args[1]);
    Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
    in.close();
    conf.put("hdfs.config", yamlConf);
    String topoName = "wordCounter";
    if (args.length == 3) {
      topoName = args[2];
    } else if (args.length > 3) {
      System.out.println("Usage: TridentSequenceTopology <hdfs_config_yaml> [<topology name>]");
      return;
    }

    conf.setNumWorkers(3);
    StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0]));
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  Config conf = new Config();
  conf.setMaxSpoutPending(20);
  conf.put(Config.TOPOLOGY_TRIDENT_WINDOWING_INMEMORY_CACHE_LIMIT, 100);
  // window-state table should already be created with cf:tuples column
  HBaseWindowsStoreFactory windowStoreFactory =
    new HBaseWindowsStoreFactory(new HashMap<String, Object>(), "window-state", "cf".getBytes("UTF-8"), "tuples".getBytes("UTF-8"));
  String topoName = "wordCounterWithWindowing";
  if (args.length > 0) {
    topoName = args[0];
  }
  conf.setNumWorkers(3);
  StormSubmitter.submitTopologyWithProgressBar(topoName, conf, buildTopology(windowStoreFactory));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  String topoName = "wordCounter";
  if (args.length == 3) {
    topoName = args[2];
  } else if (args.length > 3 || args.length < 2) {
    System.out.println("Usage: WordCountTrident <mongodb url> <mongodb collection> [topology name]");
    return;
  }
  conf.setNumWorkers(3);
  StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0], args[1]));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  String topoName = "wordCounter";
  if (args.length == 2) {
    topoName = args[1];
  } else if (args.length > 2) {
    System.out.println("Usage: WordCountTrident <hbase.rootdir> [topology name]");
    return;
  }
  conf.setNumWorkers(3);
  StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0]));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  Config conf = new Config();
  conf.setMaxSpoutPending(5);
  String topoName = "wordCounter";
  if (args.length == 3) {
    topoName = args[2];
  } else if (args.length > 3 || args.length < 2) {
    System.out.println("Usage: WordCountTrident <mongodb url> <mongodb collection> [topology name]");
    return;
  }
  conf.setNumWorkers(3);
  StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0], args[1]));
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
    Config conf = new Config();
    conf.setMaxSpoutPending(5);
    conf.setNumWorkers(3);

    String topologyName = "wordCounter";
    if (args.length < 2) {
      System.out.println("Usage: WordCountTopology <nameserver addr> <topic> [topology name]");
    } else {
      if (args.length > 3) {
        topologyName = args[2];
      }
      StormSubmitter.submitTopology(topologyName, conf, buildTopology(args[0], args[1]));
    }
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
    Config conf = new Config();
    conf.setMaxSpoutPending(5);
    conf.setNumWorkers(3);

    String topologyName = "wordCounter";
    if (args.length < 2) {
      System.out.println("Usage: WordCountTrident <nameserver addr> <topic> [topology name]");
    } else {
      if (args.length > 3) {
        topologyName = args[2];
      }
      StormSubmitter.submitTopology(topologyName, conf, buildTopology(args[0], args[1]));
    }
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  StormTopology topology = buildDevicesTopology();
  Config conf = new Config();
  conf.setMaxSpoutPending(20);
  conf.setNumWorkers(3);
  StormSubmitter.submitTopologyWithProgressBar("devices-topology", conf, topology);
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  Config conf = new Config();
  conf.setMaxSpoutPending(20);
  String topoName = "wordCounter";
  if (args.length > 0) {
    topoName = args[0];
  }
  conf.setNumWorkers(3);
  StormSubmitter.submitTopologyWithProgressBar(topoName, conf, buildTopology());
  try (DRPCClient drpc = DRPCClient.getConfiguredClient(conf)) {
    for (int i = 0; i < 10; i++) {
      System.out.println("DRPC RESULT: " + drpc.execute("words", "cat the dog jumped"));
      Thread.sleep(1000);
    }
  }
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
  StormTopology topology = buildVehiclesTopology();
  Config conf = new Config();
  conf.setMaxSpoutPending(20);
  conf.setNumWorkers(3);
  StormSubmitter.submitTopologyWithProgressBar("vehicles-topology", conf, topology);
}

代码示例来源:origin: apache/storm

public static void main(String[] args) throws Exception {
    Config conf = new Config();
    conf.setMaxSpoutPending(20);
    String topoName = "wordCounter";
    if (args.length > 0) {
      topoName = args[0];
    }
    conf.setNumWorkers(3);
    StormSubmitter.submitTopologyWithProgressBar(topoName, conf, buildTopology());
    try (DRPCClient drpc = DRPCClient.getConfiguredClient(conf)) {
      for (int i = 0; i < 10; i++) {
        System.out.println("DRPC RESULT: " + drpc.execute("words", "CAT THE DOG JUMPED"));
        Thread.sleep(1000);
      }
    }
  }
}

代码示例来源:origin: apache/storm

protected void run(String[] args) throws AlreadyAliveException, InvalidTopologyException,
    AuthorizationException, InterruptedException {
    final String brokerUrl = args.length > 0 ? args[0] : KAFKA_LOCAL_BROKER;
    final boolean isOpaque = args.length > 1 ? Boolean.parseBoolean(args[1]) : true;
    System.out.println("Running with broker url " + brokerUrl + " and isOpaque=" + isOpaque);

    Config tpConf = new Config();
    tpConf.setDebug(true);
    tpConf.setMaxSpoutPending(5);

    // Producers
    StormSubmitter.submitTopology(TOPIC_1 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_1));
    StormSubmitter.submitTopology(TOPIC_2 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_2));
    // Consumer
    KafkaTridentSpoutConfig<String, String> spoutConfig = newKafkaSpoutConfig(brokerUrl);
    ITridentDataSource spout = isOpaque ? newKafkaTridentSpoutOpaque(spoutConfig) : newKafkaTridentSpoutTransactional(spoutConfig);
    StormSubmitter.submitTopology("topics-consumer", tpConf,
      TridentKafkaConsumerTopology.newTopology(spout));
  }
}

代码示例来源:origin: apache/storm

public static void runStormTopology(LocalCluster cluster, final List<?> watchedList, final int expectedValueSize,
                  AbstractStreamsProcessor proc, StormTopology topo) throws Exception {
  final Config conf = new Config();
  conf.setMaxSpoutPending(20);
  conf.setDebug(true);
  if (proc.getClassLoaders() != null && proc.getClassLoaders().size() > 0) {
    CompilingClassLoader lastClassloader = proc.getClassLoaders().get(proc.getClassLoaders().size() - 1);
    Utils.setClassLoaderForJavaDeSerialize(lastClassloader);
  }
  try (LocalCluster.LocalTopology stormTopo = cluster.submitTopology("storm-sql", conf, topo)) {
    waitForCompletion(1000 * 1000, () -> watchedList.size() < expectedValueSize);
  } finally {
    while (cluster.getClusterInfo().get_topologies_size() > 0) {
      Thread.sleep(10);
    }
    Utils.resetClassLoaderForJavaDeSerialize();
  }
}

相关文章