org.apache.pig.data.TupleFactory.getInstance()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(9.7k)|赞(0)|评价(0)|浏览(82)

本文整理了Java中org.apache.pig.data.TupleFactory.getInstance()方法的一些代码示例,展示了TupleFactory.getInstance()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。TupleFactory.getInstance()方法的具体详情如下:
包路径:org.apache.pig.data.TupleFactory
类名称:TupleFactory
方法名:getInstance

TupleFactory.getInstance介绍

[英]Get a reference to the singleton factory.
[中]获取singleton工厂的参考资料。

代码示例

代码示例来源:origin: elastic/elasticsearch-hadoop

@Override
public Object addToArray(Object array, List<Object> value) {
  return TupleFactory.getInstance().newTupleNoCopy(value);
}

代码示例来源:origin: elastic/elasticsearch-hadoop

dataMap = reader.getCurrentValue();
Tuple tuple = TupleFactory.getInstance().newTuple(dataMap.size());

代码示例来源:origin: elastic/elasticsearch-hadoop

private PigTuple createTuple(Object obj, ResourceSchema schema) {
    PigTuple tuple = new PigTuple(schema);
    tuple.setTuple(TupleFactory.getInstance().newTuple(obj));
    return tuple;
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

private PigTuple createTuple(Object obj, ResourceSchema schema) {
    PigTuple tuple = new PigTuple(schema);
    tuple.setTuple(TupleFactory.getInstance().newTuple(obj));
    return tuple;
  }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

private PigTuple createTuple(Object obj, ResourceSchema schema) {
  PigTuple tuple = new PigTuple(schema);
  tuple.setTuple(TupleFactory.getInstance().newTuple(obj));
  return tuple;
}

代码示例来源:origin: elastic/elasticsearch-hadoop

private PigTuple createTuple(Object obj, ResourceSchema schema) {
  PigTuple tuple = new PigTuple(schema);
  tuple.setTuple(TupleFactory.getInstance().newTuple(obj));
  return tuple;
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedBag() {
  String expected = "{\"bag\":[[{\"first\":\"one\",\"second\":\"two\",\"third\":\"three\"}]," +
                 "[{\"first\":\"one\",\"second\":\"two\",\"third\":\"three\"}]," +
                 "[{\"first\":\"one\",\"second\":\"two\",\"third\":\"three\"}]]}";
  Tuple tuple = TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "one", "two", "three" }));
  assertThat(pigTypeToJson(createTuple(new DefaultDataBag(Arrays.asList(new Tuple[] { tuple, tuple, tuple })),
      createSchema("bag: {t:(first:chararray, second:chararray, third: chararray)}"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedBag() {
  String expected = "{\"bag\":[[\"one\",\"two\",\"three\"],[\"one\",\"two\",\"three\"],[\"one\",\"two\",\"three\"]]}";
  Tuple tuple = TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "one", "two", "three" }));
  assertThat(pigTypeToJson(createTuple(new DefaultDataBag(Arrays.asList(new Tuple[] { tuple, tuple, tuple })),
      createSchema("bag: {t:(first:chararray, second:chararray, third: chararray)}"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testBagWithAnonTuple() {
  String expected = "{\"bag_0\":[[{\"val_0\":\"xxx\",\"val_1\":\"yyy\"}]," +
                  "[{\"val_0\":\"xxx\",\"val_1\":\"yyy\"}]," +
                  "[{\"val_0\":\"xxx\",\"val_1\":\"yyy\"}]]}";
  Tuple tuple = TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "xxx", "yyy" }));
  assertThat(pigTypeToJson(createTuple(new DefaultDataBag(Arrays.asList(new Tuple[] { tuple, tuple, tuple })),
      createSchema("{t:(chararray, chararray)}"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedTuple() {
  String expected = "{\"namedtuple\":[\"one\",\"two\"]}";
  assertThat(pigTypeToJson(createTuple(TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "one", "two" })),
      createSchema("namedtuple: (first:chararray, second:chararray)"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedTuple() {
  String expected = "{\"namedtuple\":[{\"first\":\"one\",\"second\":\"two\"}]}";
  assertThat(pigTypeToJson(createTuple(TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "one", "two" })),
      createSchema("namedtuple: (first:chararray, second:chararray)"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testAnonTuple() {
  String expected = "{\"anontuple\":[{\"val_0\":\"xxx\",\"val_1\":\"yyy\",\"val_2\":\"zzz\"}]}";
  assertThat(pigTypeToJson(createTuple(
      TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "xxx", "yyy", "zzz" })),
          createSchema("anontuple: (chararray, chararray, chararray)"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testBagWithAnonTuple() {
  String expected = "{\"bag\":[[\"xxx\",\"yyy\"],[\"xxx\",\"yyy\"],[\"xxx\",\"yyy\"]]}";
  Tuple tuple = TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "xxx", "yyy" }));
  assertThat((pigTypeToJson(createTuple(new DefaultDataBag(Arrays.asList(new Tuple[] { tuple, tuple, tuple })),
      createSchema("bag: {t:(chararray, chararray)}")))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedTupleWithMixedValues() {
  String expected = "{\"namedtuplewithmixedvalues\":[1,\"two\"]}";
  assertThat(pigTypeToJson(createTuple(TupleFactory.getInstance().newTuple(Arrays.asList(new Object[] { 1, "two" })),
      createSchema("namedtuplewithmixedvalues: (first:int, second:chararray)"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testAnonymousTuple() {
  String expected = "{\"anontuple\":[\"xxx\",\"yyy\",\"zzz\"]}";
  assertThat(pigTypeToJson(createTuple(
      TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "xxx", "yyy", "zzz" })),
      createSchema("anontuple: (chararray, chararray, chararray)"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedTupleWithMixedValues() {
  String expected = "{\"namedtuplewithmixedvalues\":[{\"first\":1,\"second\":\"two\"}]}";
  assertThat(pigTypeToJson(createTuple(TupleFactory.getInstance().newTuple(Arrays.asList(new Object[] { 1, "two" })),
          createSchema("namedtuplewithmixedvalues: (first:int, second:chararray)"))), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testNamedTupleWithExclusion() {
  Settings settings = new TestSettings();
  settings.setProperty(ConfigurationOptions.ES_MAPPING_EXCLUDE, "namedtuple.second");
  String expected = "{\"namedtuple\":[{\"first\":\"one\"}]}";
  assertThat(pigTypeToJson(createTuple(TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "one", "two" })),
      createSchema("namedtuple: (first:chararray, second:chararray)")), settings), is(expected));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void generateEventPigTuple() throws Exception {
  Map<String, Number> map = new LinkedHashMap<String, Number>();
  map.put("one", 1);
  map.put("two", 2);
  map.put("three", 3);
  PigTuple tuple = createTuple(TupleFactory.getInstance().newTuple(Arrays.asList(new String[] { "one", "two" })),
      createSchema("namedtuple: (first:chararray, second:chararray)"));
  SerializationEventConverter eventConverter = new SerializationEventConverter();
  SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), tuple, new ArrayList<String>());
  String rawEvent = eventConverter.getRawEvent(iaeFailure);
  assertThat(rawEvent, equalTo("[PigTuple]=((namedtuple:(first:chararray,second:chararray)):((one,two)))"));
  String timestamp = eventConverter.getTimestamp(iaeFailure);
  assertTrue(StringUtils.hasText(timestamp));
  assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L);
  String exceptionType = eventConverter.renderExceptionType(iaeFailure);
  assertEquals("illegal_argument_exception", exceptionType);
  String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure);
  assertEquals("garbage", exceptionMessage);
  String eventMessage = eventConverter.renderEventMessage(iaeFailure);
  assertEquals("Could not construct bulk entry from record", eventMessage);
}

代码示例来源:origin: apache/phoenix

@Before
public void setUp() throws Exception {
  conf = getTestClusterConfig();
  conf.set(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS);
  // Set CURRENT_SCN to confirm that it's ignored
  conf.set(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(System.currentTimeMillis()+QueryConstants.MILLIS_IN_DAY));
  pigServer = new PigServer(ExecType.LOCAL, conf);
  Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
  conn = DriverManager.getConnection(getUrl(), props);
  zkQuorum = LOCALHOST + JDBC_PROTOCOL_SEPARATOR + getZKClientPort(conf);
  tupleFactory = TupleFactory.getInstance();
}

代码示例来源:origin: org.apache.pig/pig

@Override
public void setConf(Configuration conf) {
  try {
    Class<? extends TupleRawComparator> mComparatorClass = TupleFactory.getInstance().tupleRawComparatorClass();
    mComparator = mComparatorClass.newInstance();
  } catch (InstantiationException e) {
    throw new RuntimeException(e);
  } catch (IllegalAccessException e) {
    throw new RuntimeException(e);
  }
  mComparator.setConf(conf);
}

相关文章

微信公众号

最新文章

更多