org.apache.hadoop.hive.common.type.Date.valueOf()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(13.1k)|赞(0)|评价(0)|浏览(208)

本文整理了Java中org.apache.hadoop.hive.common.type.Date.valueOf()方法的一些代码示例,展示了Date.valueOf()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Date.valueOf()方法的具体详情如下:
包路径:org.apache.hadoop.hive.common.type.Date
类名称:Date
方法名:valueOf

Date.valueOf介绍

暂无

代码示例

代码示例来源:origin: apache/hive

public static Date getRandDate(Random r) {
 String dateStr = String.format("%d-%02d-%02d",
   Integer.valueOf(1800 + r.nextInt(500)),  // year
   Integer.valueOf(1 + r.nextInt(12)),      // month
   Integer.valueOf(1 + r.nextInt(28)));     // day
 Date dateVal = Date.valueOf(dateStr);
 return dateVal;
}

代码示例来源:origin: apache/hive

private Object convertLiteral(Object o) {
  // This is a bit hackish to fix mismatch between SARG and Hive types
  // for Timestamp and Date. TODO: Move those types to storage-api.
  if (o instanceof java.sql.Date) {
   return Date.valueOf(o.toString());
  } else if (o instanceof java.sql.Timestamp) {
   return Timestamp.valueOf(o.toString());
  }
  return o;
 }
}

代码示例来源:origin: apache/hive

public boolean parseDate(String strValue, Date result) {
  Date parsedVal;
  try {
   parsedVal = Date.valueOf(strValue);
  } catch (IllegalArgumentException e) {
   parsedVal = null;
  }
  if (parsedVal == null) {
   return false;
  }
  result.setTimeInMillis(parsedVal.toEpochMilli());
  return true;
 }
}

代码示例来源:origin: apache/hive

private Date readDateValue(String dateStr) {
  // try either yyyy-mm-dd, or integer representing days since epoch
  try {
   DateWritableV2 writableVal = new DateWritableV2(org.apache.hadoop.hive.common.type.Date.valueOf(dateStr));
   return new Date(writableVal.getDays());
  } catch (IllegalArgumentException err) {
   // Fallback to integer parsing
   LOG.debug("Reading date value as days since epoch: {}", dateStr);
   return new Date(Long.parseLong(dateStr));
  }
 }
}

代码示例来源:origin: apache/hive

@Test
public void testValidCases() throws Exception {
 checkValidCase("1945-12-31", Date.valueOf("1945-12-31"));
 checkValidCase("1946-01-01", Date.valueOf("1946-01-01"));
 checkValidCase("2001-11-12", Date.valueOf("2001-11-12"));
 checkValidCase("0004-05-06", Date.valueOf("0004-05-06"));
 checkValidCase("1678-09-10", Date.valueOf("1678-09-10"));
 checkValidCase("9999-10-11", Date.valueOf("9999-10-11"));
 // Timestamp strings should parse ok
 checkValidCase("2001-11-12 01:02:03", Date.valueOf("2001-11-12"));
 // Leading spaces
 checkValidCase(" 1946-01-01", Date.valueOf("1946-01-01"));
 checkValidCase(" 2001-11-12 01:02:03", Date.valueOf("2001-11-12"));
 checkValidCase("2001-13-12", Date.valueOf("2002-01-12"));
 checkValidCase("2001-11-31", Date.valueOf("2001-12-01"));
}

代码示例来源:origin: apache/hive

private static HCatRecord getHCat13TypesComplexRecord() {
 List<Object> rec_hcat13ComplexTypes = new ArrayList<Object>();
 Map<HiveDecimal, String> m = new HashMap<HiveDecimal, String>();
 m.put(HiveDecimal.create(new BigDecimal("1234.12")), "1234.12");
 m.put(HiveDecimal.create(new BigDecimal("1234.13")), "1234.13");
 rec_hcat13ComplexTypes.add(m);
 Map<Timestamp, List<Object>> m2 = new HashMap<Timestamp, List<Object>>();
 List<Object> list = new ArrayList<Object>();
 list.add(Date.valueOf("2014-01-05"));
 list.add(new HashMap<HiveDecimal, String>(m));
 m2.put(Timestamp.ofEpochMilli(System.currentTimeMillis()), list);
 rec_hcat13ComplexTypes.add(m2);
 return new DefaultHCatRecord(rec_hcat13ComplexTypes);
}

代码示例来源:origin: apache/hive

private static HCatRecord getHCat13TypesRecord() {
 List<Object> rec_hcat13types = new ArrayList<Object>(5);
 rec_hcat13types.add(HiveDecimal.create(new BigDecimal("123.45")));//prec 5, scale 2
 rec_hcat13types.add(new HiveChar("hive_char", 10));
 rec_hcat13types.add(new HiveVarchar("hive_varchar", 20));
 rec_hcat13types.add(Date.valueOf("2014-01-06"));
 rec_hcat13types.add(Timestamp.ofEpochMilli(System.currentTimeMillis()));
 return new DefaultHCatRecord(rec_hcat13types);
}
private static HCatRecord getHCat13TypesComplexRecord() {

代码示例来源:origin: apache/hive

public static Timestamp stringToTimestamp(String s) {
  s = s.trim();
  // Handle simpler cases directly avoiding exceptions
  if (s.length() == DATE_LENGTH) {
   // Its a date!
   return Timestamp.ofEpochMilli(Date.valueOf(s).toEpochMilli());
  }
  try {
   return Timestamp.valueOf(s);
  } catch (IllegalArgumentException eT) {
   // Try zoned timestamp
   try {
    return Timestamp.valueOf(
      TimestampTZUtil.parse(s).getZonedDateTime().toLocalDateTime().toString());
   } catch (IllegalArgumentException | DateTimeParseException eTZ) {
    // Last attempt
    return Timestamp.ofEpochMilli(Date.valueOf(s).toEpochMilli());
   }
  }
 }
}

代码示例来源:origin: apache/hive

@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
  throws UDFArgumentException {
 if (arguments.length != 0) {
  throw new UDFArgumentLengthException(
    "The function CURRENT_DATE does not take any arguments, but found "
    + arguments.length);
 }
 if (currentDate == null) {
  SessionState ss = SessionState.get();
  ZonedDateTime dateTime = ss.getQueryCurrentTimestamp().atZone(
    ss.getConf().getLocalTimeZone());
  Date dateVal = Date.valueOf(
    dateTime.toString().substring(0, 10));
  currentDate = new DateWritableV2(dateVal);
 }
 return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
}

代码示例来源:origin: apache/hive

public void testGreatestDate() throws HiveException {
 GenericUDFGreatest udf = new GenericUDFGreatest();
 ObjectInspector[] arguments = new ObjectInspector[3];
 for (int i = 0; i < arguments.length; i++) {
  arguments[i] = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
 }
 udf.initialize(arguments);
 Date d1 = Date.valueOf("2015-03-20");
 Date d2 = Date.valueOf("2015-03-21");
 Date d3 = Date.valueOf("2014-03-20");
 runAndVerify(new Date[] { d1, d2, d3 }, d2, udf);
 runAndVerify(new Date[] { null, d2, d3 }, null, udf);
 runAndVerify(new Date[] { d1, null, d3 }, null, udf);
 runAndVerify(new Date[] { d1, d2, null }, null, udf);
 runAndVerify(new Date[] { null, null, null }, null, udf);
}

代码示例来源:origin: apache/hive

public void testLeastDate() throws HiveException {
 GenericUDFLeast udf = new GenericUDFLeast();
 ObjectInspector[] arguments = new ObjectInspector[3];
 for (int i = 0; i < arguments.length; i++) {
  arguments[i] = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
 }
 udf.initialize(arguments);
 Date d1 = Date.valueOf("2015-03-20");
 Date d2 = Date.valueOf("2015-03-21");
 Date d3 = Date.valueOf("2014-03-20");
 runAndVerify(new Date[] { d1, d2, d3 }, d3, udf);
 runAndVerify(new Date[] { null, d2, d3 }, null, udf);
 runAndVerify(new Date[] { d1, null, d3 }, null, udf);
 runAndVerify(new Date[] { d1, d2, null }, null, udf);
 runAndVerify(new Date[] { null, null, null }, null, udf);
}

代码示例来源:origin: apache/hive

@Test
public void testNormalizeColSpec() throws Exception {
 // Hive normalizes partition spec for dates to yyyy-mm-dd format. Some versions of Java will
 // accept other formats for Date.valueOf, e.g. yyyy-m-d, and who knows what else in the future;
 // some will not accept other formats, so we cannot test normalization with them - type check
 // will fail before it can ever happen. Thus, test in isolation.
 checkNormalization("date", "2010-01-01", "2010-01-01", Date.valueOf("2010-01-01"));
 checkNormalization("date", "2010-1-01", "2010-01-01", Date.valueOf("2010-01-01"));
 checkNormalization("date", "2010-1-1", "2010-01-01", Date.valueOf("2010-01-01"));
 checkNormalization("string", "2010-1-1", "2010-1-1", "2010-1-1");
 try {
  checkNormalization("date", "foo", "", "foo"); // Bad format.
  fail("should throw");
 } catch (SemanticException ex) {
 }
 try {
  checkNormalization("date", "2010-01-01", "2010-01-01", "2010-01-01"); // Bad value type.
  fail("should throw");
 } catch (SemanticException ex) {
 }
}

代码示例来源:origin: apache/hive

private void runAndVerifyDt(String str, Integer expResult, GenericUDF udf) throws HiveException {
 DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
   Date.valueOf(str)) : null);
 DeferredObject[] args = { valueObj0 };
 IntWritable output = (IntWritable) udf.evaluate(args);
 if (expResult == null) {
  assertNull(output);
 } else {
  assertNotNull(output);
  assertEquals("quarter() test ", expResult.intValue(), output.get());
 }
}

代码示例来源:origin: apache/hive

public void testDate() throws HiveException {
 GenericUDFToUnixTimeStamp udf = new GenericUDFToUnixTimeStamp();
 ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
 ObjectInspector[] arguments = {valueOI};
 udf.initialize(arguments);
 Date date = Date.valueOf("1970-01-01");
 runAndVerify(udf,
   new DateWritableV2(date),
   new LongWritable(date.toEpochSecond()));
 // test null values
 runAndVerify(udf, null, null);
}

代码示例来源:origin: apache/hive

private void runAndVerifyDate(String str, Text fmtText, String expResult, GenericUDF udf)
  throws HiveException {
 DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
   Date.valueOf(str)) : null);
 DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
 DeferredObject[] args = { valueObj0, valueObj1 };
 Text output = (Text) udf.evaluate(args);
 assertEquals("date_format() test ", expResult, output != null ? output.toString() : null);
}

代码示例来源:origin: apache/hive

protected void runTestDt(String dt1, String dt2, Double expDiff, GenericUDFMonthsBetween udf)
   throws HiveException {
  DateWritableV2 dtWr1 = dt1 == null ? null : new DateWritableV2(Date.valueOf(dt1));
  DateWritableV2 dtWr2 = dt2 == null ? null : new DateWritableV2(Date.valueOf(dt2));
  DeferredJavaObject valueObj1 = new DeferredJavaObject(dtWr1);
  DeferredJavaObject valueObj2 = new DeferredJavaObject(dtWr2);
  DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };
  DoubleWritable output = (DoubleWritable) udf.evaluate(args);
  if (expDiff == null) {
   assertNull("months_between() test for NULL DATE failed", output);
  } else {
   assertNotNull("months_between() test for NOT NULL DATE failed", output);
   assertEquals("months_between() test for DATE failed", expDiff, output.get(), 0.00000001D);
  }
 }
}

代码示例来源:origin: apache/hive

@Test
public void testIntervalYearMonthPlusDate() throws Exception {
 GenericUDFOPPlus udf = new GenericUDFOPPlus();
 HiveIntervalYearMonthWritable left =
   new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
 DateWritableV2 right =
   new DateWritableV2(Date.valueOf("2001-06-15"));
 ObjectInspector[] inputOIs = {
   PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
   PrimitiveObjectInspectorFactory.writableDateObjectInspector
 };
 DeferredObject[] args = {
   new DeferredJavaObject(left),
   new DeferredJavaObject(right),
 };
 PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
 Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
 DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
 Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}

代码示例来源:origin: apache/hive

@Test
public void testDateMinusIntervalYearMonth() throws Exception {
 GenericUDFOPMinus udf = new GenericUDFOPMinus();
 DateWritableV2 left =
   new DateWritableV2(Date.valueOf("2004-02-15"));
 HiveIntervalYearMonthWritable right =
   new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
 ObjectInspector[] inputOIs = {
   PrimitiveObjectInspectorFactory.writableDateObjectInspector,
   PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
 };
 DeferredObject[] args = {
   new DeferredJavaObject(left),
   new DeferredJavaObject(right),
 };
 PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
 Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
 DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
 Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
}

代码示例来源:origin: apache/hive

@Test
public void testDatePlusIntervalYearMonth() throws Exception {
 GenericUDFOPPlus udf = new GenericUDFOPPlus();
 DateWritableV2 left =
   new DateWritableV2(Date.valueOf("2001-06-15"));
 HiveIntervalYearMonthWritable right =
   new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
 ObjectInspector[] inputOIs = {
   PrimitiveObjectInspectorFactory.writableDateObjectInspector,
   PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
 };
 DeferredObject[] args = {
   new DeferredJavaObject(left),
   new DeferredJavaObject(right),
 };
 PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
 Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
 DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
 Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}

代码示例来源:origin: apache/hive

@Test
public void testIntervalDayTimePlusDate() throws Exception {
 GenericUDFOPPlus udf = new GenericUDFOPPlus();
 HiveIntervalDayTimeWritable left =
   new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
 DateWritableV2 right =
   new DateWritableV2(Date.valueOf("2001-01-01"));
 ObjectInspector[] inputOIs = {
   PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
   PrimitiveObjectInspectorFactory.writableDateObjectInspector
 };
 DeferredObject[] args = {
   new DeferredJavaObject(left),
   new DeferredJavaObject(right),
 };
 // Date + day-time interval = timestamp
 PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
 Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
 TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
 Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
}

相关文章