org.apache.hadoop.hive.common.type.Date.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(6.7k)|赞(0)|评价(0)|浏览(206)

本文整理了Java中org.apache.hadoop.hive.common.type.Date.<init>()方法的一些代码示例,展示了Date.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Date.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.hive.common.type.Date
类名称:Date
方法名:<init>

Date.<init>介绍

暂无

代码示例

代码示例来源:origin: apache/hive

/**
 * Return a copy of this object.
 */
public Object clone() {
 // LocalDateTime is immutable.
 return new Date(this.localDate);
}

代码示例来源:origin: apache/hive

public static Date of(int year, int month, int dayOfMonth) {
 return new Date(LocalDate.of(year, month, dayOfMonth));
}

代码示例来源:origin: apache/hive

public static Date ofEpochMilli(long epochMilli) {
 return new Date(LocalDateTime.ofInstant(
   Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate());
}

代码示例来源:origin: apache/hive

public static Date valueOf(String s) {
 s = s.trim();
 int idx = s.indexOf(" ");
 if (idx != -1) {
  s = s.substring(0, idx);
 }
 LocalDate localDate;
 try {
  localDate = LocalDate.parse(s, PARSE_FORMATTER);
 } catch (DateTimeParseException e) {
  throw new IllegalArgumentException("Cannot create date, parsing error");
 }
 return new Date(localDate);
}

代码示例来源:origin: apache/hive

public Date add(HiveIntervalYearMonth interval, Date dt) {
 if (dt == null || interval == null) {
  return null;
 }
 Date dtResult = new Date();
 add(interval, dt, dtResult);
 return dtResult;
}

代码示例来源:origin: apache/hive

public Date subtract(Date left, HiveIntervalYearMonth right) {
 if (left == null || right == null) {
  return null;
 }
 Date dtResult = new Date();
 subtract(left, right, dtResult);
 return dtResult;
}

代码示例来源:origin: apache/hive

public Date parseDate(String strValue) {
 Date result = new Date();
 if (parseDate(strValue, result)) {
  return result;
 }
 return null;
}

代码示例来源:origin: apache/hive

public Date add(Date dt, HiveIntervalYearMonth interval) {
 if (dt == null || interval == null) {
  return null;
 }
 Date dtResult = new Date();
 add(dt, interval, dtResult);
 return dtResult;
}

代码示例来源:origin: apache/hive

public DateConverter(PrimitiveObjectInspector inputOI,
  SettableDateObjectInspector outputOI) {
 this.inputOI = inputOI;
 this.outputOI = outputOI;
 r = outputOI.create(new Date());
}

代码示例来源:origin: apache/hive

public static Date ofEpochDay(int epochDay) {
 return new Date(LocalDate.ofEpochDay(epochDay));
}

代码示例来源:origin: apache/hive

private void evaluate(LongColumnVector outputColVector, BytesColumnVector inV, int i) {
 String dateString = new String(inV.vector[i], inV.start[i], inV.length[i], StandardCharsets.UTF_8);
 Date hDate = new Date();
 if (dateParser.parseDate(dateString, hDate)) {
  outputColVector.vector[i] = DateWritableV2.dateToDays(hDate);
  return;
 }
 outputColVector.vector[i] = 1;
 outputColVector.isNull[i] = true;
 outputColVector.noNulls = false;
}

代码示例来源:origin: apache/hive

/**
 * Set the DateWritableV2 based on the year/month/day of the date in the local timezone.
 * @param d Date value
 */
public void set(Date d) {
 if (d == null) {
  date = new Date();
  return;
 }
 set(d.toEpochDay());
}

代码示例来源:origin: apache/hive

/**
 * Perform date + int operation .
 * @param dt the date
 * @param interval the int (days)
 * @return the resulting date
 */
public Date add(Date dt, int interval) {
 if (dt == null) {
  return null;
 }
 Date dtResult = new Date();
 dtResult.setTimeInDays(dt.toEpochDay() + interval);
 return dtResult;
}

代码示例来源:origin: apache/hive

protected void evaluateString(ColumnVector columnVector, LongColumnVector outputVector, int i) {
 BytesColumnVector bcv = (BytesColumnVector) columnVector;
 text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
 org.apache.hadoop.hive.common.type.Date hDate = new org.apache.hadoop.hive.common.type.Date();
 boolean parsed = dateParser.parseDate(text.toString(), hDate);
 if (!parsed) {
  outputVector.noNulls = false;
  outputVector.isNull[i] = true;
  return;
 }
 long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
 if (isPositive) {
  days += numDays;
 } else {
  days -= numDays;
 }
 outputVector.vector[i] = days;
}

代码示例来源:origin: apache/hive

protected void evaluateString(BytesColumnVector inputColumnVector1, LongColumnVector outputVector, int index, long numDays) {
 if (inputColumnVector1.isNull[index]) {
  outputVector.noNulls = false;
  outputVector.isNull[index] = true;
 } else {
  text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]);
  Date hDate = new Date();
  boolean parsed = dateParser.parseDate(text.toString(), hDate);
  if (!parsed) {
   outputVector.noNulls = false;
   outputVector.isNull[index] = true;
   return;
  }
  long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
  if (isPositive) {
   days += numDays;
  } else {
   days -= numDays;
  }
  outputVector.vector[index] = days;
 }
}

代码示例来源:origin: apache/hive

/**
 * Read DATE.
 * The representation of date in Teradata binary format is:
 * The Date D is a int with 4 bytes using little endian,
 * The representation is (D+19000000).ToString -> YYYYMMDD,
 * eg: Date 07 b2 01 00 -> 111111 in little endian -> 19111111 - > 1911.11.11.
 * the null date will use 0 to pad.
 *
 * @return the date
 * @throws IOException the io exception
 * @throws ParseException the parse exception
 */
public Date readDate() throws IOException, ParseException {
 int di = readInt();
 if (di == 0) {
  return null;
 }
 String dateString = String.valueOf(di + 19000000);
 if (dateString.length() < DATE_STRING_LENGTH) {
  dateString = StringUtils.leftPad(dateString, DATE_STRING_LENGTH, '0');
 }
 Date date = new Date();
 date.setYear(Integer.parseInt(dateString.substring(0, 4)));
 date.setMonth(Integer.parseInt(dateString.substring(4, 6)));
 date.setDayOfMonth(Integer.parseInt(dateString.substring(6, 8)));
 return date;
}

代码示例来源:origin: apache/hive

protected void evaluateRepeatedString(BytesColumnVector inputColumnVector1,
  long[] vector2, LongColumnVector outputVector,
  boolean selectedInUse, int[] selected, int n) {
 if (inputColumnVector1.isNull[0]) {
  outputVector.noNulls = false;
  outputVector.isNull[0] = true;
  outputVector.isRepeating = true;
  return;
 }
 text.set(
   inputColumnVector1.vector[0], inputColumnVector1.start[0], inputColumnVector1.length[0]);
 Date date = new Date();
 boolean parsed = dateParser.parseDate(text.toString(), date);
 if (!parsed) {
  outputVector.noNulls = false;
  outputVector.isNull[0] = true;
  outputVector.isRepeating = true;
  return;
 }
 long days = DateWritableV2.millisToDays(date.toEpochMilli());
 evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
}

代码示例来源:origin: apache/hive

@Override
public String vectorExpressionParameters() {
 String value;
 if (object instanceof Long) {
  Date tempDate = new Date();
  tempDate.setTimeInMillis(DateWritableV2.daysToMillis((int) longValue));
  value = tempDate.toString();
 } else if (object instanceof Timestamp) {
  value = org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
    timestampValue.getTime(), timestampValue.getNanos()).toString();
 } else if (object instanceof byte []) {
  value = new String(this.stringValue, StandardCharsets.UTF_8);
 } else {
  value = "unknown";
 }
 return "val " + value + ", " + getColumnParamString(0, colNum);
}

代码示例来源:origin: apache/hive

return new TimestampWritableV2(new Timestamp());
case DATE:
 return new DateWritableV2(new Date());
case FLOAT:
 return new FloatWritable(0);

代码示例来源:origin: apache/nifi

Date d = record.getAsDate(fieldName, field.getDataType().getFormat());
if(d != null) {
  org.apache.hadoop.hive.common.type.Date hiveDate = new org.apache.hadoop.hive.common.type.Date();
  hiveDate.setTimeInMillis(d.getTime());
  val = hiveDate;

相关文章