org.apache.parquet.example.data.Group.getFieldRepetitionCount()方法的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(7.4k)|赞(0)|评价(0)|浏览(133)

本文整理了Java中org.apache.parquet.example.data.Group.getFieldRepetitionCount()方法的一些代码示例,展示了Group.getFieldRepetitionCount()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Group.getFieldRepetitionCount()方法的具体详情如下:
包路径:org.apache.parquet.example.data.Group
类名称:Group
方法名:getFieldRepetitionCount

Group.getFieldRepetitionCount介绍

暂无

代码示例

代码示例来源:origin: apache/incubator-druid

/**
 * Convert a primitive group field to a "ingestion friendly" java object
 *
 * @return "ingestion ready" java object, or null
 */
@Nullable
private static Object convertPrimitiveField(Group g, int fieldIndex, boolean binaryAsString)
{
 PrimitiveType pt = (PrimitiveType) g.getType().getFields().get(fieldIndex);
 if (pt.isRepetition(Type.Repetition.REPEATED) && g.getFieldRepetitionCount(fieldIndex) > 1) {
  List<Object> vals = new ArrayList<>();
  for (int i = 0; i < g.getFieldRepetitionCount(fieldIndex); i++) {
   vals.add(convertPrimitiveField(g, fieldIndex, i, binaryAsString));
  }
  return vals;
 }
 return convertPrimitiveField(g, fieldIndex, 0, binaryAsString);
}

代码示例来源:origin: apache/incubator-druid

/**
 * convert a repeated field into a list of primitives or groups
 */
private static List<Object> convertRepeatedFieldToList(Group g, int fieldIndex, boolean binaryAsString)
{
 Type t = g.getType().getFields().get(fieldIndex);
 assert t.getRepetition().equals(Type.Repetition.REPEATED);
 int repeated = g.getFieldRepetitionCount(fieldIndex);
 List<Object> vals = new ArrayList<>();
 for (int i = 0; i < repeated; i++) {
  if (t.isPrimitive()) {
   vals.add(convertPrimitiveField(g, fieldIndex, i, binaryAsString));
  } else {
   vals.add(g.getGroup(fieldIndex, i));
  }
 }
 return vals;
}

代码示例来源:origin: apache/incubator-druid

int mapEntries = g.getFieldRepetitionCount(0);
Map<String, Object> converted = new HashMap<>();
for (int i = 0; i < mapEntries; i++) {

代码示例来源:origin: apache/incubator-druid

int repeated = g.getFieldRepetitionCount(0);
boolean isListItemPrimitive = g.getType().getFields().get(0).isPrimitive();
List<Object> vals = new ArrayList<>();

代码示例来源:origin: apache/ignite

/**
 * Prints the given group in the row of Parquet file.
 *
 * @param g The given group.
 */
private static void printGroup(Group g) {
  int fieldCnt = g.getType().getFieldCount();
  for (int field = 0; field < fieldCnt; field++) {
    int valCnt = g.getFieldRepetitionCount(field);
    Type fieldType = g.getType().getType(field);
    String fieldName = fieldType.getName();
    for (int idx = 0; idx < valCnt; idx++) {
      if (fieldType.isPrimitive())
        System.out.println(fieldName + " " + g.getValueToString(field, idx));
      else
        printGroup(g.getGroup(field, idx));
    }
  }
  System.out.println();
}

代码示例来源:origin: apache/ignite

/**
 * Read coefficient matrix from parquet.
 *
 * @param g Coefficient group.
 * @return Vector of coefficients.
 */
private static Vector readSVMCoefficients(SimpleGroup g) {
  Vector coefficients;
  Group coeffGroup = g.getGroup(0, 0).getGroup(3, 0);
  final int amountOfCoefficients = coeffGroup.getFieldRepetitionCount(0);
  coefficients = new DenseVector(amountOfCoefficients);
  for (int j = 0; j < amountOfCoefficients; j++) {
    double coefficient = coeffGroup.getGroup(0, j).getDouble(0, 0);
    coefficients.set(j, coefficient);
  }
  return coefficients;
}

代码示例来源:origin: apache/ignite

/**
 * Read coefficient matrix from parquet.
 *
 * @param g Coefficient group.
 * @return Vector of coefficients.
 */
private static Vector readLinRegCoefficients(SimpleGroup g) {
  Vector coefficients;
  Group coeffGroup = g.getGroup(1, 0).getGroup(3, 0);
  final int amountOfCoefficients = coeffGroup.getFieldRepetitionCount(0);
  coefficients = new DenseVector(amountOfCoefficients);
  for (int j = 0; j < amountOfCoefficients; j++) {
    double coefficient = coeffGroup.getGroup(0, j).getDouble(0, 0);
    coefficients.set(j, coefficient);
  }
  return coefficients;
}

代码示例来源:origin: apache/incubator-druid

if (g.getFieldRepetitionCount(fieldIndex) <= 0) {
 return null;
  int repeated = g.getFieldRepetitionCount(fieldIndex);
  List<Object> vals = new ArrayList<>();
  for (int i = 0; i < repeated; i++) {

代码示例来源:origin: apache/ignite

/**
 * Read coefficient matrix from parquet.
 *
 * @param g Coefficient group.
 * @return Vector of coefficients.
 */
private static Vector readCoefficients(SimpleGroup g) {
  Vector coefficients;
  final int amountOfCoefficients = g.getGroup(3, 0).getGroup(5, 0).getFieldRepetitionCount(0);
  coefficients = new DenseVector(amountOfCoefficients);
  for (int j = 0; j < amountOfCoefficients; j++) {
    double coefficient = g.getGroup(3, 0).getGroup(5, 0).getGroup(0, j).getDouble(0, 0);
    coefficients.set(j, coefficient);
  }
  return coefficients;
}

代码示例来源:origin: apache/ignite

private static Model loadKMeansModel(String pathToMdl) {
  Vector[] centers = null;
  try (ParquetFileReader r = ParquetFileReader.open(HadoopInputFile.fromPath(new Path(pathToMdl), new Configuration()))) {
    PageReadStore pages;
    final MessageType schema = r.getFooter().getFileMetaData().getSchema();
    final MessageColumnIO colIO = new ColumnIOFactory().getColumnIO(schema);
    while (null != (pages = r.readNextRowGroup())) {
      final int rows = (int)pages.getRowCount();
      final RecordReader recordReader = colIO.getRecordReader(pages, new GroupRecordConverter(schema));
      centers = new DenseVector[rows];
      for (int i = 0; i < rows; i++) {
        final SimpleGroup g = (SimpleGroup)recordReader.read();
        // final int clusterIdx = g.getInteger(0, 0);
        Group clusterCenterCoeff = g.getGroup(1, 0).getGroup(3, 0);
        final int amountOfCoefficients = clusterCenterCoeff.getFieldRepetitionCount(0);
        centers[i] = new DenseVector(amountOfCoefficients);
        for (int j = 0; j < amountOfCoefficients; j++) {
          double coefficient = clusterCenterCoeff.getGroup(0, j).getDouble(0, 0);
          centers[i].set(j, coefficient);
        }
      }
    }
  }
  catch (IOException e) {
    System.out.println("Error reading parquet file.");
    e.printStackTrace();
  }
  return new KMeansModel(centers, new EuclideanDistance());
}

代码示例来源:origin: iflytek/Guitar

Group group = (Group) record.data;
int arraySize = group.getFieldRepetitionCount("array");
for (int i = 0; i < arraySize; i++) {
  if ("Integer".equals(dataType)) {
int listSize = group.getFieldRepetitionCount("list");
for (int i = 0; i < listSize; i++) {
  Group listGroup = group.getGroup("list", i);

代码示例来源:origin: iflytek/Guitar

map = new HashMap<String, Object>();
Group group = (Group) record.data;
int size = group.getFieldRepetitionCount("map");
for (int i = 0; i < size; i++) {
  Group mapGroup = group.getGroup("map", i);

代码示例来源:origin: vmware/hillview

int fieldCount = g.getType().getFieldCount();
for (int field = 0; field < fieldCount; field++) {
  int valueCount = g.getFieldRepetitionCount(field);
  IAppendableColumn col = cols.get(field);
  if (valueCount == 0) {

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

private void writeGroup(Group group, GroupType type) {
  int fieldCount = type.getFieldCount();
  for (int field = 0; field < fieldCount; ++field) {
   int valueCount = group.getFieldRepetitionCount(field);
   if (valueCount > 0) {
    Type fieldType = type.getType(field);
    String fieldName = fieldType.getName();
    recordConsumer.startField(fieldName, field);
    for (int index = 0; index < valueCount; ++index) {
     if (fieldType.isPrimitive()) {
      group.writeValue(field, index, recordConsumer);
     } else {
      recordConsumer.startGroup();
      writeGroup(group.getGroup(field, index), fieldType.asGroupType());
      recordConsumer.endGroup();
     }
    }
    recordConsumer.endField(fieldName, field);
   }
  }
 }
}

代码示例来源:origin: org.apache.parquet/parquet-column

private void writeGroup(Group group, GroupType type) {
  int fieldCount = type.getFieldCount();
  for (int field = 0; field < fieldCount; ++field) {
   int valueCount = group.getFieldRepetitionCount(field);
   if (valueCount > 0) {
    Type fieldType = type.getType(field);
    String fieldName = fieldType.getName();
    recordConsumer.startField(fieldName, field);
    for (int index = 0; index < valueCount; ++index) {
     if (fieldType.isPrimitive()) {
      group.writeValue(field, index, recordConsumer);
     } else {
      recordConsumer.startGroup();
      writeGroup(group.getGroup(field, index), fieldType.asGroupType());
      recordConsumer.endGroup();
     }
    }
    recordConsumer.endField(fieldName, field);
   }
  }
 }
}

相关文章