org.apache.parquet.io.api.Binary.fromConstantByteArray()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(6.5k)|赞(0)|评价(0)|浏览(143)

本文整理了Java中org.apache.parquet.io.api.Binary.fromConstantByteArray()方法的一些代码示例,展示了Binary.fromConstantByteArray()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Binary.fromConstantByteArray()方法的具体详情如下:
包路径:org.apache.parquet.io.api.Binary
类名称:Binary
方法名:fromConstantByteArray

Binary.fromConstantByteArray介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-sql_2.10

  1. @Override
  2. public final Binary readBinary(int len) {
  3. Binary result = Binary.fromConstantByteArray(buffer, offset - Platform.BYTE_ARRAY_OFFSET, len);
  4. offset += len;
  5. return result;
  6. }
  7. }

代码示例来源:origin: org.apache.spark/spark-sql_2.11

  1. @Override
  2. public final Binary readBinary(int len) {
  3. ByteBuffer buffer = getBuffer(len);
  4. if (buffer.hasArray()) {
  5. return Binary.fromConstantByteArray(
  6. buffer.array(), buffer.arrayOffset() + buffer.position(), len);
  7. } else {
  8. byte[] bytes = new byte[len];
  9. buffer.get(bytes);
  10. return Binary.fromConstantByteArray(bytes);
  11. }
  12. }
  13. }

代码示例来源:origin: org.apache.spark/spark-sql

  1. @Override
  2. public final Binary readBinary(int len) {
  3. ByteBuffer buffer = getBuffer(len);
  4. if (buffer.hasArray()) {
  5. return Binary.fromConstantByteArray(
  6. buffer.array(), buffer.arrayOffset() + buffer.position(), len);
  7. } else {
  8. byte[] bytes = new byte[len];
  9. buffer.get(bytes);
  10. return Binary.fromConstantByteArray(bytes);
  11. }
  12. }
  13. }

代码示例来源:origin: apache/hive

  1. protected static void writeListData(ParquetWriter<Group> writer, boolean isDictionaryEncoding,
  2. int elementNum) throws IOException {
  3. SimpleGroupFactory f = new SimpleGroupFactory(schema);
  4. int listMaxSize = 4;
  5. int listElementIndex = 0;
  6. for (int i = 0; i < elementNum; i++) {
  7. boolean isNull = isNull(i);
  8. Group group = f.newGroup();
  9. int listSize = i % listMaxSize + 1;
  10. if (!isNull) {
  11. for (int j = 0; j < listSize; j++) {
  12. group.append("list_int32_field", getIntValue(isDictionaryEncoding, listElementIndex));
  13. group.append("list_int64_field", getLongValue(isDictionaryEncoding, listElementIndex));
  14. group.append("list_double_field", getDoubleValue(isDictionaryEncoding, listElementIndex));
  15. group.append("list_float_field", getFloatValue(isDictionaryEncoding, listElementIndex));
  16. group.append("list_boolean_field", getBooleanValue(listElementIndex));
  17. group.append("list_binary_field", getBinaryValue(isDictionaryEncoding, listElementIndex));
  18. HiveDecimal hd = getDecimal(isDictionaryEncoding, listElementIndex).setScale(2);
  19. HiveDecimalWritable hdw = new HiveDecimalWritable(hd);
  20. group.append("list_decimal_field", Binary.fromConstantByteArray(hdw.getInternalStorage()));
  21. listElementIndex++;
  22. }
  23. }
  24. for (int j = 0; j < listMaxSize; j++) {
  25. group.append("list_binary_field_for_repeat_test", getBinaryValue(isDictionaryEncoding, i));
  26. }
  27. writer.write(group);
  28. }
  29. writer.close();
  30. }

代码示例来源:origin: apache/hive

  1. HiveDecimal hd = getDecimal(isDictionaryEncoding, mapElementIndex).setScale(2);
  2. HiveDecimalWritable hdw = new HiveDecimalWritable(hd);
  3. Binary decimalValForMap = Binary.fromConstantByteArray(hdw.getInternalStorage());
  4. group.addGroup("map_int32").append("key", intValForMap).append("value", intValForMap);
  5. group.addGroup("map_int64").append("key", longValForMap).append("value", longValForMap);

代码示例来源:origin: com.alibaba.blink/flink-table

  1. @Override
  2. public final Binary readBinary(int len) {
  3. Binary result = Binary.fromConstantByteArray(buffer, offset - BYTE_ARRAY_OFFSET, len);
  4. offset += len;
  5. return result;
  6. }

代码示例来源:origin: io.snappydata/snappy-spark-sql

  1. @Override
  2. public final Binary readBinary(int len) {
  3. Binary result = Binary.fromConstantByteArray(buffer, offset - Platform.BYTE_ARRAY_OFFSET, len);
  4. offset += len;
  5. return result;
  6. }
  7. }

代码示例来源:origin: apache/hive

  1. group.append("value", Binary.fromConstantByteArray(w.getInternalStorage()));

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. @Override
  2. public Binary slice(int start, int length) {
  3. return Binary.fromConstantByteArray(getBytesUnsafe(), start, length);
  4. }
  5. @Override

代码示例来源:origin: org.apache.parquet/parquet-column

  1. @Override
  2. public Binary slice(int start, int length) {
  3. if (isBackingBytesReused) {
  4. return Binary.fromReusedByteArray(value, offset + start, length);
  5. } else {
  6. return Binary.fromConstantByteArray(value, offset + start, length);
  7. }
  8. }

代码示例来源:origin: org.apache.parquet/parquet-column

  1. @Override
  2. public Binary slice(int start, int length) {
  3. if (isBackingBytesReused) {
  4. return Binary.fromReusedByteArray(value, start, length);
  5. } else {
  6. return Binary.fromConstantByteArray(value, start, length);
  7. }
  8. }

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. public Binary copy() {
  2. if (isBackingBytesReused) {
  3. return Binary.fromConstantByteArray(getBytes());
  4. } else {
  5. return this;
  6. }
  7. }

代码示例来源:origin: org.apache.parquet/parquet-column

  1. public Binary copy() {
  2. if (isBackingBytesReused) {
  3. return Binary.fromConstantByteArray(getBytes());
  4. } else {
  5. return this;
  6. }
  7. }

代码示例来源:origin: org.apache.parquet/parquet-column

  1. @Override
  2. public Binary slice(int start, int length) {
  3. return Binary.fromConstantByteArray(getBytesUnsafe(), start, length);
  4. }
  5. @Override

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. @Override
  2. public Binary slice(int start, int length) {
  3. if (isBackingBytesReused) {
  4. return Binary.fromReusedByteArray(value, offset + start, length);
  5. } else {
  6. return Binary.fromConstantByteArray(value, offset + start, length);
  7. }
  8. }

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. @Override
  2. public Binary slice(int start, int length) {
  3. if (isBackingBytesReused) {
  4. return Binary.fromReusedByteArray(value, start, length);
  5. } else {
  6. return Binary.fromConstantByteArray(value, start, length);
  7. }
  8. }

代码示例来源:origin: org.apache.parquet/parquet-column

  1. public DeltaByteArrayReader() {
  2. this.prefixLengthReader = new DeltaBinaryPackingValuesReader();
  3. this.suffixReader = new DeltaLengthByteArrayValuesReader();
  4. this.previous = Binary.fromConstantByteArray(new byte[0]);
  5. }

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. public DeltaByteArrayReader() {
  2. this.prefixLengthReader = new DeltaBinaryPackingValuesReader();
  3. this.suffixReader = new DeltaLengthByteArrayValuesReader();
  4. this.previous = Binary.fromConstantByteArray(new byte[0]);
  5. }

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. @Override
  2. Binary truncateMin(Binary minValue, int length) {
  3. if (minValue.length() <= length) {
  4. return minValue;
  5. }
  6. ByteBuffer buffer = minValue.toByteBuffer();
  7. byte[] array;
  8. if (validator.checkValidity(buffer) == Validity.VALID) {
  9. array = truncateUtf8(buffer, length);
  10. } else {
  11. array = truncate(buffer, length);
  12. }
  13. return array == null ? minValue : Binary.fromConstantByteArray(array);
  14. }

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

  1. @Override
  2. Binary truncateMax(Binary maxValue, int length) {
  3. if (maxValue.length() <= length) {
  4. return maxValue;
  5. }
  6. byte[] array;
  7. ByteBuffer buffer = maxValue.toByteBuffer();
  8. if (validator.checkValidity(buffer) == Validity.VALID) {
  9. array = incrementUtf8(truncateUtf8(buffer, length));
  10. } else {
  11. array = increment(truncate(buffer, length));
  12. }
  13. return array == null ? maxValue : Binary.fromConstantByteArray(array);
  14. }

相关文章