org.apache.hadoop.hbase.client.Append.getFamilyCellMap()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(19.7k)|赞(0)|评价(0)|浏览(253)

本文整理了Java中org.apache.hadoop.hbase.client.Append.getFamilyCellMap()方法的一些代码示例,展示了Append.getFamilyCellMap()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Append.getFamilyCellMap()方法的具体详情如下:
包路径:org.apache.hadoop.hbase.client.Append
类名称:Append
方法名:getFamilyCellMap

Append.getFamilyCellMap介绍

暂无

代码示例

代码示例来源:origin: apache/hbase

  1. @Override
  2. public Result preAppend(final ObserverContext<RegionCoprocessorEnvironment> e,
  3. final Append append) throws IOException {
  4. NavigableMap<byte [], List<Cell>> map = append.getFamilyCellMap();
  5. for (Map.Entry<byte [], List<Cell>> entry : map.entrySet()) {
  6. for (Cell cell : entry.getValue()) {
  7. String appendStr = Bytes.toString(cell.getValueArray(), cell.getValueOffset(),
  8. cell.getValueLength());
  9. if (appendStr.equals("b")) {
  10. tr10 = append.getTimeRange();
  11. } else if (appendStr.equals("c") && !append.getTimeRange().isAllTime()) {
  12. tr2 = append.getTimeRange();
  13. }
  14. }
  15. }
  16. return null;
  17. }
  18. }

代码示例来源:origin: apache/hbase

  1. @Override
  2. public Result preAppendAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> c,
  3. final Append append) throws IOException {
  4. if (append.getAttribute(CHECK_COVERING_PERM) != null) {
  5. // We had failure with table, cf and q perm checks and now giving a chance for cell
  6. // perm check
  7. TableName table = c.getEnvironment().getRegion().getRegionInfo().getTable();
  8. AuthResult authResult = null;
  9. User user = getActiveUser(c);
  10. if (checkCoveringPermission(user, OpType.APPEND, c.getEnvironment(), append.getRow(),
  11. append.getFamilyCellMap(), append.getTimeRange().getMax(), Action.WRITE)) {
  12. authResult = AuthResult.allow(OpType.APPEND.toString(),
  13. "Covering cell set", user, Action.WRITE, table, append.getFamilyCellMap());
  14. } else {
  15. authResult = AuthResult.deny(OpType.APPEND.toString(),
  16. "Covering cell set", user, Action.WRITE, table, append.getFamilyCellMap());
  17. }
  18. AccessChecker.logResult(authResult);
  19. if (authorizationEnabled && !authResult.isAllowed()) {
  20. throw new AccessDeniedException("Insufficient permissions " +
  21. authResult.toContextString());
  22. }
  23. }
  24. return null;
  25. }

代码示例来源:origin: apache/hbase

  1. @Override
  2. public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append)
  3. throws IOException {
  4. User user = getActiveUser(c);
  5. checkForReservedTagPresence(user, append);
  6. // Require WRITE permission to the table, CF, and the KV to be appended
  7. RegionCoprocessorEnvironment env = c.getEnvironment();
  8. Map<byte[],? extends Collection<Cell>> families = append.getFamilyCellMap();
  9. AuthResult authResult = permissionGranted(OpType.APPEND, user,
  10. env, families, Action.WRITE);
  11. AccessChecker.logResult(authResult);
  12. if (!authResult.isAllowed()) {
  13. if (cellFeaturesEnabled && !compatibleEarlyTermination) {
  14. append.setAttribute(CHECK_COVERING_PERM, TRUE);
  15. } else if (authorizationEnabled) {
  16. throw new AccessDeniedException("Insufficient permissions " +
  17. authResult.toContextString());
  18. }
  19. }
  20. byte[] bytes = append.getAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL);
  21. if (bytes != null) {
  22. if (cellFeaturesEnabled) {
  23. addCellPermissions(bytes, append.getFamilyCellMap());
  24. } else {
  25. throw new DoNotRetryIOException("Cell ACLs cannot be persisted");
  26. }
  27. }
  28. return null;
  29. }

代码示例来源:origin: apache/hbase

  1. out.setDurability(durabilityFromHBase(in.getDurability()));
  2. for (Map.Entry<byte [], List<Cell>> entry : in.getFamilyCellMap().entrySet()) {
  3. byte[] family = entry.getKey();
  4. for (Cell cell : entry.getValue()) {

代码示例来源:origin: apache/phoenix

  1. public Append dropSequence(long timestamp) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, new byte[] {(byte)MetaOp.DROP_SEQUENCE.ordinal()});
  5. if (timestamp != HConstants.LATEST_TIMESTAMP) {
  6. append.setAttribute(SequenceRegionObserver.MAX_TIMERANGE_ATTRIB, Bytes.toBytes(timestamp));
  7. }
  8. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  9. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  10. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, ByteUtil.EMPTY_BYTE_ARRAY)));
  11. return append;
  12. }

代码示例来源:origin: apache/phoenix

  1. private Append newReturn(SequenceValue value) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. byte[] opBuf = new byte[] {(byte)MetaOp.RETURN_SEQUENCE.ordinal()};
  5. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, opBuf);
  6. append.setAttribute(SequenceRegionObserver.CURRENT_VALUE_ATTRIB, PLong.INSTANCE.toBytes(value.nextValue));
  7. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  8. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  9. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, value.timestamp, PLong.INSTANCE.toBytes(value.currentValue)),
  10. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, value.timestamp, PBoolean.INSTANCE.toBytes(value.limitReached))
  11. ));
  12. return append;
  13. }

代码示例来源:origin: apache/phoenix

  1. public Append createSequence(long startWith, long incrementBy, long cacheSize, long timestamp, long minValue, long maxValue, boolean cycle) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, new byte[] {(byte)MetaOp.CREATE_SEQUENCE.ordinal()});
  5. if (timestamp != HConstants.LATEST_TIMESTAMP) {
  6. append.setAttribute(SequenceRegionObserver.MAX_TIMERANGE_ATTRIB, Bytes.toBytes(timestamp));
  7. }
  8. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  9. byte[] startWithBuf = PLong.INSTANCE.toBytes(startWith);
  10. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  11. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, ByteUtil.EMPTY_BYTE_ARRAY),
  12. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, timestamp, startWithBuf),
  13. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.START_WITH_BYTES, timestamp, startWithBuf),
  14. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.INCREMENT_BY_BYTES, timestamp, PLong.INSTANCE.toBytes(incrementBy)),
  15. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CACHE_SIZE_BYTES, timestamp, PLong.INSTANCE.toBytes(cacheSize)),
  16. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.MIN_VALUE_BYTES, timestamp, PLong.INSTANCE.toBytes(minValue)),
  17. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.MAX_VALUE_BYTES, timestamp, PLong.INSTANCE.toBytes(maxValue)),
  18. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CYCLE_FLAG_BYTES, timestamp, PBoolean.INSTANCE.toBytes(cycle)),
  19. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, timestamp, PDataType.FALSE_BYTES)
  20. ));
  21. return append;
  22. }

代码示例来源:origin: apache/phoenix

  1. Cell keyValue = append.getFamilyCellMap().values().iterator().next().iterator().next();
  2. m.getFamilyCellMap().putAll(append.getFamilyCellMap());
  3. break;
  4. case DROP_SEQUENCE:
  5. case CREATE_SEQUENCE:
  6. m = new Put(row, clientTimestamp);
  7. m.getFamilyCellMap().putAll(append.getFamilyCellMap());
  8. break;

代码示例来源:origin: harbby/presto-connectors

  1. /**
  2. * Copy constructor
  3. * @param a
  4. */
  5. public Append(Append a) {
  6. this.row = a.getRow();
  7. this.ts = a.getTimeStamp();
  8. this.familyMap.putAll(a.getFamilyCellMap());
  9. for (Map.Entry<String, byte[]> entry : a.getAttributesMap().entrySet()) {
  10. this.setAttribute(entry.getKey(), entry.getValue());
  11. }
  12. }

代码示例来源:origin: com.aliyun.phoenix/ali-phoenix-core

  1. public Append dropSequence(long timestamp) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, new byte[] {(byte)MetaOp.DROP_SEQUENCE.ordinal()});
  5. if (timestamp != HConstants.LATEST_TIMESTAMP) {
  6. append.setAttribute(SequenceRegionObserver.MAX_TIMERANGE_ATTRIB, Bytes.toBytes(timestamp));
  7. }
  8. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  9. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  10. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, ByteUtil.EMPTY_BYTE_ARRAY)));
  11. return append;
  12. }

代码示例来源:origin: org.apache.phoenix/phoenix-core

  1. public Append dropSequence(long timestamp) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, new byte[] {(byte)MetaOp.DROP_SEQUENCE.ordinal()});
  5. if (timestamp != HConstants.LATEST_TIMESTAMP) {
  6. append.setAttribute(SequenceRegionObserver.MAX_TIMERANGE_ATTRIB, Bytes.toBytes(timestamp));
  7. }
  8. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  9. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  10. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, ByteUtil.EMPTY_BYTE_ARRAY)));
  11. return append;
  12. }

代码示例来源:origin: com.google.cloud.bigtable/bigtable-hbase

  1. /** {@inheritDoc} */
  2. @Override
  3. public void adapt(Append operation, ReadModifyWriteRow readModifyWriteRow) {
  4. for (Map.Entry<byte[], List<Cell>> entry : operation.getFamilyCellMap().entrySet()) {
  5. String familyName = Bytes.toString(entry.getKey());
  6. // Bigtable applies all appends present in a single RPC. HBase applies only the last
  7. // mutation present, if any. We remove all but the last mutation for each qualifier here:
  8. List<Cell> cells = CellDeduplicationHelper.deduplicateFamily(operation, entry.getKey());
  9. for (Cell cell : cells) {
  10. readModifyWriteRow.append(
  11. familyName,
  12. ByteString.copyFrom(
  13. cell.getQualifierArray(),
  14. cell.getQualifierOffset(),
  15. cell.getQualifierLength()),
  16. ByteString.copyFrom(
  17. cell.getValueArray(),
  18. cell.getValueOffset(),
  19. cell.getValueLength())
  20. );
  21. }
  22. }
  23. }
  24. }

代码示例来源:origin: GoogleCloudPlatform/cloud-bigtable-client

  1. /** {@inheritDoc} */
  2. @Override
  3. public void adapt(Append operation, ReadModifyWriteRow readModifyWriteRow) {
  4. for (Map.Entry<byte[], List<Cell>> entry : operation.getFamilyCellMap().entrySet()) {
  5. String familyName = Bytes.toString(entry.getKey());
  6. // Bigtable applies all appends present in a single RPC. HBase applies only the last
  7. // mutation present, if any. We remove all but the last mutation for each qualifier here:
  8. List<Cell> cells = CellDeduplicationHelper.deduplicateFamily(operation, entry.getKey());
  9. for (Cell cell : cells) {
  10. readModifyWriteRow.append(
  11. familyName,
  12. ByteString.copyFrom(
  13. cell.getQualifierArray(),
  14. cell.getQualifierOffset(),
  15. cell.getQualifierLength()),
  16. ByteString.copyFrom(
  17. cell.getValueArray(),
  18. cell.getValueOffset(),
  19. cell.getValueLength())
  20. );
  21. }
  22. }
  23. }
  24. }

代码示例来源:origin: org.apache.phoenix/phoenix-core

  1. private Append newReturn(SequenceValue value) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. byte[] opBuf = new byte[] {(byte)MetaOp.RETURN_SEQUENCE.ordinal()};
  5. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, opBuf);
  6. append.setAttribute(SequenceRegionObserver.CURRENT_VALUE_ATTRIB, PLong.INSTANCE.toBytes(value.nextValue));
  7. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  8. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  9. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, value.timestamp, PLong.INSTANCE.toBytes(value.currentValue)),
  10. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, value.timestamp, PBoolean.INSTANCE.toBytes(value.limitReached))
  11. ));
  12. return append;
  13. }

代码示例来源:origin: com.aliyun.phoenix/ali-phoenix-core

  1. private Append newReturn(SequenceValue value) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. byte[] opBuf = new byte[] {(byte)MetaOp.RETURN_SEQUENCE.ordinal()};
  5. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, opBuf);
  6. append.setAttribute(SequenceRegionObserver.CURRENT_VALUE_ATTRIB, PLong.INSTANCE.toBytes(value.nextValue));
  7. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  8. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  9. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, value.timestamp, PLong.INSTANCE.toBytes(value.currentValue)),
  10. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, value.timestamp, PBoolean.INSTANCE.toBytes(value.limitReached))
  11. ));
  12. return append;
  13. }

代码示例来源:origin: org.apache.hbase/hbase-server

  1. @Override
  2. public Result preAppend(final ObserverContext<RegionCoprocessorEnvironment> e,
  3. final Append append) throws IOException {
  4. NavigableMap<byte [], List<Cell>> map = append.getFamilyCellMap();
  5. for (Map.Entry<byte [], List<Cell>> entry : map.entrySet()) {
  6. for (Cell cell : entry.getValue()) {
  7. String appendStr = Bytes.toString(cell.getValueArray(), cell.getValueOffset(),
  8. cell.getValueLength());
  9. if (appendStr.equals("b")) {
  10. tr10 = append.getTimeRange();
  11. } else if (appendStr.equals("c") && !append.getTimeRange().isAllTime()) {
  12. tr2 = append.getTimeRange();
  13. }
  14. }
  15. }
  16. return null;
  17. }
  18. }

代码示例来源:origin: harbby/presto-connectors

  1. @Override
  2. public Result preAppendAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> c,
  3. final Append append) throws IOException {
  4. if (append.getAttribute(CHECK_COVERING_PERM) != null) {
  5. // We had failure with table, cf and q perm checks and now giving a chance for cell
  6. // perm check
  7. TableName table = c.getEnvironment().getRegion().getRegionInfo().getTable();
  8. AuthResult authResult = null;
  9. if (checkCoveringPermission(OpType.APPEND, c.getEnvironment(), append.getRow(),
  10. append.getFamilyCellMap(), HConstants.LATEST_TIMESTAMP, Action.WRITE)) {
  11. authResult = AuthResult.allow(OpType.APPEND.toString(), "Covering cell set",
  12. getActiveUser(), Action.WRITE, table, append.getFamilyCellMap());
  13. } else {
  14. authResult = AuthResult.deny(OpType.APPEND.toString(), "Covering cell set",
  15. getActiveUser(), Action.WRITE, table, append.getFamilyCellMap());
  16. }
  17. logResult(authResult);
  18. if (authorizationEnabled && !authResult.isAllowed()) {
  19. throw new AccessDeniedException("Insufficient permissions " +
  20. authResult.toContextString());
  21. }
  22. }
  23. return null;
  24. }

代码示例来源:origin: harbby/presto-connectors

  1. @Override
  2. public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append)
  3. throws IOException {
  4. User user = getActiveUser();
  5. checkForReservedTagPresence(user, append);
  6. // Require WRITE permission to the table, CF, and the KV to be appended
  7. RegionCoprocessorEnvironment env = c.getEnvironment();
  8. Map<byte[],? extends Collection<Cell>> families = append.getFamilyCellMap();
  9. AuthResult authResult = permissionGranted(OpType.APPEND, user, env, families, Action.WRITE);
  10. logResult(authResult);
  11. if (!authResult.isAllowed()) {
  12. if (cellFeaturesEnabled && !compatibleEarlyTermination) {
  13. append.setAttribute(CHECK_COVERING_PERM, TRUE);
  14. } else if (authorizationEnabled) {
  15. throw new AccessDeniedException("Insufficient permissions " +
  16. authResult.toContextString());
  17. }
  18. }
  19. byte[] bytes = append.getAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL);
  20. if (bytes != null) {
  21. if (cellFeaturesEnabled) {
  22. addCellPermissions(bytes, append.getFamilyCellMap());
  23. } else {
  24. throw new DoNotRetryIOException("Cell ACLs cannot be persisted");
  25. }
  26. }
  27. return null;
  28. }

代码示例来源:origin: org.apache.phoenix/phoenix-core

  1. public Append createSequence(long startWith, long incrementBy, long cacheSize, long timestamp, long minValue, long maxValue, boolean cycle) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, new byte[] {(byte)MetaOp.CREATE_SEQUENCE.ordinal()});
  5. if (timestamp != HConstants.LATEST_TIMESTAMP) {
  6. append.setAttribute(SequenceRegionObserver.MAX_TIMERANGE_ATTRIB, Bytes.toBytes(timestamp));
  7. }
  8. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  9. byte[] startWithBuf = PLong.INSTANCE.toBytes(startWith);
  10. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  11. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, ByteUtil.EMPTY_BYTE_ARRAY),
  12. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, timestamp, startWithBuf),
  13. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.START_WITH_BYTES, timestamp, startWithBuf),
  14. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.INCREMENT_BY_BYTES, timestamp, PLong.INSTANCE.toBytes(incrementBy)),
  15. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CACHE_SIZE_BYTES, timestamp, PLong.INSTANCE.toBytes(cacheSize)),
  16. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.MIN_VALUE_BYTES, timestamp, PLong.INSTANCE.toBytes(minValue)),
  17. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.MAX_VALUE_BYTES, timestamp, PLong.INSTANCE.toBytes(maxValue)),
  18. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CYCLE_FLAG_BYTES, timestamp, PBoolean.INSTANCE.toBytes(cycle)),
  19. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, timestamp, PDataType.FALSE_BYTES)
  20. ));
  21. return append;
  22. }

代码示例来源:origin: com.aliyun.phoenix/ali-phoenix-core

  1. public Append createSequence(long startWith, long incrementBy, long cacheSize, long timestamp, long minValue, long maxValue, boolean cycle) {
  2. byte[] key = this.key.getKey();
  3. Append append = new Append(key);
  4. append.setAttribute(SequenceRegionObserver.OPERATION_ATTRIB, new byte[] {(byte)MetaOp.CREATE_SEQUENCE.ordinal()});
  5. if (timestamp != HConstants.LATEST_TIMESTAMP) {
  6. append.setAttribute(SequenceRegionObserver.MAX_TIMERANGE_ATTRIB, Bytes.toBytes(timestamp));
  7. }
  8. Map<byte[], List<Cell>> familyMap = append.getFamilyCellMap();
  9. byte[] startWithBuf = PLong.INSTANCE.toBytes(startWith);
  10. familyMap.put(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, Arrays.<Cell>asList(
  11. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, ByteUtil.EMPTY_BYTE_ARRAY),
  12. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, timestamp, startWithBuf),
  13. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.START_WITH_BYTES, timestamp, startWithBuf),
  14. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.INCREMENT_BY_BYTES, timestamp, PLong.INSTANCE.toBytes(incrementBy)),
  15. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CACHE_SIZE_BYTES, timestamp, PLong.INSTANCE.toBytes(cacheSize)),
  16. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.MIN_VALUE_BYTES, timestamp, PLong.INSTANCE.toBytes(minValue)),
  17. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.MAX_VALUE_BYTES, timestamp, PLong.INSTANCE.toBytes(maxValue)),
  18. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.CYCLE_FLAG_BYTES, timestamp, PBoolean.INSTANCE.toBytes(cycle)),
  19. PhoenixKeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_FAMILY_BYTES, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, timestamp, PDataType.FALSE_BYTES)
  20. ));
  21. return append;
  22. }

相关文章