org.apache.helix.ZNRecord.merge()方法的使用及代码示例

x33g5p2x  于2022-02-05 转载在 其他  
字(7.5k)|赞(0)|评价(0)|浏览(134)

本文整理了Java中org.apache.helix.ZNRecord.merge()方法的一些代码示例,展示了ZNRecord.merge()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。ZNRecord.merge()方法的具体详情如下:
包路径:org.apache.helix.ZNRecord
类名称:ZNRecord
方法名:merge

ZNRecord.merge介绍

[英]Batch merge of ZNRecordDelta
[中]批量合并ZNRecordDelta

代码示例

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Batch merge of {@link ZNRecordDelta}
 * @see #merge(ZNRecordDelta)
 * @param deltaList
 */
void merge(List<ZNRecordDelta> deltaList) {
 for (ZNRecordDelta delta : deltaList) {
  merge(delta);
 }
}

代码示例来源:origin: org.apache.helix/helix-core

@Override
 public ZNRecord update(ZNRecord current) {
  if (current != null) {
   current.merge(_record);
   return current;
  }
  return _record;
 }
}

代码示例来源:origin: apache/helix

@Override
 public ZNRecord update(ZNRecord currentData) {
  if (currentData != null && mergeOnUpdate) {
   currentData.merge(record);
   return currentData;
  }
  return record;
 }
};

代码示例来源:origin: apache/helix

@Override
 public ZNRecord update(ZNRecord current) {
  if (current != null) {
   current.merge(_record);
   return current;
  }
  return _record;
 }
}

代码示例来源:origin: org.apache.helix/helix-core

@Override
 public ZNRecord update(ZNRecord currentData) {
  if (currentData != null && mergeOnUpdate) {
   currentData.merge(record);
   return currentData;
  }
  return record;
 }
};

代码示例来源:origin: apache/helix

/**
 * Batch merge of {@link ZNRecordDelta}
 * @see #merge(ZNRecordDelta)
 * @param deltaList
 */
void merge(List<ZNRecordDelta> deltaList) {
 for (ZNRecordDelta delta : deltaList) {
  merge(delta);
 }
}

代码示例来源:origin: org.apache.helix/helix-core

public void merge(CurrentState curState) {
  _curStateDelta.getRecord().merge(curState.getRecord());
 }
}

代码示例来源:origin: apache/helix

public void merge(CurrentState curState) {
  _curStateDelta.getRecord().merge(curState.getRecord());
 }
}

代码示例来源:origin: apache/helix

public void merge(CurrentState anotherDelta) {
 _delta.getRecord().merge(anotherDelta.getRecord());
}

代码示例来源:origin: org.apache.helix/helix-core

public void merge(CurrentState anotherDelta) {
 _delta.getRecord().merge(anotherDelta.getRecord());
}

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Merge a list of ZNRecords into a single ZNRecord
 * @param records
 * @return {@link ZNRecord}
 */
public ZNRecord assemble(List<ZNRecord> records) {
 ZNRecord assembledRecord = null;
 if (records != null && records.size() > 0) {
  for (ZNRecord record : records) {
   if (record == null) {
    continue;
   }
   if (assembledRecord == null) {
    assembledRecord = new ZNRecord(record.getId());
   }
   assembledRecord.merge(record);
  }
 }
 return assembledRecord;
}

代码示例来源:origin: apache/helix

/**
 * Merge a list of ZNRecords into a single ZNRecord
 * @param records
 * @return {@link ZNRecord}
 */
public ZNRecord assemble(List<ZNRecord> records) {
 ZNRecord assembledRecord = null;
 if (records != null && records.size() > 0) {
  for (ZNRecord record : records) {
   if (record == null) {
    continue;
   }
   if (assembledRecord == null) {
    assembledRecord = new ZNRecord(record.getId());
   }
   assembledRecord.merge(record);
  }
 }
 return assembledRecord;
}

代码示例来源:origin: apache/helix

if (record.getDeltaList().size() > 0) {
 ZNRecord value = new ZNRecord(record.getId());
 value.merge(record);
 client.create(path, value, mode);
} else {

代码示例来源:origin: apache/helix

ZNRecord curRecord = client.readData(path);
 if (curRecord != null) {
  curRecord.merge(record);
  client.asyncSetData(path, curRecord, -1, null);
 } else {
if (record.getDeltaList().size() > 0) {
 ZNRecord newRecord = new ZNRecord(record.getId());
 newRecord.merge(record);
 client.create(path, null, mode);

代码示例来源:origin: apache/helix

@Override
public ZNRecord update(ZNRecord currentData) {
 ZNRecord newRec = new ZNRecord(message.getResourceName());
 if (currentData != null) {
  int currentGen = convertToInt(newRec.getSimpleField("currentGen"), 0);
  int currentGenStartSeq = convertToInt(newRec.getSimpleField("currentGenStartSeq"), 0);
  int prevGen = convertToInt(newRec.getSimpleField("prevGen"), 0);
  int prevGenEndSeq = convertToInt(newRec.getSimpleField("prevGenEndSeq"), 0);
  newRec.setSimpleField("currentGen", Integer.toString(currentGen + 1));
  newRec.setSimpleField("currentGenStartSeq", Integer.toString(1));
  if (currentGen > 0) {
   newRec.setSimpleField("prevGen", Integer.toString(currentGen));
   int localEndSeq = 1;
   if (lastRecordProcessed != null) {
    localEndSeq = (int) lastRecordProcessed.txid;
   }
   newRec.setSimpleField("prevGenEndSeq", "" + localEndSeq);
  }
  newRec.merge(currentData);
 } else {
  newRec.setSimpleField("currentGen", Integer.toString(1));
  newRec.setSimpleField("currentGenStartSeq", Integer.toString(1));
 }
 return newRec;
}

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Merge in a {@link ZNRecordDelta} corresponding to its merge policy
 * @param delta
 */
void merge(ZNRecordDelta delta) {
 if (delta.getMergeOperation() == MergeOperation.ADD) {
  merge(delta.getRecord());
 } else if (delta.getMergeOperation() == MergeOperation.SUBTRACT) {
  subtract(delta.getRecord());
 } else if (delta.getMergeOperation() == MergeOperation.UPDATE) {
  update(delta.getRecord());
 }
}

代码示例来源:origin: apache/helix

/**
 * Merge in a {@link ZNRecordDelta} corresponding to its merge policy
 * @param delta
 */
void merge(ZNRecordDelta delta) {
 if (delta.getMergeOperation() == MergeOperation.ADD) {
  merge(delta.getRecord());
 } else if (delta.getMergeOperation() == MergeOperation.SUBTRACT) {
  subtract(delta.getRecord());
 } else if (delta.getMergeOperation() == MergeOperation.UPDATE) {
  update(delta.getRecord());
 }
}

代码示例来源:origin: apache/helix

StringRepresentation getHostedEntitiesRepresentation(String clusterName, String jobQueueName)
  throws Exception {
 ZkClient zkClient =
   ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
 HelixDataAccessor accessor =
   ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
 PropertyKey.Builder keyBuilder = accessor.keyBuilder();
 TaskDriver taskDriver = new TaskDriver(zkClient, clusterName);
 // Get job queue config
 // TODO: fix this to use workflowConfig.
 ResourceConfig jobQueueConfig = accessor.getProperty(keyBuilder.resourceConfig(jobQueueName));
 // Get job queue context
 WorkflowContext ctx = taskDriver.getWorkflowContext(jobQueueName);
 // Create the result
 ZNRecord hostedEntitiesRecord = new ZNRecord(jobQueueName);
 if (jobQueueConfig != null) {
  hostedEntitiesRecord.merge(jobQueueConfig.getRecord());
 }
 if (ctx != null) {
  hostedEntitiesRecord.merge(ctx.getRecord());
 }
 StringRepresentation representation =
   new StringRepresentation(ClusterRepresentationUtil.ZNRecordToJson(hostedEntitiesRecord),
     MediaType.APPLICATION_JSON);
 return representation;
}

代码示例来源:origin: apache/helix

hostedEntitiesRecord.merge(jobConfig.getRecord());
hostedEntitiesRecord.merge(ctx.getRecord());

代码示例来源:origin: apache/helix

@Test
 public void testJobStateOnCreation() {
  Workflow.Builder builder = new Workflow.Builder(WORKFLOW_NAME);
  JobConfig.Builder jobConfigBuilder = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND)
    .setTargetResource(WORKFLOW_NAME).setTargetPartitionStates(Sets.newHashSet("SLAVE","MASTER"))
    .setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
  String jobName = "job";
  builder = builder.addJob(jobName, jobConfigBuilder);
  Workflow workflow = builder.build();
  WorkflowConfig workflowConfig = workflow.getWorkflowConfig();
  JobConfig jobConfig = jobConfigBuilder.build();
  workflowConfig.getRecord().merge(jobConfig.getRecord());

  _cache.getJobConfigMap().put(WORKFLOW_NAME + "_" + jobName, jobConfig);
  _cache.getWorkflowConfigMap().put(WORKFLOW_NAME, workflowConfig);

  WorkflowRebalancer workflowRebalancer = new WorkflowRebalancer();
  workflowRebalancer.init(_manager);
  ResourceAssignment resourceAssignment = workflowRebalancer
    .computeBestPossiblePartitionState(_cache, _idealState, _resource, _currStateOutput);

  WorkflowContext workflowContext = _cache.getWorkflowContext(WORKFLOW_NAME);
  Map<String, TaskState> jobStates = workflowContext.getJobStates();
  for (String job : jobStates.keySet()) {
   Assert.assertEquals(jobStates.get(job), TaskState.NOT_STARTED);
  }
 }
}

相关文章