本文整理了Java中org.apache.helix.task.WorkflowContext.getJobStates()
方法的一些代码示例,展示了WorkflowContext.getJobStates()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。WorkflowContext.getJobStates()
方法的具体详情如下:
包路径:org.apache.helix.task.WorkflowContext
类名称:WorkflowContext
方法名:getJobStates
暂无
代码示例来源:origin: apache/incubator-pinot
/**
* Get all tasks for the given task type.
*
* @param taskType Task type
* @return Set of task names
*/
@Nonnull
public synchronized Set<String> getTasks(@Nonnull String taskType) {
Set<String> helixJobs = _taskDriver.getWorkflowContext(getHelixJobQueueName(taskType)).getJobStates().keySet();
Set<String> tasks = new HashSet<>(helixJobs.size());
for (String helixJobName : helixJobs) {
tasks.add(getPinotTaskName(helixJobName));
}
return tasks;
}
代码示例来源:origin: apache/incubator-pinot
/**
* Get all task states for the given task type.
*
* @param taskType Task type
* @return Map from task name to task state
*/
@Nonnull
public synchronized Map<String, TaskState> getTaskStates(@Nonnull String taskType) {
Map<String, TaskState> helixJobStates =
_taskDriver.getWorkflowContext(getHelixJobQueueName(taskType)).getJobStates();
Map<String, TaskState> taskStates = new HashMap<>(helixJobStates.size());
for (Map.Entry<String, TaskState> entry : helixJobStates.entrySet()) {
taskStates.put(getPinotTaskName(entry.getKey()), entry.getValue());
}
return taskStates;
}
代码示例来源:origin: apache/helix
Map<String, TaskState> allJobStates = workflowContext.getJobStates();
for (Map.Entry<String, TaskState> jobState : allJobStates.entrySet()) {
if (!jobState.getValue().equals(TaskState.NOT_STARTED)) {
代码示例来源:origin: org.apache.helix/helix-core
Map<String, TaskState> allJobStates = workflowContext.getJobStates();
for (String job : allJobStates.keySet()) {
if (!allJobStates.get(job).equals(TaskState.NOT_STARTED)) {
代码示例来源:origin: apache/helix
Map<String, TaskState> jobStates = workflowContext.getJobStates();
for (String job : workflowConfig.getJobDag().getAllNodes()) {
JobConfig jobConfig = TaskUtil.getJobConfig(dataAccessor, job);
代码示例来源:origin: org.apache.helix/helix-core
/**
* Return all jobs that are COMPLETED and passes its expiry time.
* @param dataAccessor
* @param propertyStore
* @param workflowConfig
* @param workflowContext
* @return
*/
protected static Set<String> getExpiredJobs(HelixDataAccessor dataAccessor,
HelixPropertyStore propertyStore, WorkflowConfig workflowConfig,
WorkflowContext workflowContext) {
Set<String> expiredJobs = new HashSet<String>();
if (workflowContext != null) {
Map<String, TaskState> jobStates = workflowContext.getJobStates();
for (String job : workflowConfig.getJobDag().getAllNodes()) {
JobConfig jobConfig = TaskUtil.getJobConfig(dataAccessor, job);
JobContext jobContext = TaskUtil.getJobContext(propertyStore, job);
long expiry = jobConfig.getExpiry();
if (expiry == workflowConfig.DEFAULT_EXPIRY || expiry < 0) {
expiry = workflowConfig.getExpiry();
}
if (jobContext != null && jobStates.get(job) == TaskState.COMPLETED) {
if (System.currentTimeMillis() >= jobContext.getFinishTime() + expiry) {
expiredJobs.add(job);
}
}
}
}
return expiredJobs;
}
代码示例来源:origin: apache/helix
@Test
public void testAbortTaskForWorkflowFail() throws InterruptedException {
failTask = true;
String workflowName = TestHelper.getTestMethodName();
Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName);
for (int i = 0; i < 5; i++) {
List<TaskConfig> taskConfigs = Lists.newArrayListWithCapacity(1);
Map<String, String> taskConfigMap = Maps.newHashMap();
taskConfigs.add(new TaskConfig("TaskOne", taskConfigMap));
JobConfig.Builder jobBuilder = new JobConfig.Builder().setCommand("DummyCommand")
.addTaskConfigs(taskConfigs).setJobCommandConfigMap(_jobCommandMap);
workflowBuilder.addJob("JOB" + i, jobBuilder);
}
_driver.start(workflowBuilder.build());
_driver.pollForWorkflowState(workflowName, TaskState.FAILED);
int abortedTask = 0;
for (TaskState jobState : _driver.getWorkflowContext(workflowName).getJobStates().values()) {
if (jobState == TaskState.ABORTED) {
abortedTask++;
}
}
Assert.assertEquals(abortedTask, 4);
}
代码示例来源:origin: apache/helix
Set<String> jobWithFinalStates = new HashSet<>(workflowCtx.getJobStates().keySet());
jobWithFinalStates.removeAll(workflowCfg.getJobDag().getAllNodes());
if (jobWithFinalStates.size() > 0) {
代码示例来源:origin: apache/helix
Assert.assertEquals(context.getJobStates().keySet(), remainJobs);
Assert.assertTrue(remainJobs.containsAll(context.getJobStartTimes().keySet()));
代码示例来源:origin: apache/helix
@Test
public void testJobStateOnCreation() {
Workflow.Builder builder = new Workflow.Builder(WORKFLOW_NAME);
JobConfig.Builder jobConfigBuilder = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND)
.setTargetResource(WORKFLOW_NAME).setTargetPartitionStates(Sets.newHashSet("SLAVE","MASTER"))
.setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
String jobName = "job";
builder = builder.addJob(jobName, jobConfigBuilder);
Workflow workflow = builder.build();
WorkflowConfig workflowConfig = workflow.getWorkflowConfig();
JobConfig jobConfig = jobConfigBuilder.build();
workflowConfig.getRecord().merge(jobConfig.getRecord());
_cache.getJobConfigMap().put(WORKFLOW_NAME + "_" + jobName, jobConfig);
_cache.getWorkflowConfigMap().put(WORKFLOW_NAME, workflowConfig);
WorkflowRebalancer workflowRebalancer = new WorkflowRebalancer();
workflowRebalancer.init(_manager);
ResourceAssignment resourceAssignment = workflowRebalancer
.computeBestPossiblePartitionState(_cache, _idealState, _resource, _currStateOutput);
WorkflowContext workflowContext = _cache.getWorkflowContext(WORKFLOW_NAME);
Map<String, TaskState> jobStates = workflowContext.getJobStates();
for (String job : jobStates.keySet()) {
Assert.assertEquals(jobStates.get(job), TaskState.NOT_STARTED);
}
}
}
代码示例来源:origin: apache/helix
taskDataCache.updateJobContext(_testJobPrefix + "0", jbCtx0);
wfCtx.getJobStates().remove(_testJobPrefix + "1");
taskDataCache.removeContext(_testJobPrefix + "1");
内容来源于网络,如有侵权,请联系作者删除!