org.apache.helix.task.WorkflowContext类的使用及代码示例

x33g5p2x  于2022-02-03 转载在 其他  
字(12.8k)|赞(0)|评价(0)|浏览(157)

本文整理了Java中org.apache.helix.task.WorkflowContext类的一些代码示例,展示了WorkflowContext类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。WorkflowContext类的具体详情如下:
包路径:org.apache.helix.task.WorkflowContext
类名称:WorkflowContext

WorkflowContext介绍

[英]Typed interface to the workflow context information stored by TaskRebalancer in the Helix property store
[中]TaskRebalance在Helix属性存储中存储的工作流上下文信息的类型化接口

代码示例

代码示例来源:origin: apache/incubator-gobblin

  1. WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName);
  2. if (workflowContext != null) {
  3. TaskState jobState = workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName));
  4. switch (jobState) {
  5. case STOPPED:

代码示例来源:origin: apache/incubator-pinot

  1. /**
  2. * Get the task queue state for the given task type.
  3. *
  4. * @param taskType Task type
  5. * @return Task queue state
  6. */
  7. public synchronized TaskState getTaskQueueState(@Nonnull String taskType) {
  8. return _taskDriver.getWorkflowContext(getHelixJobQueueName(taskType)).getWorkflowState();
  9. }

代码示例来源:origin: apache/incubator-pinot

  1. /**
  2. * Get all tasks for the given task type.
  3. *
  4. * @param taskType Task type
  5. * @return Set of task names
  6. */
  7. @Nonnull
  8. public synchronized Set<String> getTasks(@Nonnull String taskType) {
  9. Set<String> helixJobs = _taskDriver.getWorkflowContext(getHelixJobQueueName(taskType)).getJobStates().keySet();
  10. Set<String> tasks = new HashSet<>(helixJobs.size());
  11. for (String helixJobName : helixJobs) {
  12. tasks.add(getPinotTaskName(helixJobName));
  13. }
  14. return tasks;
  15. }

代码示例来源:origin: org.apache.helix/helix-core

  1. boolean incomplete = false;
  2. TaskState workflowState = ctx.getWorkflowState();
  3. if (TaskState.TIMED_OUT.equals(workflowState)) {
  4. TaskState jobState = ctx.getJobState(job);
  5. if (jobState == TaskState.FAILED || jobState == TaskState.TIMED_OUT) {
  6. failedJobs++;
  7. if (!cfg.isJobQueue() && failedJobs > cfg.getFailureThreshold()) {
  8. ctx.setWorkflowState(TaskState.FAILED);
  9. LOG.info("Workflow {} reached the failure threshold, so setting its state to FAILED.", cfg.getWorkflowId());
  10. for (String jobToFail : cfg.getJobDag().getAllNodes()) {
  11. if (ctx.getJobState(jobToFail) == TaskState.IN_PROGRESS) {
  12. ctx.setJobState(jobToFail, TaskState.ABORTED);
  13. ctx.setWorkflowState(TaskState.COMPLETED);
  14. return true;

代码示例来源:origin: apache/helix

  1. if (!workflowCfg.isJobQueue() && !finalStates.contains(workflowCtx.getWorkflowState())) {
  2. scheduleRebalanceForTimeout(workflow, workflowCtx.getStartTime(), workflowCfg.getTimeout());
  3. if (!TaskState.TIMED_OUT.equals(workflowCtx.getWorkflowState()) && isTimeout(workflowCtx.getStartTime(), workflowCfg.getTimeout())) {
  4. workflowCtx.setWorkflowState(TaskState.TIMED_OUT);
  5. _taskDataCache.updateWorkflowContext(workflow, workflowCtx);
  6. if (!finalStates.contains(workflowCtx.getWorkflowState()) && TargetState.STOP.equals(targetState)) {
  7. LOG.info("Workflow " + workflow + "is marked as stopped.");
  8. if (isWorkflowStopped(workflowCtx, workflowCfg)) {
  9. workflowCtx.setWorkflowState(TaskState.STOPPED);
  10. _taskDataCache.updateWorkflowContext(workflow, workflowCtx);
  11. if (workflowCtx.getFinishTime() == WorkflowContext.UNFINISHED && isWorkflowFinished(workflowCtx,
  12. workflowCfg, _taskDataCache.getJobConfigMap(), _taskDataCache)) {
  13. workflowCtx.setFinishTime(currentTime);
  14. updateWorkflowMonitor(workflowCtx, workflowCfg);
  15. _taskDataCache.updateWorkflowContext(workflow, workflowCtx);
  16. if (workflowCtx.getFinishTime() != WorkflowContext.UNFINISHED) {
  17. LOG.info("Workflow " + workflow + " is finished.");
  18. long expiryTime = workflowCfg.getExpiry();
  19. if (workflowCtx.getFinishTime() + expiryTime <= currentTime) {
  20. LOG.info("Workflow " + workflow + " passed expiry time, cleaning up the workflow context.");
  21. cleanupWorkflow(workflow);
  22. } else {

代码示例来源:origin: apache/helix

  1. public static WorkflowContext buildWorkflowContext(String workflowResource,
  2. TaskState workflowState, Long startTime, TaskState... jobStates) {
  3. WorkflowContext workflowContext =
  4. new WorkflowContext(new ZNRecord(TaskUtil.WORKFLOW_CONTEXT_KW));
  5. workflowContext.setName(workflowResource);
  6. workflowContext.setStartTime(startTime == null ? System.currentTimeMillis() : startTime);
  7. int jobId = 0;
  8. for (TaskState jobstate : jobStates) {
  9. workflowContext
  10. .setJobState(TaskUtil.getNamespacedJobName(workflowResource, JOB_KW) + jobId++, jobstate);
  11. }
  12. workflowContext.setWorkflowState(workflowState);
  13. return workflowContext;
  14. }

代码示例来源:origin: org.apache.helix/helix-core

  1. if (!workflowCfg.isJobQueue() && !finalStates.contains(workflowCtx.getWorkflowState())) {
  2. scheduleRebalanceForTimeout(workflow, workflowCtx.getStartTime(), workflowCfg.getTimeout());
  3. if (!TaskState.TIMED_OUT.equals(workflowCtx.getWorkflowState())
  4. && isTimeout(workflowCtx.getStartTime(), workflowCfg.getTimeout())) {
  5. workflowCtx.setWorkflowState(TaskState.TIMED_OUT);
  6. clusterData.updateWorkflowContext(workflow, workflowCtx, _manager.getHelixDataAccessor());
  7. if (!finalStates.contains(workflowCtx.getWorkflowState())
  8. && TargetState.STOP.equals(targetState)) {
  9. LOG.info("Workflow " + workflow + "is marked as stopped.");
  10. if (isWorkflowStopped(workflowCtx, workflowCfg)) {
  11. workflowCtx.setWorkflowState(TaskState.STOPPED);
  12. clusterData.updateWorkflowContext(workflow, workflowCtx, _manager.getHelixDataAccessor());
  13. if (workflowCtx.getFinishTime() == WorkflowContext.UNFINISHED && isWorkflowFinished(workflowCtx,
  14. workflowCfg, clusterData.getJobConfigMap(), clusterData)) {
  15. workflowCtx.setFinishTime(currentTime);
  16. updateWorkflowMonitor(workflowCtx, workflowCfg);
  17. clusterData.updateWorkflowContext(workflow, workflowCtx, _manager.getHelixDataAccessor());
  18. if (workflowCtx.getFinishTime() != WorkflowContext.UNFINISHED) {
  19. LOG.info("Workflow " + workflow + " is finished.");
  20. long expiryTime = workflowCfg.getExpiry();
  21. if (workflowCtx.getFinishTime() + expiryTime <= currentTime) {
  22. LOG.info("Workflow " + workflow + " passed expiry time, cleaning up the workflow context.");
  23. cleanupWorkflow(workflow, workflowCfg);

代码示例来源:origin: org.apache.helix/helix-core

  1. long timeToSchedule = Long.MAX_VALUE;
  2. for (String job : workflowCfg.getJobDag().getAllNodes()) {
  3. TaskState jobState = workflowCtx.getJobState(job);
  4. if (jobState != null && !jobState.equals(TaskState.NOT_STARTED)) {
  5. if (LOG.isDebugEnabled()) {
  6. long calculatedStartTime = workflowCtx.getJobStartTime(job);
  7. if (calculatedStartTime < 0) {
  8. workflowCtx.setJobStartTime(job, calculatedStartTime);
  9. } else {
  10. scheduleSingleJob(job, jobConfig);
  11. workflowCtx.setJobState(job, TaskState.NOT_STARTED);
  12. scheduledJobs++;

代码示例来源:origin: apache/helix

  1. /**
  2. * Remove all jobs that are in final states (ABORTED, FAILED, COMPLETED) from the job queue. The
  3. * job config, job context will be removed from Zookeeper.
  4. *
  5. * @param queue The name of job queue
  6. */
  7. public void cleanupQueue(String queue) {
  8. WorkflowConfig workflowConfig = TaskUtil.getWorkflowConfig(_accessor, queue);
  9. if (workflowConfig == null) {
  10. throw new IllegalArgumentException("Queue " + queue + " does not yet exist!");
  11. }
  12. boolean isTerminable = workflowConfig.isTerminable();
  13. if (isTerminable) {
  14. throw new IllegalArgumentException(queue + " is not a queue!");
  15. }
  16. WorkflowContext wCtx = TaskUtil.getWorkflowContext(_propertyStore, queue);
  17. if (wCtx == null || wCtx.getWorkflowState() == null) {
  18. throw new IllegalStateException("Queue " + queue + " does not have a valid work state!");
  19. }
  20. Set<String> jobs = new HashSet<String>();
  21. for (String jobNode : workflowConfig.getJobDag().getAllNodes()) {
  22. TaskState curState = wCtx.getJobState(jobNode);
  23. if (curState != null && (curState == TaskState.ABORTED || curState == TaskState.COMPLETED
  24. || curState == TaskState.FAILED)) {
  25. jobs.add(jobNode);
  26. }
  27. }
  28. TaskUtil.removeJobsFromWorkflow(_accessor, _propertyStore, queue, jobs, true);
  29. }

代码示例来源:origin: apache/helix

  1. Assert.assertNull(context.getJobState(notStartedJobName));
  2. Assert.assertTrue(context.getFinishTime() - context.getStartTime() >= timeout);

代码示例来源:origin: apache/helix

  1. Thread.sleep(timeToSleep);
  2. ctx = getWorkflowContext(workflowName);
  3. } while ((ctx == null || ctx.getLastScheduledSingleWorkflow() == null));
  4. workflowName = ctx.getLastScheduledSingleWorkflow();
  5. Thread.sleep(timeToSleep);
  6. ctx = getWorkflowContext(workflowName);
  7. } while ((ctx == null || ctx.getJobState(jobName) == null || !allowedStates
  8. .contains(ctx.getJobState(jobName))) && System.currentTimeMillis() < st + timeout);
  9. if (ctx == null || !allowedStates.contains(ctx.getJobState(jobName))) {
  10. throw new HelixException(
  11. String.format("Workflow \"%s\" context is null or job \"%s\" is not in states: %s",
  12. return ctx.getJobState(jobName);

代码示例来源:origin: apache/helix

  1. @Override
  2. public ZNRecord update(ZNRecord currentData) {
  3. if (currentData != null) {
  4. WorkflowContext workflowContext = new WorkflowContext(currentData);
  5. workflowContext.removeJobStates(jobs);
  6. workflowContext.removeJobStartTime(jobs);
  7. currentData = workflowContext.getRecord();
  8. }
  9. return currentData;
  10. }
  11. };

代码示例来源:origin: org.apache.helix/helix-core

  1. protected void updateWorkflowMonitor(WorkflowContext context, WorkflowConfig config) {
  2. if (_clusterStatusMonitor != null) {
  3. _clusterStatusMonitor.updateWorkflowCounters(config, context.getWorkflowState(),
  4. context.getFinishTime() - context.getStartTime());
  5. }
  6. }

代码示例来源:origin: org.apache.helix/helix-core

  1. workflowCtx.setJobState(jobResource, TaskState.STOPPED);
  2. workflowCtx.setWorkflowState(TaskState.STOPPED);
  3. workflowCtx.setJobState(jobResource, TaskState.IN_PROGRESS);
  4. workflowCtx.setWorkflowState(TaskState.IN_PROGRESS);
  5. workflowCtx.setJobState(jobResource, TaskState.FAILED);
  6. if (workflowConfig.isTerminable()) {
  7. workflowCtx.setWorkflowState(TaskState.FAILED);
  8. workflowCtx.setFinishTime(finishTime);
  9. workflowCtx.setJobState(jobResource, TaskState.COMPLETED);
  10. jobCtx.setFinishTime(currentTime);
  11. if (isWorkflowComplete(workflowCtx, workflowConfig)) {
  12. workflowCtx.setWorkflowState(TaskState.COMPLETED);
  13. workflowCtx.setFinishTime(currentTime);

代码示例来源:origin: com.linkedin.gobblin/gobblin-cluster

  1. private void markJobComplete(String jobName, JobContext jobContext,
  2. WorkflowConfig workflowConfig, WorkflowContext workflowContext) {
  3. long currentTime = System.currentTimeMillis();
  4. workflowContext.setJobState(jobName, TaskState.COMPLETED);
  5. jobContext.setFinishTime(currentTime);
  6. if (isWorkflowFinished(workflowContext, workflowConfig)) {
  7. workflowContext.setFinishTime(currentTime);
  8. }
  9. }

代码示例来源:origin: apache/helix

  1. /**
  2. * Update context of the Workflow
  3. */
  4. public void updateWorkflowContext(String resourceName, WorkflowContext workflowContext) {
  5. updateContext(resourceName, workflowContext.getRecord());
  6. }

代码示例来源:origin: org.apache.helix/helix-core

  1. /**
  2. * Helper function to change target state for a given workflow
  3. */
  4. private void setWorkflowTargetState(String workflow, TargetState state) {
  5. setSingleWorkflowTargetState(workflow, state);
  6. // For recurring schedules, last scheduled incomplete workflow must also be handled
  7. WorkflowContext wCtx = TaskUtil.getWorkflowContext(_propertyStore, workflow);
  8. if (wCtx != null) {
  9. String lastScheduledWorkflow = wCtx.getLastScheduledSingleWorkflow();
  10. if (lastScheduledWorkflow != null) {
  11. setSingleWorkflowTargetState(lastScheduledWorkflow, state);
  12. }
  13. }
  14. }

代码示例来源:origin: apache/helix

  1. @Test public void testTaskRetryWithoutDelay() throws Exception {
  2. String jobResource = TestHelper.getTestMethodName();
  3. JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG);
  4. jobBuilder.setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG)
  5. .setMaxAttemptsPerTask(2).setCommand(MockTask.TASK_COMMAND)
  6. .setFailureThreshold(Integer.MAX_VALUE)
  7. .setJobCommandConfigMap(ImmutableMap.of(MockTask.FAILURE_COUNT_BEFORE_SUCCESS, "1"));
  8. Workflow flow =
  9. WorkflowGenerator.generateSingleJobWorkflowBuilder(jobResource, jobBuilder).build();
  10. _driver.start(flow);
  11. // Wait until the job completes.
  12. _driver.pollForWorkflowState(jobResource, TaskState.COMPLETED);
  13. long startTime = _driver.getWorkflowContext(jobResource).getStartTime();
  14. long finishedTime = _driver.getWorkflowContext(jobResource).getFinishTime();
  15. // It should finished at less than 2 sec
  16. Assert.assertTrue(finishedTime - startTime <= 2000L);
  17. }
  18. }

代码示例来源:origin: apache/helix

  1. public WorkflowContext getOrInitializeWorkflowContext(
  2. String workflowName, TaskDataCache cache) {
  3. WorkflowContext workflowCtx = cache.getWorkflowContext(workflowName);
  4. if (workflowCtx == null) {
  5. workflowCtx = new WorkflowContext(new ZNRecord(TaskUtil.WORKFLOW_CONTEXT_KW));
  6. workflowCtx.setStartTime(System.currentTimeMillis());
  7. workflowCtx.setName(workflowName);
  8. LOG.debug("Workflow context is created for " + workflowName);
  9. }
  10. return workflowCtx;
  11. }

代码示例来源:origin: apache/helix

  1. /**
  2. * This test method tests whether PropertyKey.Builder successfully creates a path for
  3. * WorkflowContext instances.
  4. * TODO: KeyBuilder must handle the case for future versions of Task Framework with a different
  5. * path structure
  6. */
  7. @Test
  8. public void testGetWorkflowContext() {
  9. // Manually create a WorkflowContext instance
  10. ZNRecord znRecord = new ZNRecord(WORKFLOW_NAME);
  11. WorkflowContext workflowContext = new WorkflowContext(znRecord);
  12. _manager.getHelixPropertyStore().set(
  13. Joiner.on("/").join(TaskConstants.REBALANCER_CONTEXT_ROOT, WORKFLOW_NAME, CONTEXT_NODE),
  14. workflowContext.getRecord(), AccessOption.PERSISTENT);
  15. // Test retrieving this WorkflowContext using PropertyKey.Builder.getPath()
  16. String path = KEY_BUILDER.workflowContext(WORKFLOW_NAME).getPath();
  17. WorkflowContext workflowCtx =
  18. new WorkflowContext(_baseAccessor.get(path, null, AccessOption.PERSISTENT));
  19. Assert.assertEquals(workflowContext, workflowCtx);
  20. }

相关文章