org.apache.kylin.job.execution.Output.getExtra()方法的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(8.5k)|赞(0)|评价(0)|浏览(77)

本文整理了Java中org.apache.kylin.job.execution.Output.getExtra()方法的一些代码示例,展示了Output.getExtra()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Output.getExtra()方法的具体详情如下:
包路径:org.apache.kylin.job.execution.Output
类名称:Output
方法名:getExtra

Output.getExtra介绍

暂无

代码示例

代码示例来源:origin: apache/kylin

public static String getBuildInstance(Output output) {
  final String str = output.getExtra().get(BUILD_INSTANCE);
  if (str != null) {
    return str;
  }
  return "unknown";
}

代码示例来源:origin: apache/kylin

public static long getExtraInfoAsLong(Output output, String key, long defaultValue) {
  final String str = output.getExtra().get(key);
  if (str != null) {
    return Long.parseLong(str);
  } else {
    return defaultValue;
  }
}

代码示例来源:origin: apache/kylin

protected final Map<String, String> getExtraInfo() {
  return getOutput().getExtra();
}

代码示例来源:origin: apache/kylin

private String findExtraInfo(String key, String dft, boolean backward) {
    ArrayList<AbstractExecutable> tasks = new ArrayList<AbstractExecutable>(getTasks());

    if (backward) {
      Collections.reverse(tasks);
    }

    for (AbstractExecutable child : tasks) {
      Output output = getManager().getOutput(child.getId());
      String value = output.getExtra().get(key);
      if (value != null)
        return value;
    }
    return dft;
  }
}

代码示例来源:origin: apache/kylin

@Override
protected void onExecuteStart(ExecutableContext executableContext) {
  final Output output = getOutput();
  if (output.getExtra().containsKey(START_TIME)) {
    final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID);
    if (sparkJobID == null) {
      getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
      return;
    }
    try {
      String status = getAppState(sparkJobID);
      if (status == null || status.equals("FAILED") || status.equals("KILLED")) {
        //remove previous mr job info
        super.onExecuteStart(executableContext);
      } else {
        getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
      }
    } catch (IOException e) {
      logger.warn("error get hadoop status");
      super.onExecuteStart(executableContext);
    }
  } else {
    super.onExecuteStart(executableContext);
  }
}

代码示例来源:origin: apache/kylin

@Override
protected void onExecuteStart(ExecutableContext executableContext) {
  final Output output = getOutput();
  if (output.getExtra().containsKey(START_TIME)) {
    final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID);
    if (mrJobId == null) {
      getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);

代码示例来源:origin: apache/kylin

protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  ExecutableManager mgr = getManager();
  Map<String, String> extra = mgr.getOutput(getId()).getExtra();
  String sparkJobId = extra.get(ExecutableConstants.SPARK_JOB_ID);
  if (!StringUtils.isEmpty(sparkJobId)) {
        extra = mgr.getOutput(getId()).getExtra();
        if (extra != null && extra.get(ExecutableConstants.SPARK_JOB_ID) != null) {
          killAppRetry(extra.get(ExecutableConstants.SPARK_JOB_ID));
      extra = mgr.getOutput(getId()).getExtra();
      extra.put(ExecutableConstants.SPARK_JOB_ID, "");
      getManager().addJobInfo(getId(), extra);

代码示例来源:origin: apache/kylin

final String mrJobId = errorOutput.getExtra().get(ExecutableConstants.MR_JOB_ID);
  dataMap.put("mr_job_id", StringUtil.noBlank(mrJobId, "Not initialized"));
} else {

代码示例来源:origin: apache/kylin

Configuration conf = new Configuration(HadoopUtil.getCurrentConfiguration());
String[] jobArgs = overwriteJobConf(conf, context.getConfig(), getMapReduceParams().trim().split("\\s+"));
final Map<String, String> extra = mgr.getOutput(getId()).getExtra();
if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) {
  job = new Cluster(conf).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID)));

代码示例来源:origin: apache/kylin

private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i, Output stepOutput) {
  Preconditions.checkNotNull(stepOutput);
  JobInstance.JobStep result = new JobInstance.JobStep();
  result.setId(task.getId());
  result.setName(task.getName());
  result.setSequenceID(i);
  result.setStatus(parseToJobStepStatus(stepOutput.getState()));
  for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) {
    if (entry.getKey() != null && entry.getValue() != null) {
      result.putInfo(entry.getKey(), entry.getValue());
    }
  }
  result.setExecStartTime(AbstractExecutable.getStartTime(stepOutput));
  result.setExecEndTime(AbstractExecutable.getEndTime(stepOutput));
  if (task instanceof ShellExecutable) {
    result.setExecCmd(((ShellExecutable) task).getCmd());
  }
  if (task instanceof MapReduceExecutable) {
    result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
    result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
  }
  if (task instanceof HadoopShellExecutable) {
    result.setExecCmd(((HadoopShellExecutable) task).getJobParams());
  }
  return result;
}

代码示例来源:origin: apache/kylin

for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) {
  if (entry.getKey() != null && entry.getValue() != null) {
    result.putInfo(entry.getKey(), entry.getValue());

代码示例来源:origin: KylinOLAP/Kylin

protected long getExtraInfoAsLong(String key, long defaultValue) {
  final String str = executableManager.getOutput(getId()).getExtra().get(key);
  if (str != null) {
    return Long.parseLong(str);
  } else {
    return defaultValue;
  }
}

代码示例来源:origin: KylinOLAP/Kylin

String sourceRecordsCount = baseCuboidOutput.getExtra().get(ExecutableConstants.SOURCE_RECORDS_COUNT);
Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsCount), "Can't get cube source record count.");
long sourceCount = Long.parseLong(sourceRecordsCount);
String sourceRecordsSize = baseCuboidOutput.getExtra().get(ExecutableConstants.SOURCE_RECORDS_SIZE);
Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsSize), "Can't get cube source record size.");
long sourceSize = Long.parseLong(sourceRecordsSize);
boolean segmentReady = true;
if (!StringUtils.isBlank(getConvertToHfileStepId())) {
  String cubeSizeString = executableManager.getOutput(getConvertToHfileStepId()).getExtra().get(ExecutableConstants.HDFS_BYTES_WRITTEN);
  Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size.");
  size = Long.parseLong(cubeSizeString) / 1024;

代码示例来源:origin: KylinOLAP/Kylin

@Override
protected void onExecuteStart(ExecutableContext executableContext) {
  final Output output = executableManager.getOutput(getId());
  if (output.getExtra().containsKey(START_TIME)) {
    final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID);
    if (mrJobId == null) {
      executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
      return;
    }
    try {
      Job job = new Cluster(new Configuration()).getJob(JobID.forName(mrJobId));
      if (job.getJobState() == JobStatus.State.FAILED) {
        //remove previous mr job info
        super.onExecuteStart(executableContext);
      } else {
        executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
      }
    } catch (IOException e) {
      logger.warn("error get hadoop status");
      super.onExecuteStart(executableContext);
    } catch (InterruptedException e) {
      logger.warn("error get hadoop status");
      super.onExecuteStart(executableContext);
    }
  } else {
    super.onExecuteStart(executableContext);
  }
}

代码示例来源:origin: KylinOLAP/Kylin

try {
  Job job;
  final Map<String, String> extra = executableManager.getOutput(getId()).getExtra();
  if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) {
    job = new Cluster(new Configuration()).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID)));

代码示例来源:origin: KylinOLAP/Kylin

return new ExecuteResult(ExecuteResult.State.FAILED, "there is no segment with id:" + getSegmentId());
String cubeSizeString = executableManager.getOutput(getConvertToHfileStepId()).getExtra().get(ExecutableConstants.HDFS_BYTES_WRITTEN);
Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size.");
long cubeSize = Long.parseLong(cubeSizeString) / 1024;

代码示例来源:origin: org.apache.kylin/kylin-job

public static String getExtraInfo(Output output, String key) {
  return output.getExtra().get(key);
}

代码示例来源:origin: org.apache.kylin/kylin-job

public static long getExtraInfoAsLong(Output output, String key, long defaultValue) {
  final String str = output.getExtra().get(key);
  if (str != null) {
    return Long.parseLong(str);
  } else {
    return defaultValue;
  }
}

代码示例来源:origin: org.apache.kylin/kylin-core-job

public static long getExtraInfoAsLong(Output output, String key, long defaultValue) {
  final String str = output.getExtra().get(key);
  if (str != null) {
    return Long.parseLong(str);
  } else {
    return defaultValue;
  }
}

代码示例来源:origin: org.apache.kylin/kylin-core-job

private String findExtraInfo(String key, String dft, boolean backward) {
    ArrayList<AbstractExecutable> tasks = new ArrayList<AbstractExecutable>(getTasks());

    if (backward) {
      Collections.reverse(tasks);
    }

    for (AbstractExecutable child : tasks) {
      Output output = getManager().getOutput(child.getId());
      String value = output.getExtra().get(key);
      if (value != null)
        return value;
    }
    return dft;
  }
}

相关文章