com.typesafe.config.Config.atPath()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(11.1k)|赞(0)|评价(0)|浏览(185)

本文整理了Java中com.typesafe.config.Config.atPath()方法的一些代码示例,展示了Config.atPath()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Config.atPath()方法的具体详情如下:
包路径:com.typesafe.config.Config
类名称:Config
方法名:atPath

Config.atPath介绍

[英]Places the config inside another Config at the given path.

Note that path expressions have a syntax and sometimes require quoting (see ConfigUtil#joinPath and ConfigUtil#splitPath).
[中]将配置放置在给定路径的另一个配置中。
请注意,路径表达式具有语法,有时需要引用(请参见ConfigUtil“joinPath”和ConfigUtil“splitPath”)。

代码示例

代码示例来源:origin: apache/drill

@Override
public Config atPath(String path) {
 return c.atPath(path);
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 public List<Config> getResolvedJobConfigs(Config userConfig, DatasetDescriptor inputDescriptor,
   DatasetDescriptor outputDescriptor)
   throws SpecNotFoundException, JobTemplate.TemplateException {
  Config inputDescriptorConfig = inputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_INPUT_DATASET_DESCRIPTOR_PREFIX);
  Config outputDescriptorConfig = outputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_OUTPUT_DATASET_DESCRIPTOR_PREFIX);
  userConfig = userConfig.withFallback(inputDescriptorConfig).withFallback(outputDescriptorConfig);

  List<Config> resolvedJobConfigs = new ArrayList<>();
  for (JobTemplate jobTemplate: getJobTemplates()) {
   Config resolvedJobConfig = jobTemplate.getResolvedConfig(userConfig).resolve().withValue(
     ConfigurationKeys.JOB_TEMPLATE_PATH, ConfigValueFactory.fromAnyRef(jobTemplate.getUri().toString()));;
   resolvedJobConfigs.add(resolvedJobConfig);
  }
  return resolvedJobConfigs;
 }
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Build the merged config for each {@link FlowEdge}, which is a combination of (in the precedence described below):
 * <ul>
 *   <p> the user provided flow config </p>
 *   <p> edge specific properties/overrides </p>
 *   <p> source node config </p>
 *   <p> destination node config </p>
 * </ul>
 * Each {@link JobTemplate}'s config will eventually be resolved against this merged config.
 * @param flowEdge An instance of {@link FlowEdge}.
 * @return the merged config derived as described above.
 */
private Config getMergedConfig(FlowEdge flowEdge)
  throws ExecutionException, InterruptedException {
 Config srcNodeConfig = this.flowGraph.getNode(flowEdge.getSrc()).getRawConfig().atPath(SOURCE_PREFIX);
 Config destNodeConfig = this.flowGraph.getNode(flowEdge.getDest()).getRawConfig().atPath(DESTINATION_PREFIX);
 Config mergedConfig = flowConfig.withFallback(flowEdge.getConfig()).withFallback(srcNodeConfig).withFallback(destNodeConfig);
 return mergedConfig;
}

代码示例来源:origin: apache/incubator-gobblin

public FormatConfig(Config config) {
 this.format = ConfigUtils.getString(config, DatasetDescriptorConfigKeys.FORMAT_KEY, DatasetDescriptorConfigKeys.DATASET_DESCRIPTOR_CONFIG_ANY);
 this.codecType = ConfigUtils.getString(config, DatasetDescriptorConfigKeys.CODEC_KEY, DatasetDescriptorConfigKeys.DATASET_DESCRIPTOR_CONFIG_ANY);
 this.encryptionConfig = new EncryptionConfig(ConfigUtils.getConfig(config, DatasetDescriptorConfigKeys.ENCYPTION_PREFIX, ConfigFactory
   .empty()));
 this.rawConfig = config.withFallback(this.encryptionConfig.getRawConfig().atPath(DatasetDescriptorConfigKeys.ENCYPTION_PREFIX)).
   withFallback(DEFAULT_FALLBACK);
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Checks if the {@link FlowTemplate} is resolvable using the provided {@link Config} object. A {@link FlowTemplate}
 * is resolvable only if each of the {@link JobTemplate}s in the flow is resolvable
 * @param userConfig User supplied Config
 * @return true if the {@link FlowTemplate} is resolvable
 */
@Override
public boolean isResolvable(Config userConfig, DatasetDescriptor inputDescriptor, DatasetDescriptor outputDescriptor)
  throws SpecNotFoundException, JobTemplate.TemplateException {
 Config inputDescriptorConfig = inputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_INPUT_DATASET_DESCRIPTOR_PREFIX);
 Config outputDescriptorConfig = outputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_OUTPUT_DATASET_DESCRIPTOR_PREFIX);
 userConfig = userConfig.withFallback(inputDescriptorConfig).withFallback(outputDescriptorConfig);
 ConfigResolveOptions resolveOptions = ConfigResolveOptions.defaults().setAllowUnresolved(true);
 for (JobTemplate template: this.jobTemplates) {
  Config templateConfig = template.getResolvedConfig(userConfig).resolve(resolveOptions);
  if (!template.getResolvedConfig(userConfig).resolve(resolveOptions).isResolved()) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * A method to add tracking event configurations to a JobSpec.
 * This enables {@link org.apache.gobblin.metrics.GobblinTrackingEvent}s
 * to be emitted from each Gobblin job orchestrated by Gobblin-as-a-Service, which will then be used for tracking the
 * execution status of the job.
 * @param jobSpec representing a fully resolved {@link JobSpec}.
 */
private static void addTrackingEventConfig(JobSpec jobSpec, Config sysConfig) {
 Config reportingConfig = ConfigUtils.getConfig(sysConfig, ConfigurationKeys.METRICS_REPORTING_CONFIGURATIONS_PREFIX, ConfigFactory.empty());
 if (!reportingConfig.isEmpty()) {
  Config jobConfig = jobSpec.getConfig().withFallback(reportingConfig.atPath(ConfigurationKeys.METRICS_REPORTING_CONFIGURATIONS_PREFIX));
  boolean isSchemaRegistryEnabled = ConfigUtils.getBoolean(sysConfig, ConfigurationKeys.METRICS_REPORTING_KAFKA_USE_SCHEMA_REGISTRY, false);
  if (isSchemaRegistryEnabled) {
   String schemaRegistryUrl = ConfigUtils.getString(sysConfig, KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, "");
   if (!Strings.isNullOrEmpty(schemaRegistryUrl)) {
    jobConfig = jobConfig.withValue(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, ConfigValueFactory.fromAnyRef(schemaRegistryUrl));
   }
   String schemaOverrideNamespace = ConfigUtils
     .getString(sysConfig, KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_OVERRIDE_NAMESPACE, "");
   if (!Strings.isNullOrEmpty(schemaOverrideNamespace)) {
    jobConfig = jobConfig.withValue(KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_OVERRIDE_NAMESPACE,
      ConfigValueFactory.fromAnyRef(schemaOverrideNamespace));
   }
  }
  jobSpec.setConfig(jobConfig);
 }
}

代码示例来源:origin: dremio/dremio-oss

@Override
public Config atPath(String path) {
 return config.atPath(path);
}

代码示例来源:origin: org.apache.drill/drill-common

@Override
public Config atPath(String path) {
 return c.atPath(path);
}

代码示例来源:origin: apache/streams

public static void addConfig(Config newConfig, String path) {
 config = newConfig.atPath(path).withFallback(config);
}

代码示例来源:origin: edu.jhuapl.dorset.components/hibernate-service

/**
 * Create the hibernate service which initializes the session factory
 *
 * @param conf  application configuration
 * @throws UnsupportedOperationException if a mapping file is invalid
 */
public HibernateService(Config conf) {
  Configuration hibernateConf = new Configuration();
  Config hc = conf.getConfig(HIBERNATE_KEY).atPath(HIBERNATE_KEY);
  for (Map.Entry<String, ConfigValue> entry : hc.entrySet()) {
    String key = entry.getKey();
    if (key.startsWith(MAPPING_KEY)) {
      logger.info("Loading hibernate map from " + conf.getString(key));
      try {
        hibernateConf.addResource(conf.getString(key));
      } catch (MappingException e) {
        String msg = "Something wrong with mapping: " + conf.getString(key);
        throw new UnsupportedOperationException(msg, e);
      }
    } else {
      logger.info("Setting hibernate property: " + key + "=" + conf.getString(key));
      hibernateConf.setProperty(key, conf.getString(key));
    }
  }
  sessionFactory = hibernateConf.buildSessionFactory();
}

代码示例来源:origin: org.apache.gobblin/gobblin-service

@Override
 public List<Config> getResolvedJobConfigs(Config userConfig, DatasetDescriptor inputDescriptor,
   DatasetDescriptor outputDescriptor)
   throws SpecNotFoundException, JobTemplate.TemplateException {
  Config inputDescriptorConfig = inputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_INPUT_DATASET_DESCRIPTOR_PREFIX);
  Config outputDescriptorConfig = outputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_OUTPUT_DATASET_DESCRIPTOR_PREFIX);
  userConfig = userConfig.withFallback(inputDescriptorConfig).withFallback(outputDescriptorConfig);

  List<Config> resolvedJobConfigs = new ArrayList<>();
  for (JobTemplate jobTemplate: getJobTemplates()) {
   Config resolvedJobConfig = jobTemplate.getResolvedConfig(userConfig).resolve().withValue(
     ConfigurationKeys.JOB_TEMPLATE_PATH, ConfigValueFactory.fromAnyRef(jobTemplate.getUri().toString()));;
   resolvedJobConfigs.add(resolvedJobConfig);
  }
  return resolvedJobConfigs;
 }
}

代码示例来源:origin: org.apache.gobblin/gobblin-service

/**
 * Build the merged config for each {@link FlowEdge}, which is a combination of (in the precedence described below):
 * <ul>
 *   <p> the user provided flow config </p>
 *   <p> edge specific properties/overrides </p>
 *   <p> spec executor config/overrides </p>
 *   <p> source node config </p>
 *   <p> destination node config </p>
 * </ul>
 * Each {@link JobTemplate}'s config will eventually be resolved against this merged config.
 * @param flowEdge An instance of {@link FlowEdge}.
 * @param specExecutor A {@link SpecExecutor}.
 * @return the merged config derived as described above.
 */
private Config getMergedConfig(FlowEdge flowEdge, SpecExecutor specExecutor)
  throws ExecutionException, InterruptedException {
 Config srcNodeConfig = this.flowGraph.getNode(flowEdge.getSrc()).getRawConfig().atPath(SOURCE_PREFIX);
 Config destNodeConfig = this.flowGraph.getNode(flowEdge.getDest()).getRawConfig().atPath(DESTINATION_PREFIX);
 Config mergedConfig = flowConfig.withFallback(specExecutor.getConfig().get()).withFallback(flowEdge.getConfig())
   .withFallback(srcNodeConfig).withFallback(destNodeConfig);
 return mergedConfig;
}

代码示例来源:origin: org.apache.gobblin/gobblin-service

public FormatConfig(Config config) {
 this.format = ConfigUtils.getString(config, DatasetDescriptorConfigKeys.FORMAT_KEY, DatasetDescriptorConfigKeys.DATASET_DESCRIPTOR_CONFIG_ANY);
 this.codecType = ConfigUtils.getString(config, DatasetDescriptorConfigKeys.CODEC_KEY, DatasetDescriptorConfigKeys.DATASET_DESCRIPTOR_CONFIG_ANY);
 this.encryptionConfig = new EncryptionConfig(ConfigUtils.getConfig(config, DatasetDescriptorConfigKeys.ENCYPTION_PREFIX, ConfigFactory
   .empty()));
 this.rawConfig = config.withFallback(this.encryptionConfig.getRawConfig().atPath(DatasetDescriptorConfigKeys.ENCYPTION_PREFIX)).
   withFallback(DEFAULT_FALLBACK);
}

代码示例来源:origin: org.apache.gobblin/gobblin-service

/**
 * Checks if the {@link FlowTemplate} is resolvable using the provided {@link Config} object. A {@link FlowTemplate}
 * is resolvable only if each of the {@link JobTemplate}s in the flow is resolvable
 * @param userConfig User supplied Config
 * @return true if the {@link FlowTemplate} is resolvable
 */
@Override
public boolean isResolvable(Config userConfig, DatasetDescriptor inputDescriptor, DatasetDescriptor outputDescriptor)
  throws SpecNotFoundException, JobTemplate.TemplateException {
 Config inputDescriptorConfig = inputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_INPUT_DATASET_DESCRIPTOR_PREFIX);
 Config outputDescriptorConfig = outputDescriptor.getRawConfig().atPath(DatasetDescriptorConfigKeys.FLOW_EDGE_OUTPUT_DATASET_DESCRIPTOR_PREFIX);
 userConfig = userConfig.withFallback(inputDescriptorConfig).withFallback(outputDescriptorConfig);
 ConfigResolveOptions resolveOptions = ConfigResolveOptions.defaults().setAllowUnresolved(true);
 for (JobTemplate template: this.jobTemplates) {
  Config templateConfig = template.getResolvedConfig(userConfig).resolve(resolveOptions);
  if (!template.getResolvedConfig(userConfig).resolve(resolveOptions).isResolved()) {
   return false;
  }
 }
 return true;
}

相关文章