org.apache.spark.Accumulator.add()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(4.6k)|赞(0)|评价(0)|浏览(131)

本文整理了Java中org.apache.spark.Accumulator.add()方法的一些代码示例,展示了Accumulator.add()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Accumulator.add()方法的具体详情如下:
包路径:org.apache.spark.Accumulator
类名称:Accumulator
方法名:add

Accumulator.add介绍

暂无

代码示例

代码示例来源:origin: apache/hive

public void increment(long incr) {
 accumulator.add(incr);
}

代码示例来源:origin: databricks/learning-spark

rdd.foreach(new VoidFunction<String>(){ public void call(String line) {
   if (line.contains("KK6JKQ")) {
    count.add(1);
   }
  }});
System.out.println("Lines with 'KK6JKQ': " + count.value());

代码示例来源:origin: databricks/learning-spark

new Function<String, Boolean>(){ public Boolean call(String callSign) {
  Pattern p = Pattern.compile("\\A\\d?\\p{Alpha}{1,2}\\d{1,4}\\p{Alpha}{1,3}\\Z");
  Matcher m = p.matcher(callSign);
  boolean b = m.matches();
  if (b) {
   validSignCount.add(1);
  } else {
   invalidSignCount.add(1);
  }
  return b;
 }
});

代码示例来源:origin: databricks/learning-spark

new FlatMapFunction<String, String>() { public Iterable<String> call(String line) {
   if (line.equals("")) {
    blankLines.add(1);
   }
   return Arrays.asList(line.split(" "));
  }});
callSigns.saveAsTextFile(outputDir + "/callsigns");

代码示例来源:origin: org.apache.spark/spark-core

rdd.foreach(x -> doubleAccum.add((double) x));
assertEquals((Double) 25.0, doubleAccum.value());
rdd.foreach(x -> floatAccum.add((float) x));
assertEquals((Float) 25.0f, floatAccum.value());

代码示例来源:origin: org.apache.spark/spark-core_2.10

rdd.foreach(x -> doubleAccum.add((double) x));
assertEquals((Double) 25.0, doubleAccum.value());
rdd.foreach(x -> floatAccum.add((float) x));
assertEquals((Float) 25.0f, floatAccum.value());

代码示例来源:origin: org.apache.spark/spark-core_2.11

rdd.foreach(x -> doubleAccum.add((double) x));
assertEquals((Double) 25.0, doubleAccum.value());
rdd.foreach(x -> floatAccum.add((float) x));
assertEquals((Float) 25.0f, floatAccum.value());

代码示例来源:origin: apache/tinkerpop

@Override
public void add(final String key, final Object value) {
  checkKeyValue(key, value);
  if (this.inExecute)
    this.sparkMemory.get(key).add(new ObjectWritable<>(value));
  else
    throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key);
}

代码示例来源:origin: co.cask.cdap/hydrator-spark-core2

@Override
public void incrementOutputRecordCount() {
 outputRecordCounter.add(1.0);
}

代码示例来源:origin: co.cask.cdap/hydrator-spark-core2

@Override
public void incrementErrorRecordCount() {
 errorRecordCounter.add(1.0);
}

代码示例来源:origin: co.cask.cdap/hydrator-spark-core2

@Override
public void incrementInputRecordCount() {
 inputRecordCounter.add(1.0);
}

代码示例来源:origin: org.apache.pig/pig

public void increment(T incr) {
  accumulator.add(incr);
}

代码示例来源:origin: org.apache.crunch/crunch-spark

@Override
public void increment(long inc) {
 this.value += inc;
 accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, inc)));
}

代码示例来源:origin: org.apache.crunch/crunch-spark

@Override
 public void setValue(long newValue) {
  long delta = newValue - value;
  accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, delta)));
  this.value = newValue;
 }
}

代码示例来源:origin: apache/crunch

@Override
 public void setValue(long newValue) {
  long delta = newValue - value;
  accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, delta)));
  this.value = newValue;
 }
}

代码示例来源:origin: apache/crunch

@Override
public void increment(long inc) {
 this.value += inc;
 accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, inc)));
}

代码示例来源:origin: deepspark/deepspark

@Override
public void call(Record arg0) throws Exception {
  dbWriter.putSample(arg0);
  
  if((++count % 1000) == 0) {
    System.out.println(String.format("%d images saved...", count));
  }
  
  totalCount.add(1);
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

@Override
public void add(final String key, final Object value) {
  checkKeyValue(key, value);
  if (this.inExecute)
    this.sparkMemory.get(key).add(new ObjectWritable<>(value));
  else
    throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key);
}

代码示例来源:origin: ai.grakn/grakn-kb

@Override
public void add(final String key, final Object value) {
  checkKeyValue(key, value);
  if (this.inExecute) {
    this.sparkMemory.get(key).add(new ObjectWritable<>(value));
  } else {
    throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key);
  }
}

代码示例来源:origin: org.qcri.rheem/rheem-spark

public void accept(JavaRDD<?> rdd, SparkExecutor sparkExecutor) throws RheemException {
  if (this.isMarkedForInstrumentation() && !this.isRddCached()) {
    final Accumulator<Integer> accumulator = sparkExecutor.sc.accumulator(0);
    this.rdd = rdd.filter(dataQuantum -> {
      accumulator.add(1);
      return true;
    });
    this.accumulator = accumulator;
  } else {
    this.rdd = rdd;
  }
}

相关文章