本文整理了Java中org.apache.spark.util.Utils
类的一些代码示例,展示了Utils
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils
类的具体详情如下:
包路径:org.apache.spark.util.Utils
类名称:Utils
暂无
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
taskContext = mock(TaskContext.class);
代码示例来源:origin: org.apache.spark/spark-core_2.11
@After
public void tearDown() {
try {
assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
} finally {
Utils.deleteRecursively(tempDir);
tempDir = null;
}
}
代码示例来源:origin: apache/hive
return new ObjectPair<Long, Integer>(-1L, -1);
int executorMemoryInMB = Utils.memoryStringToMb(
sparkConf.get("spark.executor.memory", "512m"));
double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);
代码示例来源:origin: org.apache.spark/spark-mllib
PrefixSpanModel<Integer> model = prefixSpan.run(sequences);
File tempDir = Utils.createTempDir(
System.getProperty("java.io.tmpdir"), "JavaPrefixSpanSuite");
String outputPath = tempDir.getPath();
Utils.deleteRecursively(tempDir);
代码示例来源:origin: org.apache.spark/spark-core_2.10
/**
* Release N bytes of execution memory for a MemoryConsumer.
*/
public void releaseExecutionMemory(long size, MemoryConsumer consumer) {
logger.debug("Task {} release {} from {}", taskAttemptId, Utils.bytesToString(size), consumer);
memoryManager.releaseExecutionMemory(size, taskAttemptId, consumer.getMode());
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
String oomStackTrace = Utils.exceptionString(oom);
assertThat("expected OutOfMemoryError in " +
"org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
代码示例来源:origin: org.apache.spark/spark-core_2.10
boolean copyThrewException = true;
try {
lengths[i] = Utils.copyStream(in, out, false, transferToEnabled);
copyThrewException = false;
} finally {
代码示例来源:origin: org.apache.spark/spark-core_2.10
final FileChannel spillInputChannel = spillInputChannels[i];
final long writeStartTime = System.nanoTime();
Utils.copyFileStreamNIO(
spillInputChannel,
mergedFileOutputChannel,
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
PrefixSpanModel<Integer> model = prefixSpan.run(sequences);
File tempDir = Utils.createTempDir(
System.getProperty("java.io.tmpdir"), "JavaPrefixSpanSuite");
String outputPath = tempDir.getPath();
Utils.deleteRecursively(tempDir);
代码示例来源:origin: org.apache.spark/spark-core_2.11
/**
* Release N bytes of execution memory for a MemoryConsumer.
*/
public void releaseExecutionMemory(long size, MemoryConsumer consumer) {
logger.debug("Task {} release {} from {}", taskAttemptId, Utils.bytesToString(size), consumer);
memoryManager.releaseExecutionMemory(size, taskAttemptId, consumer.getMode());
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
String oomStackTrace = Utils.exceptionString(oom);
assertThat("expected OutOfMemoryError in " +
"org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
代码示例来源:origin: org.apache.spark/spark-core
boolean copyThrewException = true;
try {
lengths[i] = Utils.copyStream(in, out, false, transferToEnabled);
copyThrewException = false;
} finally {
代码示例来源:origin: org.apache.spark/spark-core_2.11
final FileChannel spillInputChannel = spillInputChannels[i];
final long writeStartTime = System.nanoTime();
Utils.copyFileStreamNIO(
spillInputChannel,
mergedFileOutputChannel,
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
taskContext = mock(TaskContext.class);
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
.run(rdd);
File tempDir = Utils.createTempDir(
System.getProperty("java.io.tmpdir"), "JavaFPGrowthSuite");
String outputPath = tempDir.getPath();
Utils.deleteRecursively(tempDir);
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void sequenceFile() {
File tempDir = Files.createTempDir();
tempDir.deleteOnExit();
String outputDir = new File(tempDir, "output").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<>(1, "a"),
new Tuple2<>(2, "aa"),
new Tuple2<>(3, "aaa")
);
JavaPairRDD<Integer, String> rdd = sc.parallelizePairs(pairs);
rdd.mapToPair(pair -> new Tuple2<>(new IntWritable(pair._1()), new Text(pair._2())))
.saveAsHadoopFile(outputDir, IntWritable.class, Text.class, SequenceFileOutputFormat.class);
// Try reading the output back as an object file
JavaPairRDD<Integer, String> readRDD = sc.sequenceFile(outputDir, IntWritable.class, Text.class)
.mapToPair(pair -> new Tuple2<>(pair._1().get(), pair._2().toString()));
Assert.assertEquals(pairs, readRDD.collect());
Utils.deleteRecursively(tempDir);
}
代码示例来源:origin: org.apache.spark/spark-core
/**
* Release N bytes of execution memory for a MemoryConsumer.
*/
public void releaseExecutionMemory(long size, MemoryConsumer consumer) {
logger.debug("Task {} release {} from {}", taskAttemptId, Utils.bytesToString(size), consumer);
memoryManager.releaseExecutionMemory(size, taskAttemptId, consumer.getMode());
}
代码示例来源:origin: apache/drill
return new ObjectPair<Long, Integer>(-1L, -1);
int executorMemoryInMB = Utils.memoryStringToMb(
sparkConf.get("spark.executor.memory", "512m"));
double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);
代码示例来源:origin: org.apache.spark/spark-core
String oomStackTrace = Utils.exceptionString(oom);
assertThat("expected OutOfMemoryError in " +
"org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
代码示例来源:origin: org.apache.spark/spark-core_2.11
boolean copyThrewException = true;
try {
lengths[i] = Utils.copyStream(in, out, false, transferToEnabled);
copyThrewException = false;
} finally {
内容来源于网络,如有侵权,请联系作者删除!