本文整理了Java中org.apache.spark.util.Utils.createTempDir()
方法的一些代码示例,展示了Utils.createTempDir()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.createTempDir()
方法的具体详情如下:
包路径:org.apache.spark.util.Utils
类名称:Utils
方法名:createTempDir
暂无
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
taskContext = mock(TaskContext.class);
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
taskContext = mock(TaskContext.class);
代码示例来源:origin: org.apache.spark/spark-core
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
taskContext = mock(TaskContext.class);
代码示例来源:origin: org.apache.spark/spark-core_2.10
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir("test", "test");
mergedOutputFile = File.createTempFile("mergedoutput", "", tempDir);
partitionSizesInMergedFile = null;
代码示例来源:origin: org.apache.spark/spark-core_2.11
taskMemoryManager = new TaskMemoryManager(memoryManager, 0);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
MockitoAnnotations.initMocks(this);
代码示例来源:origin: org.apache.spark/spark-core_2.10
taskMemoryManager = new TaskMemoryManager(memoryManager, 0);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
MockitoAnnotations.initMocks(this);
代码示例来源:origin: org.apache.spark/spark-core_2.11
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(null, "test");
mergedOutputFile = File.createTempFile("mergedoutput", "", tempDir);
partitionSizesInMergedFile = null;
代码示例来源:origin: org.apache.spark/spark-core
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
tempDir = Utils.createTempDir(null, "test");
mergedOutputFile = File.createTempFile("mergedoutput", "", tempDir);
partitionSizesInMergedFile = null;
代码示例来源:origin: org.apache.spark/spark-core
taskMemoryManager = new TaskMemoryManager(memoryManager, 0);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
MockitoAnnotations.initMocks(this);
代码示例来源:origin: org.apache.spark/spark-sql_2.10
@Before
public void setUp() {
input = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "input").toString();
File f = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "output");
f.delete();
output = f.toString();
}
代码示例来源:origin: org.apache.spark/spark-sql_2.11
@Before
public void setUp() {
input = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "input").toString();
File f = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "output");
f.delete();
output = f.toString();
}
代码示例来源:origin: org.apache.spark/spark-sql
@Before
public void setUp() {
input = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "input").toString();
File f = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "output");
f.delete();
output = f.toString();
}
代码示例来源:origin: org.apache.spark/spark-sql
@Before
public void setUp() {
spark = new TestSparkSession();
input = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "input").toString();
}
代码示例来源:origin: org.apache.spark/spark-sql_2.11
@Before
public void setUp() {
spark = new TestSparkSession();
input = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "input").toString();
}
代码示例来源:origin: org.apache.spark/spark-sql_2.10
@Before
public void setUp() throws IOException {
spark = SparkSession.builder()
.master("local[*]")
.appName("testing")
.getOrCreate();
path =
Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile();
if (path.exists()) {
path.delete();
}
List<String> jsonObjects = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}");
}
Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING());
df = spark.read().json(ds);
df.createOrReplaceTempView("jsonTable");
}
代码示例来源:origin: org.apache.spark/spark-sql_2.11
@Before
public void setUp() throws IOException {
spark = SparkSession.builder()
.master("local[*]")
.appName("testing")
.getOrCreate();
path =
Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile();
if (path.exists()) {
path.delete();
}
List<String> jsonObjects = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}");
}
Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING());
df = spark.read().json(ds);
df.createOrReplaceTempView("jsonTable");
}
代码示例来源:origin: org.apache.spark/spark-sql
@Before
public void setUp() throws IOException {
spark = SparkSession.builder()
.master("local[*]")
.appName("testing")
.getOrCreate();
path =
Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile();
if (path.exists()) {
path.delete();
}
List<String> jsonObjects = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}");
}
Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING());
df = spark.read().json(ds);
df.createOrReplaceTempView("jsonTable");
}
代码示例来源:origin: org.apache.spark/spark-mllib
@Override
public void setUp() throws IOException {
super.setUp();
tempDir = Utils.createTempDir(
System.getProperty("java.io.tmpdir"), "JavaDefaultReadWriteSuite");
}
代码示例来源:origin: org.apache.spark/spark-mllib_2.10
@Override
public void setUp() throws IOException {
super.setUp();
tempDir = Utils.createTempDir(
System.getProperty("java.io.tmpdir"), "JavaDefaultReadWriteSuite");
}
代码示例来源:origin: org.apache.spark/spark-streaming_2.10
@SuppressWarnings("unchecked")
@Test
public void testTextFileStream() throws IOException {
File testDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark");
List<List<String>> expected = fileTestPrepare(testDir);
JavaDStream<String> input = ssc.textFileStream(testDir.toString());
JavaTestUtils.attachTestOutputStream(input);
List<List<String>> result = JavaTestUtils.runStreams(ssc, 1, 1);
assertOrderInvariantEquals(expected, result);
}
内容来源于网络,如有侵权,请联系作者删除!