[INFO] 2020-11-26 09:14:12,488 dispatcher-event-loop-2 org.apache.spark.executor.Executor - {} - Executor is trying to kill task 13306.0 in stage 31.0 (TID 216456), reason: Stage cancelled
[INFO] 2020-11-26 09:14:12,489 dispatcher-event-loop-0 org.apache.spark.executor.Executor - {} - Executor is trying to kill task 14406.0 in stage 31.0 (TID 217229), reason: Stage cancelled
[INFO] 2020-11-26 09:14:12,490 dispatcher-event-loop-1 org.apache.spark.executor.Executor - {} - Executor is trying to kill task 15329.0 in stage 31.0 (TID 217973), reason: Stage cancelled
[INFO] 2020-11-26 09:14:13,003 Executor task launch worker for task 217973 org.apache.spark.executor.Executor - {} - Executor killed task 15329.0 in stage 31.0 (TID 217973), reason: Stage cancelled
[ERROR] 2020-11-26 09:15:45,077 Executor task launch worker for task 217229 org.apache.spark.memory.TaskMemoryManager - {} - error while calling spill() on org.apache.spark.shuffle.sort.ShuffleExternalSorter@9fa4f9f
java.io.IOException: No space left on device
at java.io.FileOutputStream.writeBytes(Native Method) ~[?:1.8.0_252]
at java.io.FileOutputStream.write(FileOutputStream.java:326) ~[?:1.8.0_252]
at org.apache.spark.storage.TimeTrackingOutputStream.write(TimeTrackingOutputStream.java:58) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82) ~[?:1.8.0_252]
at java.io.BufferedOutputStream.write(BufferedOutputStream.java:126) ~[?:1.8.0_252]
at org.xerial.snappy.SnappyOutputStream.dumpOutput(SnappyOutputStream.java:360) ~[snappy-java-1.1.4.jar:?]
at org.xerial.snappy.SnappyOutputStream.compressInput(SnappyOutputStream.java:374) ~[snappy-java-1.1.4.jar:?]
at org.xerial.snappy.SnappyOutputStream.write(SnappyOutputStream.java:130) ~[snappy-java-1.1.4.jar:?]
at org.apache.spark.io.SnappyOutputStreamWrapper.write(CompressionCodec.scala:207) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:252) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.ShuffleExternalSorter.writeSortedFile(ShuffleExternalSorter.java:211) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.ShuffleExternalSorter.spill(ShuffleExternalSorter.java:264) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.memory.TaskMemoryManager.acquireExecutionMemory(TaskMemoryManager.java:206) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.memory.TaskMemoryManager.allocatePage(TaskMemoryManager.java:285) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.memory.MemoryConsumer.allocatePage(MemoryConsumer.java:117) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.ShuffleExternalSorter.acquireNewPageIfNecessary(ShuffleExternalSorter.java:373) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.ShuffleExternalSorter.insertRecord(ShuffleExternalSorter.java:397) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.insertRecordIntoSorter(UnsafeShuffleWriter.java:267) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.write(UnsafeShuffleWriter.java:188) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.scheduler.Task.run(Task.scala:121) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[?:1.8.0_252]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[?:1.8.0_252]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_252]
[INFO] 2020-11-26 09:15:45,306 Executor task launch worker for task 216456 org.apache.spark.executor.Executor - {} - Executor interrupted and killed task 13306.0 in stage 31.0 (TID 216456), reason: Stage cancelled
[ERROR] 2020-11-26 09:15:45,500 Executor task launch worker for task 217229 org.apache.spark.executor.Executor - {} - Exception in task 14406.0 in stage 31.0 (TID 217229)
org.apache.spark.memory.SparkOutOfMemoryError: error while calling spill() on org.apache.spark.shuffle.sort.ShuffleExternalSorter@9fa4f9f : No space left on device
at org.apache.spark.memory.TaskMemoryManager.acquireExecutionMemory(TaskMemoryManager.java:219) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.memory.TaskMemoryManager.allocatePage(TaskMemoryManager.java:285) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.memory.MemoryConsumer.allocatePage(MemoryConsumer.java:117) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.ShuffleExternalSorter.acquireNewPageIfNecessary(ShuffleExternalSorter.java:373) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.ShuffleExternalSorter.insertRecord(ShuffleExternalSorter.java:397) ~[spark-core_2.11-2.4.0.jar:2.4.0]
at org.apache.spark.shuffle.sort.UnsafeShuffleWriter.insertReco
暂无答案!
目前还没有任何答案,快来回答吧!