package RDD_Programing
import org.apache.spark.sql.SparkSession
object RDD_Programing {
def main (args:Array[String]) : Unit = {
val spark=SparkSession.builder().master(master = "local")
.appName(name = "rdd programing").getOrCreate()
val sc=spark.sparkContext
sc.setLogLevel("OFF")
val input_path="input/readme/txt"
val output_path=""
val rdd1= sc.textFile(input_path)
val rdd2= rdd1.flatMap(_.split(""))
val rdd3= rdd2.map(x=>(x,x.length))
val rdd4= rdd3.filter{case (x,y) => y >=6}
rdd4.collect().foreach(f=>{
println(f)
})
}
}
误差
Exception in thread "main" java.lang.IllegalAccessError: class org.apache.spark.storage.StorageUtils$ (in unnamed module @0x4c2bb6e0) cannot access class sun.nio.ch.DirectBuffer (in module java.base) because module java.base does not export sun.nio.ch to unnamed module @0x4c2bb6e0
at org.apache.spark.storage.StorageUtils$.<init>(StorageUtils.scala:213)
at org.apache.spark.storage.StorageUtils$.<clinit>(StorageUtils.scala)
at org.apache.spark.storage.BlockManagerMasterEndpoint.<init>(BlockManagerMasterEndpoint.scala:114)
at org.apache.spark.SparkEnv$.$anonfun$create$9(SparkEnv.scala:353)
at org.apache.spark.SparkEnv$.registerOrLookupEndpoint$1(SparkEnv.scala:290)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:339)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:279)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:464)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2704)
at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:953)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:947)
at RDD_Programing.RDD_Programing$.main(RDD_Programing.scala:8)
at RDD_Programing.RDD_Programing.main(RDD_Programing.scala)
线程“main”中出现异常java.lang.IllegalAccessError solved
1条答案
按热度按时间mrzz3bfm1#
检查java版本。它将无法与java 11/17一起工作。它正在与jdk1.8一起工作