SparkIgnite2.8.1抛出java.lang.noclassdeffounderror:org/h2/index/baseindex

isr3a4wc  于 2021-05-27  发布在  Spark
关注(0)|答案(0)|浏览(306)

下面是spark ignite dataframe的一个简单示例,我得到了以下错误。
这个用例很简单,我想将读取的文件转换为df并将其写入ignite,稍后从ignite读取相同的数据
构建.sbt

  1. name := "spark-word-count"
  2. version := "0.1"
  3. scalaVersion := "2.11.12"
  4. libraryDependencies ++= Seq(
  5. "org.apache.ignite" % "ignite-spark" % "2.8.1"
  6. )

字数.scala

  1. val linesRDD = sparkSession.sparkContext.textFile(filesPath)
  2. val filesProcessed = sparkSession.sparkContext.wholeTextFiles(filesPath).count()
  3. val filteredLinesRDD = if (cacheType.isDefined) {
  4. getFilteredRDD(linesRDD).persist(cacheType.get)
  5. } else {
  6. getFilteredRDD(linesRDD)
  7. }
  8. val processedLines = filteredLinesRDD.count()
  9. sparkSession.createDataFrame(getPhraseSizesRDD(filteredLinesRDD))
  10. .toDF("Phrase", "Size")
  11. .write.format(FORMAT_IGNITE)
  12. .option(OPTION_CONFIG_FILE, igniteConfigFile)
  13. .option(OPTION_TABLE, "PhraseSize")
  14. .option(OPTION_CREATE_TABLE_PRIMARY_KEY_FIELDS, "Phrase")
  15. .option(OPTION_CREATE_TABLE_PARAMETERS, "backups=1")
  16. .option(OPTION_STREAMER_ALLOW_OVERWRITE, "true")
  17. .mode(SaveMode.Append)
  18. .save()

错误堆栈跟踪:

  1. Exception in thread "main" java.lang.NoClassDefFoundError: org/h2/index/BaseIndex
  2. at java.lang.ClassLoader.defineClass1(Native Method)
  3. at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
  4. at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
  5. at java.net.URLClassLoader.defineClass(URLClassLoader.java:468)
  6. at java.net.URLClassLoader.access$100(URLClassLoader.java:74)
  7. at java.net.URLClassLoader$1.run(URLClassLoader.java:369)
  8. at java.net.URLClassLoader$1.run(URLClassLoader.java:363)
  9. at java.security.AccessController.doPrivileged(Native Method)
  10. at java.net.URLClassLoader.findClass(URLClassLoader.java:362)
  11. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  12. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
  13. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  14. at java.lang.Class.forName0(Native Method)
  15. at java.lang.Class.forName(Class.java:264)
  16. at org.apache.ignite.internal.util.IgniteUtils.inClassPath(IgniteUtils.java:1727)
  17. at org.apache.ignite.internal.IgniteComponentType.inClassPath(IgniteComponentType.java:160)
  18. at org.apache.ignite.internal.IgnitionEx$IgniteNamedInstance.initializeDefaultSpi(IgnitionEx.java:2480)
  19. at org.apache.ignite.internal.IgnitionEx$IgniteNamedInstance.initializeConfiguration(IgnitionEx.java:2328)
  20. at org.apache.ignite.internal.IgnitionEx$IgniteNamedInstance.start(IgnitionEx.java:1697)
  21. at org.apache.ignite.internal.IgnitionEx.start0(IgnitionEx.java:1117)
  22. at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.java:637)
  23. at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.java:578)
  24. at org.apache.ignite.Ignition.getOrStart(Ignition.java:412)
  25. at org.apache.ignite.spark.IgniteContext.ignite(IgniteContext.scala:150)
  26. at org.apache.ignite.spark.IgniteContext.<init>(IgniteContext.scala:63)
  27. at org.apache.ignite.spark.IgniteContext$.apply(IgniteContext.scala:192)
  28. at org.apache.ignite.spark.impl.IgniteRelationProvider.igniteContext(IgniteRelationProvider.scala:248)
  29. at org.apache.ignite.spark.impl.IgniteRelationProvider.createRelation(IgniteRelationProvider.scala:102)
  30. at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
  31. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  32. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  33. at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
  34. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
  35. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
  36. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  37. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  38. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  39. at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
  40. at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:83)
  41. at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:81)
  42. at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
  43. at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
  44. at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:80)
  45. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:127)
  46. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:75)
  47. at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676)
  48. at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285)
  49. at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271)

移除点火索引解决了上述错误,但最终会出现不同的错误:

  1. libraryDependencies ++= Seq(
  2. ("org.apache.ignite" % "ignite-spark" % "2.8.1")
  3. .exclude("org.apache.ignite", "ignite-indexing")
  4. .exclude("com.h2database", "h2"),
  5. "com.h2database" % "h2" % "1.4.197"
  6. )

错误:

  1. 20/06/01 23:29:41 ERROR WordCountMain: Error executing WordCountJobMain: WordCountJob is failure
  2. class org.apache.ignite.IgniteException: Failed to execute query because indexing is disabled (consider adding module ignite-indexing to classpath or moving it from 'optional' to 'libs' folder).
  3. at org.apache.ignite.internal.processors.query.GridQueryProcessor.checkxEnabled(GridQueryProcessor.java:2217)
  4. at org.apache.ignite.internal.processors.query.GridQueryProcessor.querySqlFields(GridQueryProcessor.java:2388)
  5. at org.apache.ignite.internal.processors.query.GridQueryProcessor.querySqlFields(GridQueryProcessor.java:2323)
  6. at org.apache.ignite.internal.processors.query.GridQueryProcessor.querySqlFields(GridQueryProcessor.java:2296)
  7. at org.apache.ignite.spark.impl.QueryHelper$.createTable(QueryHelper.scala:64)
  8. at org.apache.ignite.spark.impl.IgniteRelationProvider.createRelation(IgniteRelationProvider.scala:160)
  9. at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
  10. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  11. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  12. at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
  13. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
  14. at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
  15. at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  16. at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  17. at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  18. at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
  19. at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:83)
  20. at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:81)
  21. at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
  22. at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
  23. at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:80)
  24. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:127)
  25. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:75)
  26. at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676)
  27. at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285)
  28. at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271)
  29. at main.scala.WordCountJob$.doRun(WordCountJob.scala:43)

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题