pyspark SparkSession是否需要SparkContext才能工作?

6bc51xsx  于 2022-11-01  发布在  Spark
关注(0)|答案(1)|浏览(184)

我构建了jupyter/all-spark-notebook Docker映像,安装了geomesa_pyspark,并尝试运行官方指南中的以下示例命令。

import geomesa_pyspark
import pyspark
from pyspark.sql import SparkSession

conf = geomesa_pyspark.configure(
        jars=['/usr/local/spark/jars/geomesa-accumulo-spark-runtime_2.11-2.0.0.jar'],
        packages=['geomesa_pyspark','pytz'],
        spark_home='/usr/local/spark/').\
        setAppName('MyTestApp')

# sc = pyspark.SparkContext()

spark = ( SparkSession
    .builder
    .config(conf=conf)
    .enableHiveSupport()
    .getOrCreate()
)

如果取消对SparkContext创建语句的注解,则代码会出现此错误。

---------------------------------------------------------------------------
Py4JJavaError                             Traceback (most recent call last)
<ipython-input-1-22f9613a0be5> in <module>
     31     .builder
     32     .master('spark://spark-master:7077')
---> 33     .config(conf=conf)
     34     .enableHiveSupport()
     35     .getOrCreate()

/usr/local/spark/python/pyspark/sql/session.py in getOrCreate(self)
    171                     for key, value in self._options.items():
    172                         sparkConf.set(key, value)
--> 173                     sc = SparkContext.getOrCreate(sparkConf)
    174                     # This SparkContext may be an existing one.
    175                     for key, value in self._options.items():

/usr/local/spark/python/pyspark/context.py in getOrCreate(cls, conf)
    365         with SparkContext._lock:
    366             if SparkContext._active_spark_context is None:
--> 367                 SparkContext(conf=conf or SparkConf())
    368             return SparkContext._active_spark_context
    369 

/usr/local/spark/python/pyspark/context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
    134         try:
    135             self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
--> 136                           conf, jsc, profiler_cls)
    137         except:
    138             # If an error occurs, clean up in order to allow future SparkContext creation:

/usr/local/spark/python/pyspark/context.py in _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, jsc, profiler_cls)
    196 
    197         # Create the Java SparkContext through Py4J
--> 198         self._jsc = jsc or self._initialize_context(self._conf._jconf)
    199         # Reset the SparkConf to the one actually used by the SparkContext in JVM.
    200         self._conf = SparkConf(_jconf=self._jsc.sc().conf())

/usr/local/spark/python/pyspark/context.py in _initialize_context(self, jconf)
    304         Initialize SparkContext in function to allow subclass specific initialization
    305         """
--> 306         return self._jvm.JavaSparkContext(jconf)
    307 
    308     @classmethod

/usr/local/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1523         answer = self._gateway_client.send_command(command)
   1524         return_value = get_return_value(
-> 1525             answer, self._gateway_client, None, self._fqn)
   1526 
   1527         for temp_arg in temp_args:

/usr/local/spark/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
    326                 raise Py4JJavaError(
    327                     "An error occurred while calling {0}{1}{2}.\n".
--> 328                     format(target_id, ".", name), value)
    329             else:
    330                 raise Py4JError(

    Py4JJavaError: An error occurred while calling None.org.apache.spark.api.java.JavaSparkContext.
    : java.lang.AbstractMethodError: io.netty.util.concurrent.MultithreadEventExecutorGroup.newChild(Ljava/util/concurrent/ThreadFactory;[Ljava/lang/Object;)Lio/netty/util/concurrent/EventExecutor;
        at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:64)
        at io.netty.channel.MultithreadEventLoopGroup.<init>(MultithreadEventLoopGroup.java:59)
        at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:78)
        at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:73)
        at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:60)
        at org.apache.spark.network.util.NettyUtils.createEventLoop(NettyUtils.java:50)
        at org.apache.spark.network.client.TransportClientFactory.<init>(TransportClientFactory.java:102)
        at org.apache.spark.network.TransportContext.createClientFactory(TransportContext.java:99)
        at org.apache.spark.rpc.netty.NettyRpcEnv.<init>(NettyRpcEnv.scala:71)
        at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:461)
        at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:57)
        at org.apache.spark.SparkEnv$.create(SparkEnv.scala:249)
        at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:175)
        at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:257)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:424)
        at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247)
        at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
        at py4j.Gateway.invoke(Gateway.java:238)
        at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80)
        at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69)
        at py4j.GatewayConnection.run(GatewayConnection.java:238)
        at java.lang.Thread.run(Thread.java:748)

我使用以下版本:

  • Spark2.4.5
  • Hadoop 2.7版本
  • java-1.8.0-openjdk-amd64.

为什么它需要一个SparkContext?它不应该包含在SparkSession中吗?

svmlkihl

svmlkihl1#

AbstractMethodError s表示类路径有问题-例如,请参见this post。由于错误发生在netty中,因此您应该检查不同版本的netty jar的类路径。

相关问题