Advertisement
PeachLemonade

log

Mar 18th, 2024
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1.  
  2. 2024-03-18 09:46:38,889 WARN util.Utils: spark.executor.instances less than spark.dynamicAllocation.minExecutors is invalid, ignoring its setting, please update your configs.
  3. 2024-03-18 09:46:43,266 WARN util.Utils: spark.executor.instances less than spark.dynamicAllocation.minExecutors is invalid, ignoring its setting, please update your configs.
  4. 2024-03-18 09:46:43,267 WARN cluster.YarnSchedulerBackend$YarnSchedulerEndpoint: Attempted to request executors before the AM has registered!
  5. ---------------------------------------------------------------------------
  6. Py4JError                                 Traceback (most recent call last)
  7. Cell In[11], line 1
  8. ----> 1 spark = SparkSession.builder.master("yarn").appName("L5T2").getOrCreate()
  9.  
  10. File /usr/local/lib/python3.8/dist-packages/pyspark/sql/session.py:497, in SparkSession.Builder.getOrCreate(self)
  11.     495     sparkConf.set(key, value)
  12.     496 # This SparkContext may be an existing one.
  13. --> 497 sc = SparkContext.getOrCreate(sparkConf)
  14.     498 # Do not update `SparkConf` for existing `SparkContext`, as it's shared
  15.     499 # by all sessions.
  16.     500 session = SparkSession(sc, options=self._options)
  17.  
  18. File /usr/local/lib/python3.8/dist-packages/pyspark/context.py:515, in SparkContext.getOrCreate(cls, conf)
  19.     513 with SparkContext._lock:
  20.     514     if SparkContext._active_spark_context is None:
  21. --> 515         SparkContext(conf=conf or SparkConf())
  22.     516     assert SparkContext._active_spark_context is not None
  23.     517     return SparkContext._active_spark_context
  24.  
  25. File /usr/local/lib/python3.8/dist-packages/pyspark/context.py:203, in SparkContext.__init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls, udf_profiler_cls, memory_profiler_cls)
  26.     201 SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
  27.     202 try:
  28. --> 203     self._do_init(
  29.     204         master,
  30.     205         appName,
  31.     206         sparkHome,
  32.     207         pyFiles,
  33.     208         environment,
  34.     209         batchSize,
  35.     210         serializer,
  36.     211         conf,
  37.     212         jsc,
  38.     213         profiler_cls,
  39.     214         udf_profiler_cls,
  40.     215         memory_profiler_cls,
  41.     216     )
  42.     217 except BaseException:
  43.     218     # If an error occurs, clean up in order to allow future SparkContext creation:
  44.     219     self.stop()
  45.  
  46. File /usr/local/lib/python3.8/dist-packages/pyspark/context.py:316, in SparkContext._do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, jsc, profiler_cls, udf_profiler_cls, memory_profiler_cls)
  47.     311 # If encryption is enabled, we need to setup a server in the jvm to read broadcast
  48.     312 # data via a socket.
  49.     313 # scala's mangled names w/ $ in them require special treatment.
  50.     314 self._encryption_enabled = self._jvm.PythonUtils.isEncryptionEnabled(self._jsc)
  51.     315 os.environ["SPARK_AUTH_SOCKET_TIMEOUT"] = str(
  52. --> 316     self._jvm.PythonUtils.getPythonAuthSocketTimeout(self._jsc)
  53.     317 )
  54.     318 os.environ["SPARK_BUFFER_SIZE"] = str(self._jvm.PythonUtils.getSparkBufferSize(self._jsc))
  55.     320 self.pythonExec = os.environ.get("PYSPARK_PYTHON", "python3")
  56.  
  57. File /usr/local/lib/python3.8/dist-packages/py4j/java_gateway.py:1549, in JavaClass.__getattr__(self, name)
  58.    1546         return get_return_value(
  59.    1547             answer, self._gateway_client, self._fqn, name)
  60.    1548 else:
  61. -> 1549     raise Py4JError(
  62.    1550         "{0}.{1} does not exist in the JVM".format(self._fqn, name))
  63.  
  64. Py4JError: org.apache.spark.api.python.PythonUtils.getPythonAuthSocketTimeout does not exist in the JVM
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement