0

I created a docken container from this bigdl image. However wheb I tried to start nn_context:

sc = init_nncontext("...")
spark= SparkContext(sc)

I got this error: Exception: Java gateway process exited before sending its port number

/tmp/ipykernel_188/2945161649.py in <module>
----> 1 sc = init_nncontext("anomaly detection")
      2 spark= SparkContext(sc)

/opt/work/bigdl-2.1.0/python/bigdl-spark_3.1.2-2.1.0-python-api.zip/bigdl/dllib/nncontext.py in init_nncontext(conf, cluster_mode, spark_log_level, redirect_spark_log, **kwargs)
    534         spark_args.update(conf.getAll())
    535     if cluster_mode == "spark-submit":
--> 536         sc = init_internal_nncontext(conf, spark_log_level, redirect_spark_log)
    537     elif cluster_mode == "local":
    538         if conf:

/opt/work/bigdl-2.1.0/python/bigdl-spark_3.1.2-2.1.0-python-api.zip/bigdl/dllib/nncontext.py in init_internal_nncontext(conf, spark_log_level, redirect_spark_log)
    642 
    643     if isinstance(conf, six.string_types):
--> 644         sc = getOrCreateSparkContext(conf=None, appName=conf)
    645     else:
    646         sc = getOrCreateSparkContext(conf=conf)

/opt/work/bigdl-2.1.0/python/bigdl-spark_3.1.2-2.1.0-python-api.zip/bigdl/dllib/nncontext.py in getOrCreateSparkContext(conf, appName)
    684             if appName:
    685                 spark_conf.setAppName(appName)
--> 686             return SparkContext.getOrCreate(spark_conf)
    687         else:
    688             return SparkContext.getOrCreate()

/opt/work/spark-3.1.2/python/lib/pyspark.zip/pyspark/context.py in getOrCreate(cls, conf)
    382         with SparkContext._lock:
    383             if SparkContext._active_spark_context is None:
--> 384                 SparkContext(conf=conf or SparkConf())
    385             return SparkContext._active_spark_context
    386 

/opt/work/spark-3.1.2/python/lib/pyspark.zip/pyspark/context.py in __init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
    142                 " is not allowed as it is a security risk.")
    143 
--> 144         SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
    145         try:
    146             self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,

/opt/work/spark-3.1.2/python/lib/pyspark.zip/pyspark/context.py in _ensure_initialized(cls, instance, gateway, conf)
    329         with SparkContext._lock:
    330             if not SparkContext._gateway:
--> 331                 SparkContext._gateway = gateway or launch_gateway(conf)
    332                 SparkContext._jvm = SparkContext._gateway.jvm
    333 

/opt/work/spark-3.1.2/python/lib/pyspark.zip/pyspark/java_gateway.py in launch_gateway(conf, popen_kwargs)
    106 
    107             if not os.path.isfile(conn_info_file):
--> 108                 raise Exception("Java gateway process exited before sending its port number")
    109 
    110             with open(conn_info_file, "rb") as info:

Exception: Java gateway process exited before sending its port number
user
  • 33
  • 5

0 Answers0