1

I am trying a simple program to read a JSON file and write into MongoDB Local instance, I got the Mongo DB installed

In Spark wrote this program

import com.mongodb.spark.MongoSpark
import org.apache.spark.sql.{DataFrame, SparkSession}

    object Main extends App {

        val sparkSession = SparkSession.builder().appName("SparkMongo")
          .master("local[*]")
          .config("spark.mongodb.input.uri", "mongodb://localhost:27017/product.styles")
          .config("spark.mongodb.output.uri", "mongodb://localhost:27017/product.styles")
          .getOrCreate()

        var data : DataFrame = sparkSession.read.option("multiline", "true").json("/home/sandeep/style.json")
        MongoSpark.save(data)
    }

But I get the below error, when I run using Spark-Submit

Exception in thread "main" java.lang.NoSuchMethodError: scala.Predef$.refArrayOps([Ljava/lang/Object;)[Ljava/lang/Object;
        at com.mongodb.spark.config.MongoCompanionConfig.getOptionsFromConf(MongoCompanionConfig.scala:233)
        at com.mongodb.spark.config.MongoCompanionConfig.getOptionsFromConf$(MongoCompanionConfig.scala:232)
        at com.mongodb.spark.config.WriteConfig$.getOptionsFromConf(WriteConfig.scala:37)
        at com.mongodb.spark.config.MongoCompanionConfig.apply(MongoCompanionConfig.scala:113)
        at com.mongodb.spark.config.MongoCompanionConfig.apply$(MongoCompanionConfig.scala:112)
        at com.mongodb.spark.config.WriteConfig$.apply(WriteConfig.scala:37)
        at com.mongodb.spark.config.MongoCompanionConfig.apply(MongoCompanionConfig.scala:100)
        at com.mongodb.spark.config.MongoCompanionConfig.apply$(MongoCompanionConfig.scala:100)
        at com.mongodb.spark.config.WriteConfig$.apply(WriteConfig.scala:37)
        at com.mongodb.spark.MongoSpark$.save(MongoSpark.scala:138)
        at Main$.delayedEndpoint$Main$1(Main.scala:15)
        at Main$delayedInit$body.apply(Main.scala:5)
        at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
        at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
        at scala.App$$anonfun$main$1.apply(App.scala:76)
        at scala.App$$anonfun$main$1.apply(App.scala:76)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
        at scala.App$class.main(App.scala:76)
        at Main$.main(Main.scala:5)
        at Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Not sure, Where and what am I doing wrong

Sandeep540
  • 897
  • 3
  • 13
  • 38

1 Answers1

0

Don’t you need to specify a port for your mongodb server ?

SanBan
  • 635
  • 5
  • 12