0

I get the following error when I run my Spark Scala program:

User class threw exception: java.lang.NoSuchMethodError: com.fasterxml.jackson.databind.JsonMappingException.(Ljava/io/Closeable;Ljava/lang/String;)V

The particular line which throws the error is val configString = UtilsKPI.downloadFromS3(configPath).

This is the code of UtilsKPI.

object UtilsKPI {

  var bucketNameCode: String = ""
  var bucketNameData: String = ""
  var credentials: BasicAWSCredentials = null
  var client: AmazonS3Client = null

  def parseJSON(line: String): Map[String, String] = {
    JacksMapper.readValue[Map[String, String]](line)
  }

  def setBucketNameCode(bnc: String): Unit = {
    bucketNameCode = bnc
  }


  def setBucketNameData(bnd: String): Unit = {
    bucketNameData = bnd
  }

  def setS3(): Unit = {
    client = new AmazonS3Client()
  }

  def convertMapToJSON(map: Map[String, Any]): String = {
    val json = JacksMapper.writeValueAsString[scala.collection.immutable.Map[String, Any]](map)
    json
  }

  def uploadConfigToS3(config: String, filePath: String): Unit = {
    val CONTENT_TYPE = "application/json"
    val fileContentBytes = config.getBytes(StandardCharsets.UTF_8)
    val fileInputStream = new ByteArrayInputStream(fileContentBytes)
    val metadata = new ObjectMetadata()
    metadata.setContentType(CONTENT_TYPE)
    metadata.setContentLength(fileContentBytes.length)
    val putObjectRequest = new PutObjectRequest(bucketNameCode, filePath, fileInputStream, metadata)
    client.deleteObject(new DeleteObjectRequest(bucketNameCode, filePath))
    client.putObject(putObjectRequest)
  }

  def downloadFromS3(downloadFile: String): String = {
    val s3Object = client.getObject(new GetObjectRequest(bucketNameCode, downloadFile))
    val myData = Source.fromInputStream(s3Object.getObjectContent())
    myData.getLines().mkString("\n")
  }


  def getByKeyFromS3(key: String): String = {
    val s3Object = client.getObject(bucketNameData, key)
    val myData = Source.fromInputStream(s3Object.getObjectContent())
    myData.getLines().mkString("\n")
  }

}

This is my pom.xml file:

<properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>

        <java.version>1.8</java.version>
        <scala.version>2.11</scala.version>
        <scala.subversion>2.11.8</scala.subversion>
        <spark.version>2.2.0</spark.version>
        <jackson.version>2.9.4</jackson.version>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>${scala.subversion}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_${scala.version}</artifactId>
            <version>${spark.version}</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.module</groupId>
            <artifactId>jackson-module-scala_${scala.version}</artifactId>
            <version>${jackson.version}</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>${jackson.version}</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-annotations</artifactId>
            <version>${jackson.version}</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-core</artifactId>
            <version>${jackson.version}</version>
        </dependency>
        <dependency>
            <groupId>com.github.nscala-time</groupId>
            <artifactId>nscala-time_${scala.version}</artifactId>
            <version>2.18.0</version>
        </dependency>
        <dependency>
            <groupId>com.github.scopt</groupId>
            <artifactId>scopt_${scala.version}</artifactId>
            <version>3.3.0</version>
        </dependency>
        <dependency>
            <groupId>com.amazonaws</groupId>
            <artifactId>aws-java-sdk-s3</artifactId>
            <version>1.11.297</version>
        </dependency>
        <dependency>
            <groupId>com.typesafe.scala-logging</groupId>
            <artifactId>scala-logging_${scala.version}</artifactId>
            <version>3.8.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming-kafka-0-8_${scala.version}</artifactId>
            <version>${spark.version}</version>
        </dependency>
    </dependencies>
Markus
  • 3,562
  • 12
  • 48
  • 85
  • @Jarrod Roberson: I don't think that my question is a duplicate. I understand that the problem refers to different versions of the library. But I updated all libraries in my POM file. I cannot see the real reason why my code fails. – Markus Mar 19 '18 at 18:09
  • *the root cause is that you have compiled a class against a different version of the class that is missing a method, than the one you are using when running it.* –  Mar 19 '18 at 18:27

0 Answers0