2

I am getting this error:

[INFO] --- maven-jar-plugin:2.4:jar (default-jar) @ cdt-hive ---
[INFO] Building jar: /Users/alexamil/WebstormProjects/cisco/cdt-hive/target/cdt-hive-1.0-SNAPSHOT.jar
[INFO] 
[INFO] --- exec-maven-plugin:1.6.0:java (default-cli) @ cdt-hive ---
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/Users/alexamil/.m2/repository/org/slf4j/slf4j-log4j12/1.7.5/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/Users/alexamil/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Trying to connect!
[WARNING] 
java.lang.IncompatibleClassChangeError: Implementing class
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
        at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
        at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
        at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.eclipse.jetty.server.ServerConnector.<init>(ServerConnector.java:97)
        at spark.embeddedserver.jetty.SocketConnectorFactory.createSocketConnector(SocketConnectorFactory.java:45)
        at spark.embeddedserver.jetty.EmbeddedJettyServer.ignite(EmbeddedJettyServer.java:96)
        at spark.Service.lambda$init$0(Service.java:349)
        at java.lang.Thread.run(Thread.java:745)

I can confirm that the error only occurs when I include the Spark route registration line indicated below by the ∆ symbol.

Here is the code I am running:

package cdt;
import org.apache.log4j.*;
import java.sql.*;
import static spark.Spark.*;

public class App {

    final static Logger logger = Logger.getLogger(App.class);

    public static void main(String[] args) throws ClassNotFoundException, SQLException {

        BasicConfigurator.configure(); // configure log4j
        Logger.getRootLogger().setLevel(Level.WARN);

        get("/hello", (req, res) -> "Hello World"); // ∆ register Spark route

        Class.forName("org.apache.hive.jdbc.HiveDriver");

        System.out.println("Trying to connect!");
        Connection con = DriverManager.getConnection("jdbc:hive2://hdprd1-edge-lb01:8888",
                "hdpair", "Dat@_Infusi0n_1");
        System.out.println("Connected!");
        Statement stmt = con.createStatement();
        String sql = ("show tables");
        System.out.println("Trying to execute query!");
        ResultSet res = stmt.executeQuery(sql);
        System.out.println("Query executed!");

        if (res.next()) {
            System.out.println(res.getString(1));
        } else {
            System.out.println("Could not connect!");
        }
    }
}

and here is my pom.xml file:

<project>

    <modelVersion>4.0.0</modelVersion>
    <groupId>cdt</groupId>
    <artifactId>cdt-hive</artifactId>
    <packaging>jar</packaging>
    <version>1.0-SNAPSHOT</version>
    <name>cdt-hive</name>
    <url>http://maven.apache.org</url>

    <properties>
        <maven.compiler.source>1.8</maven.compiler.source>
        <maven.compiler.target>1.8</maven.compiler.target>
    </properties>

    <build>

        <plugins>
            <plugin>
                <groupId>org.codehaus.mojo</groupId>
                <artifactId>exec-maven-plugin</artifactId>
                <version>1.6.0</version>
                <configuration>
                    <mainClass>cdt.App</mainClass>
                    <arguments>
                        <argument>foo</argument>
                        <argument>bar</argument>
                        <argument>baz</argument>
                    </arguments>
                </configuration>
            </plugin>
        </plugins>
    </build>



    <dependencies>

        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>1.7.5</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.8.1</version>
            <scope>provided</scope>
        </dependency>

        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.1.1</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.8.2</version>
            <scope>test</scope>
        </dependency>

        <dependency>
            <groupId>com.sparkjava</groupId>
            <artifactId>spark-core</artifactId>
            <version>2.5</version>
        </dependency>
    </dependencies>

</project>

Does anyone know what this error is about and how the Spark code call causes it? How to avoid it?

mck
  • 40,932
  • 13
  • 35
  • 50
Alexander Mills
  • 90,741
  • 139
  • 482
  • 817

1 Answers1

0

This error means that class can not be loaded at runtime because some class in your runtime environment is incomptatible with other. At first look it looks that there is some problem with SLF4j - maybe you have multiple versions of library in runtime classpath and wrong one is picked up.

Use

mvn help:dependency-tree  

to print out dependencies hierarchy and check carefully for duplicates with different versions. And then check your server runtime classpath

Konstantin Pribluda
  • 12,329
  • 1
  • 30
  • 35