1

I added all the jars which are required for this project,but i am unable to resolve this exception.can anyone give e suggestion regarding this. Can u also please tell tell how to give hive database access permissions. Thanks in advance.

java.lang.ClassNotFoundException: org.apache.hadoop.hive.jdbc.HiveDriver
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:190)
    at org.ezytruk.com.CreateHiveExternalTable.createHiveExternalTable(CreateHiveExternalTable.java:20)
    at org.ezytruk.com.CreateHiveExternalTable.main(CreateHiveExternalTable.java:53)
Exception in thread "main" java.sql.SQLException: No suitable driver found for jdbc:hive://localhost/EZYTRUK
    at java.sql.DriverManager.getConnection(DriverManager.java:596)
    at java.sql.DriverManager.getConnection(DriverManager.java:215)
    at org.ezytruk.com.CreateHiveExternalTable.createHiveExternalTable(CreateHiveExternalTable.java:39)
    at org.ezytruk.com.CreateHiveExternalTable.main(CreateHiveExternalTable.java:53)

pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <groupId>BigData</groupId>
  <artifactId>BigData</artifactId>
  <version>0.0.1-SNAPSHOT</version>
  <properties>
  <slf4j.version>1.6.1</slf4j.version>
  <hadoop-version>2.6.0</hadoop-version>
  <mysql-connector-version>5.1.40</mysql-connector-version>
  <sqoop-core-version>1.99.3</sqoop-core-version>
  <zookeeper-version>3.4.9</zookeeper-version>
  <hive-jdbc-version>1.2.1</hive-jdbc-version>
  <commons-io-version>2.2</commons-io-version>
  <commons-logging.version>1.2</commons-logging.version>
  </properties>
  <dependencies>
  <dependency>
    <groupId>commons-io</groupId>
    <artifactId>commons-io</artifactId>
    <version>${commons-io-version}</version>
</dependency>
 <dependency>
        <groupId>commons-logging</groupId>
        <artifactId>commons-logging</artifactId>
        <version>${commons-logging.version}</version>
   </dependency>        
   <dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
    <version>${mysql-connector-version}</version>
   </dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-client</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-mapreduce-client-core</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-yarn-common</artifactId>
    <version>${hadoop-version}</version>
</dependency>
 <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-core</artifactId>
    <version>1.2.1</version>
</dependency>
 <dependency> 
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop-core</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop-client</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop-common</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop.connector</groupId>
    <artifactId>sqoop-connector-generic-jdbc</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop</artifactId>
    <version>1.4.1-incubating</version>
</dependency>
<dependency>
    <groupId>org.apache.zookeeper</groupId>
    <artifactId>zookeeper</artifactId>
    <version>${zookeeper-version}</version>
</dependency>

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-exec</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-metastore</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-common</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-service</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-shims</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-serde</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>

</dependencies>
  <packaging>war</packaging>
  <build>
    <sourceDirectory>src</sourceDirectory>
    <plugins>
      <plugin>
        <artifactId>maven-compiler-plugin</artifactId>
        <version>3.3</version>
        <configuration>
          <source>1.7</source>
          <target>1.7</target>
        </configuration>
      </plugin>
      <plugin>
        <artifactId>maven-war-plugin</artifactId>
        <version>2.6</version>
        <configuration>
          <warSourceDirectory>WebContent</warSourceDirectory>
        </configuration>
      </plugin>
    </plugins>
  </build>
</project>

Program:

 package org.hive.com;

    import java.io.FileNotFoundException;
    import java.io.IOException;
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.SQLException;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;

    import com.mysql.jdbc.Statement;

    public class CreateHiveExternalTable {

        public static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

        public static void createHiveExternalTable() throws FileNotFoundException, IOException, SQLException {
            try {
                Class.forName(driverName);
            } catch (ClassNotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }

            Configuration config = new Configuration();
            config.addResource(new Path("/usr/local/hadoop/etc/hadoop/conf/core-site.xml"));
            config.addResource(new Path("/usr/local/hadoop/etc/hadoop/conf/hdfs-site.xml"));



        Connection connect = DriverManager.getConnection("jdbc:hive://localhost/hivedb","hive","");
            Statement stmt = (Statement) connect.createStatement();
            //String tableName = properties.getProperty("hive_table_name");
            stmt.executeQuery("CREATE EXTERNAL TABLE IF NOT EXISTS"
             +"SHIPPER(S_ID INT,S_NAME VARCHAR(100),S_ADDR VARCHAR(100),S_CITY VARCHAR(100)"
             +"ROW FORMAT DELIMITED FIELDS TERMINATED BY ','"
             +"LOCATION 'hdfs://localhost://hive'");

            System.out.println("Table created.");
            connect.close();
        }

         public static void main(String[] args) throws FileNotFoundException, IOException, SQLException{
             CreateHiveExternalTable hiveTable = new CreateHiveExternalTable();
             hiveTable.createHiveExternalTable();
         }     

        }    
Yasodhara
  • 111
  • 2
  • 13
  • Any example of the code you're using? – Yeikel Dec 30 '16 at 06:41
  • public class CreateHiveExternalTable { public static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver"; public static void createHiveExternalTable() throws FileNotFoundException, IOException, SQLException { try { Class.forName(driverName); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } – Yasodhara Dec 30 '16 at 07:01
  • u can see above i added code – Yasodhara Dec 30 '16 at 07:04

2 Answers2

1

From this post Connect from Java to Hive using JDBC

Try

private static String driverName = "org.apache.hive.jdbc.HiveDriver"

instead of

private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

I hope you have added Class.forName(driverName) statement in your code

Also :

    Connection connect = DriverManager.getConnection("jdbc:hive2://localhost:HIVEPORT/hivedb","hive","");

instead of

Connection connect = DriverManager.getConnection("jdbc:hive://localhost/hivedb","hive","");

I am not sure what port you're running Hive , but remember to change this line

localhost:HIVEPORT
Community
  • 1
  • 1
Yeikel
  • 854
  • 10
  • 18
  • can u please tell me how to find hive port number because till now i used hive directly through terminal but not using java... – Yasodhara Dec 30 '16 at 07:38
1

hive.server2.thrift.port is the property you can check for the port.

on hive shell give command "set hive.server2.thrift.port" this will give you the port number of hive

by default the hive port is set to 10000 but you can chech using the above command on hive shell..

Aditya Agarwal
  • 693
  • 1
  • 10
  • 17