I'm new to Gradle and built a Jar build using Gradle. When I tried executing that jar using spark-submit, I'm getting a Class Not Found error. I'm able to execute the same application in IntelliJ IDEA, but getting error when executing as Independent JAR.
Below are the contents of build.gradle
:
/*
* This file was generated by the Gradle 'init' task.
*/
apply plugin: 'scala'
apply plugin: 'idea'
apply plugin: 'org.sonarqube'
// Applying Sonar Qube details for gradle
apply from: "${rootProject.rootDir}/gradle/sonar.gradle"
repositories {
mavenLocal()
mavenCentral()
maven {
url = uri('https://repo1.maven.org/maven2')
}
}
dependencies {
implementation 'com.google.code.gson:gson:2.8.2'
implementation 'io.netty:netty-all:4.1.42.Final'
implementation 'com.github.jengelman.gradle.plugins:shadow:4.0.1'
testImplementation "org.scalatest:scalatest_2.11:$scalaTestVersion"
runtime "com.apple.jvm.commons:commons-metrics:0.13.1"
}
buildscript {
dependencies {
classpath 'org.sonarsource.scanner.gradle:sonarqube-gradle-plugin:2.6'
}
}
sourceSets {
main {
scala {
srcDirs = ['src/main/scala']
}
}
test {
scala.srcDirs = ['src/test/scala']
}
}
sonarqube {
properties {
property "sonar.projectName", "App-name"
property "sonar.projectKey", "App-name"
property "sonar.sourceEncoding", "UTF-8"
property "sonar.tests", "src/test"
property "sonar.sources", "src/main"
property "sonar.scala.coverage.reportPaths", "$buildDir/reports/scoverage/scoverage.xml"
property "sonar.coverage.jacoco.xmlReportPaths", "$buildDir/jacoco/jacoco.xml"
property "sonar.test.exclusions", "src/test/**"
}
}
configurations {
jars
}
// Force Jacksom-module version to handle below error.
//com.fasterxml.jackson.databind.JsonMappingException: Scala module 2.8.8 requires Jackson Databind version >= 2.8.0 and < 2.9.0
configurations.all {
resolutionStrategy {
force "com.fasterxml.jackson.module:jackson-module-scala_2.11:2.9.5"
}
}
jar {
manifest {
attributes(
'Class-Path': configurations.compile.files.collect {"$it.name"}.join(' '),
'Main-Class': 'com.github.MainClass'
)}
from {
files(sourceSets.main.output.classesDirs)
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
}{
exclude "META-INF/*.SF"
exclude "META-INF/*.DSA"
exclude "META-INF/*.RSA"
}
zip64 true
archiveClassifier = 'jar-with-dependencies'
}
/*task fatJar(type: Jar) {
//manifest.from jar.manifest
manifest {
attributes 'Main-Class': 'com.github.MainClass'
}
zip64 true
archiveClassifier = 'jar-with-dependencies'
from files(sourceSets.main.output.classesDirs)
from {
configurations.runtime.asFileTree.files.collect {zipTree(it) }
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
} {
exclude "META-INF/*.SF"
exclude "META-INF/*.DSA"
exclude "META-INF/*.RSA"
}
with jar
}
artifacts {
archives fatJar
}*/
I tried building the Jar with both ./gradlew clean build
and ./gradlew clean fatJar
and executing below command:
spark-submit --master local --deploy-mode client --class com.github.MainClass /local_path/.out/libs/App-name-1.0-SNAPSHOT-jar-with-dependencies.jar
Error:
java.lang.ClassNotFoundException: com.github.MainClass
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:238)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:810)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
2020-12-02 13:02:49 INFO ShutdownHookManager:54 - Shutdown hook called
2020-12-02 13:02:49 INFO ShutdownHookManager:54 - Deleting directory /private/var/folders/sq/npjk1mkn7lgfm57mf9g_3rrh0000gn/T/spark-8ab912b5-23ef-4182-9c70-947f2cd2831a