1

I am using drools with Spark to execute some set of rules. I have written a method to load the .drl file. and Instantiate the object of "InternalKnowledgeBase". This below code is working in local mode, But when I am running in cluster (EMR), I am getting below exception. attaching code and exception stackTrace.

def loadDrl(drlFilePath: String): Option[InternalKnowledgeBase] = {
try {
  val resource = ResourceFactory.newClassPathResource(drlFilePath)
  val kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder() //Exception Occurs
  kbuilder.add(resource, ResourceType.DRL)

  if (kbuilder.hasErrors()) {
    throw new RuntimeException(kbuilder.getErrors().toString())
  }
  val kbase = KnowledgeBaseFactory.newKnowledgeBase()
  kbase.addPackages(kbuilder.getKnowledgePackages())
  Some(kbase)
} catch {
  case e: Exception => { 
    e.printStackTrace()
    None 
    }
 }
}

Please find the Exception Stack Trace.

java.lang.NullPointerException
    at org.kie.internal.builder.KnowledgeBuilderFactory.newKnowledgeBuilder(KnowledgeBuilderFactory.java:48)
    at $line27.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.loadDrl(<pastie>:44)
    at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:39)
    at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)
    at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:46)
    at $line33.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:48)
    at $line33.$read$$iw$$iw$$iw$$iw.<init>(<console>:50)
    at $line33.$read$$iw$$iw$$iw.<init>(<console>:52)
    at $line33.$read$$iw$$iw.<init>(<console>:54)
    at $line33.$read$$iw.<init>(<console>:56)
    at $line33.$read.<init>(<console>:58)
    at $line33.$read$.<init>(<console>:62)
    at $line33.$read$.<clinit>(<console>)
    at $line33.$eval$.$print$lzycompute(<console>:7)
    at $line33.$eval$.$print(<console>:6)
    at $line33.$eval.$print(<console>)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:793)
    at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1054)
    at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:645)
    at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:644)
    at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
    at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
    at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:644)
    at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)
    at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)
    at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:819)
    at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:691)
    at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:404)
    at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:425)
    at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:285)
    at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)
    at org.apache.spark.repl.Main$.doMain(Main.scala:78)
    at org.apache.spark.repl.Main$.main(Main.scala:58)
    at org.apache.spark.repl.Main.main(Main.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849)
    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Appreciate your help! Thanks.

z_1_p
  • 409
  • 1
  • 3
  • 16

1 Answers1

1

I had a similar issue a while ago. Mine was related to building a jar with shade plugin. Resolved it after finding one of these questions (don't remember with one exactly):

Had to add this transformer to my pom.xml

<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
    <resource>META-INF/kie.conf</resource>
</transformer>
facha
  • 11,862
  • 14
  • 59
  • 82