0

This is my pom.xml file

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchemainstance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.4.1</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.activeedge</groupId>
    <artifactId>aetbigdatasoluions</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>AETProcessor</name>
    <description> Project for Data processing</description>

    <properties>
        <java.version>11</java.version>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-actuator</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-jpa</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-mail</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
             <exclusions>
        <exclusion>
         <groupId>org.springframework.boot</groupId>
         <artifactId>spring-boot-starter-logging</artifactId>
      </exclusion>
           <exclusion>
                    <groupId>ch.qos.logback</groupId>
                    <artifactId>logback-classic</artifactId>
                </exclusion>
      </exclusions>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-devtools</artifactId>
            <scope>runtime</scope>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.postgresql</groupId>
            <artifactId>postgresql</artifactId>
            <scope>runtime</scope>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>
        <dependency>
          <groupId>org.springframework.boot</groupId>
          <artifactId>spring-boot-starter-validation</artifactId>
         </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
    <!-- https://mvnrepository.com/artifact/org.apache.camel/camel-spark-starter -->
         <!--  <dependency>
            <groupId>org.apache.camel</groupId>
            <artifactId>camel-spark-starter</artifactId>
            <version>3.0.0-RC3</version>
        </dependency> -->
       <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core -->
      <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.12</artifactId>
        <version>3.0.1</version>
        </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
      <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.12</artifactId>
        <version>3.0.0</version>
     </dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming -->
    <dependency>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-streaming_2.12</artifactId>
    <version>3.0.0</version>
    <scope>provided</scope>
    </dependency>
        <dependency>
        <groupId>com.crealytics</groupId>
        <artifactId>spark-excel_2.12</artifactId>
        <version>0.13.1</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
                <configuration>
                    <excludes>
                        <exclude>
                            <groupId>org.projectlombok</groupId>
                            <artifactId>lombok</artifactId>
                        </exclude>
                    </excludes>
                </configuration>
            </plugin>
        </plugins>
    </build>
</project>

This is my class file

package com.aet.service;

import java.util.Properties;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.SparkSession.Builder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

@Component
public class CBASparkprocessor {
  
  @Value("${cbapath.path}")
  private String cbaPath  ;
  
 public void readcbadata() {
   
   System.out.println("calling spark method ............");
   SparkSession spark  = SparkSession.builder().appName("CBA and 
    Postillion").
       master("local")
      // .config("spark.sql.warehouse.dir", "file:///C:/temp")
       .getOrCreate()  ;
   System.out.println("called spark method ............");
   Dataset<Row> df  = spark.read().format("com.crealytics.spark.excel")
      //  .option("sheetName","Export Worksheet")
        .option("header", "true") // Required
        .option("useHeader","true")
        .option("inferSchema","true")
        .option("dateFormat", "yy-mmm-d")
        .option("treatEmptyValuesAsNulls", "true")
        .option("addColorColumns", "false")  
     // .option("ignoreLeadingWhiteSpace", "true")
      //  .option("ignoreTrailingWhiteSpace", "true")
              .option("maxRowsInMey", 20) 
        .load(cbaPath+"/atm report 17-dec-2020.xlsx") ;
    System.out.println("created df ............"); 
    df.printSchema();
   System.out.println(df.columns()[0]) ;
   System.out.println(df.col("TILL ACCT_NAME")) ;
   df.show(5);
   System.out.println(df.tail(3)) ;
   System.out.println(df.count());
   Properties prop  =  new Properties() ;
   prop.setProperty("driver", "org.postgresql.Driver") ;
   prop.setProperty("user","postgres") ;
   prop.setProperty("password","oracle") ;
   //jdbc
   df.write().mode(SaveMode.Overwrite)
   .jdbc("jdbc:postgresql://localhost:5432/postgres", "cbadata",prop);
   System.out.println("success");
  spark.close();
    return ;
 }
}

This is the full log

  2021-01-04 03:56:25.109  INFO 5392 --- [  restartedMain] o.s.b.w.embedded.tomcat.TomcatWebServer  : Tomcat started on port(s): 2021 (http) with context path ''
    2021-01-04 03:56:25.123  INFO 5392 --- [  restartedMain] com.aet.AetProcessorApplication          : Started AetProcessorApplication in 4.763 seconds (JVM running for 6.004)
    spark in spring .....
    calling spark method ............
    WARNING: An illegal reflective access operation has occurred
    WARNING: Illegal reflective access by org.apache.spark.unsafe.Platform (file:/C:/Users/DELL/.m2/repository/org/apache/spark/spark-unsafe_2.12/3.0.1/spark-unsafe_2.12-3.0.1.jar) to constructor java.nio.DirectByteBuffer(long,int)
    WARNING: Please consider reporting this to the maintainers of org.apache.spark.unsafe.Platform
    WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
    WARNING: All illegal access operations will be denied in a future release
    2021-01-04 03:56:25.748  INFO 5392 --- [  restartedMain] org.apache.spark.SparkContext            : Running Spark version 3.0.1
    2021-01-04 03:56:26.027  WARN 5392 --- [  restartedMain] org.apache.hadoop.util.NativeCodeLoader  : Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
    2021-01-04 03:56:26.134  INFO 5392 --- [  restartedMain] org.apache.spark.resource.ResourceUtils  : ==============================================================
    2021-01-04 03:56:26.136  INFO 5392 --- [  restartedMain] org.apache.spark.resource.ResourceUtils  : Resources for spark.driver:
    
    2021-01-04 03:56:26.136  INFO 5392 --- [  restartedMain] org.apache.spark.resource.ResourceUtils  : ==============================================================
    2021-01-04 03:56:26.136  INFO 5392 --- [  restartedMain] org.apache.spark.SparkContext            : Submitted application: CBA and Postillion
    2021-01-04 03:56:26.218  INFO 5392 --- [  restartedMain] org.apache.spark.SecurityManager         : Changing view acls to: DELL
    2021-01-04 03:56:26.219  INFO 5392 --- [  restartedMain] org.apache.spark.SecurityManager         : Changing modify acls to: DELL
    2021-01-04 03:56:26.219  INFO 5392 --- [  restartedMain] org.apache.spark.SecurityManager         : Changing view acls groups to: 
    2021-01-04 03:56:26.219  INFO 5392 --- [  restartedMain] org.apache.spark.SecurityManager         : Changing modify acls groups to: 
    2021-01-04 03:56:26.220  INFO 5392 --- [  restartedMain] org.apache.spark.SecurityManager         : SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(DELL); groups with view permissions: Set(); users  with modify permissions: Set(DELL); groups with modify permissions: Set()
    2021-01-04 03:56:26.325  INFO 5392 --- [on(1)-127.0.0.1] o.a.c.c.C.[Tomcat].[localhost].[/]       : Initializing Spring DispatcherServlet 'dispatcherServlet'
    2021-01-04 03:56:26.326  INFO 5392 --- [on(1)-127.0.0.1] o.s.web.servlet.DispatcherServlet        : Initializing Servlet 'dispatcherServlet'
    2021-01-04 03:56:26.327  INFO 5392 --- [on(1)-127.0.0.1] o.s.web.servlet.DispatcherServlet        : Completed initialization in 1 ms
    2021-01-04 03:56:26.962  INFO 5392 --- [  restartedMain] org.apache.spark.util.Utils              : Successfully started service 'sparkDriver' on port 59292.
    2021-01-04 03:56:26.990  INFO 5392 --- [  restartedMain] org.apache.spark.SparkEnv                : Registering MapOutputTracker
    2021-01-04 03:56:27.025  INFO 5392 --- [  restartedMain] org.apache.spark.SparkEnv                : Registering BlockManagerMaster
    2021-01-04 03:56:27.048  INFO 5392 --- [  restartedMain] o.a.s.s.BlockManagerMasterEndpoint       : Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
    2021-01-04 03:56:27.048  INFO 5392 --- [  restartedMain] o.a.s.s.BlockManagerMasterEndpoint       : BlockManagerMasterEndpoint up
    2021-01-04 03:56:27.052  INFO 5392 --- [  restartedMain] org.apache.spark.SparkEnv                : Registering BlockManagerMasterHeartbeat
    2021-01-04 03:56:27.067  INFO 5392 --- [  restartedMain] o.apache.spark.storage.DiskBlockManager  : Created local directory at C:\Users\DELL\AppData\Local\Temp\blockmgr-045af8d1-c0b5-4529-bcbf-4df2bba990c9
    2021-01-04 03:56:27.097  INFO 5392 --- [  restartedMain] o.a.spark.storage.memory.MemoryStore     : MemoryStore started with capacity 3.4 GiB
    2021-01-04 03:56:27.114  INFO 5392 --- [  restartedMain] org.apache.spark.SparkEnv                : Registering OutputCommitCoordinator
    2021-01-04 03:56:27.210  INFO 5392 --- [  restartedMain] org.sparkproject.jetty.util.log          : Logging initialized @8091ms to org.sparkproject.jetty.util.log.Slf4jLog
    2021-01-04 03:56:27.271  INFO 5392 --- [  restartedMain] org.sparkproject.jetty.server.Server     : jetty-9.4.z-SNAPSHOT; built: 2019-04-29T20:42:08.989Z; git: e1bc35120a6617ee3df052294e433f3a25ce7097; jvm 11.0.9+7-LTS
    2021-01-04 03:56:27.291  INFO 5392 --- [  restartedMain] org.sparkproject.jetty.server.Server     : Started @8171ms
    2021-01-04 03:56:27.323  INFO 5392 --- [  restartedMain] o.s.jetty.server.AbstractConnector       : Started ServerConnector@47322531{HTTP/1.1,[http/1.1]}{0.0.0.0:4040}
    2021-01-04 03:56:27.324  INFO 5392 --- [  restartedMain] org.apache.spark.util.Utils              : Successfully started service 'SparkUI' on port 4040.
    2021-01-04 03:56:27.344  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@6e590c26{/jobs,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.346  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@78ce03b0{/jobs/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.346  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@9ec6bc5{/jobs/job,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.347  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@2d8c3532{/jobs/job/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.347  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@7f909636{/stages,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.348  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@522e0079{/stages/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.348  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@3fefd0a{/stages/stage,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.349  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@75e224f9{/stages/stage/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.350  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@146082e3{/stages/pool,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.350  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@2f74968c{/stages/pool/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.351  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@7d814329{/storage,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.351  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@104ea5ce{/storage/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.352  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@3f0007d7{/storage/rdd,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.352  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@6db187ce{/storage/rdd/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.353  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@6684dd15{/environment,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.354  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@2ee931e4{/environment/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.354  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@1d09a68f{/executors,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.355  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@ffbf151{/executors/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.355  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@2f617739{/executors/threadDump,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.357  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@43db9cfa{/executors/threadDump/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.364  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@7c6c7bcf{/static,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.365  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@55eadd2a{/,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.366  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@67de2d25{/api,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.366  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@5b2023f3{/jobs/job/kill,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.367  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@2b56d9a2{/stages/stage/kill,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.368  INFO 5392 --- [  restartedMain] org.apache.spark.ui.SparkUI              : Bound SparkUI to 0.0.0.0, and started at http://DESKTOP-TVLS5UO:4040
    2021-01-04 03:56:27.530  INFO 5392 --- [  restartedMain] org.apache.spark.executor.Executor       : Starting executor ID driver on host DESKTOP-TVLS5UO
    2021-01-04 03:56:27.557  INFO 5392 --- [  restartedMain] org.apache.spark.util.Utils              : Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 59309.
    2021-01-04 03:56:27.557  INFO 5392 --- [  restartedMain] o.a.s.n.netty.NettyBlockTransferService  : Server created on DESKTOP-TVLS5UO:59309
    2021-01-04 03:56:27.559  INFO 5392 --- [  restartedMain] org.apache.spark.storage.BlockManager    : Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
    2021-01-04 03:56:27.567  INFO 5392 --- [  restartedMain] o.a.spark.storage.BlockManagerMaster     : Registering BlockManager BlockManagerId(driver, DESKTOP-TVLS5UO, 59309, None)
    2021-01-04 03:56:27.571  INFO 5392 --- [ckManagerMaster] o.a.s.s.BlockManagerMasterEndpoint       : Registering block manager DESKTOP-TVLS5UO:59309 with 3.4 GiB RAM, BlockManagerId(driver, DESKTOP-TVLS5UO, 59309, None)
    2021-01-04 03:56:27.575  INFO 5392 --- [  restartedMain] o.a.spark.storage.BlockManagerMaster     : Registered BlockManager BlockManagerId(driver, DESKTOP-TVLS5UO, 59309, None)
    2021-01-04 03:56:27.576  INFO 5392 --- [  restartedMain] org.apache.spark.storage.BlockManager    : Initialized BlockManager: BlockManagerId(driver, DESKTOP-TVLS5UO, 59309, None)
    2021-01-04 03:56:27.590  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@4620dc6d{/metrics/json,null,AVAILABLE,@Spark}
    called spark method ............
    2021-01-04 03:56:27.881  WARN 5392 --- [  restartedMain] o.apache.spark.sql.internal.SharedState  : URL.setURLStreamHandlerFactory failed to set FsUrlStreamHandlerFactory
    2021-01-04 03:56:27.882  INFO 5392 --- [  restartedMain] o.apache.spark.sql.internal.SharedState  : Setting hive.metastore.warehouse.dir ('null') to the value of spark.sql.warehouse.dir ('file:/C:/Users/DELL/eclipse-workspace/AETProcessor/spark-warehouse').
    2021-01-04 03:56:27.882  INFO 5392 --- [  restartedMain] o.apache.spark.sql.internal.SharedState  : Warehouse path is 'file:/C:/Users/DELL/eclipse-workspace/AETProcessor/spark-warehouse'.
    2021-01-04 03:56:27.897  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@748956c{/SQL,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.897  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@3830a54{/SQL/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.898  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@759bddde{/SQL/execution,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.899  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@4d4d1316{/SQL/execution/json,null,AVAILABLE,@Spark}
    2021-01-04 03:56:27.900  INFO 5392 --- [  restartedMain] o.s.jetty.server.handler.ContextHandler  : Started o.s.j.s.ServletContextHandler@5d1115f9{/static/sql,null,AVAILABLE,@Spark}
    2021-01-04 03:56:37.788  WARN 5392 --- [tor-heartbeater] o.a.spark.executor.ProcfsMetricsGetter   : Exception when trying to compute pagesize, as a result reporting of ProcessTree metrics is stopped
    2021-01-04 03:56:41.699  INFO 5392 --- [  restartedMain] org.apache.spark.SparkContext            : Starting job: aggregate at InferSchema.scala:33
    2021-01-04 03:56:41.713  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Got job 0 (aggregate at InferSchema.scala:33) with 1 output partitions
    2021-01-04 03:56:41.714  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Final stage: ResultStage 0 (aggregate at InferSchema.scala:33)
    2021-01-04 03:56:41.714  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Parents of final stage: List()
    2021-01-04 03:56:41.715  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Missing parents: List()
    2021-01-04 03:56:41.720  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Submitting ResultStage 0 (ParallelCollectionRDD[0] at parallelize at ExcelRelation.scala:98), which has no missing parents
    2021-01-04 03:56:41.790  INFO 5392 --- [uler-event-loop] o.a.spark.storage.memory.MemoryStore     : Block broadcast_0 stored as values in memory (estimated size 2.6 KiB, free 3.4 GiB)
    2021-01-04 03:56:41.850  INFO 5392 --- [uler-event-loop] o.a.spark.storage.memory.MemoryStore     : Block broadcast_0_piece0 stored as bytes in memory (estimated size 1497.0 B, free 3.4 GiB)
    2021-01-04 03:56:41.853  INFO 5392 --- [ckManagerMaster] o.apache.spark.storage.BlockManagerInfo  : Added broadcast_0_piece0 in memory on DESKTOP-TVLS5UO:59309 (size: 1497.0 B, free: 3.4 GiB)
    2021-01-04 03:56:41.855  INFO 5392 --- [uler-event-loop] org.apache.spark.SparkContext            : Created broadcast 0 from broadcast at DAGScheduler.scala:1223
    2021-01-04 03:56:41.873  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Submitting 1 missing tasks from ResultStage 0 (ParallelCollectionRDD[0] at parallelize at ExcelRelation.scala:98) (first 15 tasks are for partitions Vector(0))
    2021-01-04 03:56:41.874  INFO 5392 --- [uler-event-loop] o.a.spark.scheduler.TaskSchedulerImpl    : Adding task set 0.0 with 1 tasks
    2021-01-04 03:56:41.966  INFO 5392 --- [er-event-loop-0] o.apache.spark.scheduler.TaskSetManager  : Starting task 0.0 in stage 0.0 (TID 0, DESKTOP-TVLS5UO, executor driver, partition 0, PROCESS_LOCAL, 8733 bytes)
    2021-01-04 03:56:41.977  INFO 5392 --- [rker for task 0] org.apache.spark.executor.Executor       : Running task 0.0 in stage 0.0 (TID 0)
    2021-01-04 03:56:42.088  INFO 5392 --- [rker for task 0] org.apache.spark.executor.Executor       : Finished task 0.0 in stage 0.0 (TID 0). 1016 bytes result sent to driver
    2021-01-04 03:56:42.095  INFO 5392 --- [result-getter-0] o.apache.spark.scheduler.TaskSetManager  : Finished task 0.0 in stage 0.0 (TID 0) in 173 ms on DESKTOP-TVLS5UO (executor driver) (1/1)
    2021-01-04 03:56:42.097  INFO 5392 --- [result-getter-0] o.a.spark.scheduler.TaskSchedulerImpl    : Removed TaskSet 0.0, whose tasks have all completed, from pool 
    2021-01-04 03:56:42.102  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : ResultStage 0 (aggregate at InferSchema.scala:33) finished in 0.367 s
    2021-01-04 03:56:42.109  INFO 5392 --- [uler-event-loop] org.apache.spark.scheduler.DAGScheduler  : Job 0 is finished. Cancelling potential speculative or zombie tasks for this job
    2021-01-04 03:56:42.109  INFO 5392 --- [uler-event-loop] o.a.spark.scheduler.TaskSchedulerImpl    : Killing all running tasks in stage 0: Stage finished
    2021-01-04 03:56:42.112  INFO 5392 --- [  restartedMain] org.apache.spark.scheduler.DAGScheduler  : Job 0 finished: aggregate at InferSchema.scala:33, took 0.412763 s
    2021-01-04 03:56:42.711  INFO 5392 --- [ckManagerMaster] o.apache.spark.storage.BlockManagerInfo  : Removed broadcast_0_piece0 on DESKTOP-TVLS5UO:59309 in memory (size: 1497.0 B, free: 3.4 GiB)
    created df ............
    root
     |-- HTD_TRAN_DATE: string (nullable = true)
     |-- tran_Amt: double (nullable = true)
     |-- TILL ACCT_NAME: string (nullable = true)
     |-- TRAN_ID: string (nullable = true)
     |-- REF_NUM: string (nullable = true)
     |-- TILL ACCT NUM: string (nullable = true)
     |-- tranRmk: string (nullable = true)
     |-- HTD_VALUE_DATE: string (nullable = true)
     |-- HTD_PSTD_USER_ID: string (nullable = true)
     |-- GAM_SOL_ID: string (nullable = true)
     |-- STAN: string (nullable = true)
     |-- retrieval_number: string (nullable = true)
    
    HTD_TRAN_DATE
    TILL ACCT_NAME

As you can see , the spark data-frame is created and I can print out the schema but cannot print out the data when I call show method.I have used df.count() , nothing is printed out and even jdbc method to save data to database .it is not just working. Please, has anybody experience it this. Kindly help.

João Dias
  • 16,277
  • 6
  • 33
  • 45

0 Answers0