import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{min, max, avg}
val spark = SparkSession.builder().getOrCreate()
val data = spark.read.option("header","true").format("csv").load("cpid_data.csv")
val equi = data.withColumn("value",col("value").cast("double")).groupBy("id").agg(avg("value"), max("value"), min("value")).show()
In the above code when I try to write the output in a csv file like,
equi.write.option("header",true).csv("cpido.csv")
It is throwing error like write is not a member of unit.
could anyone help me with this? How to load the output to a csv file?