%scala
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, to_date}
Seq(("20110813"),("20090724")).toDF("Date").select(
col("Date"),
to_date(col("Date"),"yyyy-mm-dd").as("to_date")
).show()
+--------+-------+
| Date|to_date|
+--------+-------+
|20110813| null|
|20090724| null|
+--------+-------+
+--------+----------+
| Date| to_date|
+--------+----------+
|20110813|2011-01-13|
|20090724|2009-01-24|
+--------+----------+
Seq(("20110813"),("20090724")).toDF("Date").select(
col("Date"),
to_date(col("Date"),"yyyymmdd").as("to_date")
).show()
I am trying to convert a string to timestamp, but I am getting always null/default values returned to the date value