Use hive context to save dataframe in hive
spark 1.6
val conf = new SparkConf().setAppName("jsontest").setMaster("local[*]")
val sc = new SparkContext(conf)
val json = """[
{
"tag_name": "A",
"item_name": "B",
"timestamp": "2018-07-09T16:59:17.8232306Z",
"value": 10
}
]"""
val jsonRdd = sc.parallelize(Seq(json))
System.setProperty("hive.metastore.uris: ", "thrift://ip:9083");
val hiveContext = new HiveContext(sc)
val df = hiveContext.read.json(jsonRdd)
df.show(false)
+---------+--------+----------------------------+-----+
|item_name|tag_name|timestamp |value|
+---------+--------+----------------------------+-----+
|B |A |2018-07-09T16:59:17.8232306Z|10 |
+---------+--------+----------------------------+-----+
//save into table
df.write.mode(SaveMode.Append).insertInto(tableName)
spark > 2.0
val spark = SparkSession .builder() .appName( "SparkSessionZipsExample" ) .config( "spark.sql.warehouse.dir" , warehouseLocation) .enableHiveSupport() .getOrCreate()