I adapted the answer from this question here. You can also add the joinTypes in Json file to read in runtume. You can check this answer for json object handling JsonParsing
Update 1: I update the answer to follow Spark documentation way JoinType
import org.apache.spark._
import org.apache.spark.sql._
import org.apache.spark.sql.expressions._
import org.apache.spark.sql.functions._
object SparkSandbox extends App {
case class Row(id: Int, value: String)
private[this] implicit val spark = SparkSession.builder().master("local[*]").getOrCreate()
import spark.implicits._
spark.sparkContext.setLogLevel("ERROR")
val r1 = Seq(Row(1, "A1"), Row(2, "A2"), Row(3, "A3"), Row(4, "A4")).toDS()
val r2 = Seq(Row(3, "A3"), Row(4, "A4"), Row(4, "A4_1"), Row(5, "A5"), Row(6, "A6")).toDS()
val validUserJoinType = "inner"
val inValiedUserJoinType = "nothing"
val joinTypes = Seq("inner", "outer", "full", "full_outer", "left", "left_outer", "right", "right_outer", "left_semi", "left_anti")
inValiedUserJoinType match {
case x => if (joinTypes.contains(x)) {
println("do some logic")
joinTypes foreach { joinType =>
println(s"${joinType.toUpperCase()} JOIN")
r1.join(right = r2, usingColumns = Seq("id"), joinType = joinType).orderBy("id").show()
}
}
case _ =>
val supported = Seq(
"inner",
"outer", "full", "fullouter", "full_outer",
"leftouter", "left", "left_outer",
"rightouter", "right", "right_outer",
"leftsemi", "left_semi",
"leftanti", "left_anti",
"cross")
throw new IllegalArgumentException(s"Unsupported join type '$inValiedUserJoinType'. " +
"Supported join types include: " + supported.mkString("'", "', '", "'") + ".")
}
}