I have this sbt file:
offline := true
name := "hello"
version := "1.0"
scalaVersion := "2.11.7-local"
scalaHome := Some(file("/home/ubuntu/software/scala-2.11.7"))
libraryDependencies += "org.apache.spark" %% "spark-core" % "1.5.0" % "provided"
How can I tell it to use this address for Spark
rather than using web?
/home/ubuntu/software/spark-1.5.0-bin-hadoop2.6
Because it just tries to connect to internet for Spark dependencies and my VM doesn't have internet access due to security issues.
I eventually want to run this simple code:
import org.apache.spark.SparkContext._
import org.apache.spark.api.java._
import org.apache.spark.api.java.function.Function_
import org.apache.spark.graphx._
import org.apache.spark.graphx.lib._
import org.apache.spark.graphx.PartitionStrategy._
//class PartBQ1{
object PartBQ1{
val conf = new SparkConf().setMaster("spark://10.0.1.31:7077")
.setAppName("CS-838-Assignment2-Question2")
.set("spark.driver.memory", "1g")
.set("spark.eventLog.enabled", "true")
.set("spark.eventLog.dir", "/home/ubuntu/storage/logs")
.set("spark.executor.memory", "21g")
.set("spark.executor.cores", "4")
.set("spark.cores.max", "4")
.set("spark.task.cpus", "1")
val sc = new SparkContext(conf=conf)
val sql_ctx = new SQLContext(sc)
val graph = GraphLoader.edgeListFile(sc, "data2.txt")
}