Skip to content

Commit

Permalink
Merge pull request amplab#71 from aarondav/scdefaults
Browse files Browse the repository at this point in the history
Spark shell exits if it cannot create SparkContext

Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one
of our regexes). Previously, we would default to Mesos, fail, and then start the shell
anyway, except that any Spark command would fail. Simply exiting seems clearer.
  • Loading branch information
mateiz committed Oct 19, 2013
2 parents fc26e5b + 7473726 commit 8d528af
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
13 changes: 6 additions & 7 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -217,21 +217,20 @@ class SparkContext(
scheduler.initialize(backend)
scheduler

case _ =>
if (MESOS_REGEX.findFirstIn(master).isEmpty) {
logWarning("Master %s does not match expected format, parsing as Mesos URL".format(master))
}
case MESOS_REGEX(mesosUrl) =>
MesosNativeLibrary.load()
val scheduler = new ClusterScheduler(this)
val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean
val masterWithoutProtocol = master.replaceFirst("^mesos://", "") // Strip initial mesos://
val backend = if (coarseGrained) {
new CoarseMesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
new CoarseMesosSchedulerBackend(scheduler, this, mesosUrl, appName)
} else {
new MesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
new MesosSchedulerBackend(scheduler, this, mesosUrl, appName)
}
scheduler.initialize(backend)
scheduler

case _ =>
throw new SparkException("Could not parse Master URL: '" + master + "'")
}
}
taskScheduler.start()
Expand Down
9 changes: 8 additions & 1 deletion repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
.getOrElse(new Array[String](0))
.map(new java.io.File(_).getAbsolutePath)
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
try {
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
} catch {
case e: Exception =>
e.printStackTrace()
echo("Failed to create SparkContext, exiting...")
sys.exit(1)
}
sparkContext
}

Expand Down

0 comments on commit 8d528af

Please sign in to comment.