Skip to content

Commit 23256be

Browse files
seyfesrowen
authored andcommitted
[SPARK-14576][WEB UI] Spark console should display Web UI url
## What changes were proposed in this pull request? This is a proposal to print the Spark Driver UI link when spark-shell is launched. ## How was this patch tested? Launched spark-shell in local mode and cluster mode. Spark-shell console output included following line: "Spark context Web UI available at <Spark web url>" Author: Ergin Seyfe <[email protected]> Closes apache#12341 from seyfe/spark_console_display_webui_link.
1 parent 7c6937a commit 23256be

File tree

4 files changed

+12
-6
lines changed

4 files changed

+12
-6
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

+2
Original file line numberDiff line numberDiff line change
@@ -280,6 +280,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
280280

281281
private[spark] def ui: Option[SparkUI] = _ui
282282

283+
def uiWebUrl: Option[String] = _ui.map(_.webUrl)
284+
283285
/**
284286
* A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse.
285287
*

core/src/main/scala/org/apache/spark/ui/WebUI.scala

+8-6
Original file line numberDiff line numberDiff line change
@@ -133,26 +133,28 @@ private[spark] abstract class WebUI(
133133

134134
/** Bind to the HTTP server behind this web interface. */
135135
def bind() {
136-
assert(!serverInfo.isDefined, "Attempted to bind %s more than once!".format(className))
136+
assert(!serverInfo.isDefined, s"Attempted to bind $className more than once!")
137137
try {
138-
var host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0")
138+
val host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0")
139139
serverInfo = Some(startJettyServer(host, port, sslOptions, handlers, conf, name))
140-
logInfo("Bound %s to %s, and started at http://%s:%d".format(className, host,
141-
publicHostName, boundPort))
140+
logInfo(s"Bound $className to $host, and started at $webUrl")
142141
} catch {
143142
case e: Exception =>
144-
logError("Failed to bind %s".format(className), e)
143+
logError(s"Failed to bind $className", e)
145144
System.exit(1)
146145
}
147146
}
148147

148+
/** Return the url of web interface. Only valid after bind(). */
149+
def webUrl: String = s"http://$publicHostName:$boundPort"
150+
149151
/** Return the actual port to which this server is bound. Only valid after bind(). */
150152
def boundPort: Int = serverInfo.map(_.boundPort).getOrElse(-1)
151153

152154
/** Stop the server behind this web interface. Only valid after bind(). */
153155
def stop() {
154156
assert(serverInfo.isDefined,
155-
"Attempted to stop %s before binding to a server!".format(className))
157+
s"Attempted to stop $className before binding to a server!")
156158
serverInfo.get.stop()
157159
}
158160
}

repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala

+1
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,7 @@ private[repl] trait SparkILoopInit {
125125
command("""
126126
@transient val sc = {
127127
val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
128+
_sc.uiWebUrl.foreach(webUrl => println(s"Spark context Web UI available at ${webUrl}"))
128129
println("Spark context available as 'sc' " +
129130
s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
130131
_sc

repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala

+1
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
3838
processLine("""
3939
@transient val sc = {
4040
val _sc = org.apache.spark.repl.Main.createSparkContext()
41+
_sc.uiWebUrl.foreach(webUrl => println(s"Spark context Web UI available at ${webUrl}"))
4142
println("Spark context available as 'sc' " +
4243
s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
4344
_sc

0 commit comments

Comments
 (0)