From 06fcb2769a18aae72abaa67437c546317c10024c Mon Sep 17 00:00:00 2001 From: WilliamZhu Date: Mon, 17 Dec 2018 17:14:14 +0800 Subject: [PATCH] sometimes exexutor get sparkenv will fail --- .../main/java/org/apache/spark/ps/cluster/PSServiceSink.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/streamingpro-spark-2.3.0-adaptor/src/main/java/org/apache/spark/ps/cluster/PSServiceSink.scala b/streamingpro-spark-2.3.0-adaptor/src/main/java/org/apache/spark/ps/cluster/PSServiceSink.scala index 74e078ab8..8b84e03f9 100644 --- a/streamingpro-spark-2.3.0-adaptor/src/main/java/org/apache/spark/ps/cluster/PSServiceSink.scala +++ b/streamingpro-spark-2.3.0-adaptor/src/main/java/org/apache/spark/ps/cluster/PSServiceSink.scala @@ -83,8 +83,6 @@ class PSServiceSink(val property: Properties, val registry: MetricRegistry, psDriverUrl = "spark://ps-driver-endpoint@" + psDriverHost + ":" + psDriverPort } - parseArgs - def createRpcEnv = { val isDriver = env.executorId == SparkContext.DRIVER_IDENTIFIER val bindAddress = hostname @@ -108,6 +106,7 @@ class PSServiceSink(val property: Properties, val registry: MetricRegistry, Thread.sleep(3000) logInfo(s"start PSExecutor;env:${env}") if (env.executorId != SparkContext.DRIVER_IDENTIFIER) { + parseArgs val rpcEnv = createRpcEnv val pSExecutorBackend = new PSExecutorBackend(env, rpcEnv, psDriverUrl, psExecutorId, hostname, cores) PSExecutorBackend.executorBackend = Some(pSExecutorBackend)