Skip to content

Commit 939ae8c

Browse files
committed
Merged hops code
fixes Merged hops code fixes
1 parent 075447b commit 939ae8c

File tree

20 files changed

+344
-41
lines changed

20 files changed

+344
-41
lines changed

common/network-yarn/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464

6565
<!-- Provided dependencies -->
6666
<dependency>
67-
<groupId>org.apache.hadoop</groupId>
67+
<groupId>io.hops</groupId>
6868
<artifactId>hadoop-client</artifactId>
6969
</dependency>
7070
<dependency>

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
<artifactId>xbean-asm6-shaded</artifactId>
6060
</dependency>
6161
<dependency>
62-
<groupId>org.apache.hadoop</groupId>
62+
<groupId>io.hops</groupId>
6363
<artifactId>hadoop-client</artifactId>
6464
</dependency>
6565
<dependency>

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -541,6 +541,8 @@ private[spark] class SparkSubmit extends Logging {
541541
OptionAssigner(args.keytab, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.keytab"),
542542

543543
// Other options
544+
OptionAssigner(args.executorGPUs, STANDALONE | YARN, ALL_DEPLOY_MODES,
545+
confKey = "spark.executor.gpus"),
544546
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
545547
confKey = "spark.executor.cores"),
546548
OptionAssigner(args.executorMemory, STANDALONE | MESOS | YARN | KUBERNETES, ALL_DEPLOY_MODES,

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
4646
var deployMode: String = null
4747
var executorMemory: String = null
4848
var executorCores: String = null
49+
var executorGPUs: String = null
4950
var totalExecutorCores: String = null
5051
var propertiesFile: String = null
5152
var driverMemory: String = null
@@ -70,6 +71,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
7071
var isPython: Boolean = false
7172
var pyFiles: String = null
7273
var isR: Boolean = false
74+
var isTensorFlow: String = null
75+
var tensorflowNumPs: String = null
7376
var action: SparkSubmitAction = null
7477
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
7578
var proxyUser: String = null
@@ -181,6 +184,10 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
181184
totalExecutorCores = Option(totalExecutorCores)
182185
.orElse(sparkProperties.get("spark.cores.max"))
183186
.orNull
187+
executorGPUs = Option(executorGPUs)
188+
.orElse(sparkProperties.get("spark.executor.gpus"))
189+
.orElse(env.get("SPARK_EXECUTOR_GPUS"))
190+
.orNull
184191
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
185192
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
186193
files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
@@ -198,6 +205,14 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
198205
.orNull
199206
numExecutors = Option(numExecutors)
200207
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
208+
isTensorFlow = Option(isTensorFlow)
209+
.orElse(sparkProperties.get("spark.tensorflow.application"))
210+
.orElse(env.get("SPARK_TENSORFLOW_APPLICATION"))
211+
.getOrElse("false")
212+
tensorflowNumPs = Option(tensorflowNumPs)
213+
.orElse(sparkProperties.get("spark.tensorflow.num.ps"))
214+
.orElse(env.get("SPARK_TENSORFLOW_NUM_PS"))
215+
.getOrElse("0")
201216
queue = Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
202217
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
203218
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
@@ -326,6 +341,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
326341
| deployMode $deployMode
327342
| executorMemory $executorMemory
328343
| executorCores $executorCores
344+
| executorGPUs $executorGPUs
329345
| totalExecutorCores $totalExecutorCores
330346
| propertiesFile $propertiesFile
331347
| driverMemory $driverMemory
@@ -335,6 +351,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
335351
| driverExtraJavaOptions $driverExtraJavaOptions
336352
| supervise $supervise
337353
| queue $queue
354+
| isTensorFlow $isTensorFlow
355+
| tensorflowNumPs $tensorflowNumPs
338356
| numExecutors $numExecutors
339357
| files $files
340358
| pyFiles $pyFiles
@@ -379,6 +397,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
379397
case TOTAL_EXECUTOR_CORES =>
380398
totalExecutorCores = value
381399

400+
case EXECUTOR_GPUS =>
401+
executorGPUs = value
402+
382403
case EXECUTOR_CORES =>
383404
executorCores = value
384405

@@ -423,6 +444,12 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
423444
case QUEUE =>
424445
queue = value
425446

447+
case IS_TENSORFLOW =>
448+
isTensorFlow = value
449+
450+
case NUM_TENSORFLOW_PS =>
451+
tensorflowNumPs = value
452+
426453
case FILES =>
427454
files = Utils.resolveURIs(value)
428455

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -235,6 +235,7 @@ private[spark] object CoarseGrainedExecutorBackend extends Logging {
235235
var executorId: String = null
236236
var hostname: String = null
237237
var cores: Int = 0
238+
var gpus: Int = 0
238239
var appId: String = null
239240
var workerUrl: Option[String] = None
240241
val userClassPath = new mutable.ListBuffer[URL]()
@@ -254,6 +255,9 @@ private[spark] object CoarseGrainedExecutorBackend extends Logging {
254255
case ("--cores") :: value :: tail =>
255256
cores = value.toInt
256257
argv = tail
258+
case ("--gpus") :: value :: tail =>
259+
gpus = value.toInt
260+
argv = tail
257261
case ("--app-id") :: value :: tail =>
258262
appId = value
259263
argv = tail

external/kafka-0-10-assembly/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@
7070
<scope>provided</scope>
7171
</dependency>
7272
<dependency>
73-
<groupId>org.apache.hadoop</groupId>
73+
<groupId>io.hops</groupId>
7474
<artifactId>hadoop-client</artifactId>
7575
<scope>provided</scope>
7676
</dependency>

external/kinesis-asl-assembly/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@
9090
<scope>provided</scope>
9191
</dependency>
9292
<dependency>
93-
<groupId>org.apache.hadoop</groupId>
93+
<groupId>io.hops</groupId>
9494
<artifactId>hadoop-client</artifactId>
9595
<scope>provided</scope>
9696
</dependency>

hadoop-cloud/pom.xml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
<scope>test</scope>
6060
</dependency>
6161
<dependency>
62-
<groupId>org.apache.hadoop</groupId>
62+
<groupId>io.hops</groupId>
6363
<artifactId>hadoop-client</artifactId>
6464
<version>${hadoop.version}</version>
6565
<scope>provided</scope>
@@ -69,13 +69,13 @@
6969
intra-jackson-module version problems.
7070
-->
7171
<dependency>
72-
<groupId>org.apache.hadoop</groupId>
72+
<groupId>io.hops</groupId>
7373
<artifactId>hadoop-aws</artifactId>
7474
<version>${hadoop.version}</version>
7575
<scope>${hadoop.deps.scope}</scope>
7676
<exclusions>
7777
<exclusion>
78-
<groupId>org.apache.hadoop</groupId>
78+
<groupId>io.hops</groupId>
7979
<artifactId>hadoop-common</artifactId>
8080
</exclusion>
8181
<exclusion>
@@ -105,13 +105,13 @@
105105
</exclusions>
106106
</dependency>
107107
<dependency>
108-
<groupId>org.apache.hadoop</groupId>
108+
<groupId>io.hops</groupId>
109109
<artifactId>hadoop-openstack</artifactId>
110110
<version>${hadoop.version}</version>
111111
<scope>${hadoop.deps.scope}</scope>
112112
<exclusions>
113113
<exclusion>
114-
<groupId>org.apache.hadoop</groupId>
114+
<groupId>io.hops</groupId>
115115
<artifactId>hadoop-common</artifactId>
116116
</exclusion>
117117
<exclusion>
@@ -179,13 +179,13 @@
179179
Hadoop WASB client only arrived in Hadoop 2.7
180180
-->
181181
<dependency>
182-
<groupId>org.apache.hadoop</groupId>
182+
<groupId>io.hops</groupId>
183183
<artifactId>hadoop-azure</artifactId>
184184
<version>${hadoop.version}</version>
185185
<scope>${hadoop.deps.scope}</scope>
186186
<exclusions>
187187
<exclusion>
188-
<groupId>org.apache.hadoop</groupId>
188+
<groupId>io.hops</groupId>
189189
<artifactId>hadoop-common</artifactId>
190190
</exclusion>
191191
<exclusion>

launcher/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080

8181
<!-- Not needed by the test code, but referenced by SparkSubmit which is used by the tests. -->
8282
<dependency>
83-
<groupId>org.apache.hadoop</groupId>
83+
<groupId>io.hops</groupId>
8484
<artifactId>hadoop-client</artifactId>
8585
<scope>test</scope>
8686
</dependency>

launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,13 @@ public class SparkLauncher extends AbstractLauncher<SparkLauncher> {
6363
public static final String EXECUTOR_EXTRA_LIBRARY_PATH = "spark.executor.extraLibraryPath";
6464
/** Configuration key for the number of executor CPU cores. */
6565
public static final String EXECUTOR_CORES = "spark.executor.cores";
66+
67+
/** Configuration key for the number of executor CPU cores. */
68+
public static final String EXECUTOR_GPUS = "spark.executor.gpus";
69+
70+
static final String IS_TENSORFLOW = "spark.tensorflow.application";
71+
72+
static final String NUM_TENSORFLOW_PS = "spark.tensorflow.num.ps";
6673

6774
static final String PYSPARK_DRIVER_PYTHON = "spark.pyspark.driver.python";
6875

0 commit comments

Comments
 (0)