浏览代码

[livy] Allow spark builder files to optionally allow local files

This allows us to submit livy-assembly while protecting batch users
from accessing local files.
Erick Tryzelaar 10 年之前
父节点
当前提交
dd0a503

+ 47 - 30
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/spark/SparkSubmitProcessBuilder.scala

@@ -18,6 +18,7 @@
 
 package com.cloudera.hue.livy.spark
 
+import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder.{RelativePath, AbsolutePath, Path}
 import com.cloudera.hue.livy.{LivyConf, Logging}
 
 import scala.collection.JavaConversions._
@@ -27,20 +28,28 @@ object SparkSubmitProcessBuilder {
   def apply(livyConf: LivyConf): SparkSubmitProcessBuilder = {
     new SparkSubmitProcessBuilder(livyConf)
   }
+
+  /**
+   * Represents a path that is either allowed to reference a local file, or must exist in our
+   * cache directory or on hdfs.
+   */
+  sealed trait Path
+  case class AbsolutePath(path: String) extends Path
+  case class RelativePath(path: String) extends Path
 }
 
 class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
 
   private[this] val fsRoot = livyConf.filesystemRoot()
 
-  private[this] var _executable = "spark-submit"
+  private[this] var _executable: Path = AbsolutePath("spark-submit")
   private[this] var _master: Option[String] = None
   private[this] var _deployMode: Option[String] = None
   private[this] var _className: Option[String] = None
   private[this] var _name: Option[String] = None
-  private[this] var _jars: ArrayBuffer[String] = ArrayBuffer()
-  private[this] var _pyFiles: ArrayBuffer[String] = ArrayBuffer()
-  private[this] var _files: ArrayBuffer[String] = ArrayBuffer()
+  private[this] var _jars: ArrayBuffer[Path] = ArrayBuffer()
+  private[this] var _pyFiles: ArrayBuffer[Path] = ArrayBuffer()
+  private[this] var _files: ArrayBuffer[Path] = ArrayBuffer()
   private[this] var _conf: ArrayBuffer[(String, String)] = ArrayBuffer()
   private[this] var _driverMemory: Option[String] = None
   private[this] var _driverJavaOptions: Option[String] = None
@@ -52,14 +61,14 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
   private[this] var _executorCores: Option[String] = None
   private[this] var _queue: Option[String] = None
   private[this] var _numExecutors: Option[String] = None
-  private[this] var _archives: ArrayBuffer[String] = ArrayBuffer()
+  private[this] var _archives: ArrayBuffer[Path] = ArrayBuffer()
 
   private[this] var _env: ArrayBuffer[(String, String)] = ArrayBuffer()
   private[this] var _redirectOutput: Option[ProcessBuilder.Redirect] = None
   private[this] var _redirectError: Option[ProcessBuilder.Redirect] = None
   private[this] var _redirectErrorStream: Option[Boolean] = None
 
-  def executable(executable: String): SparkSubmitProcessBuilder = {
+  def executable(executable: Path): SparkSubmitProcessBuilder = {
     _executable = executable
     this
   }
@@ -84,33 +93,33 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
     this
   }
 
-  def jar(jar: String): SparkSubmitProcessBuilder = {
-    this._jars += buildPath(jar)
+  def jar(jar: Path): SparkSubmitProcessBuilder = {
+    this._jars += jar
     this
   }
 
-  def jars(jars: Traversable[String]): SparkSubmitProcessBuilder = {
-    jars.foreach(jar)
+  def jars(jars: Traversable[Path]): SparkSubmitProcessBuilder = {
+    this._jars ++= jars
     this
   }
 
-  def pyFile(pyFile: String): SparkSubmitProcessBuilder = {
-    this._pyFiles += buildPath(pyFile)
+  def pyFile(pyFile: Path): SparkSubmitProcessBuilder = {
+    this._pyFiles += pyFile
     this
   }
 
-  def pyFiles(pyFiles: Traversable[String]): SparkSubmitProcessBuilder = {
-    pyFiles.foreach(pyFile)
+  def pyFiles(pyFiles: Traversable[Path]): SparkSubmitProcessBuilder = {
+    this._pyFiles ++= pyFiles
     this
   }
 
-  def file(file: String): SparkSubmitProcessBuilder = {
-    this._files += buildPath(file)
+  def file(file: Path): SparkSubmitProcessBuilder = {
+    this._files += file
     this
   }
 
-  def files(files: Traversable[String]): SparkSubmitProcessBuilder = {
-    files.foreach(file)
+  def files(files: Traversable[Path]): SparkSubmitProcessBuilder = {
+    this._files ++= files
     this
   }
 
@@ -177,12 +186,12 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
     this
   }
 
-  def archive(archive: String): SparkSubmitProcessBuilder = {
-    _archives += buildPath(archive)
+  def archive(archive: Path): SparkSubmitProcessBuilder = {
+    _archives += archive
     this
   }
 
-  def archives(archives: Traversable[String]): SparkSubmitProcessBuilder = {
+  def archives(archives: Traversable[Path]): SparkSubmitProcessBuilder = {
     archives.foreach(archive)
     this
   }
@@ -207,8 +216,8 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
     this
   }
 
-  def start(file: String, args: Traversable[String]): Process = {
-    var args_ = ArrayBuffer(_executable)
+  def start(file: Path, args: Traversable[String]): Process = {
+    var args_ = ArrayBuffer(fromPath(_executable))
 
     def addOpt(option: String, value: Option[String]): Unit = {
       value.foreach { v =>
@@ -227,9 +236,9 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
     addOpt("--master", _master)
     addOpt("--deploy-mode", _deployMode)
     addOpt("--name", _name)
-    addList("--jars", _jars)
-    addList("--py-files", _pyFiles)
-    addList("--files", _files)
+    addList("--jars", _jars.map(fromPath))
+    addList("--py-files", _pyFiles.map(fromPath))
+    addList("--files", _files.map(fromPath))
     addOpt("--class", _className)
     addList("--conf", _conf.map { case (key, value) => f"$key=$value" })
     addOpt("--driver-memory", _driverMemory)
@@ -241,12 +250,12 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
     addOpt("--driver-cores", _driverCores)
     addOpt("--executor-cores", _executorCores)
     addOpt("--queue", _queue)
-    addList("--archives", _archives)
+    addList("--archives", _archives.map(fromPath))
 
-    args_ += buildPath(file)
+    args_ += fromPath(file)
     args_ ++= args
 
-    info(s"Running ${args.mkString(" ")}")
+    info(s"Running ${args_.mkString(" ")}")
 
     val pb = new ProcessBuilder(args_)
     val env = pb.environment()
@@ -262,5 +271,13 @@ class SparkSubmitProcessBuilder(livyConf: LivyConf) extends Logging {
     pb.start()
   }
 
-  private def buildPath(path: String) = fsRoot + "/" + path
+  private def fromPath(path: Path) = path match {
+    case AbsolutePath(p) => p
+    case RelativePath(p) =>
+      if (p.startsWith("hdfs://")) {
+        p
+      } else {
+        fsRoot + "/" + p
+      }
+  }
 }

+ 6 - 5
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/batch/BatchProcess.scala

@@ -20,6 +20,7 @@ package com.cloudera.hue.livy.server.batch
 
 import java.lang.ProcessBuilder.Redirect
 
+import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder.RelativePath
 import com.cloudera.hue.livy.{LivyConf, LineBufferedProcess}
 import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder
 
@@ -29,7 +30,7 @@ object BatchProcess {
   def apply(livyConf: LivyConf, id: Int, createBatchRequest: CreateBatchRequest): Batch = {
     val builder = sparkBuilder(livyConf, createBatchRequest)
 
-    val process = builder.start(createBatchRequest.file, createBatchRequest.args)
+    val process = builder.start(RelativePath(createBatchRequest.file), createBatchRequest.args)
     new BatchProcess(id, new LineBufferedProcess(process))
   }
 
@@ -37,14 +38,14 @@ object BatchProcess {
     val builder = SparkSubmitProcessBuilder(livyConf)
 
     createBatchRequest.className.foreach(builder.className)
-    createBatchRequest.jars.foreach(builder.jar)
-    createBatchRequest.pyFiles.foreach(builder.pyFile)
-    createBatchRequest.files.foreach(builder.file)
+    createBatchRequest.jars.map(RelativePath).foreach(builder.jar)
+    createBatchRequest.pyFiles.map(RelativePath).foreach(builder.pyFile)
+    createBatchRequest.files.map(RelativePath).foreach(builder.file)
     createBatchRequest.driverMemory.foreach(builder.driverMemory)
     createBatchRequest.driverCores.foreach(builder.driverCores)
     createBatchRequest.executorMemory.foreach(builder.executorMemory)
     createBatchRequest.executorCores.foreach(builder.executorCores)
-    createBatchRequest.archives.foreach(builder.archive)
+    createBatchRequest.archives.map(RelativePath).foreach(builder.archive)
 
     builder.redirectOutput(Redirect.PIPE)
 

+ 6 - 5
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/batch/BatchYarn.scala

@@ -20,6 +20,7 @@ package com.cloudera.hue.livy.server.batch
 
 import java.lang.ProcessBuilder.Redirect
 
+import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder.RelativePath
 import com.cloudera.hue.livy.{LineBufferedProcess, LivyConf}
 import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder
 import com.cloudera.hue.livy.yarn._
@@ -35,7 +36,7 @@ object BatchYarn {
   def apply(livyConf: LivyConf, client: Client, id: Int, createBatchRequest: CreateBatchRequest): Batch = {
     val builder = sparkBuilder(livyConf, createBatchRequest)
 
-    val process = new LineBufferedProcess(builder.start(createBatchRequest.file, createBatchRequest.args))
+    val process = new LineBufferedProcess(builder.start(RelativePath(createBatchRequest.file), createBatchRequest.args))
     val job = Future {
       client.getJobFromProcess(process)
     }
@@ -48,14 +49,14 @@ object BatchYarn {
     builder.master("yarn-cluster")
 
     createBatchRequest.className.foreach(builder.className)
-    createBatchRequest.jars.foreach(builder.jar)
-    createBatchRequest.pyFiles.foreach(builder.pyFile)
-    createBatchRequest.files.foreach(builder.file)
+    createBatchRequest.jars.map(RelativePath).foreach(builder.jar)
+    createBatchRequest.pyFiles.map(RelativePath).foreach(builder.pyFile)
+    createBatchRequest.files.map(RelativePath).foreach(builder.file)
     createBatchRequest.driverMemory.foreach(builder.driverMemory)
     createBatchRequest.driverCores.foreach(builder.driverCores)
     createBatchRequest.executorMemory.foreach(builder.executorMemory)
     createBatchRequest.executorCores.foreach(builder.executorCores)
-    createBatchRequest.archives.foreach(builder.archive)
+    createBatchRequest.archives.map(RelativePath).foreach(builder.archive)
 
     builder.redirectOutput(Redirect.PIPE)
     builder.redirectErrorStream(true)

+ 2 - 1
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/sessions/ProcessSession.scala

@@ -23,6 +23,7 @@ import java.net.URL
 
 import com.cloudera.hue.livy.sessions.Kind
 import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder
+import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder.AbsolutePath
 import com.cloudera.hue.livy.{LivyConf, Logging, Utils}
 
 import scala.annotation.tailrec
@@ -62,7 +63,7 @@ object ProcessSession extends Logging {
     builder.redirectOutput(Redirect.PIPE)
     builder.redirectError(Redirect.INHERIT)
 
-    builder.start(livyJar(livyConf), List(kind.toString))
+    builder.start(AbsolutePath(livyJar(livyConf)), List(kind.toString))
   }
 
   private def livyJar(conf: LivyConf): String = {

+ 2 - 1
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/sessions/YarnSession.scala

@@ -22,6 +22,7 @@ import java.lang.ProcessBuilder.Redirect
 import java.util.concurrent.TimeUnit
 
 import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder
+import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder.AbsolutePath
 import com.cloudera.hue.livy.{LineBufferedProcess, Utils, LivyConf}
 import com.cloudera.hue.livy.sessions.{Kind, Error}
 import com.cloudera.hue.livy.yarn.{Client, Job}
@@ -49,7 +50,7 @@ object YarnSession {
     builder.redirectOutput(Redirect.PIPE)
     builder.redirectErrorStream(redirect = true)
 
-    val process = builder.start(livyJar(livyConf), List(kind.toString))
+    val process = builder.start(AbsolutePath(livyJar(livyConf)), List(kind.toString))
 
     val job = Future {
       val proc = new LineBufferedProcess(process)