Эх сурвалжийг харах

HUE-3025 and HUE-3055 [livy] Support Spark 1.4, and fix pyFiles support

This implements support for Spark 1.4 and pyFiles by adding support
for uploading and adding files to the PYTHONPATH of the interactive
sessions.
Erick Tryzelaar 10 жил өмнө
parent
commit
5c2ba1f

+ 10 - 0
apps/spark/java/livy-repl/pom.xml

@@ -27,6 +27,12 @@
             <artifactId>jackson-core</artifactId>
             <artifactId>jackson-core</artifactId>
         </dependency>
         </dependency>
 
 
+        <dependency>
+            <groupId>com.fasterxml.jackson.module</groupId>
+            <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
+            <scope>provided</scope>
+        </dependency>
+
         <dependency>
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
             <artifactId>jackson-databind</artifactId>
@@ -69,6 +75,10 @@
                     <groupId>org.apache.httpcomponents</groupId>
                     <groupId>org.apache.httpcomponents</groupId>
                     <artifactId>httpcore</artifactId>
                     <artifactId>httpcore</artifactId>
                 </exclusion>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.fasterxml.jackson.core</groupId>
+                    <artifactId>jackson-core</artifactId>
+                </exclusion>
             </exclusions>
             </exclusions>
         </dependency>
         </dependency>
 
 

+ 35 - 18
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/python/PythonInterpreter.scala

@@ -41,13 +41,16 @@ object PythonInterpreter extends Logging {
     val gatewayServer = new GatewayServer(null, 0)
     val gatewayServer = new GatewayServer(null, 0)
     gatewayServer.start()
     gatewayServer.start()
 
 
-    val pySparkArchives = findPySparkArchives()
-
     val builder = new ProcessBuilder(Seq(pythonExec, createFakeShell().toString))
     val builder = new ProcessBuilder(Seq(pythonExec, createFakeShell().toString))
 
 
     val env = builder.environment()
     val env = builder.environment()
 
 
-    env.put("PYTHONPATH", (sys.env.get("PYTHONPATH") ++ pySparkArchives).mkString(File.pathSeparator))
+    val pythonPath = sys.env.getOrElse("PYTHONPATH", "")
+      .split(File.pathSeparator)
+      .++(findPySparkArchives())
+      .++(findPyFiles())
+
+    env.put("PYTHONPATH", pythonPath.mkString(File.pathSeparator))
     env.put("PYTHONUNBUFFERED", "YES")
     env.put("PYTHONUNBUFFERED", "YES")
     env.put("PYSPARK_GATEWAY_PORT", "" + gatewayServer.getListeningPort)
     env.put("PYSPARK_GATEWAY_PORT", "" + gatewayServer.getListeningPort)
     env.put("SPARK_HOME", sys.env.getOrElse("SPARK_HOME", "."))
     env.put("SPARK_HOME", sys.env.getOrElse("SPARK_HOME", "."))
@@ -63,21 +66,35 @@ object PythonInterpreter extends Logging {
     sys.env.get("PYSPARK_ARCHIVES_PATH")
     sys.env.get("PYSPARK_ARCHIVES_PATH")
       .map(_.split(",").toSeq)
       .map(_.split(",").toSeq)
       .getOrElse {
       .getOrElse {
-      sys.env.get("SPARK_HOME") .map { case sparkHome =>
-        val pyLibPath = Seq(sparkHome, "python", "lib").mkString(File.separator)
-        val pyArchivesFile = new File(pyLibPath, "pyspark.zip")
-        require(pyArchivesFile.exists(),
-          "pyspark.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
-
-        val py4jFile = Files.newDirectoryStream(Paths.get(pyLibPath), "py4j-*-src.zip")
-          .iterator()
-          .next()
-          .toFile
-
-        require(py4jFile.exists(),
-          "py4j-*-src.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
-        Seq(pyArchivesFile.getAbsolutePath, py4jFile.getAbsolutePath)
-      }.getOrElse(Seq())
+        sys.env.get("SPARK_HOME").map { sparkHome =>
+          val pyLibPath = Seq(sparkHome, "python", "lib").mkString(File.separator)
+          val pyArchivesFile = new File(pyLibPath, "pyspark.zip")
+          require(pyArchivesFile.exists(),
+            "pyspark.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
+
+          val py4jFile = Files.newDirectoryStream(Paths.get(pyLibPath), "py4j-*-src.zip")
+            .iterator()
+            .next()
+            .toFile
+
+          require(py4jFile.exists(),
+            "py4j-*-src.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
+          Seq(pyArchivesFile.getAbsolutePath, py4jFile.getAbsolutePath)
+        }.getOrElse(Seq())
+      }
+  }
+
+  private def findPyFiles(): Seq[String] = {
+    val pyFiles = sys.props.getOrElse("spark.submit.pyFiles", "").split(",")
+
+    if (sys.env.getOrElse("SPARK_YARN_MODE", "") == "true") {
+      // In spark mode, these files have been localized into the current directory.
+      pyFiles.map { file =>
+        val name = new File(file).getName
+        new File(name).getAbsolutePath
+      }
+    } else {
+      pyFiles
     }
     }
   }
   }
 
 

+ 42 - 4
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionFactory.scala

@@ -18,7 +18,9 @@
 
 
 package com.cloudera.hue.livy.spark.interactive
 package com.cloudera.hue.livy.spark.interactive
 
 
+import java.io.File
 import java.lang.ProcessBuilder.Redirect
 import java.lang.ProcessBuilder.Redirect
+import java.nio.file.{Paths, Files}
 
 
 import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
 import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
 import com.cloudera.hue.livy.sessions.{PySpark, SessionFactory, SessionKindSerializer}
 import com.cloudera.hue.livy.sessions.{PySpark, SessionFactory, SessionKindSerializer}
@@ -31,9 +33,10 @@ object InteractiveSessionFactory {
   private val LivyReplDriverClassPath = "livy.repl.driverClassPath"
   private val LivyReplDriverClassPath = "livy.repl.driverClassPath"
   private val LivyReplJar = "livy.repl.jar"
   private val LivyReplJar = "livy.repl.jar"
   private val LivyServerUrl = "livy.server.serverUrl"
   private val LivyServerUrl = "livy.server.serverUrl"
-  private val SparkDriverExtraJavaOptions = "spark.driver.extraDriverOptions"
+  private val SparkDriverExtraJavaOptions = "spark.driver.extraJavaOptions"
   private val SparkLivyCallbackUrl = "spark.livy.callbackUrl"
   private val SparkLivyCallbackUrl = "spark.livy.callbackUrl"
   private val SparkLivyPort = "spark.livy.port"
   private val SparkLivyPort = "spark.livy.port"
+  private val SparkSubmitPyFiles = "spark.submit.pyFiles"
   private val SparkYarnIsPython = "spark.yarn.isPython"
   private val SparkYarnIsPython = "spark.yarn.isPython"
 }
 }
 
 
@@ -71,12 +74,25 @@ abstract class InteractiveSessionFactory(processFactory: SparkProcessBuilderFact
     request.files.map(RelativePath).foreach(builder.file)
     request.files.map(RelativePath).foreach(builder.file)
     request.jars.map(RelativePath).foreach(builder.jar)
     request.jars.map(RelativePath).foreach(builder.jar)
     request.proxyUser.foreach(builder.proxyUser)
     request.proxyUser.foreach(builder.proxyUser)
-    request.pyFiles.map(RelativePath).foreach(builder.pyFile)
     request.queue.foreach(builder.queue)
     request.queue.foreach(builder.queue)
     request.name.foreach(builder.name)
     request.name.foreach(builder.name)
 
 
     request.kind match {
     request.kind match {
-      case PySpark() => builder.conf(SparkYarnIsPython, "true", admin = true)
+      case PySpark() =>
+        builder.conf(SparkYarnIsPython, "true", admin = true)
+
+        // FIXME: Spark-1.4 seems to require us to manually upload the PySpark support files.
+        // We should only do this for Spark 1.4.x
+        val pySparkFiles = findPySparkArchives()
+        builder.files(pySparkFiles.map(AbsolutePath))
+
+        // We can't actually use `builder.pyFiles`, because livy-repl is a Jar, and
+        // spark-submit will reject it because it isn't a Python file. Instead we'll pass it
+        // through a special property that the livy-repl will use to expose these libraries in
+        // the Python shell.
+        builder.files(request.pyFiles.map(RelativePath))
+
+        builder.conf(SparkSubmitPyFiles, (pySparkFiles ++ request.pyFiles).mkString(","), admin = true)
       case _ =>
       case _ =>
     }
     }
 
 
@@ -85,7 +101,7 @@ abstract class InteractiveSessionFactory(processFactory: SparkProcessBuilderFact
         case Some(javaOptions) => f"$javaOptions $replJavaOpts"
         case Some(javaOptions) => f"$javaOptions $replJavaOpts"
         case None => replJavaOpts
         case None => replJavaOpts
       }
       }
-      builder.conf(SparkDriverExtraJavaOptions, javaOpts)
+      builder.conf(SparkDriverExtraJavaOptions, javaOpts, admin = true)
     }
     }
 
 
     processFactory.livyConf.getOption(LivyReplDriverClassPath)
     processFactory.livyConf.getOption(LivyReplDriverClassPath)
@@ -108,4 +124,26 @@ abstract class InteractiveSessionFactory(processFactory: SparkProcessBuilderFact
     livyConf.getOption(LivyReplJar)
     livyConf.getOption(LivyReplJar)
       .getOrElse(Utils.jarOfClass(getClass).head)
       .getOrElse(Utils.jarOfClass(getClass).head)
   }
   }
+
+  private def findPySparkArchives(): Seq[String] = {
+    sys.env.get("PYSPARK_ARCHIVES_PATH")
+      .map(_.split(",").toSeq)
+      .getOrElse {
+        sys.env.get("SPARK_HOME") .map { case sparkHome =>
+          val pyLibPath = Seq(sparkHome, "python", "lib").mkString(File.separator)
+          val pyArchivesFile = new File(pyLibPath, "pyspark.zip")
+          require(pyArchivesFile.exists(),
+            "pyspark.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
+
+          val py4jFile = Files.newDirectoryStream(Paths.get(pyLibPath), "py4j-*-src.zip")
+            .iterator()
+            .next()
+            .toFile
+
+          require(py4jFile.exists(),
+            "py4j-*-src.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
+          Seq(pyArchivesFile.getAbsolutePath, py4jFile.getAbsolutePath)
+        }.getOrElse(Seq())
+      }
+  }
 }
 }

+ 10 - 2
apps/spark/java/pom.xml

@@ -52,10 +52,11 @@
         <dispatch.version>0.11.2</dispatch.version>
         <dispatch.version>0.11.2</dispatch.version>
         <httpclient.version>4.5</httpclient.version>
         <httpclient.version>4.5</httpclient.version>
         <httpcore.version>4.4.1</httpcore.version>
         <httpcore.version>4.4.1</httpcore.version>
-        <jackson.version>2.2.3</jackson.version>
+        <jackson.version>2.4.4</jackson.version>
+        <jackson-module-scala.version>2.4.4</jackson-module-scala.version>
         <javax.servlet-api.version>3.1.0</javax.servlet-api.version>
         <javax.servlet-api.version>3.1.0</javax.servlet-api.version>
         <jetty.version>9.2.10.v20150310</jetty.version>
         <jetty.version>9.2.10.v20150310</jetty.version>
-        <json4s.version>3.2.5</json4s.version>
+        <json4s.version>3.2.11</json4s.version>
         <logback.version>1.1.2</logback.version>
         <logback.version>1.1.2</logback.version>
         <metrics.version>3.1.0</metrics.version>
         <metrics.version>3.1.0</metrics.version>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@@ -104,6 +105,13 @@
                 <version>${jackson.version}</version>
                 <version>${jackson.version}</version>
             </dependency>
             </dependency>
 
 
+            <dependency>
+                <groupId>com.fasterxml.jackson.module</groupId>
+                <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
+                <version>${jackson-module-scala.version}</version>
+                <scope>provided</scope>
+            </dependency>
+
             <dependency>
             <dependency>
                 <groupId>commons-codec</groupId>
                 <groupId>commons-codec</groupId>
                 <artifactId>commons-codec</artifactId>
                 <artifactId>commons-codec</artifactId>