Explorar o código

[livy] Automatically import the spark context as "sc"

Erick Tryzelaar %!s(int64=10) %!d(string=hai) anos
pai
achega
8587786ba4

+ 15 - 0
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/scala/interpreter/Interpreter.scala

@@ -2,6 +2,7 @@ package com.cloudera.hue.livy.repl.scala.interpreter
 
 import java.io._
 
+import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.repl.SparkIMain
 
 import scala.concurrent.ExecutionContext
@@ -29,6 +30,7 @@ class Interpreter {
   private var _state: Interpreter.State = Interpreter.NotStarted()
   private val outputStream = new ByteArrayOutputStream()
   private var sparkIMain: SparkIMain = _
+  private var sparkContext: SparkContext = _
   private var executeCount = 0
 
   def state = _state
@@ -44,11 +46,24 @@ class Interpreter {
     val settings = new Settings()
     settings.usejavacp.value = true
 
+    val sparkConf = new SparkConf(true)
+      .setAppName("Livy Spark shell")
+
+    sparkContext = new SparkContext(sparkConf)
+
     sparkIMain = createSparkIMain(classLoader, settings)
+    sparkIMain.initializeSynchronous()
+    sparkIMain.beQuietDuring {
+      sparkIMain.bind("sc", "org.apache.spark.SparkContext", sparkContext, List("""@transient"""))
+    }
 
     _state = Interpreter.Idle()
   }
 
+  private def getMaster(): String = {
+    sys.props.get("spark.master").getOrElse("local[*]")
+  }
+
   private def createSparkIMain(classLoader: ClassLoader, settings: Settings) = {
     val out = new JPrintWriter(outputStream, true)
     val cls = classLoader.loadClass(classOf[SparkIMain].getName)

+ 12 - 0
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/PythonSessionSpec.scala

@@ -147,5 +147,17 @@ class PythonSessionSpec extends FunSpec with ShouldMatchers with BeforeAndAfter
 
       result should equal (expectedResult)
     }
+
+    it("should access the spark context") {
+      val result = Await.result(session.execute("""sc"""), Duration.Inf)
+      val resultMap = result.extract[Map[String, JValue]]
+
+      // Manually extract the values since the line numbers in the exception could change.
+      resultMap("status").extract[String] should equal ("ok")
+      resultMap("execution_count").extract[Int] should equal (0)
+
+      val data = resultMap("data").extract[Map[String, JValue]]
+      data("text/plain").extract[String] should include ("<pyspark.context.SparkContext object at")
+    }
   }
 }

+ 12 - 0
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkSessionSpec.scala

@@ -120,5 +120,17 @@ class SparkSessionSpec extends FunSpec with ShouldMatchers with BeforeAndAfter {
       resultMap("evalue").extract[String] should include ("java.lang.Exception")
       resultMap.get("traceback") should equal (None)
     }
+
+    it("should access the spark context") {
+      val result = Await.result(session.execute("""sc"""), Duration.Inf)
+      val resultMap = result.extract[Map[String, JValue]]
+
+      // Manually extract the values since the line numbers in the exception could change.
+      resultMap("status").extract[String] should equal ("ok")
+      resultMap("execution_count").extract[Int] should equal (0)
+
+      val data = resultMap("data").extract[Map[String, JValue]]
+      data("text/plain").extract[String] should include ("res0: org.apache.spark.SparkContext = org.apache.spark.SparkContext")
+    }
   }
  }

+ 6 - 1
apps/spark/java/livy-server/pom.xml

@@ -109,6 +109,12 @@
             <scope>provided</scope>
         </dependency>
 
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client</artifactId>
+            <scope>provided</scope>
+        </dependency>
+
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-yarn-client</artifactId>
@@ -118,7 +124,6 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-yarn-api</artifactId>
-            <version>${hadoop.version}</version>
             <scope>provided</scope>
         </dependency>
 

+ 6 - 0
apps/spark/java/pom.xml

@@ -168,6 +168,12 @@
                 <version>${hadoop.version}</version>
             </dependency>
 
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-client</artifactId>
+                <version>${hadoop.version}</version>
+            </dependency>
+
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-yarn-client</artifactId>