Browse Source

[spark] Initial web server working

Erick Tryzelaar 11 years ago
parent
commit
ed2c280

+ 0 - 73
apps/spark/java/sparker-repl/src/main/java/com/cloudera/hue/sparker/repl/SparkerInterpreter.java

@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.sparker.repl;
-
-import org.apache.spark.repl.Main;
-import org.apache.spark.repl.SparkILoop;
-
-import java.io.*;
-import java.util.UUID;
-
-public class SparkerInterpreter implements AutoCloseable {
-
-    private final UUID uuid;
-    private final PipedWriter stdin;
-    private final PipedReader stdout;
-    private final SparkILoop interp;
-    private final Thread thread;
-
-    public SparkerInterpreter(UUID uuid) throws IOException {
-        this.uuid = uuid;
-        this.stdin = new PipedWriter();
-        this.stdout = new PipedReader();
-        this.interp = new SparkILoop(
-                new BufferedReader(new PipedReader(stdin)),
-                new PrintWriter(System.out)); //new PipedWriter(stdout)));
-
-
-        Main.interp_$eq(interp);
-
-        thread = new Thread(new Runnable() {
-            @Override
-            public void run() {
-                interp.process(new String[]{"-usejavacp"});
-            }
-        });
-    }
-
-    public UUID getUUID() {
-        return uuid;
-    }
-
-    public void execute(String command) throws IOException {
-        stdin.write(command);
-        stdin.write("\n");
-    }
-
-    public void start() throws IOException {
-        thread.start();
-    }
-
-    @Override
-    public void close() throws Exception {
-        stdin.close();
-        stdout.close();
-        thread.join();
-    }
-}

+ 6 - 18
apps/spark/java/sparker-repl/src/main/scala/Scalatra.scala

@@ -1,32 +1,20 @@
 import javax.servlet.ServletContext
 import javax.servlet.ServletContext
 
 
-import akka.actor.{ActorSystem, Props}
-import com.cloudera.hue.sparker.repl.{HelloWorldApp, SparkActor}
+import _root_.akka.actor.ActorSystem
+import com.cloudera.hue.sparker.repl.{HelloWorldApp, SparkerInterpreter}
 import org.scalatra.LifeCycle
 import org.scalatra.LifeCycle
 
 
-trait SparkerILoopInit {
-  def configureSparkerILoop() {
-    /*
-    org.apache.spark.repl.Main.interp = new SparkerILoop(Console.in, new StringWriter)
-
-    val args = Array("-usejavacp")
-    org.apache.spark.repl.Main.interp.process(args)
-    */
-  }
-}
-
-class ScalatraBootstrap extends LifeCycle with SparkerILoopInit {
+class ScalatraBootstrap extends LifeCycle {
 
 
   val system = ActorSystem()
   val system = ActorSystem()
+  val sparkerInterpreter = new SparkerInterpreter
 
 
   override def init(context: ServletContext): Unit = {
   override def init(context: ServletContext): Unit = {
-    val myActor = system.actorOf(Props[SparkActor])
-
-    configureSparkerILoop()
-    context.mount(new HelloWorldApp(system, myActor), "/*")
+    context.mount(new HelloWorldApp(sparkerInterpreter), "/*")
   }
   }
 
 
   override def destroy(context: ServletContext): Unit = {
   override def destroy(context: ServletContext): Unit = {
+    sparkerInterpreter.close()
     system.shutdown()
     system.shutdown()
   }
   }
 }
 }

+ 15 - 15
apps/spark/java/sparker-repl/src/main/scala/com/cloudera/hue/sparker/repl/HelloWorldApp.scala

@@ -1,44 +1,43 @@
 package com.cloudera.hue.sparker.repl
 package com.cloudera.hue.sparker.repl
 
 
 import java.io._
 import java.io._
-import java.util.concurrent.{ArrayBlockingQueue, SynchronousQueue, TimeUnit}
+import java.util.concurrent.SynchronousQueue
 
 
-import akka.actor.{Actor, ActorRef, ActorSystem}
-import akka.pattern.ask
+import akka.actor.{Actor, ActorSystem}
 import akka.util.Timeout
 import akka.util.Timeout
 import org.json4s.JsonDSL._
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 import org.json4s.jackson.JsonMethods._
-import org.scalatra.{Accepted, FutureSupport, ScalatraFilter}
+import org.scalatra.{ScalatraServlet, AsyncResult, FutureSupport, ScalatraFilter}
 
 
-import scala.concurrent.duration.Duration
-import scala.concurrent.{Await, ExecutionContext}
+import scala.concurrent.{ExecutionContextExecutor, ExecutionContext}
 
 
-class HelloWorldApp(system: ActorSystem, sparkActor: ActorRef) extends ScalatraFilter with FutureSupport {
+class HelloWorldApp(interpreter: SparkerInterpreter) extends ScalatraServlet with FutureSupport {
 
 
-  protected implicit def executor: ExecutionContext = system.dispatcher
-
-  implicit val defaultTimeout = Timeout(10)
+  implicit def executor: ExecutionContextExecutor = ExecutionContext.global
+  implicit def defaultTimeout: Timeout = Timeout(10)
 
 
   get("/") {
   get("/") {
     <h1>Hello {params("name")}</h1>
     <h1>Hello {params("name")}</h1>
   }
   }
 
 
   get("/async") {
   get("/async") {
-    val future = ask(sparkActor, "1 + 1")
-
-    implicit val timeout = akka.util.Timeout(60, TimeUnit.SECONDS)
-    Await.result(future, Duration.Inf)
+    new AsyncResult { val is =
+      interpreter.execute("1 + 1")
+    }
   }
   }
 
 
+  /*
   get("/fire-forget") {
   get("/fire-forget") {
     sparkActor ! "wee"
     sparkActor ! "wee"
     Accepted()
     Accepted()
   }
   }
+  */
 }
 }
 
 
+/*
 class SparkActor extends Actor {
 class SparkActor extends Actor {
 
 
-  protected def queue = new SynchronousQueue[Map[String, String]]
+  val queue = new SynchronousQueue[Map[String, String]]
 
 
   val inWriter = new PipedWriter()
   val inWriter = new PipedWriter()
   val inReader = new PipedReader(inWriter)
   val inReader = new PipedReader(inWriter)
@@ -71,3 +70,4 @@ class SparkActor extends Actor {
     }
     }
   }
   }
 }
 }
+*/

+ 8 - 2
apps/spark/java/sparker-repl/src/main/scala/com/cloudera/hue/sparker/repl/Main.scala

@@ -1,20 +1,26 @@
 package com.cloudera.hue.sparker.repl
 package com.cloudera.hue.sparker.repl
 
 
 import org.eclipse.jetty.server.Server
 import org.eclipse.jetty.server.Server
-import org.eclipse.jetty.servlet.DefaultServlet
+import org.eclipse.jetty.servlet.{ServletHolder, DefaultServlet}
 import org.eclipse.jetty.webapp.WebAppContext
 import org.eclipse.jetty.webapp.WebAppContext
-import org.scalatra.servlet.ScalatraListener
+import org.scalatra.servlet.{AsyncSupport, ScalatraListener}
+
+import scala.concurrent.ExecutionContext
 
 
 object Main {
 object Main {
   def main(args: Array[String]): Unit = {
   def main(args: Array[String]): Unit = {
     val port = 8087
     val port = 8087
     val server = new Server(port)
     val server = new Server(port)
     val context = new WebAppContext()
     val context = new WebAppContext()
+
     context.setContextPath("/")
     context.setContextPath("/")
     context.setResourceBase("src/main/com/cloudera/hue/sparker/repl")
     context.setResourceBase("src/main/com/cloudera/hue/sparker/repl")
     context.addEventListener(new ScalatraListener)
     context.addEventListener(new ScalatraListener)
+
     context.addServlet(classOf[DefaultServlet], "/")
     context.addServlet(classOf[DefaultServlet], "/")
 
 
+    context.setAttribute(AsyncSupport.ExecutionContextKey, ExecutionContext.global)
+
     server.setHandler(context)
     server.setHandler(context)
 
 
     server.start()
     server.start()

+ 70 - 55
apps/spark/java/sparker-repl/src/main/scala/com/cloudera/hue/sparker/repl/SparkerILoop.scala

@@ -1,17 +1,55 @@
 package com.cloudera.hue.sparker.repl
 package com.cloudera.hue.sparker.repl
 
 
-import java.io.{BufferedReader, StringWriter}
-import java.util.concurrent.BlockingQueue
+import java.io.{BufferedReader, PipedReader, PipedWriter, StringWriter}
+import java.util.concurrent.{BlockingQueue, SynchronousQueue}
 
 
 import org.apache.spark.repl.SparkILoop
 import org.apache.spark.repl.SparkILoop
 import org.json4s.DefaultFormats
 import org.json4s.DefaultFormats
+import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 import org.json4s.jackson.JsonMethods._
 
 
+import scala.concurrent._
 import scala.tools.nsc.SparkHelper
 import scala.tools.nsc.SparkHelper
 import scala.tools.nsc.interpreter.{Formatting, _}
 import scala.tools.nsc.interpreter.{Formatting, _}
 import scala.tools.nsc.util.ClassPath
 import scala.tools.nsc.util.ClassPath
 
 
-class SparkerILoop(queue: BlockingQueue[Map[String, String]], in0: BufferedReader, outString: StringWriter) extends SparkILoop(in0, new JPrintWriter(outString)) {
+class SparkerInterpreter {
+  private implicit def executor: ExecutionContext = ExecutionContext.global
+
+  private val inQueue = new SynchronousQueue[Request]
+
+  private val inWriter = new PipedWriter()
+
+  // Launch the real interpreter thread.
+  private val thread = new Thread {
+    override def run(): Unit = {
+      org.apache.spark.repl.Main.interp = new SparkerILoop(
+        inQueue,
+        new BufferedReader(new PipedReader(inWriter)),
+        new StringWriter)
+      val args = Array("-usejavacp")
+      org.apache.spark.repl.Main.interp.process(args)
+    }
+  }
+  thread.start()
+
+  def execute(statement: String): Future[String] = {
+    val promise = Promise[Map[String, String]]()
+    inQueue.put(ExecuteRequest(statement, promise))
+    promise.future.map {
+      case(response) => {
+        compact(render(response))
+      }
+    }
+  }
+
+  def close(): Unit = {
+    inQueue.put(ShutdownRequest())
+    thread.join()
+  }
+}
+
+class SparkerILoop(inQueue: BlockingQueue[Request], in0: BufferedReader, outString: StringWriter) extends SparkILoop(in0, new JPrintWriter(outString)) {
 
 
   class SparkerILoopInterpreter extends SparkILoopInterpreter {
   class SparkerILoopInterpreter extends SparkILoopInterpreter {
     outer =>
     outer =>
@@ -58,82 +96,59 @@ class SparkerILoop(queue: BlockingQueue[Map[String, String]], in0: BufferedReade
     true
     true
   }
   }
 
 
+  override def prompt = ""
+
   override def loop(): Unit = {
   override def loop(): Unit = {
     def readOneLine() = {
     def readOneLine() = {
-      out.flush()
-      in readLine prompt
+      inQueue.take()
     }
     }
     // return false if repl should exit
     // return false if repl should exit
-    def processLine(line: String): Boolean = {
+    def processLine(request: Request): Boolean = {
       if (isAsync) {
       if (isAsync) {
         if (!awaitInitialized()) return false
         if (!awaitInitialized()) return false
         runThunks()
         runThunks()
       }
       }
 
 
-      if (line eq null) {
-        return false                // assume null means EOF
-      }
+      request match {
+        case ExecuteRequest(statement, promise) => {
+          command(statement) match {
+            case Result(false, _) => false
+            case Result(true, finalLine) => {
+              finalLine match {
+                case Some(line) => addReplay(line)
+                case _ =>
+              }
 
 
-      val request = parseOpt(line) match {
-        case Some(request) => request;
-        case None => {
-          queue.put(Map("type" -> "error", "msg" -> "invalid json"))
-          //println(compact(render(Map("type" -> "error", "msg" -> "invalid json"))))
-          return true
-        }
-      }
+              var output: String = outString.getBuffer.toString
+              output = output.substring(0, output.length - 1)
+              outString.getBuffer.setLength(0)
+
+              promise.success(Map("type" -> "stdout", "stdout" -> output))
 
 
-      implicit val formats = DefaultFormats
-      val type_ = (request \ "type").extract[Option[String]]
-
-      type_ match {
-        case Some("stdin") => {
-          (request \ "statement").extract[Option[String]] match {
-            case Some(statement) => {
-              command(statement) match {
-                case Result(false, _) => false
-                case Result(true, finalLine) => {
-                  finalLine match {
-                    case Some(line) => addReplay(line)
-                    case _ =>
-                  }
-
-                  var output: String = outString.getBuffer.toString
-                  output = output.substring("scala> ".length + 1, output.length - 1)
-                  outString.getBuffer.setLength(0)
-                  queue.put(Map("type" -> "stdout", "stdout" -> output))
-                  //println(compact(render(Map("type" -> "stdout", "stdout" -> output))))
-
-                  true
-                }
-              }
-            }
-            case _ => {
-              queue.put(Map("type" -> "error", "msg" -> "missing statement"))
-              //println(compact(render(Map("type" -> "error", "msg" -> "missing statement"))))
               true
               true
             }
             }
           }
           }
         }
         }
-        case _ => {
-          queue.put(Map("type" -> "error", "msg" -> "unknown type"))
-          //println(compact(render(Map("type" -> "error", "msg" -> "unknown type"))))
-          true
-        }
+        case ShutdownRequest() => false
       }
       }
     }
     }
     def innerLoop() {
     def innerLoop() {
       outString.getBuffer.setLength(0)
       outString.getBuffer.setLength(0)
+
       val shouldContinue = try {
       val shouldContinue = try {
         processLine(readOneLine())
         processLine(readOneLine())
-      } catch {case t: Throwable => crashRecovery(t)}
-      if (shouldContinue)
+      } catch {
+        case t: Throwable => crashRecovery(t)
+      }
+
+      if (shouldContinue) {
         innerLoop()
         innerLoop()
-      else {
-        queue.put(Map("state" -> "quit"))
-        //println(compact(render(Map("state" -> "quit"))))
       }
       }
     }
     }
     innerLoop()
     innerLoop()
   }
   }
 }
 }
+
+sealed trait Request
+case class ExecuteRequest(statement: String, promise: Promise[Map[String, String]]) extends Request
+case class ShutdownRequest() extends Request