Преглед изворни кода

[livy] Expose interactive session logs through /sessions/id/log route

Erick Tryzelaar пре 10 година
родитељ
комит
705a7fbf4f

+ 11 - 15
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/batch/BatchSessionServlet.scala

@@ -69,11 +69,7 @@ class BatchSessionServlet(batchManager: BatchManager)
 
     batchManager.getBatch(id) match {
       case None => NotFound("batch not found")
-      case Some(batch) =>
-        val from = params.get("from").map(_.toInt)
-        val size = params.get("size").map(_.toInt)
-
-        Serializers.serializeBatch(batch, from, size)
+      case Some(batch) => Serializers.serializeBatch(batch)
     }
   }
 
@@ -95,8 +91,12 @@ class BatchSessionServlet(batchManager: BatchManager)
       case Some(batch) =>
         val from = params.get("from").map(_.toInt)
         val size = params.get("size").map(_.toInt)
+        val (from_, total, logLines) = Serializers.getLogs(batch, from, size)
 
-        ("id", batch.id) ~ ("log", Serializers.getLogs(batch, from, size))
+        ("id", batch.id) ~
+          ("from", from_) ~
+          ("total", total) ~
+          ("log", logLines)
     }
   }
 
@@ -129,16 +129,13 @@ private object Serializers {
 
   def Formats: List[CustomSerializer[_]] = List(BatchSerializer)
 
-  def serializeBatch(batch: BatchSession,
-                     from: Option[Int],
-                     size: Option[Int]): JValue = {
-
+  def serializeBatch(batch: BatchSession): JValue = {
     ("id", batch.id) ~
       ("state", batch.state.toString) ~
-      ("log", getLogs(batch, from, size))
+      ("log", getLogs(batch, None, Some(10))._3)
   }
 
-  def getLogs(batch: BatchSession, fromOpt: Option[Int], sizeOpt: Option[Int]): JValue = {
+  def getLogs(batch: BatchSession, fromOpt: Option[Int], sizeOpt: Option[Int]) = {
     val lines = batch.logLines()
 
     val size = sizeOpt.getOrElse(100)
@@ -148,7 +145,7 @@ private object Serializers {
     }
     val until = from + size
 
-    lines.view(from, until)
+    (from, lines.length, lines.view(from, until))
   }
 
   case object BatchSerializer extends CustomSerializer[BatchSession](
@@ -156,8 +153,7 @@ private object Serializers {
     // We don't support deserialization.
     PartialFunction.empty
   }, {
-    case batch: BatchSession =>
-      serializeBatch(batch, None, None)
+    case batch: BatchSession => serializeBatch(batch)
   }
     )
   )

+ 2 - 0
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSession.scala

@@ -42,6 +42,8 @@ trait InteractiveSession extends Session {
 
   def lastActivity: Long
 
+  def logLines(): IndexedSeq[String]
+
   def url: Option[URL]
 
   def url_=(url: URL)

+ 2 - 0
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionProcess.scala

@@ -118,6 +118,8 @@ private class InteractiveSessionProcess(id: Int,
   stdoutThread.setDaemon(true)
   stdoutThread.start()
 
+  override def logLines() = process.inputLines
+
   override def stop(): Future[Unit] = {
     super.stop() andThen { case r =>
       // Make sure the process is reaped.

+ 33 - 1
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServlet.scala

@@ -27,6 +27,7 @@ import com.cloudera.hue.livy.server.interactive.InteractiveSession.SessionFailed
 import com.cloudera.hue.livy.sessions._
 import com.fasterxml.jackson.core.JsonParseException
 import org.json4s.JsonAST.JString
+import org.json4s.JsonDSL._
 import org.json4s._
 import org.scalatra._
 import org.scalatra.json.JacksonJsonSupport
@@ -134,6 +135,23 @@ class InteractiveSessionServlet(sessionManager: SessionManager)
     new AsyncResult() { val is = for { _ <- future } yield NoContent() }
   }
 
+  get("/:sessionId/log") {
+    val sessionId = params("sessionId").toInt
+
+    sessionManager.get(sessionId) match {
+      case None => NotFound("Session not found")
+      case Some(session: InteractiveSession) =>
+        val from = params.get("from").map(_.toInt)
+        val size = params.get("size").map(_.toInt)
+        val (from_, total, logLines) = Serializers.getLogs(session, from, size)
+
+        ("id", session.id) ~
+          ("from", from_) ~
+          ("total", total) ~
+          ("log", logLines)
+    }
+  }
+
   get("/:sessionId/statements") {
     val sessionId = params("sessionId").toInt
 
@@ -215,7 +233,21 @@ private object Serializers {
     ("id", session.id) ~
       ("state", serializeSessionState(session.state)) ~
       ("kind", serializeSessionKind(session.kind)) ~
-      ("proxyUser", session.proxyUser)
+      ("proxyUser", session.proxyUser) ~
+      ("log", getLogs(session, None, Some(10))._3)
+  }
+  
+  def getLogs(session: InteractiveSession, fromOpt: Option[Int], sizeOpt: Option[Int]) = {
+    val lines = session.logLines()
+
+    val size = sizeOpt.getOrElse(100)
+    var from = fromOpt.getOrElse(-1)
+    if (from < 0) {
+      from = math.min(0, lines.length - size)
+    }
+    val until = from + size
+
+    (from, lines.length, lines.view(from, until))
   }
 
   def serializeStatement(statement: Statement, from: Option[Int], size: Option[Int]): JValue = {

+ 17 - 13
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionYarn.scala

@@ -22,7 +22,7 @@ import java.lang.ProcessBuilder.Redirect
 import java.util.concurrent.TimeUnit
 
 import com.cloudera.hue.livy.sessions.Error
-import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder
+import com.cloudera.hue.livy.spark.{SparkProcess, SparkSubmitProcessBuilder}
 import com.cloudera.hue.livy.spark.SparkSubmitProcessBuilder.{RelativePath, AbsolutePath}
 import com.cloudera.hue.livy.yarn.{Client, Job}
 import com.cloudera.hue.livy.{LineBufferedProcess, LivyConf, Utils}
@@ -63,16 +63,7 @@ object InteractiveSessionYarn {
 
     val process = builder.start(AbsolutePath(livyJar(livyConf)), List(createInteractiveRequest.kind.toString))
 
-    val job = Future {
-      val job = client.getJobFromProcess(process)
-
-      // We don't need the process anymore.
-      process.destroy()
-
-      job
-    }
-
-    new InteractiveSessionYarn(id, createInteractiveRequest, job)
+    new InteractiveSessionYarn(id, client, process, createInteractiveRequest)
   }
 
   private def livyJar(livyConf: LivyConf) = {
@@ -85,13 +76,26 @@ object InteractiveSessionYarn {
 }
 
 private class InteractiveSessionYarn(id: Int,
-                                     createInteractiveRequest: CreateInteractiveRequest,
-                                     job: Future[Job]) extends InteractiveWebSession(id, createInteractiveRequest) {
+                                     client: Client,
+                                     process: SparkProcess,
+                                     createInteractiveRequest: CreateInteractiveRequest)
+  extends InteractiveWebSession(id, createInteractiveRequest) {
+
+  private val job = Future {
+    val job = client.getJobFromProcess(process)
+
+    job
+  }
+
   job.onFailure { case _ =>
     _state = Error()
   }
 
+  override def logLines() = process.inputLines
+
   override def stop(): Future[Unit] = {
+    process.destroy()
+
     super.stop().andThen {
       case _ =>
         try {

+ 1 - 1
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveWebSession.scala

@@ -31,7 +31,7 @@ import org.json4s.{DefaultFormats, Formats}
 import scala.concurrent.duration.Duration
 import scala.concurrent.{Future, _}
 
-class InteractiveWebSession(val id: Int, createInteractiveRequest: CreateInteractiveRequest) extends InteractiveSession with Logging {
+abstract class InteractiveWebSession(val id: Int, createInteractiveRequest: CreateInteractiveRequest) extends InteractiveSession with Logging {
 
   protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
   protected implicit def jsonFormats: Formats = DefaultFormats

+ 2 - 0
apps/spark/java/livy-server/src/test/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServletSpec.scala

@@ -40,6 +40,8 @@ class InteractiveSessionServletSpec extends ScalatraSuite with FunSpecLike {
 
     override def kind: Kind = Spark()
 
+    override def logLines() = IndexedSeq()
+
     override def state = _state
 
     override def stop(): Future[Unit] = ???