Эх сурвалжийг харах

[livy] Fix running "cat(3)" in SparkR

Erick Tryzelaar 10 жил өмнө
parent
commit
b782c352d4

+ 1 - 1
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/sparkr/SparkRInterpreter.scala

@@ -33,7 +33,7 @@ import scala.io.Source
 private object SparkRInterpreter {
   val LIVY_END_MARKER = "----LIVY_END_OF_COMMAND----"
   val PRINT_MARKER = f"""print("$LIVY_END_MARKER")"""
-  val EXPECTED_OUTPUT = f"""\n$PRINT_MARKER\n[1] "$LIVY_END_MARKER""""
+  val EXPECTED_OUTPUT = f"""$PRINT_MARKER\n[1] "$LIVY_END_MARKER""""
 }
 
 private class SparkRInterpreter(process: Process)

+ 17 - 1
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkRSessionSpec.scala

@@ -90,7 +90,7 @@ class SparkRSessionSpec extends BaseSessionSpec {
       result should equal (expectedResult)
     }
 
-    it("should capture stdout") {
+    it("should capture stdout from print") {
       val statement = session.execute("""print('Hello World')""")
       statement.id should equal (0)
 
@@ -106,6 +106,22 @@ class SparkRSessionSpec extends BaseSessionSpec {
       result should equal (expectedResult)
     }
 
+    it("should capture stdout from cat") {
+      val statement = session.execute("""cat(3)""")
+      statement.id should equal (0)
+
+      val result = Await.result(statement.result, Duration.Inf)
+      val expectedResult = Extraction.decompose(Map(
+        "status" -> "ok",
+        "execution_count" -> 0,
+        "data" -> Map(
+          "text/plain" -> "3"
+        )
+      ))
+
+      result should equal (expectedResult)
+    }
+
     it("should report an error if accessing an unknown variable") {
       val statement = session.execute("""x""")
       statement.id should equal (0)