Quellcode durchsuchen

HUE-3308 [spark] Migrate Livy to external respository

Romain Rigaux vor 9 Jahren
Ursprung
Commit
d7a2b9a835
99 geänderte Dateien mit 3 neuen und 10102 gelöschten Zeilen
  1. 0 33
      apps/spark/Makefile
  2. 3 751
      apps/spark/java/README.rst
  3. 0 38
      apps/spark/java/bin/livy-repl
  4. 0 31
      apps/spark/java/bin/livy-server
  5. 0 29
      apps/spark/java/bin/livy-yarn-client
  6. 0 24
      apps/spark/java/bin/livy-yarn-server
  7. 0 31
      apps/spark/java/bin/setup-classpath
  8. 0 40
      apps/spark/java/conf/livy-defaults.conf.template
  9. 0 124
      apps/spark/java/conf/spark-user-configurable-options.template
  10. 0 37
      apps/spark/java/dist.xml
  11. 0 118
      apps/spark/java/livy-assembly/pom.xml
  12. 0 99
      apps/spark/java/livy-core/pom.xml
  13. 0 47
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/LineBufferedProcess.scala
  14. 0 94
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/LineBufferedStream.scala
  15. 0 120
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/LivyConf.scala
  16. 0 55
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/Logging.scala
  17. 0 135
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/Utils.scala
  18. 0 111
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/WebServer.scala
  19. 0 64
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/msgs.scala
  20. 0 45
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/Kind.scala
  21. 0 42
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/Session.scala
  22. 0 30
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/SessionFactory.scala
  23. 0 110
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/SessionManager.scala
  24. 0 81
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/SessionState.scala
  25. 0 23
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/batch/BatchSession.scala
  26. 0 59
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/interactive/InteractiveSession.scala
  27. 0 65
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/interactive/Statement.scala
  28. 0 35
      apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/interactive/StatementState.scala
  29. 0 102
      apps/spark/java/livy-core/src/test/scala/com/cloudera/hue/livy/sessions/BaseInteractiveSessionSpec.scala
  30. 0 51
      apps/spark/java/livy-core/src/test/scala/com/cloudera/hue/livy/sessions/SessionManagerSpec.scala
  31. 0 77
      apps/spark/java/livy-core/src/test/scala/com/cloudera/hue/livy/sessions/interactive/StatementSpec.scala
  32. 0 218
      apps/spark/java/livy-repl/pom.xml
  33. 0 18
      apps/spark/java/livy-repl/src/main/resources/fake_R.sh
  34. 0 39
      apps/spark/java/livy-repl/src/main/resources/fake_pyspark.sh
  35. 0 446
      apps/spark/java/livy-repl/src/main/resources/fake_shell.py
  36. 0 13
      apps/spark/java/livy-repl/src/main/resources/logback-access.xml
  37. 0 12
      apps/spark/java/livy-repl/src/main/resources/logback.xml
  38. 0 50
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/Interpreter.scala
  39. 0 158
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/Main.scala
  40. 0 144
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/Session.scala
  41. 0 146
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/WebApp.scala
  42. 0 30
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/package.scala
  43. 0 131
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/process/ProcessInterpreter.scala
  44. 0 189
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/python/PythonInterpreter.scala
  45. 0 260
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/scala/SparkInterpreter.scala
  46. 0 250
      apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/sparkr/SparkRInterpreter.scala
  47. 0 34
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/BaseInterpreterSpec.scala
  48. 0 50
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/BaseSessionSpec.scala
  49. 0 213
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/PythonInterpreterSpec.scala
  50. 0 204
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/PythonSessionSpec.scala
  51. 0 128
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/ScalaInterpreterSpec.scala
  52. 0 111
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkRInterpreterSpec.scala
  53. 0 185
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkRSessionSpec.scala
  54. 0 197
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkSessionSpec.scala
  55. 0 116
      apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/WebAppSpec.scala
  56. 0 230
      apps/spark/java/livy-server/pom.xml
  57. 0 13
      apps/spark/java/livy-server/src/main/resources/logback-access.xml
  58. 0 12
      apps/spark/java/livy-server/src/main/resources/logback.xml
  59. 0 179
      apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/Main.scala
  60. 0 146
      apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/SessionServlet.scala
  61. 0 74
      apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/batch/BatchSessionServlet.scala
  62. 0 223
      apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServlet.scala
  63. 0 135
      apps/spark/java/livy-server/src/test/scala/com/cloudera/hue/livy/server/batch/BatchServletSpec.scala
  64. 0 142
      apps/spark/java/livy-server/src/test/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServletSpec.scala
  65. 0 145
      apps/spark/java/livy-spark/pom.xml
  66. 0 123
      apps/spark/java/livy-spark/src/main/resources/com/cloudera/hue/livy/spark/default-spark-user-configurable-options.conf
  67. 0 32
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/ConfigOptionNotAllowed.scala
  68. 0 99
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkManager.scala
  69. 0 12
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkProcess.scala
  70. 0 299
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkProcessBuilder.scala
  71. 0 31
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkProcessBuilderFactory.scala
  72. 0 42
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkUserConfigurableOptions.scala
  73. 0 64
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionFactory.scala
  74. 0 75
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionProcess.scala
  75. 0 30
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionProcessFactory.scala
  76. 0 90
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionYarn.scala
  77. 0 35
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionYarnFactory.scala
  78. 0 38
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/CreateBatchRequest.scala
  79. 0 38
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/CreateInteractiveRequest.scala
  80. 0 150
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionFactory.scala
  81. 0 77
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionProcess.scala
  82. 0 34
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionProcessFactory.scala
  83. 0 77
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionYarn.scala
  84. 0 43
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionYarnFactory.scala
  85. 0 244
      apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveWebSession.scala
  86. 0 43
      apps/spark/java/livy-spark/src/test/scala/com/cloudera/hue/livy/spark/SparkProcessBuilderSpec.scala
  87. 0 71
      apps/spark/java/livy-spark/src/test/scala/com/cloudera/hue/livy/spark/batch/BatchProcessSpec.scala
  88. 0 40
      apps/spark/java/livy-spark/src/test/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionProcessSpec.scala
  89. 0 66
      apps/spark/java/livy-yarn/pom.xml
  90. 0 51
      apps/spark/java/livy-yarn/src/main/assembly/dist.xml
  91. 0 21
      apps/spark/java/livy-yarn/src/main/bash/run-am.sh
  92. 0 40
      apps/spark/java/livy-yarn/src/main/bash/run-class.sh
  93. 0 19
      apps/spark/java/livy-yarn/src/main/bash/run-job.sh
  94. 0 12
      apps/spark/java/livy-yarn/src/main/resources/logback.xml
  95. 0 29
      apps/spark/java/livy-yarn/src/main/scala/com/cloudera/hue/livy/yarn/ApplicationState.scala
  96. 0 78
      apps/spark/java/livy-yarn/src/main/scala/com/cloudera/hue/livy/yarn/Client.scala
  97. 0 105
      apps/spark/java/livy-yarn/src/main/scala/com/cloudera/hue/livy/yarn/Job.scala
  98. 0 551
      apps/spark/java/pom.xml
  99. 0 1
      maven/pom.xml

+ 0 - 33
apps/spark/Makefile

@@ -23,36 +23,3 @@ endif
 APP_NAME = spark
 include $(ROOT)/Makefile.sdk
 
-SPARK_ROOT := $(realpath .)
-JAVA_LIB = $(SPARK_ROOT)/java-lib
-
-LIVY_VERSION = 0.2.0-SNAPSHOT
-LIVY_DIR = $(SPARK_ROOT)/java
-LIVY_TARGET_DIR = $(LIVY_DIR)/livy-assembly/target/scala-2.10
-LIVY_ASSEMBLY_JAR = livy-assembly-$(LIVY_VERSION).jar
-LIVY_JAR = $(JAVA_LIB)/livy-assembly.jar
-
-HAVE_JAVA_SRC = $(wildcard $(LIVY_DIR))
-
-compile: $(LIVY_JAR)
-
-ifneq (,$(wildcard $(LIVY_DIR)))
-$(LIVY_JAR): $(shell find $(LIVY_DIR) -type f)
-	mkdir -p $(JAVA_LIB)
-	@echo "--- Building Desktop spark"
-	cd $(LIVY_DIR) && mvn clean install -DskipTests $(MAVEN_OPTIONS)
-	cp $(LIVY_TARGET_DIR)/$(LIVY_ASSEMBLY_JAR) $(LIVY_JAR)
-
-clean::
-	rm -Rf $(JAVA_LIB)
-	cd $(LIVY_DIR) && mvn clean $(MAVEN_OPTIONS) || :
-else
-$(LIVY_JAR):
-	$(error Cannot build spark jars without source)
-endif
-
-BDIST_EXCLUDES += \
-	--exclude=java
-
-SDIST_EXCLUDES += \
-	--exclude=java-lib

+ 3 - 751
apps/spark/java/README.rst

@@ -1,757 +1,9 @@
 Welcome to Livy
 ===============
 
-Livy is an open source REST interface for interacting with Apache Spark from anywhere. It supports executing snippets of code or programs in a Spark context that runs locally or in YARN.
+Livy is an open source REST interface for interacting with Apache Spark from anywhere. 
 
-* Interactive Scala, Python and R shells
-* Batch submissions in Scala, Java, Python
-* Multi users can share the same server (impersonation support)
-* Can be used for submitting jobs from anywhere with REST
-* Does not require any code change to your programs
+Livy was originally created in Hue and then moved to its own project _livy.io: http://livy.io.
 
-The code is currently incubating in Hue but hopefully will eventually graduate in its top
-project. `Pull requests`_ are welcomed!
+And read more about_Spark in Hue here:http://gethue.com/spark/.
 
-.. _Pull requests: https://github.com/cloudera/hue/pulls
-
-
-Quick Start
-===========
-
-Livy is used for powering the Spark snippets of the `Hadoop Notebook`_ of `Hue 3.8`_, which you can see the
-`implementation here`_.
-
-See the API documentation below and some curl examples:
-
-  * `Interactive shells`_
-  * `Batch jobs`_
-  * `Shared RDDs`_
-
-.. _Interactive shells: http://gethue.com/how-to-use-the-livy-spark-rest-job-server-for-interactive-spark/
-.. _Batch jobs: http://gethue.com/how-to-use-the-livy-spark-rest-job-server-api-for-sharing-spark-rdds-and-contexts/
-.. _Shared RDDs: http://gethue.com/how-to-use-the-livy-spark-rest-job-server-api-for-submitting-batch-jar-python-and-streaming-spark-jobs/
-.. _Hadoop Notebook: http://gethue.com/new-notebook-application-for-spark-sql/
-.. _Hue 3.8: http://gethue.com/hue-3-8-with-an-oozie-editor-revamp-better-performances-improved-spark-ui-is-out/
-.. _implementation here: https://github.com/cloudera/hue/blob/master/apps/spark/src/spark/job_server_api.py
-
-
-Prerequisites
-=============
-
-To build/run Livy, you will need:
-
-Debian/Ubuntu:
-  * mvn (from ``maven`` package or maven3 tarball)
-  * openjdk-7-jdk (or Oracle Java7 jdk)
-  * spark 1.4+ from (from `Apache Spark tarball`_)
-  * Python 2.6+
-  * R 3.x
-
-Redhat/CentOS:
-  * mvn (from ``maven`` package or maven3 tarball)
-  * java-1.7.0-openjdk (or Oracle Java7 jdk)
-  * spark 1.4+ (from `Apache Spark tarball`_)
-  * Python 2.6+
-  * R 3.x
-
-MacOS:
-  * Xcode command line tools
-  * Oracle's JDK 1.7+
-  * Maven (Homebrew)
-  * apache-spark 1.5 (Homebrew)
-  * Python 2.6+
-  * R 3.x
-
-
-
-.. _Apache Spark Tarball: https://spark.apache.org/downloads.html
-
-
-Building Livy
-=============
-
-Livy is currently built by the `Hue Build System`_, it can also be built on
-it's own (aka without any other Hue dependency) with `Apache Maven`_. To
-checkout and build Livy, run:
-
-.. code:: shell
-
-    % git clone git@github.com:cloudera/hue.git
-    % cd hue
-    % cd apps/spark/java
-    % mvn -DskipTests clean package
-
-By default Livy is built with the Cloudera distribution of Spark (currently
-based off Spark 1.5.0), but it is simple to support other versions, such as
-Spark 1.4.1, by compiling Livy with:
-
-.. code:: shell
-
-    % mvn -DskipTests -Dspark.version=1.4.1 clean package
-
-.. _Hue Build System: https://github.com/cloudera/hue/#getting-started
-.. _Apache Maven: http://maven.apache.org
-
-
-Running Tests
-=============
-
-In order to run the Livy Tests, first follow the instructions in `Building
-Livy`_. Then run:
-
-.. code:: shell
-
-    % export SPARK_HOME=/usr/lib/spark
-    % export HADOOP_CONF_DIR=/etc/hadoop/conf
-    % mvn test
-
-
-Running Livy
-============
-
-In order to run Livy with local sessions, first export these variables:
-
-.. code:: shell
-
-   % export SPARK_HOME=/usr/lib/spark
-   % export HADOOP_CONF_DIR=/etc/hadoop/conf
-
-Then start the server with:
-
-.. code:: shell
-
-    % ./bin/livy-server
-
-Or with YARN sessions by running:
-
-.. code:: shell
-
-   % env \
-     LIVY_SERVER_JAVA_OPTS="-Dlivy.server.session.factory=yarn" \
-     CLASSPATH=`hadoop classpath` \
-     $LIVY_HOME/bin/livy-server
-
-
-Livy Configuration
-==================
-
-The properties of the server can be modified by copying
-`livy-defaults.conf.template`_ and renaming it ``conf/livy-defaults.conf``. The
-Livy configuration directory can be placed in an alternative directory by defining
-``LIVY_CONF_DIR``.
-
-In particular the ``YARN mode`` (default is ``local`` process for development) can be set with:
-
-.. code:: shell
-
-    livy.server.session.factory = yarn
-
-.. _livy-defaults.conf.template: https://github.com/cloudera/hue/blob/master/apps/spark/java/conf/livy-defaults.conf.template
-
-Spark Configuration
-===================
-
-Livy's Spark sessions are configured through two mechanisms. First, is by way of the local
-`Spark configuration`_. Create, or modify the Spark configuration files as directed, and point
-Livy at this directory with:
-
-.. code:: shell
-
-    % env \
-      SPARK_CONF_DIR=... \
-      $LIVY_HOME/bin/livy-server
-
-The second mechanism is by white listing `Spark configuration`_ options that can be set by the user
-creating a Spark session. This list can be created by copying
-`spark-user-configurable-options.template`_ to ``spark-user-configurable-options`` and listing
-the options the user may specify in the ``conf`` session field.
-
-*warning*: Be careful before enabling options. Some options may allow a malicious user to
-read files that are accessible by the Livy Server process user. Among other things, this might
-allow a user to access the Livy TLS private key, Kerberos tickets, or more.
-
-.. _Spark configuration: https://spark.apache.org/docs/latest/configuration.html
-.. _spark-user-configurable-options.template: https://github.com/cloudera/hue/blob/master/apps/spark/java/conf/spark-user-configurable-options.template
-
-
-Spark Example
-=============
-
-Now to see it in action by interacting with it in Python with the `Requests`_
-library. By default livy runs on port 8998 (which can be changed with the
-``livy_server_port config`` option). We’ll start off with a Spark session that
-takes Scala code:
-
-.. code:: shell
-    % sudo pip install requests
-
-.. code:: python
-
-    >>> import json, pprint, requests, textwrap
-    >>> host = 'http://localhost:8998'
-    >>> data = {'kind': 'spark'}
-    >>> headers = {'Content-Type': 'application/json'}
-    >>> r = requests.post(host + '/sessions', data=json.dumps(data), headers=headers)
-    >>> r.json()
-    {u'state': u'starting', u'id': 0, u’kind’: u’spark’}
-
-Once the session has completed starting up, it transitions to the idle state:
-
-.. code:: python
-
-    >>> session_url = host + r.headers['location']
-    >>> r = requests.get(session_url, headers=headers)
-    >>> r.json()
-    {u'state': u'idle', u'id': 0, u’kind’: u’spark’}
-
-Now we can execute Scala by passing in a simple JSON command:
-
-.. code:: python
-
-    >>> statements_url = session_url + '/statements'
-    >>> data = {'code': '1 + 1'}
-    >>> r = requests.post(statements_url, data=json.dumps(data), headers=headers)
-    >>> r.json()
-    {u'output': None, u'state': u'running', u'id': 0}
-
-If a statement takes longer than a few milliseconds to execute, Livy returns
-early and provides a URL that can be polled until it is complete:
-
-.. code:: python
-
-    >>> statement_url = host + r.headers['location']
-    >>> r = requests.get(statement_url, headers=headers)
-    >>> pprint.pprint(r.json())
-    [{u'id': 0,
-      u'output': {u'data': {u'text/plain': u'res0: Int = 2'},
-                  u'execution_count': 0,
-                  u'status': u'ok'},
-      u'state': u'available'}]
-
-That was a pretty simple example. More interesting is using Spark to estimate
-PI. This is from the `Spark Examples`_:
-
-.. code:: python
-
-    >>> data = {
-    ...   'code': textwrap.dedent("""\
-    ...      val NUM_SAMPLES = 100000;
-    ...      val count = sc.parallelize(1 to NUM_SAMPLES).map { i =>
-    ...        val x = Math.random();
-    ...        val y = Math.random();
-    ...        if (x*x + y*y < 1) 1 else 0
-    ...      }.reduce(_ + _);
-    ...      println(\"Pi is roughly \" + 4.0 * count / NUM_SAMPLES)
-    ...      """)
-    ... }
-    >>> r = requests.post(statements_url, data=json.dumps(data), headers=headers)
-    >>> pprint.pprint(r.json())
-    {u'id': 1,
-     u'output': {u'data': {u'text/plain': u'Pi is roughly 3.14004\nNUM_SAMPLES: Int = 100000\ncount: Int = 78501'},
-                 u'execution_count': 1,
-                 u'status': u'ok'},
-     u'state': u'available'}
-
-Finally, lets close our session:
-
-.. code:: python
-
-    >>> session_url = 'http://localhost:8998/sessions/0'
-    >>> requests.delete(session_url, headers=headers)
-    <Response [204]>
-
-.. _Requests: http://docs.python-requests.org/en/latest/
-.. _Spark Examples: https://spark.apache.org/examples.html
-
-
-PySpark Example
-===============
-
-pyspark has the exact same API, just with a different initial command:
-
-.. code:: python
-
-    >>> data = {'kind': 'pyspark'}
-    >>> r = requests.post(host + '/sessions', data=json.dumps(data), headers=headers)
-    >>> r.json()
-    {u'id': 1, u'state': u'idle'}
-
-The PI example from before then can be run as:
-
-.. code:: python
-
-    >>> data = {
-    ...   'code': textwrap.dedent("""\
-    ...     import random
-    ...     NUM_SAMPLES = 100000
-    ...     def sample(p):
-    ...       x, y = random.random(), random.random()
-    ...       return 1 if x*x + y*y < 1 else 0
-    ...
-    ...     count = sc.parallelize(xrange(0, NUM_SAMPLES)).map(sample) \
-    ...               .reduce(lambda a, b: a + b)
-    ...     print "Pi is roughly %f" % (4.0 * count / NUM_SAMPLES)
-    ...     """)
-    ... }
-    >>> r = requests.post(statements_url, data=json.dumps(data), headers=headers)
-    >>> pprint.pprint(r.json())
-    {u'id': 12,
-     u'output': {u'data': {u'text/plain': u'Pi is roughly 3.136000'},
-                 u'execution_count': 12,
-                 u'status': u'ok'},
-     u'state': u'running'}
-
-
-SparkR Example
-==============
-
-SparkR also has the same API:
-
-.. code:: python
-
-    >>> data = {'kind': 'sparkR'}
-    >>> r = requests.post(host + '/sessions', data=json.dumps(data), headers=headers)
-    >>> r.json()
-    {u'id': 1, u'state': u'idle'}
-
-The PI example from before then can be run as:
-
-.. code:: python
-
-    >>> data = {
-    ...   'code': textwrap.dedent("""\
-    ...      n <- 100000
-    ...      piFunc <- function(elem) {
-    ...        rands <- runif(n = 2, min = -1, max = 1)
-    ...        val <- ifelse((rands[1]^2 + rands[2]^2) < 1, 1.0, 0.0)
-    ...        val
-    ...      }
-    ...      piFuncVec <- function(elems) {
-    ...        message(length(elems))
-    ...        rands1 <- runif(n = length(elems), min = -1, max = 1)
-    ...        rands2 <- runif(n = length(elems), min = -1, max = 1)
-    ...        val <- ifelse((rands1^2 + rands2^2) < 1, 1.0, 0.0)
-    ...        sum(val)
-    ...      }
-    ...      rdd <- parallelize(sc, 1:n, slices)
-    ...      count <- reduce(lapplyPartition(rdd, piFuncVec), sum)
-    ...      cat("Pi is roughly", 4.0 * count / n, "\n")
-    ...     """)
-    ... }
-    >>> r = requests.post(statements_url, data=json.dumps(data), headers=headers)
-    >>> pprint.pprint(r.json())
-    {u'id': 12,
-     u'output': {u'data': {u'text/plain': u'Pi is roughly 3.136000'},
-                 u'execution_count': 12,
-                 u'status': u'ok'},
-     u'state': u'running'}
-
-
-Community
-=========
-
- * User group: http://groups.google.com/a/cloudera.org/group/hue-user
- * Umbrella Jira: https://issues.cloudera.org/browse/HUE-2588
- * Pull requests: https://github.com/cloudera/hue/pulls
-
-
-REST API
-========
-
-GET /sessions
--------------
-
-Returns all the active interactive sessions.
-
-Response Body
-^^^^^^^^^^^^^
-
-+----------+-----------------+------+
-| name     | description     | type |
-+==========+=================+======+
-| sessions | `session`_ list | list |
-+----------+-----------------+------+
-
-
-POST /sessions
---------------
-
-Creates a new interative Scala, Python or R shell in the cluster.
-
-Request Body
-^^^^^^^^^^^^
-
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| name              | description                                                                    | type            |
-+===================+================================================================================+=================+
-| kind              | The session kind (required)                                                    | `session kind`_ |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| proxyUser         | The user to impersonate that will run this session (e.g. bob)                  | string          |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| jars              | Files to be placed on the java classpath                                       | list of paths   |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| pyFiles           | Files to be placed on the PYTHONPATH                                           | list of paths   |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| files             | Files to be placed in executor working directory                               | list of paths   |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| driverMemory      | Memory for driver (e.g. 1000M, 2G)                                             | string          |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| driverCores       | Number of cores used by driver (YARN mode only)                                | int             |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| executorMemory    | Memory for executor (e.g. 1000M, 2G)                                           | string          |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| executorCores     | Number of cores used by executor                                               | int             |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| totalExecutorCores| number of cluster cores used by executor (Standalone mode only)                | int             |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| numExecutors      | Number of executors (YARN mode only)                                           | int             |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| archives          | Archives to be uncompressed in the executor working directory (YARN mode only) | list of paths   |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| queue             | The YARN queue to submit too (YARN mode only)                                  | string          |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| name              | Name of the application                                                        | string          |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-| conf              | Spark configuration property                                                   | Map of key=val  |
-+-------------------+--------------------------------------------------------------------------------+-----------------+
-
-
-Response Body
-^^^^^^^^^^^^^
-
-The created `Session`_.
-
-
-GET /sessions/{sessionId}
--------------------------
-
-Return the session information
-
-Response
-^^^^^^^^
-
-The `Session`_.
-
-
-DELETE /sessions/{sessionId}
-----------------------------
-
-Kill the `Session`_ job.
-
-
-GET /sessions/{sessionId}/logs
-------------------------------
-
-Get the log lines from this session.
-
-Request Parameters
-^^^^^^^^^^^^^^^^^^
-
-+------+-----------------------------+------+
-| name | description                 | type |
-+======+=============================+======+
-| from | offset                      | int  |
-+------+-----------------------------+------+
-| size | amount of batches to return | int  |
-+------+-----------------------------+------+
-
-Response Body
-^^^^^^^^^^^^^
-
-+------+-----------------------+-----------------+
-| name | description           | type            |
-+======+=======================+=================+
-| id   | The session id        | int             |
-+------+-----------------------+-----------------+
-| from | offset                | int             |
-+------+-----------------------+-----------------+
-| size | total amount of lines | int             |
-+------+-----------------------+-----------------+
-| log  | The log lines         | list of strings |
-+------+-----------------------+-----------------+
-
-
-GET /sessions/{sessionId}/statements
-------------------------------------
-
-Return all the statements in a session.
-
-Response Body
-^^^^^^^^^^^^^
-
-+------------+-------------------+------+
-| name       | description       | type |
-+============+===================+======+
-| statements | `statement`_ list | list |
-+------------+-------------------+------+
-
-
-POST /sessions/{sessionId}/statements
--------------------------------------
-
-Execute a statement in a session.
-
-Request Body
-^^^^^^^^^^^^
-
-+------+---------------------+--------+
-| name | description         | type   |
-+======+=====================+========+
-| code | The code to execute | string |
-+------+---------------------+--------+
-
-Response Body
-^^^^^^^^^^^^^
-
-The `statement`_ object.
-
-
-GET /batches
-------------
-
-Return all the active batch jobs.
-
-Response Body
-^^^^^^^^^^^^^
-
-+---------+---------------+------+
-| name    | description   | type |
-+=========+===============+======+
-| batches | `batch`_ list | list |
-+---------+---------------+------+
-
-
-POST /batches
--------------
-
-Request Body
-^^^^^^^^^^^^
-
-+----------------+---------------------------------------------------+-----------------+
-| name           | description                                       | type            |
-+================+===================================================+=================+
-| proxyUser      | The user to impersonate that will execute the job | string          |
-+----------------+---------------------------------------------------+-----------------+
-| file           | Archive holding the file                          | path (required) |
-+----------------+---------------------------------------------------+-----------------+
-| args           | Command line arguments                            | list of strings |
-+----------------+---------------------------------------------------+-----------------+
-| className      | Application's java/spark main class               | string          |
-+----------------+---------------------------------------------------+-----------------+
-| jars           | Files to be placed on the java classpath          | list of paths   |
-+----------------+---------------------------------------------------+-----------------+
-| pyFiles        | Files to be placed on the PYTHONPATH              | list of paths   |
-+----------------+---------------------------------------------------+-----------------+
-| files          | Files to be placed in executor working directory  | list of paths   |
-+----------------+---------------------------------------------------+-----------------+
-| driverMemory   | Memory for driver (e.g. 1000M, 2G)                | string          |
-+----------------+---------------------------------------------------+-----------------+
-| driverCores    | Number of cores used by driver                    | int             |
-+----------------+---------------------------------------------------+-----------------+
-| executorMemory | Memory for executor (e.g. 1000M, 2G)              | string          |
-+----------------+---------------------------------------------------+-----------------+
-| executorCores  | Number of cores used by executor                  | int             |
-+----------------+---------------------------------------------------+-----------------+
-| numExecutors   | Number of executor                                | int             |
-+----------------+---------------------------------------------------+-----------------+
-| archives       | Archives to be uncompressed (YARN mode only)      | list of paths   |
-+----------------+---------------------------------------------------+-----------------+
-| queue          | The YARN queue to submit too (YARN mode only)     | string          |
-+----------------+---------------------------------------------------+-----------------+
-| name           | Name of the application                           | string          |
-+----------------+---------------------------------------------------+-----------------+
-| conf           | Spark configuration property                      | Map of key=val  |
-+----------------+---------------------------------------------------+-----------------+
-
-
-Response Body
-^^^^^^^^^^^^^
-
-The created `Batch`_ object.
-
-
-GET /batches/{batchId}
-----------------------
-
-Request Parameters
-^^^^^^^^^^^^^^^^^^
-
-+------+-----------------------------+------+
-| name | description                 | type |
-+======+=============================+======+
-| from | offset                      | int  |
-+------+-----------------------------+------+
-| size | amount of batches to return | int  |
-+------+-----------------------------+------+
-
-Response Body
-^^^^^^^^^^^^^
-
-+-------+-----------------------------+-----------------+
-| name  | description                 | type            |
-+=======+=============================+=================+
-| id    | The batch id                | int             |
-+-------+-----------------------------+-----------------+
-| state | The state of the batch      | `batch`_ state  |
-+-------+-----------------------------+-----------------+
-| log   | The output of the batch job | list of strings |
-+-------+-----------------------------+-----------------+
-
-
-DELETE /batches/{batchId}
--------------------------
-
-Kill the `Batch`_ job.
-
-
-GET /batches/{batchId}/logs
----------------------------
-
-Get the log lines from this batch.
-
-Request Parameters
-^^^^^^^^^^^^^^^^^^
-
-+------+-----------------------------+------+
-| name | description                 | type |
-+======+=============================+======+
-| from | offset                      | int  |
-+------+-----------------------------+------+
-| size | amount of batches to return | int  |
-+------+-----------------------------+------+
-
-Response Body
-^^^^^^^^^^^^^
-
-+------+-----------------------+-----------------+
-| name | description           | type            |
-+======+=======================+=================+
-| id   | The batch id          | int             |
-+------+-----------------------+-----------------+
-| from | offset                | int             |
-+------+-----------------------+-----------------+
-| size | total amount of lines | int             |
-+------+-----------------------+-----------------+
-| log  | The log lines         | list of strings |
-+------+-----------------------+-----------------+
-
-
-REST Objects
-============
-
-Session
--------
-
-Sessions represent an interactive shell.
-
-+----------------+--------------------------------------------------+----------------------------+
-| name           | description                                      | type                       |
-+================+==================================================+============================+
-| id             | The session id                                   | int                        |
-+----------------+--------------------------------------------------+----------------------------+
-| kind           | session kind (spark, pyspark, or sparkr)         | `session kind`_ (required) |
-+----------------+--------------------------------------------------+----------------------------+
-| log            | The log lines                                    | list of strings            |
-+----------------+--------------------------------------------------+----------------------------+
-| state          | The session state                                | string                     |
-+----------------+--------------------------------------------------+----------------------------+
-
-
-Session State
-^^^^^^^^^^^^^
-
-+-------------+----------------------------------+
-| name        | description                      |
-+=============+==================================+
-| not_started | session has not been started     |
-+-------------+----------------------------------+
-| starting    | session is starting              |
-+-------------+----------------------------------+
-| idle        | session is waiting for input     |
-+-------------+----------------------------------+
-| busy        | session is executing a statement |
-+-------------+----------------------------------+
-| error       | session errored out              |
-+-------------+----------------------------------+
-| dead        | session has exited               |
-+-------------+----------------------------------+
-
-Session Kind
-^^^^^^^^^^^^
-
-+---------+----------------------------------+
-| name    | description                      |
-+=========+==================================+
-| spark   | interactive scala/spark session  |
-+---------+----------------------------------+
-| pyspark | interactive python/spark session |
-+---------+----------------------------------+
-| sparkr  | interactive R/spark session      |
-+---------+----------------------------------+
-
-Statement
----------
-
-Statements represent the result of an execution statement.
-
-+--------+----------------------+---------------------+
-| name   | description          | type                |
-+========+======================+=====================+
-| id     | The statement id     | integer             |
-+--------+----------------------+---------------------+
-| state  | The execution state  | `statement state`_  |
-+--------+----------------------+---------------------+
-| output | The execution output | `statement output`_ |
-+--------+----------------------+---------------------+
-
-Statement State
-^^^^^^^^^^^^^^^
-
-+-----------+----------------------------------+
-| name      | description                      |
-+===========+==================================+
-| running   | Statement is currently executing |
-+-----------+----------------------------------+
-| available | Statement has a ready response   |
-+-----------+----------------------------------+
-| error     | Statement failed                 |
-+-----------+----------------------------------+
-
-Statement Output
-^^^^^^^^^^^^^^^^
-
-+-----------------+-------------------+----------------------------------+
-| name            | description       | type                             |
-+=================+===================+==================================+
-| status          | execution status  | string                           |
-+-----------------+-------------------+----------------------------------+
-| execution_count | a monotomically   | integer                          |
-|                 | increasing number |                                  |
-+-----------------+-------------------+----------------------------------+
-| data            | statement output  | an object mapping a mime type to |
-|                 |                   | the result. If the mime type is  |
-|                 |                   | ``application/json``, the value  |
-|                 |                   | will be a JSON value             |
-+-----------------+-------------------+----------------------------------+
-
-Batch
------
-
-+----------------+--------------------------------------------------+----------------------------+
-| name           | description                                      | type                       |
-+================+==================================================+============================+
-| id             | The session id                                   | int                        |
-+----------------+--------------------------------------------------+----------------------------+
-| kind           | session kind (spark, pyspark, or sparkr)         | `session kind`_ (required) |
-+----------------+--------------------------------------------------+----------------------------+
-| log            | The log lines                                    | list of strings            |
-+----------------+--------------------------------------------------+----------------------------+
-| state          | The session state                                | string                     |
-+----------------+--------------------------------------------------+----------------------------+
-
-
-License
-=======
-
-Apache License, Version 2.0
-http://www.apache.org/licenses/LICENSE-2.0

+ 0 - 38
apps/spark/java/bin/livy-repl

@@ -1,38 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Runs spark shell server.
-
-set -e
-
-export LIVY_HOME=$(cd $(dirname $0)/.. && pwd)
-cd $LIVY_HOME
-
-if [ "$SPARK_HOME" ]; then
-	SPARK_SUBMIT="$SPARK_HOME/bin/spark-submit"
-else
-	SPARK_SUBMIT=spark-submit
-fi
-
-source ./bin/setup-classpath
-
-exec $SPARK_SUBMIT \
-	--driver-java-options "$LIVY_REPL_JAVA_OPTS" \
-	--class com.cloudera.hue.livy.repl.Main \
-	"$ASSEMBLY_JAR" \
-	"$@"

+ 0 - 31
apps/spark/java/bin/livy-server

@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Runs Livy server.
-
-set -e
-
-export LIVY_HOME=$(cd $(dirname $0)/.. && pwd)
-cd $LIVY_HOME
-
-source ./bin/setup-classpath
-
-exec java \
-	$LIVY_SERVER_JAVA_OPTS \
-	-cp "$CLASSPATH" \
-	com.cloudera.hue.livy.server.Main "$@"

+ 0 - 29
apps/spark/java/bin/livy-yarn-client

@@ -1,29 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Runs Livy server.
-
-set -e
-
-export LIVY_HOME=$(cd $(dirname $0)/.. && pwd)
-
-CLASSPATH="$LIVY_HOME/livy-server/target/lib/*:$LIVY_HOME/livy-server/target/livy-server-3.7.0-SNAPSHOT.jar:`hadoop classpath`:$CLASSPATH"
-
-exec java \
-	-cp "$CLASSPATH" \
-	com.cloudera.hue.livy.yarn.Main "$@"

+ 0 - 24
apps/spark/java/bin/livy-yarn-server

@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-export LIVY_HOME=$(cd $(dirname $0)/.. && pwd)
-export CLASSPATH=`hadoop classpath`
-
-exec $LIVY_HOME/bin/livy-server yarn "$@"

+ 0 - 31
apps/spark/java/bin/setup-classpath

@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-ASSEMBLY_DIR="$LIVY_HOME/livy-assembly/target/scala-2.10"
-
-for f in $ASSEMBLY_DIR/livy-assembly-*.jar; do
-	ASSEMBLY_JAR="$f"
-done
-
-if [[ ! -f "$ASSEMBLY_JAR" ]]; then
-	echo "failed to find $ASSEMBLY_JAR" 1>&2
-	echo "you need to build Livy before running this program" 1>&2
-	exit 1
-fi
-
-CLASSPATH="$ASSEMBLY_JAR:$CLASSPATH"

+ 0 - 40
apps/spark/java/conf/livy-defaults.conf.template

@@ -1,40 +0,0 @@
-# Specifies Livy's environment. May either be "production" or "development". In "development"
-# mode, Livy will enable debugging options, such as reporting possible routes on a 404.
-# defaults to development
-## livy.environment = development
-
-# Use this keystore for the SSL certificate and key.
-## livy.keystore =
-
-# Specify the keystore password.
-## livy.keystore.password =
-
-# What host address to start the server on. Defaults to 0.0.0.0. If using the
-# `yarn` factory mode, this address must be accessible from the YARN nodes.
-## livy.server.host = 0.0.0.0
-
-# What port to start the server on. Defaults to 8998.
-## livy.server.port = 8998
-
-# What session factory to use. The options are `process` and `yarn`.
-## livy.server.session.factory = process
-
-# What spark-submit executable path to use to submit spark applications. Defaults to
-# `spark-submit`.
-## livy.server.spark-submit = spark-submit
-
-# Time in milliseconds on how long Livy will wait before timing out an idle session.
-# Default is one hour.
-## livy.server.session.timeout = 3600000
-
-# Location to find the livy assembly. If not specified, livy will determine the
-# assembly from the local jarfile. If using `yarn` sessions, this may be on HDFS.
-## livy.yarn.jar = hdfs://localhost:8020/user/hue/share/lib/livy-assembly.jar
-
-# If livy should use proxy users when submitting a job.
-## livy.impersonation.enabled = true
-
-# Allow spark to reference files found in this directory. This may either be a local directory
-# or an hdfs:// directory. Be careful setting this option, as it may allow users to access
-# any file the Livy process can access, such as Livy's SSL certificate.
-## livy.files.dir = /var/run/livy/uploaded-files

+ 0 - 124
apps/spark/java/conf/spark-user-configurable-options.template

@@ -1,124 +0,0 @@
-// Application properties
-spark.app.name
-spark.driver.cores
-spark.driver.maxResultSize
-spark.driver.memory
-spark.executor.memory
-spark.extraListeners
-// spark.local.dir
-spark.logConf
-// spark.master
-
-// Runtime Environment
-spark.driver.extraClassPath
-// spark.driver.extraJavaOptions
-spark.driver.extraLibraryPath
-spark.driver.userClassPathFirst
-spark.executor.extraClassPath
-// spark.executor.extraJavaOptions
-spark.executor.extraLibraryPath
-spark.executor.instances
-spark.executor.logs.rolling.maxRetainedFiles
-spark.executor.logs.rolling.maxSize
-spark.executor.logs.rolling.strategy
-spark.executor.logs.rolling.time.interval
-spark.executor.userClassPathFirst
-// spark.executorEnv.[EnvironmentVariableName]
-spark.python.profile
-spark.python.profile.dump
-spark.python.worker.memory
-spark.python.worker.reuse
-spark.reducer.maxSizeInFlight
-
-// Shuffle Behavior
-spark.shuffle.blockTransferService
-spark.shuffle.compress
-spark.shuffle.consolidateFiles
-spark.shuffle.file.buffer
-spark.shuffle.io.maxRetries
-spark.shuffle.io.numConnectionsPerPeer
-spark.shuffle.io.preferDirectBufs
-spark.shuffle.io.retryWait
-spark.shuffle.manager
-spark.shuffle.memoryFraction
-spark.shuffle.service.enabled
-spark.shuffle.service.port
-spark.shuffle.sort.bypassMergeThreshold
-spark.shuffle.spill
-spark.shuffle.spill.compress
-
-// Spark UI
-spark.eventLog.compress
-spark.eventLog.dir
-spark.eventLog.enabled
-spark.ui.killEnabled
-spark.ui.port
-spark.ui.retainedJobs
-spark.ui.retainedStages
-spark.worker.ui.retainedExecutors
-spark.worker.ui.retainedDrivers
-
-// Compression and Serialization
-spark.broadcast.compress
-spark.closure.serializer
-spark.io.compression.codec
-spark.io.compression.lz4.blockSize
-spark.io.compression.snappy.blockSize
-spark.kryo.classesToRegister
-spark.kryo.referenceTracking
-spark.kryo.registrationRequired
-spark.kryo.registrator
-spark.kryoserializer.buffer.max
-spark.kryoserializer.buffer
-spark.rdd.compress
-spark.serializer
-spark.serializer.objectStreamReset
-
-// Execution Behavior
-spark.broadcast.blockSize
-spark.broadcast.factory
-spark.cleaner.ttl
-spark.executor.cores
-spark.totalExecutor.cores
-spark.default.parallelism
-spark.executor.heartbeatInterval
-spark.files.fetchTimeout
-spark.files.useFetchCache
-spark.files.overwrite
-spark.hadoop.cloneConf
-spark.hadoop.validateOutputSpecs
-spark.storage.memoryFraction
-spark.storage.memoryMapThreshold
-spark.storage.unrollFraction
-spark.externalBlockStore.blockManager
-spark.externalBlockStore.baseDir
-spark.externalBlockStore.url
-
-// Networking
-
-// Dynamic Allocation
-spark.dynamicAllocation.enabled
-spark.dynamicAllocation.executorIdleTimeout
-spark.dynamicAllocation.cachedExecutorIdleTimeout
-spark.dynamicAllocation.initialExecutors
-spark.dynamicAllocation.maxExecutors
-spark.dynamicAllocation.minExecutors
-spark.dynamicAllocation.schedulerBacklogTimeout
-spark.dynamicAllocation.sustainedSchedulerBacklogTimeout
-
-// Security
-
-// Encryption
-
-// Spark Streaming
-spark.streaming.backpressure.enabled
-spark.streaming.blockInterval
-spark.streaming.receiver.maxRate
-spark.streaming.receiver.writeAheadLog.enable
-spark.streaming.unpersist
-spark.streaming.kafka.maxRatePerPartition
-spark.streaming.kafka.maxRetries
-spark.streaming.ui.retainedBatches
-
-// SparkR
-spark.r.numRBackendThreads

+ 0 - 37
apps/spark/java/dist.xml

@@ -1,37 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
-  license agreements. See the NOTICE file distributed with this work for additional
-  information regarding copyright ownership. The ASF licenses this file to
-  you under the Apache License, Version 2.0 (the "License"); you may not use
-  this file except in compliance with the License. You may obtain a copy of
-  the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
-  by applicable law or agreed to in writing, software distributed under the
-  License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
-  OF ANY KIND, either express or implied. See the License for the specific
-  language governing permissions and limitations under the License. -->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-    <id>dist</id>
-    <formats>
-        <format>tar.gz</format>
-    </formats>
-    <includeBaseDirectory>false</includeBaseDirectory>
-
-    <dependencySets>
-        <dependencySet>
-            <outputDirectory>lib</outputDirectory>
-            <useProjectArtifact>false</useProjectArtifact>
-        </dependencySet>
-    </dependencySets>
-
-    <fileSets>
-        <fileSet>
-            <directory>${project.build.directory}</directory>
-            <outputDirectory>/lib</outputDirectory>
-            <includes>
-                <include>*.jar</include>
-            </includes>
-        </fileSet>
-    </fileSets>
-</assembly>

+ 0 - 118
apps/spark/java/livy-assembly/pom.xml

@@ -1,118 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue.livy</groupId>
-        <artifactId>livy-main</artifactId>
-        <version>0.2.0-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
-    </parent>
-
-    <artifactId>livy-assembly_2.10</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>pom</packaging>
-
-    <properties>
-        <livy.jar.dir>scala-${scala.binary.version}</livy.jar.dir>
-        <livy.jar.basename>livy-assembly-${project.version}.jar</livy.jar.basename>
-        <livy.jar>${project.build.directory}/${livy.jar.dir}/${livy.jar.basename}</livy.jar>
-    </properties>
-
-    <dependencies>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-repl_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-yarn_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-server_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-deploy-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-install-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-
-                <configuration>
-                    <shadedArtifactAttached>false</shadedArtifactAttached>
-                    <outputFile>${livy.jar}</outputFile>
-
-                    <artifactSet>
-                        <includes>
-                            <include>*:*</include>
-                        </includes>
-                    </artifactSet>
-
-                    <filters>
-                        <filter>
-                            <artifact>*:*</artifact>
-                            <excludes>
-                                <exclude>META-INF/*.DSA</exclude>
-                                <exclude>META-INF/*.RSA</exclude>
-                                <exclude>META-INF/*.SF</exclude>
-                                <exclude>org/datanucleus/**</exclude>
-                            </excludes>
-                        </filter>
-                    </filters>
-
-                    <relocations>
-                        <relocation>
-                            <pattern>org.eclipse.jetty</pattern>
-                            <shadedPattern>com.cloudera.hue.jetty</shadedPattern>
-                            <excludes>
-                                <exclude>com/google/common/base/Absent*</exclude>
-                                <exclude>com/google/common/base/Function*</exclude>
-                                <exclude>com/google/common/base/Optional*</exclude>
-                                <exclude>com/google/common/base/Present*</exclude>
-                                <exclude>com/google/common/base/Supplier</exclude>
-                            </excludes>
-                        </relocation>
-                    </relocations>
-                </configuration>
-
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-
-        </plugins>
-    </build>
-
-</project>

+ 0 - 99
apps/spark/java/livy-core/pom.xml

@@ -1,99 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue.livy</groupId>
-        <artifactId>livy-main</artifactId>
-        <version>0.2.0-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
-    </parent>
-
-    <artifactId>livy-core_2.10</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <dependencies>
-
-        <dependency>
-            <groupId>ch.qos.logback</groupId>
-            <artifactId>logback-access</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>javax.servlet-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-server</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-servlet</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-util</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra_${scala.binary.version}</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>com.typesafe.akka</groupId>
-                    <artifactId>akka-actor_${scala.binary.version}</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatest</groupId>
-            <artifactId>scalatest_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>test-jar</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-
-</project>

+ 0 - 47
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/LineBufferedProcess.scala

@@ -1,47 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-class LineBufferedProcess(process: Process) extends Logging {
-
-  private[this] val _inputStream = new LineBufferedStream(process.getInputStream)
-  private[this] val _errorStream = new LineBufferedStream(process.getErrorStream)
-
-  def inputLines: IndexedSeq[String] = _inputStream.lines
-  def errorLines: IndexedSeq[String] = _errorStream.lines
-
-  def inputIterator: Iterator[String] = _inputStream.iterator
-  def errorIterator: Iterator[String] = _errorStream.iterator
-
-  def destroy(): Unit = {
-    process.destroy()
-  }
-
-  /** Returns if the process is still actively running. */
-  def isAlive: Boolean = Utils.isProcessAlive(process)
-
-  def exitValue(): Int = {
-    process.exitValue()
-  }
-
-  def waitFor(): Int = {
-    process.waitFor()
-  }
-}
-

+ 0 - 94
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/LineBufferedStream.scala

@@ -1,94 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-import java.io.InputStream
-import java.util.concurrent.locks.ReentrantLock
-
-import scala.io.Source
-
-class LineBufferedStream(inputStream: InputStream) extends Logging {
-
-  private[this] var _lines: IndexedSeq[String] = IndexedSeq()
-
-  private[this] val _lock = new ReentrantLock()
-  private[this] val _condition = _lock.newCondition()
-  private[this] var _finished = false
-
-  private val thread = new Thread {
-    override def run() = {
-      val lines = Source.fromInputStream(inputStream).getLines()
-      for (line <- lines) {
-        _lock.lock()
-        try {
-          trace("stdout: ", line)
-          _lines = _lines :+ line
-          _condition.signalAll()
-        } finally {
-          _lock.unlock()
-        }
-      }
-
-      _lock.lock()
-      try {
-        _finished = true
-        _condition.signalAll()
-      } finally {
-        _lock.unlock()
-      }
-    }
-  }
-  thread.setDaemon(true)
-  thread.start()
-
-  def lines: IndexedSeq[String] = _lines
-
-  def iterator: Iterator[String] = {
-    new LinesIterator
-  }
-
-  private class LinesIterator extends Iterator[String] {
-    private[this] var index = 0
-
-    override def hasNext: Boolean = {
-      if (index < _lines.length) {
-        true
-      } else {
-        // Otherwise we might still have more data.
-        _lock.lock()
-        try {
-          if (_finished) {
-            false
-          } else {
-            _condition.await()
-            index < _lines.length
-          }
-        } finally {
-          _lock.unlock()
-        }
-      }
-    }
-
-    override def next(): String = {
-      val line = _lines(index)
-      index += 1
-      line
-    }
-  }
-}

+ 0 - 120
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/LivyConf.scala

@@ -1,120 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-import java.io.File
-import java.util.concurrent.ConcurrentHashMap
-
-import scala.collection.JavaConverters._
-
-object LivyConf {
-  val SESSION_FACTORY_KEY = "livy.server.session.factory"
-  val SPARK_HOME_KEY = "livy.server.spark-home"
-  val SPARK_SUBMIT_KEY = "livy.server.spark-submit"
-  val IMPERSONATION_ENABLED_KEY = "livy.impersonation.enabled"
-
-  sealed trait SessionKind
-  case class Process() extends SessionKind
-  case class Yarn() extends SessionKind
-}
-
-/**
- *
- * @param loadDefaults whether to also load values from the Java system properties
- */
-class LivyConf(loadDefaults: Boolean) {
-
-  import LivyConf._
-
-  /**
-   * Create a LivyConf that loads defaults from the system properties and the classpath.
-   * @return
-   */
-  def this() = this(true)
-
-  private val settings = new ConcurrentHashMap[String, String]
-
-  if (loadDefaults) {
-    for ((k, v) <- System.getProperties.asScala if k.startsWith("livy.")) {
-      settings.put(k, v)
-    }
-  }
-
-  /** Set a configuration variable */
-  def set(key: String, value: String): LivyConf = {
-    if (key == null) {
-      throw new NullPointerException("null key")
-    }
-
-    if (value == null) {
-      throw new NullPointerException("null key")
-    }
-
-    settings.put(key, value)
-    this
-  }
-
-  /** Set if a parameter is not already configured */
-  def setIfMissing(key: String, value: String): LivyConf = {
-    if (!settings.contains(key)) {
-      settings.put(key, value)
-    }
-    this
-  }
-
-  /** Get a configuration variable */
-  def get(key: String): String = getOption(key).getOrElse(throw new NoSuchElementException(key))
-
-  /** Get a configuration variable */
-  def get(key: String, default: String): String = getOption(key).getOrElse(default)
-
-  /** Get a parameter as an Option */
-  def getOption(key: String): Option[String] = Option(settings.get(key))
-
-  /** Get a parameter as a Boolean */
-  def getBoolean(key: String, default: Boolean) = getOption(key).map(_.toBoolean).getOrElse(default)
-
-  /** Get a parameter as an Int */
-  def getInt(key: String, default: Int) = getOption(key).map(_.toInt).getOrElse(default)
-
-  /** Return if the configuration includes this setting */
-  def contains(key: String): Boolean = settings.containsKey(key)
-
-  /** Return the location of the spark home directory */
-  def sparkHome(): Option[String] = getOption(SPARK_HOME_KEY).orElse(sys.env.get("SPARK_HOME"))
-
-  /** Return the path to the spark-submit executable. */
-  def sparkSubmit(): String = {
-    getOption(SPARK_SUBMIT_KEY)
-      .orElse { sparkHome().map { _ + File.separator + "bin" + File.separator + "spark-submit" } }
-      .getOrElse("spark-submit")
-  }
-
-  def sessionKind(): SessionKind = getOption(SESSION_FACTORY_KEY).getOrElse("process") match {
-    case "process" => Process()
-    case "yarn" => Yarn()
-    case kind => throw new IllegalStateException(f"unknown kind $kind")
-  }
-
-  /** Return the filesystem root. Defaults to the local filesystem. */
-  def filesystemRoot(): String = sessionKind() match {
-    case Process() => "file://"
-    case Yarn() => "hdfs://"
-  }
-}

+ 0 - 55
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/Logging.scala

@@ -1,55 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-import org.slf4j.LoggerFactory
-
-trait Logging {
-  lazy val logger = LoggerFactory.getLogger(this.getClass)
-
-  def trace(message: => Any) = {
-    if (logger.isTraceEnabled) {
-      logger.trace(message.toString)
-    }
-  }
-
-  def debug(message: => Any) = {
-    if (logger.isDebugEnabled) {
-      logger.debug(message.toString)
-    }
-  }
-
-  def info(message: => Any) = {
-    if (logger.isInfoEnabled) {
-      logger.info(message.toString)
-    }
-  }
-
-  def warn(message: => Any) = {
-    logger.warn(message.toString)
-  }
-
-  def error(message: => Any, t: Throwable) = {
-    logger.error(message.toString, t)
-  }
-
-  def error(message: => Any) = {
-    logger.error(message.toString)
-  }
-}

+ 0 - 135
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/Utils.scala

@@ -1,135 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-import java.io.{File, FileInputStream, InputStreamReader}
-import java.util.Properties
-
-import scala.annotation.tailrec
-import scala.collection.JavaConversions._
-import scala.concurrent.TimeoutException
-import scala.concurrent.duration.Duration
-
-object Utils {
-  def getPropertiesFromFile(file: File): Map[String, String] = {
-    val inReader = new InputStreamReader(new FileInputStream(file), "UTF-8")
-    try {
-      val properties = new Properties()
-      properties.load(inReader)
-      properties.stringPropertyNames().map(k => (k, properties(k).trim())).toMap
-    } finally {
-      inReader.close()
-    }
-  }
-
-  def getLivyConfDir(env: Map[String, String] = sys.env): Option[File] = {
-    env.get("LIVY_CONF_DIR")
-      .orElse(env.get("LIVY_HOME").map(path => s"$path${File.separator}conf"))
-      .map(new File(_))
-      .filter(_.exists())
-  }
-
-  def getLivyConfigFile(name: String): Option[File] = {
-    getLivyConfDir().map(new File(_, name)).filter(_.exists())
-  }
-
-  def getLivyConfigFileOrError(name: String): File = {
-    getLivyConfigFile(name).getOrElse {
-      throw new Exception(s"$name does not exist")
-    }
-  }
-
-  def getDefaultPropertiesFile: Option[File] = {
-    getLivyConfigFile("livy-defaults.conf")
-  }
-
-  def loadDefaultLivyProperties(conf: LivyConf, filePath: String = null) = {
-    val file: Option[File] = Option(filePath)
-      .map(new File(_))
-      .orElse(getDefaultPropertiesFile)
-
-    file.foreach { f =>
-      getPropertiesFromFile(f)
-        .filterKeys(_.startsWith("livy."))
-        .foreach { case (k, v) =>
-          conf.setIfMissing(k, v)
-          sys.props.getOrElseUpdate(k, v)
-        }
-    }
-  }
-
-  def jarOfClass(cls: Class[_]): Option[String] = {
-    val uri = cls.getResource("/" + cls.getName.replace('.', '/') + ".class")
-    if (uri != null) {
-      val uriStr = uri.toString
-      if (uriStr.startsWith("jar:file:")) {
-        Some(uriStr.substring("jar:file:".length, uriStr.indexOf("!")))
-      } else {
-        None
-      }
-    } else {
-      None
-    }
-  }
-
-  /**
-   * Checks if event has occurred during some time period. This performs an exponential backoff
-   * to limit the poll calls.
-   *
-   * @param checkForEvent
-   * @param atMost
-   * @throws java.util.concurrent.TimeoutException
-   * @throws java.lang.InterruptedException
-   * @return
-   */
-  @throws(classOf[TimeoutException])
-  @throws(classOf[InterruptedException])
-  final def waitUntil(checkForEvent: () => Boolean, atMost: Duration) = {
-    val endTime = System.currentTimeMillis() + atMost.toMillis
-
-    @tailrec
-    def aux(count: Int): Unit = {
-      if (!checkForEvent()) {
-        val now = System.currentTimeMillis()
-
-        if (now < endTime) {
-          val sleepTime = Math.max(10 * (2 << (count - 1)), 1000)
-          Thread.sleep(sleepTime)
-          aux(count + 1)
-        } else {
-          throw new TimeoutException
-        }
-      }
-    }
-
-    aux(1)
-  }
-
-  /** Returns if the process is still running */
-  def isProcessAlive(process: Process): Boolean = {
-    try {
-      process.exitValue()
-      false
-    } catch {
-      case _: IllegalThreadStateException =>
-        true
-    }
-  }
-
-}

+ 0 - 111
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/WebServer.scala

@@ -1,111 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-import java.net.{InetAddress, InetSocketAddress}
-import javax.servlet.ServletContextListener
-
-import ch.qos.logback.access.jetty.RequestLogImpl
-import org.eclipse.jetty.server._
-import org.eclipse.jetty.server.handler.{HandlerCollection, RequestLogHandler}
-import org.eclipse.jetty.servlet.{ServletContextHandler, DefaultServlet}
-import org.eclipse.jetty.util.ssl.SslContextFactory
-import org.scalatra.servlet.AsyncSupport
-
-import scala.concurrent.ExecutionContext
-
-object WebServer {
-  val KeystoreKey = "livy.keystore"
-  val KeystorePasswordKey = "livy.keystore.password"
-}
-
-class WebServer(livyConf: LivyConf, var host: String, var port: Int) extends Logging {
-  val server = new Server()
-
-  server.setStopTimeout(1000)
-  server.setStopAtShutdown(true)
-
-  val connector = livyConf.getOption(WebServer.KeystoreKey) match {
-    case None =>
-      new ServerConnector(server)
-
-    case Some(keystore) =>
-      val https = new HttpConfiguration()
-      https.addCustomizer(new SecureRequestCustomizer())
-
-      val sslContextFactory = new SslContextFactory()
-      sslContextFactory.setKeyStorePath(keystore)
-      livyConf.getOption(WebServer.KeystorePasswordKey).foreach(sslContextFactory.setKeyStorePassword)
-      livyConf.getOption(WebServer.KeystorePasswordKey).foreach(sslContextFactory.setKeyManagerPassword)
-
-      new ServerConnector(server,
-        new SslConnectionFactory(sslContextFactory, "http/1.1"),
-        new HttpConnectionFactory(https))
-  }
-
-  connector.setHost(host)
-  connector.setPort(port)
-
-  server.setConnectors(Array(connector))
-
-  val context = new ServletContextHandler()
-
-  context.setContextPath("/")
-  context.addServlet(classOf[DefaultServlet], "/")
-  context.setAttribute(AsyncSupport.ExecutionContextKey, ExecutionContext.global)
-
-  val handlers = new HandlerCollection
-  handlers.addHandler(context)
-
-  // configure the access log
-  val requestLogHandler = new RequestLogHandler
-  val requestLog = new RequestLogImpl
-  requestLog.setResource("/logback-access.xml")
-  requestLogHandler.setRequestLog(requestLog)
-  handlers.addHandler(requestLogHandler)
-
-  server.setHandler(handlers)
-
-  def addEventListener(listener: ServletContextListener) = {
-    context.addEventListener(listener)
-  }
-
-  def start() = {
-    server.start()
-
-    val connector = server.getConnectors()(0).asInstanceOf[NetworkConnector]
-
-    if (host == "0.0.0.0") {
-      host = InetAddress.getLocalHost.getHostAddress
-    }
-    port = connector.getLocalPort
-
-    info("Starting server on %s" format port)
-  }
-
-  def join() = {
-    server.join()
-  }
-
-  def stop() = {
-    context.stop()
-    server.stop()
-  }
-}
-

+ 0 - 64
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/msgs.scala

@@ -1,64 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-sealed trait MsgType
-
-object MsgType {
-  case object execute_request extends MsgType
-  case object execute_reply extends MsgType
-}
-
-case class Msg[T <: Content](msg_type: MsgType, content: T)
-
-sealed trait Content
-
-case class ExecuteRequest(code: String) extends Content {
-  val msg_type = MsgType.execute_request
-}
-
-sealed trait ExecutionStatus
-object ExecutionStatus {
-  case object ok extends ExecutionStatus
-  case object error extends ExecutionStatus
-  case object abort extends ExecutionStatus
-}
-
-sealed trait ExecuteReply extends Content {
-  val msg_type = MsgType.execute_reply
-
-  val status: ExecutionStatus
-  val execution_count: Int
-}
-
-case class ExecuteReplyOk(execution_count: Int,
-                          payload: Map[String, String]) extends ExecuteReply {
-  val status = ExecutionStatus.ok
-}
-
-case class ExecuteReplyError(execution_count: Int,
-                             ename: String,
-                             evalue: String,
-                             traceback: List[String]) extends ExecuteReply {
-  val status = ExecutionStatus.error
-}
-
-case class ExecuteResponse(id: Int, input: Seq[String], output: Seq[String])
-
-case class ShutdownRequest() extends Content

+ 0 - 45
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/Kind.scala

@@ -1,45 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-import org.json4s.CustomSerializer
-import org.json4s.JsonAST.JString
-
-sealed trait Kind
-case class Spark() extends Kind {
-  override def toString = "spark"
-}
-
-case class PySpark() extends Kind {
-  override def toString = "pyspark"
-}
-
-case class SparkR() extends Kind {
-  override def toString = "sparkr"
-}
-
-case object SessionKindSerializer extends CustomSerializer[Kind](implicit formats => ( {
-  case JString("spark") | JString("scala") => Spark()
-  case JString("pyspark") | JString("python") => PySpark()
-  case JString("sparkr") | JString("r") => SparkR()
-}, {
-  case kind: Kind => JString(kind.toString)
-}
-  )
-)

+ 0 - 42
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/Session.scala

@@ -1,42 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-import scala.concurrent.Future
-
-trait Session {
-  def id: Int
-
-  def lastActivity: Option[Long] = None
-
-  def stoppedTime: Option[Long] = {
-    state match {
-      case SessionState.Error(time) => Some(time)
-      case SessionState.Dead(time) => Some(time)
-      case SessionState.Success(time) => Some(time)
-      case _ => None
-    }
-  }
-
-  def state: SessionState
-
-  def stop(): Future[Unit]
-
-  def logLines(): IndexedSeq[String]
-}

+ 0 - 30
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/SessionFactory.scala

@@ -1,30 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-import org.json4s.{DefaultFormats, Formats, JValue}
-
-abstract class SessionFactory[S <: Session] {
-
-  protected implicit def jsonFormats: Formats = DefaultFormats
-
-  def create(id: Int, createRequest: JValue): S
-
-  def close(): Unit = {}
-}

+ 0 - 110
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/SessionManager.scala

@@ -1,110 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-import java.util.concurrent.atomic.AtomicInteger
-
-import com.cloudera.hue.livy.{LivyConf, Logging}
-import org.json4s.JValue
-
-import scala.collection.mutable
-import scala.concurrent.{ExecutionContext, Future}
-
-object SessionManager {
-  val SESSION_TIMEOUT = "livy.server.session.timeout"
-}
-
-class SessionManager[S <: Session](livyConf: LivyConf, factory: SessionFactory[S])
-  extends Logging {
-
-  private implicit def executor: ExecutionContext = ExecutionContext.global
-
-  private[this] final val _idCounter = new AtomicInteger()
-  private[this] final val _sessions = mutable.Map[Int, S]()
-
-  private[this] final val sessionTimeout = livyConf.getInt(SessionManager.SESSION_TIMEOUT, 1000 * 60 * 60)
-  private[this] final val garbageCollector = new GarbageCollector
-
-  garbageCollector.setDaemon(true)
-  garbageCollector.start()
-
-  def create(createRequest: JValue): S = {
-    val id = _idCounter.getAndIncrement
-    val session: S = factory.create(id, createRequest)
-
-    info("created session %s" format session.id)
-
-    synchronized {
-      _sessions.put(session.id, session)
-      session
-    }
-  }
-
-  def get(id: Int): Option[S] = _sessions.get(id)
-
-  def size(): Int = _sessions.size
-
-  def all(): Iterable[S] = _sessions.values
-
-  def delete(id: Int): Option[Future[Unit]] = {
-    get(id).map(delete)
-  }
-
-  def delete(session: S): Future[Unit] = {
-    session.stop().map { case _ =>
-      synchronized {
-        _sessions.remove(session.id)
-      }
-
-      Unit
-    }
-  }
-
-  def shutdown(): Unit = {}
-
-  def collectGarbage(): Future[Iterable[Unit]] = {
-    def expired(session: Session): Boolean = {
-      session.lastActivity.orElse(session.stoppedTime) match {
-        case Some(lastActivity) =>
-          val currentTime = System.currentTimeMillis()
-          currentTime - lastActivity > sessionTimeout
-        case None =>
-          false
-      }
-    }
-
-    Future.sequence(all().filter(expired).map(delete))
-  }
-
-  private class GarbageCollector extends Thread("session gc thread") {
-
-    private var finished = false
-
-    override def run(): Unit = {
-      while (!finished) {
-        collectGarbage()
-        Thread.sleep(60 * 1000)
-      }
-    }
-
-    def shutdown(): Unit = {
-      finished = true
-    }
-  }
-}

+ 0 - 81
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/SessionState.scala

@@ -1,81 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-sealed trait SessionState {
-  /** Returns true if the State represents a process that can eventually execute commands */
-  def isActive: Boolean
-}
-
-object SessionState {
-
-  case class NotStarted() extends SessionState {
-    override def isActive = true
-
-    override def toString = "not_started"
-  }
-
-  case class Starting() extends SessionState {
-    override def isActive = true
-
-    override def toString = "starting"
-  }
-
-  case class Idle() extends SessionState {
-    override def isActive = true
-
-    override def toString = "idle"
-  }
-
-  case class Running() extends SessionState {
-    override def isActive = true
-
-    override def toString = "running"
-  }
-
-  case class Busy() extends SessionState {
-    override def isActive = true
-
-    override def toString = "busy"
-  }
-
-  case class ShuttingDown() extends SessionState {
-    override def isActive = false
-
-    override def toString = "shutting_down"
-  }
-
-  case class Error(time: Long = System.currentTimeMillis()) extends SessionState {
-    override def isActive = true
-
-    override def toString = "error"
-  }
-
-  case class Dead(time: Long = System.currentTimeMillis()) extends SessionState {
-    override def isActive = false
-
-    override def toString = "dead"
-  }
-
-  case class Success(time: Long = System.currentTimeMillis()) extends SessionState {
-    override def isActive = false
-
-    override def toString = "success"
-  }
-}

+ 0 - 23
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/batch/BatchSession.scala

@@ -1,23 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions.batch
-
-import com.cloudera.hue.livy.sessions.Session
-
-trait BatchSession extends Session

+ 0 - 59
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/interactive/InteractiveSession.scala

@@ -1,59 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions.interactive
-
-import java.net.URL
-import java.util.concurrent.TimeoutException
-
-import com.cloudera.hue.livy.{ExecuteRequest, Utils}
-import com.cloudera.hue.livy.sessions.{Kind, Session, SessionState}
-
-import scala.concurrent._
-import scala.concurrent.duration.Duration
-
-object InteractiveSession {
-  class SessionFailedToStart(msg: String) extends Exception(msg)
-
-  class StatementNotFound extends Exception
-}
-
-trait InteractiveSession extends Session {
-  def kind: Kind
-
-  def proxyUser: Option[String]
-
-  override def lastActivity: Option[Long]
-
-  def url: Option[URL]
-
-  def url_=(url: URL)
-
-  def executeStatement(content: ExecuteRequest): Statement
-
-  def statements: IndexedSeq[Statement]
-
-  def interrupt(): Future[Unit]
-
-  @throws(classOf[TimeoutException])
-  @throws(classOf[InterruptedException])
-  final def waitForStateChange(oldState: SessionState, atMost: Duration) = {
-    Utils.waitUntil({ () => state != oldState }, atMost)
-  }
-}
-

+ 0 - 65
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/interactive/Statement.scala

@@ -1,65 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions.interactive
-
-import com.cloudera.hue.livy.ExecuteRequest
-import org.json4s.JValue
-import org.json4s.JsonAST.{JArray, JField, JObject, JString}
-
-import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future}
-import scala.util.{Failure, Success}
-
-class Statement(val id: Int, val request: ExecuteRequest, _output: Future[JValue]) {
-  protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-
-  private[this] var _state: StatementState = StatementState.Running()
-
-  def state = _state
-
-  def output(from: Option[Int] = None, size: Option[Int] = None): Future[JValue] = {
-    _output.map { case output =>
-      if (from.isEmpty && size.isEmpty) {
-        output
-      } else {
-        val from_ = from.getOrElse(0)
-        val size_ = size.getOrElse(100)
-        val until = from_ + size_
-
-        output \ "data" match {
-          case JObject(JField("text/plain", JString(text)) :: Nil) =>
-            val lines = text.split('\n').slice(from_, until)
-            output.replace(
-              "data" :: "text/plain" :: Nil,
-              JString(lines.mkString("\n")))
-          case JObject(JField("application/json", JArray(items)) :: Nil) =>
-            output.replace(
-              "data" :: "application/json" :: Nil,
-              JArray(items.slice(from_, until)))
-          case _ =>
-            output
-        }
-      }
-    }
-  }
-
-  _output.onComplete {
-    case Success(_) => _state = StatementState.Available()
-    case Failure(_) => _state = StatementState.Error()
-  }
-}

+ 0 - 35
apps/spark/java/livy-core/src/main/scala/com/cloudera/hue/livy/sessions/interactive/StatementState.scala

@@ -1,35 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions.interactive
-
-sealed trait StatementState
-
-object StatementState {
-  case class Running() extends StatementState {
-    override def toString = "running"
-  }
-
-  case class Available() extends StatementState {
-    override def toString = "available"
-  }
-
-  case class Error() extends StatementState {
-    override def toString = "error"
-  }
-}

+ 0 - 102
apps/spark/java/livy-core/src/test/scala/com/cloudera/hue/livy/sessions/BaseInteractiveSessionSpec.scala

@@ -1,102 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy.ExecuteRequest
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import org.json4s.{DefaultFormats, Extraction}
-import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
-
-import scala.concurrent.Await
-import scala.concurrent.duration.Duration
-
-abstract class BaseInteractiveSessionSpec extends FunSpec with Matchers with BeforeAndAfter {
-
-  implicit val formats = DefaultFormats
-
-  var session: InteractiveSession = null
-
-  def createSession(): InteractiveSession
-
-  before {
-    session = createSession()
-  }
-
-  after {
-    session.stop()
-  }
-
-  describe("A spark session") {
-    it("should start in the starting or idle state") {
-      session.state should (equal (SessionState.Starting()) or equal (SessionState.Idle()))
-    }
-
-    it("should eventually become the idle state") {
-      session.waitForStateChange(SessionState.Starting(), Duration(30, TimeUnit.SECONDS))
-      session.state should equal (SessionState.Idle())
-    }
-
-    it("should execute `1 + 2` == 3") {
-      session.waitForStateChange(SessionState.Starting(), Duration(30, TimeUnit.SECONDS))
-      val stmt = session.executeStatement(ExecuteRequest("1 + 2"))
-      val result = Await.result(stmt.output(), Duration.Inf)
-
-      val expectedResult = Extraction.decompose(Map(
-        "status" -> "ok",
-        "execution_count" -> 0,
-        "data" -> Map(
-          "text/plain" -> "3"
-        )
-      ))
-
-      result should equal (expectedResult)
-    }
-
-    it("should report an error if accessing an unknown variable") {
-      session.waitForStateChange(SessionState.Starting(), Duration(30, TimeUnit.SECONDS))
-      val stmt = session.executeStatement(ExecuteRequest("x"))
-      val result = Await.result(stmt.output(), Duration.Inf)
-      val expectedResult = Extraction.decompose(Map(
-        "status" -> "error",
-        "execution_count" -> 0,
-        "ename" -> "NameError",
-        "evalue" -> "name 'x' is not defined",
-        "traceback" -> List(
-          "Traceback (most recent call last):\n",
-          "NameError: name 'x' is not defined\n"
-        )
-      ))
-
-      result should equal (expectedResult)
-      session.state should equal (SessionState.Idle())
-    }
-
-    it("should error out the session if the interpreter dies") {
-      session.waitForStateChange(SessionState.Starting(), Duration(30, TimeUnit.SECONDS))
-      val stmt = session.executeStatement(ExecuteRequest("import os; os._exit(1)"))
-      val result = Await.result(stmt.output(), Duration.Inf)
-      (session.state match {
-        case SessionState.Error(_) => true
-        case _ => false
-      }) should equal (true)
-    }
-  }
-}

+ 0 - 51
apps/spark/java/livy-core/src/test/scala/com/cloudera/hue/livy/sessions/SessionManagerSpec.scala

@@ -1,51 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions
-
-import com.cloudera.hue.livy.LivyConf
-import org.json4s.JsonAST.{JNothing, JValue}
-import org.scalatest.{FlatSpec, Matchers}
-
-import scala.concurrent.duration.Duration
-import scala.concurrent.{Await, Future}
-
-class SessionManagerSpec extends FlatSpec with Matchers {
-
-  class MockSession(val id: Int) extends Session {
-    override def stop(): Future[Unit] = Future.successful(())
-
-    override def logLines(): IndexedSeq[String] = IndexedSeq()
-
-    override def state: SessionState = SessionState.Success(0)
-  }
-
-  class MockSessionFactory extends SessionFactory[MockSession] {
-    override def create(id: Int, createRequest: JValue): MockSession = new MockSession(id)
-  }
-
-  it should "garbage collect old sessions" in {
-    val livyConf = new LivyConf()
-    livyConf.set(SessionManager.SESSION_TIMEOUT, "100")
-    val manager = new SessionManager(livyConf, new MockSessionFactory)
-    val session = manager.create(JNothing)
-    manager.get(session.id).isDefined should be(true)
-    Await.result(manager.collectGarbage(), Duration.Inf)
-    manager.get(session.id).isEmpty should be(true)
-  }
-}

+ 0 - 77
apps/spark/java/livy-core/src/test/scala/com/cloudera/hue/livy/sessions/interactive/StatementSpec.scala

@@ -1,77 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
-      val statement = Statement(
-        0,
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.sessions.interactive
-
-import com.cloudera.hue.livy.ExecuteRequest
-import org.json4s.JsonAST.JString
-import org.json4s.{DefaultFormats, Extraction}
-import org.scalatest.{FunSpec, Matchers}
-
-import scala.concurrent.duration.Duration
-import scala.concurrent.{Await, Future}
-
-class StatementSpec extends FunSpec with Matchers {
-
-  implicit val formats = DefaultFormats
-
-  describe("A statement") {
-    it("should support paging through text/plain data") {
-      val lines = List("1", "2", "3", "4", "5")
-      val rep = Extraction.decompose(Map(
-        "status" -> "ok",
-        "execution_count" -> 0,
-        "data" -> Map(
-          "text/plain" -> lines.mkString("\n")
-        )
-      ))
-      val stmt = new Statement(0, ExecuteRequest(""), Future.successful(rep))
-      var output = Await.result(stmt.output(), Duration.Inf)
-      output \ "data" \ "text/plain" should equal (JString(lines.mkString("\n")))
-
-      output = Await.result(stmt.output(Some(2)), Duration.Inf)
-      output \ "data" \ "text/plain" should equal (JString(lines.slice(2, lines.length).mkString("\n")))
-
-      output = Await.result(stmt.output(Some(2), Some(1)), Duration.Inf)
-      output \ "data" \ "text/plain" should equal (JString(lines.slice(2, 3).mkString("\n")))
-    }
-
-    it("should support paging through application/json arrays") {
-      val lines = List("1", "2", "3", "4")
-      val rep = Extraction.decompose(Map(
-        "status" -> "ok",
-        "execution_count" -> 0,
-        "data" -> Map(
-          "application/json" -> List(1, 2, 3, 4)
-        )
-      ))
-      val stmt = new Statement(0, ExecuteRequest(""), Future.successful(rep))
-      var output = Await.result(stmt.output(), Duration.Inf)
-      (output \ "data" \ "application/json").extract[List[Int]] should equal (List(1, 2, 3, 4))
-
-      output = Await.result(stmt.output(Some(2)), Duration.Inf)
-      (output \ "data" \ "application/json").extract[List[Int]] should equal (List(3, 4))
-
-      output = Await.result(stmt.output(Some(2), Some(1)), Duration.Inf)
-      (output \ "data" \ "application/json").extract[List[Int]] should equal (List(3))
-    }
-  }
-
-}

+ 0 - 218
apps/spark/java/livy-repl/pom.xml

@@ -1,218 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue.livy</groupId>
-        <artifactId>livy-main</artifactId>
-        <relativePath>../pom.xml</relativePath>
-        <version>0.2.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>livy-repl_2.10</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <dependencies>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.module</groupId>
-            <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>commons-codec</groupId>
-            <artifactId>commons-codec</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>javax.servlet-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.httpcomponents</groupId>
-            <artifactId>httpclient</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-repl_${scala.binary.version}</artifactId>
-            <version>${spark.version}</version>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_${scala.binary.version}</artifactId>
-            <version>${spark.version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.httpcomponents</groupId>
-                    <artifactId>httpclient</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.apache.httpcomponents</groupId>
-                    <artifactId>httpcore</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.fasterxml.jackson.core</groupId>
-                    <artifactId>jackson-core</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-server</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-servlet</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-compiler</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-reflect</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatest</groupId>
-            <artifactId>scalatest_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-json_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-scalatest_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-test_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.xerial.snappy</groupId>
-            <artifactId>snappy-java</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>net.sf.py4j</groupId>
-            <artifactId>py4j</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>net.databinder.dispatch</groupId>
-            <artifactId>dispatch-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <configuration>
-                    <archive>
-                        <manifest>
-                            <mainClass>com.cloudera.hue.livy.repl.Main</mainClass>
-                        </manifest>
-                    </archive>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-            </plugin>
-
-            <plugin>
-                <groupId>org.scalatest</groupId>
-                <artifactId>scalatest-maven-plugin</artifactId>
-                <version>1.0</version>
-                <configuration>
-                    <systemProperties>
-                        <spark.app.name>Livy</spark.app.name>
-                        <spark.master>local</spark.master>
-                        <spark.driver.allowMultipleContexts>true</spark.driver.allowMultipleContexts>
-                        <spark.ui.enabled>false</spark.ui.enabled>
-                        <settings.usejavacp.value>true</settings.usejavacp.value>
-                    </systemProperties>
-                </configuration>
-            </plugin>
-
-        </plugins>
-    </build>
-
-</project>

+ 0 - 18
apps/spark/java/livy-repl/src/main/resources/fake_R.sh

@@ -1,18 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-exec R --slave "$@"

+ 0 - 39
apps/spark/java/livy-repl/src/main/resources/fake_pyspark.sh

@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-if [ -z "$SPARK_HOME" ]; then
-	echo "\$SPARK_HOME is not set" 1>&2
-	exit 1
-fi
-
-export SPARK_CONF_DIR="$SPARK_HOME/conf"
-
-source "$SPARK_HOME/bin/utils.sh"
-source "$SPARK_HOME/bin/load-spark-env.sh"
-
-export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
-
-for path in $(ls $SPARK_HOME/python/lib/*.zip); do
-	export PYTHONPATH="$path:$PYTHONPATH"
-done
-
-export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
-export PYTHONSTARTUP="$SPARK_HOME/python/pyspark/shell.py"
-
-exec python "$@"

+ 0 - 446
apps/spark/java/livy-repl/src/main/resources/fake_shell.py

@@ -1,446 +0,0 @@
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ast
-import cStringIO
-import datetime
-import decimal
-import json
-import logging
-import sys
-import traceback
-
-logging.basicConfig()
-LOG = logging.getLogger('fake_shell')
-
-global_dict = {}
-
-
-def execute_reply(status, content):
-    return {
-        'msg_type': 'execute_reply',
-        'content': dict(
-            content,
-            status=status,
-        )
-    }
-
-
-def execute_reply_ok(data):
-    return execute_reply('ok', {
-        'data': data,
-    })
-
-
-def execute_reply_error(exc_type, exc_value, tb):
-    LOG.error('execute_reply', exc_info=True)
-    return execute_reply('error', {
-        'ename': unicode(exc_type.__name__),
-        'evalue': unicode(exc_value),
-        'traceback': traceback.format_exception(exc_type, exc_value, tb, -1),
-    })
-
-
-def execute_reply_internal_error(message, exc_info=None):
-    LOG.error('execute_reply_internal_error', exc_info=exc_info)
-    return execute_reply('error', {
-        'ename': 'InternalError',
-        'evalue': message,
-        'traceback': [],
-    })
-
-
-class ExecutionError(Exception):
-    def __init__(self, exc_info):
-        self.exc_info = exc_info
-
-
-class NormalNode(object):
-    def __init__(self, code):
-        self.code = compile(code, '<stdin>', 'exec', ast.PyCF_ONLY_AST, 1)
-
-    def execute(self):
-        to_run_exec, to_run_single = self.code.body[:-1], self.code.body[-1:]
-
-        try:
-            for node in to_run_exec:
-                mod = ast.Module([node])
-                code = compile(mod, '<stdin>', 'exec')
-                exec code in global_dict
-
-            for node in to_run_single:
-                mod = ast.Interactive([node])
-                code = compile(mod, '<stdin>', 'single')
-                exec code in global_dict
-        except:
-            # We don't need to log the exception because we're just executing user
-            # code and passing the error along.
-            raise ExecutionError(sys.exc_info())
-
-
-class UnknownMagic(Exception):
-    pass
-
-
-class MagicNode(object):
-    def __init__(self, line):
-        parts = line[1:].split(' ', 1)
-        if len(parts) == 1:
-            self.magic, self.rest = parts[0], ()
-        else:
-            self.magic, self.rest = parts[0], (parts[1],)
-
-    def execute(self):
-        if not self.magic:
-            raise UnknownMagic('magic command not specified')
-
-        try:
-            handler = magic_router[self.magic]
-        except KeyError:
-            raise UnknownMagic("unknown magic command '%s'" % self.magic)
-
-        return handler(*self.rest)
-
-
-def parse_code_into_nodes(code):
-    nodes = []
-    try:
-        nodes.append(NormalNode(code))
-    except SyntaxError:
-        # It's possible we hit a syntax error because of a magic command. Split the code groups
-        # of 'normal code', and code that starts with a '%'. possibly magic code
-        # lines, and see if any of the lines
-        # Remove lines until we find a node that parses, then check if the next line is a magic
-        # line
-        # .
-
-        # Split the code into chunks of normal code, and possibly magic code, which starts with
-        # a '%'.
-        normal = []
-        chunks = []
-        for i, line in enumerate(code.rstrip().split('\n')):
-            if line.startswith('%'):
-                if normal:
-                    chunks.append(''.join(normal))
-                    normal = []
-
-                chunks.append(line)
-            else:
-                normal.append(line)
-
-        if normal:
-            chunks.append('\n'.join(normal))
-
-        # Convert the chunks into AST nodes. Let exceptions propagate.
-        for chunk in chunks:
-            if chunk.startswith('%'):
-                nodes.append(MagicNode(chunk))
-            else:
-                nodes.append(NormalNode(chunk))
-
-    return nodes
-
-
-def execute_request(content):
-    try:
-        code = content['code']
-    except KeyError:
-        return execute_reply_internal_error(
-            'Malformed message: content object missing "code"', sys.exc_info()
-        )
-
-    try:
-        nodes = parse_code_into_nodes(code)
-    except SyntaxError:
-        exc_type, exc_value, tb = sys.exc_info()
-        return execute_reply_error(exc_type, exc_value, [])
-
-    result = None
-
-    try:
-        for node in nodes:
-            result = node.execute()
-    except UnknownMagic:
-        exc_type, exc_value, tb = sys.exc_info()
-        return execute_reply_error(exc_type, exc_value, [])
-    except ExecutionError, e:
-        return execute_reply_error(*e.exc_info)
-
-    if result is None:
-        result = {}
-
-    stdout = fake_stdout.getvalue()
-    fake_stdout.truncate(0)
-
-    stderr = fake_stderr.getvalue()
-    fake_stderr.truncate(0)
-
-    output = result.pop('text/plain', '')
-
-    if stdout:
-        output += stdout
-
-    if stderr:
-        output += stderr
-
-    output = output.rstrip()
-
-    # Only add the output if it exists, or if there are no other mimetypes in the result.
-    if output or not result:
-        result['text/plain'] = output.rstrip()
-
-    return execute_reply_ok(result)
-
-
-def magic_table_convert(value):
-    try:
-        converter = magic_table_types[type(value)]
-    except KeyError:
-        converter = magic_table_types[str]
-
-    return converter(value)
-
-
-def magic_table_convert_seq(items):
-    last_item_type = None
-    converted_items = []
-
-    for item in items:
-        item_type, item = magic_table_convert(item)
-
-        if last_item_type is None:
-            last_item_type = item_type
-        elif last_item_type != item_type:
-            raise ValueError('value has inconsistent types')
-
-        converted_items.append(item)
-
-    return 'ARRAY_TYPE', converted_items
-
-
-def magic_table_convert_map(m):
-    last_key_type = None
-    last_value_type = None
-    converted_items = {}
-
-    for key, value in m:
-        key_type, key = magic_table_convert(key)
-        value_type, value = magic_table_convert(value)
-
-        if last_key_type is None:
-            last_key_type = key_type
-        elif last_value_type != value_type:
-            raise ValueError('value has inconsistent types')
-
-        if last_value_type is None:
-            last_value_type = value_type
-        elif last_value_type != value_type:
-            raise ValueError('value has inconsistent types')
-
-        converted_items[key] = value
-
-    return 'MAP_TYPE', converted_items
-
-
-magic_table_types = {
-    type(None): lambda x: ('NULL_TYPE', x),
-    bool: lambda x: ('BOOLEAN_TYPE', x),
-    int: lambda x: ('INT_TYPE', x),
-    long: lambda x: ('BIGINT_TYPE', x),
-    float: lambda x: ('DOUBLE_TYPE', x),
-    str: lambda x: ('STRING_TYPE', str(x)),
-    unicode: lambda x: ('STRING_TYPE', x.encode('utf-8')),
-    datetime.date: lambda x: ('DATE_TYPE', str(x)),
-    datetime.datetime: lambda x: ('TIMESTAMP_TYPE', str(x)),
-    decimal.Decimal: lambda x: ('DECIMAL_TYPE', str(x)),
-    tuple: magic_table_convert_seq,
-    list: magic_table_convert_seq,
-    dict: magic_table_convert_map,
-}
-
-
-def magic_table(name):
-    try:
-        value = global_dict[name]
-    except KeyError:
-        exc_type, exc_value, tb = sys.exc_info()
-        return execute_reply_error(exc_type, exc_value, [])
-
-    if not isinstance(value, (list, tuple)):
-        value = [value]
-
-    headers = {}
-    data = []
-
-    for row in value:
-        cols = []
-        data.append(cols)
-
-        if not isinstance(row, (list, tuple, dict)):
-            row = [row]
-
-        if isinstance(row, (list, tuple)):
-            iterator = enumerate(row)
-        else:
-            iterator = sorted(row.iteritems())
-
-        for name, col in iterator:
-            col_type, col = magic_table_convert(col)
-
-            try:
-                header = headers[name]
-            except KeyError:
-                header = {
-                    'name': str(name),
-                    'type': col_type,
-                }
-                headers[name] = header
-            else:
-                # Reject columns that have a different type.
-                if header['type'] != col_type:
-                    exc_type = Exception
-                    exc_value = 'table rows have different types'
-                    return execute_reply_error(exc_type, exc_value, [])
-
-            cols.append(col)
-
-    headers = [v for k, v in sorted(headers.iteritems())]
-
-    return {
-        'application/vnd.livy.table.v1+json': {
-            'headers': headers,
-            'data': data,
-        }
-    }
-
-
-def magic_json(name):
-    try:
-        value = global_dict[name]
-    except KeyError:
-        exc_type, exc_value, tb = sys.exc_info()
-        return execute_reply_error(exc_type, exc_value, [])
-
-    return {
-        'application/json': value,
-    }
-
-
-def shutdown_request(_content):
-    sys.exit()
-
-
-magic_router = {
-    'table': magic_table,
-    'json': magic_json,
-}
-
-msg_type_router = {
-    'execute_request': execute_request,
-    'shutdown_request': shutdown_request,
-}
-
-fake_stdin = cStringIO.StringIO()
-fake_stdout = cStringIO.StringIO()
-fake_stderr = cStringIO.StringIO()
-
-
-def main():
-    sys_stdin = sys.stdin
-    sys_stdout = sys.stdout
-    sys_stderr = sys.stderr
-
-    sys.stdin = fake_stdin
-    sys.stdout = fake_stdout
-    sys.stderr = fake_stderr
-
-    try:
-        # Load spark into the context
-        exec 'from pyspark.shell import sc' in global_dict
-
-        print >> sys_stderr, fake_stdout.getvalue()
-        print >> sys_stderr, fake_stderr.getvalue()
-
-        fake_stdout.truncate(0)
-        fake_stderr.truncate(0)
-
-        print >> sys_stdout, 'READY'
-        sys_stdout.flush()
-
-        while True:
-            line = sys_stdin.readline()
-
-            if line == '':
-                break
-            elif line == '\n':
-                continue
-
-            try:
-                msg = json.loads(line)
-            except ValueError:
-                LOG.error('failed to parse message', exc_info=True)
-                continue
-
-            try:
-                msg_type = msg['msg_type']
-            except KeyError:
-                LOG.error('missing message type', exc_info=True)
-                continue
-
-            try:
-                content = msg['content']
-            except KeyError:
-                LOG.error('missing content', exc_info=True)
-                continue
-
-            if not isinstance(content, dict):
-                LOG.error('content is not a dictionary')
-                continue
-
-            try:
-                handler = msg_type_router[msg_type]
-            except KeyError:
-                LOG.error('unknown message type: %s', msg_type)
-                continue
-
-            response = handler(content)
-
-            try:
-                response = json.dumps(response)
-            except ValueError:
-                response = json.dumps({
-                    'msg_type': 'inspect_reply',
-                    'content': {
-                        'status': 'error',
-                        'ename': 'ValueError',
-                        'evalue': 'cannot json-ify %s' % response,
-                        'traceback': [],
-                    }
-                })
-
-            print >> sys_stdout, response
-            sys_stdout.flush()
-    finally:
-        if 'sc' in global_dict:
-            global_dict['sc'].stop()
-
-        sys.stdin = sys_stdin
-        sys.stdout = sys_stdout
-        sys.stderr = sys_stderr
-
-
-if __name__ == '__main__':
-    sys.exit(main())

+ 0 - 13
apps/spark/java/livy-repl/src/main/resources/logback-access.xml

@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-    <!-- always a good activate OnConsoleStatusListener -->
-    <statusListener class="ch.qos.logback.core.status.OnConsoleStatusListener" />
-
-    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-        <encoder>
-            <pattern>%h %l %u %user %date "%r" %s %b</pattern>
-        </encoder>
-    </appender>
-
-    <appender-ref ref="STDOUT" />
-</configuration>

+ 0 - 12
apps/spark/java/livy-repl/src/main/resources/logback.xml

@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-        <encoder>
-            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
-        </encoder>
-    </appender>
-
-    <root level="info">
-        <appender-ref ref="STDOUT" />
-    </root>
-</configuration>

+ 0 - 50
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/Interpreter.scala

@@ -1,50 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import org.json4s.JObject
-
-object Interpreter {
-  abstract class ExecuteResponse
-
-  case class ExecuteSuccess(content: JObject) extends ExecuteResponse
-  case class ExecuteError(ename: String,
-                          evalue: String,
-                          traceback: Seq[String] = Seq()) extends ExecuteResponse
-  case class ExecuteIncomplete() extends ExecuteResponse
-  case class ExecuteAborted(message: String) extends ExecuteResponse
-}
-
-trait Interpreter {
-  import Interpreter._
-
-  def kind: String
-
-  /** Start the Interpreter */
-  def start(): Unit
-
-  /**
-   * Execute the code and return the result as a Future as it may
-   * take some time to execute.
-   */
-  def execute(code: String): ExecuteResponse
-
-  /** Shut down the interpreter. */
-  def close(): Unit
-}

+ 0 - 158
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/Main.scala

@@ -1,158 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import java.util.concurrent.TimeUnit
-import javax.servlet.ServletContext
-
-import com.cloudera.hue.livy.repl.python.PythonInterpreter
-import com.cloudera.hue.livy.repl.scala.SparkInterpreter
-import com.cloudera.hue.livy.repl.sparkr.SparkRInterpreter
-import com.cloudera.hue.livy.sessions.SessionState
-import com.cloudera.hue.livy.{LivyConf, Logging, WebServer}
-import dispatch._
-import org.json4s.jackson.Serialization.write
-import org.json4s.{DefaultFormats, Formats}
-import org.scalatra.LifeCycle
-import org.scalatra.servlet.ScalatraListener
-
-import _root_.scala.annotation.tailrec
-import _root_.scala.concurrent.duration._
-import _root_.scala.concurrent.{Await, ExecutionContext}
-
-object Main extends Logging {
-  val SESSION_KIND = "livy.repl.session.kind"
-  val CALLBACK_URL = "livy.repl.callbackUrl"
-  val PYSPARK_SESSION = "pyspark"
-  val SPARK_SESSION = "spark"
-  val SPARKR_SESSION = "sparkr"
-
-  def main(args: Array[String]): Unit = {
-
-    val host = sys.props.getOrElse("spark.livy.host", "0.0.0.0")
-    val port = sys.props.getOrElse("spark.livy.port", "8999").toInt
-    val callbackUrl = sys.props.get("spark.livy.callbackUrl")
-
-    if (args.length != 1) {
-      println("Must specify either `pyspark`/`spark`/`sparkr` for the session kind")
-      sys.exit(1)
-    }
-
-    val session_kind = args.head
-
-    session_kind match {
-      case PYSPARK_SESSION | SPARK_SESSION | SPARKR_SESSION =>
-      case _ =>
-        println("Unknown session kind: " + session_kind)
-        sys.exit(1)
-    }
-
-    val server = new WebServer(new LivyConf(), host, port)
-
-    server.context.setResourceBase("src/main/com/cloudera/hue/livy/repl")
-    server.context.addEventListener(new ScalatraListener)
-    server.context.setInitParameter(ScalatraListener.LifeCycleKey, classOf[ScalatraBootstrap].getCanonicalName)
-    server.context.setInitParameter(SESSION_KIND, session_kind)
-    callbackUrl.foreach(server.context.setInitParameter(CALLBACK_URL, _))
-
-    server.start()
-
-    try {
-      val replUrl = s"http://${server.host}:${server.port}"
-      System.setProperty("livy.repl.url", replUrl)
-
-      println(s"Starting livy-repl on $replUrl")
-      Console.flush()
-
-      server.join()
-      server.stop()
-    } finally {
-      // Make sure to close all our outstanding http requests.
-      Http.shutdown()
-    }
-  }
-}
-
-class ScalatraBootstrap extends LifeCycle with Logging {
-
-  protected implicit def executor: ExecutionContext = ExecutionContext.global
-  protected implicit def jsonFormats: Formats = DefaultFormats
-
-  var session: Session = null
-
-  override def init(context: ServletContext): Unit = {
-    try {
-      val interpreter = context.getInitParameter(Main.SESSION_KIND) match {
-        case Main.PYSPARK_SESSION => PythonInterpreter()
-        case Main.SPARK_SESSION => SparkInterpreter()
-        case Main.SPARKR_SESSION => SparkRInterpreter()
-      }
-
-      session = Session(interpreter)
-
-      context.mount(new WebApp(session), "/*")
-
-      // See if we want to notify someone that we've started on a url
-      Option(context.getInitParameter(Main.CALLBACK_URL)).foreach(notifyCallback)
-    } catch {
-      case e: Throwable =>
-        println(f"Exception thrown when initializing server: $e")
-        sys.exit(1)
-    }
-  }
-
-  override def destroy(context: ServletContext): Unit = {
-    if (session != null) {
-      session.close()
-    }
-  }
-
-  private def notifyCallback(callbackUrl: String): Unit = {
-    info(s"Notifying $callbackUrl that we're up")
-
-    Future {
-      session.waitForStateChange(SessionState.Starting(), Duration(30, TimeUnit.SECONDS))
-
-      // Wait for our url to be discovered.
-      val replUrl = waitForReplUrl()
-
-      var req = url(callbackUrl).setContentType("application/json", "UTF-8")
-      req = req << write(Map("url" -> replUrl))
-
-      val rep = Http(req OK as.String)
-      rep.onFailure {
-        case _ => System.exit(1)
-      }
-
-      Await.result(rep, Duration(10, TimeUnit.SECONDS))
-    }
-  }
-
-  /** Spin until The server may start up  */
-  @tailrec
-  private def waitForReplUrl(): String = {
-    val replUrl = System.getProperty("livy.repl.url")
-    if (replUrl == null) {
-      Thread.sleep(10)
-      waitForReplUrl()
-    } else {
-      replUrl
-    }
-  }
-}

+ 0 - 144
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/Session.scala

@@ -1,144 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import java.util.concurrent.Executors
-
-import com.cloudera.hue.livy.{Utils, Logging}
-import com.cloudera.hue.livy.sessions._
-import org.json4s.JsonDSL._
-import org.json4s.{JValue, DefaultFormats, Extraction}
-
-import _root_.scala.concurrent.duration.Duration
-import _root_.scala.concurrent.{TimeoutException, ExecutionContext, Future}
-
-object Session {
-  val STATUS = "status"
-  val OK = "ok"
-  val ERROR = "error"
-  val EXECUTION_COUNT = "execution_count"
-  val DATA = "data"
-  val ENAME = "ename"
-  val EVALUE = "evalue"
-  val TRACEBACK = "traceback"
-
-  def apply(interpreter: Interpreter): Session = new Session(interpreter)
-}
-
-class Session(interpreter: Interpreter)
-  extends Logging
-{
-  import Session._
-
-  private implicit val executor = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())
-  private implicit val formats = DefaultFormats
-
-  private var _state: SessionState = SessionState.NotStarted()
-  private var _history = IndexedSeq[Statement]()
-
-  Future {
-    _state = SessionState.Starting()
-    interpreter.start()
-    _state = SessionState.Idle()
-  }.onFailure { case _ =>
-    _state = SessionState.Error(System.currentTimeMillis())
-  }
-
-  def kind: String = interpreter.kind
-
-  def state = _state
-
-  def history: IndexedSeq[Statement] = _history
-
-  def execute(code: String): Statement = synchronized {
-    val executionCount = _history.length
-    val statement = Statement(executionCount, Future { executeCode(executionCount, code) })
-    _history :+= statement
-    statement
-  }
-
-  def close(): Unit = {
-    executor.shutdown()
-    interpreter.close()
-  }
-
-  def clearHistory() = synchronized {
-    _history = IndexedSeq()
-  }
-
-  @throws(classOf[TimeoutException])
-  @throws(classOf[InterruptedException])
-  def waitForStateChange(oldState: SessionState, atMost: Duration) = {
-    Utils.waitUntil({ () => state != oldState }, atMost)
-  }
-
-  private def executeCode(executionCount: Int, code: String) = {
-    _state = SessionState.Busy()
-
-    try {
-
-      interpreter.execute(code) match {
-        case Interpreter.ExecuteSuccess(data) =>
-          _state = SessionState.Idle()
-
-          (STATUS -> OK) ~
-          (EXECUTION_COUNT -> executionCount) ~
-          (DATA -> data)
-        case Interpreter.ExecuteIncomplete() =>
-          _state = SessionState.Idle()
-
-          (STATUS -> ERROR) ~
-          (EXECUTION_COUNT -> executionCount) ~
-          (ENAME -> "Error") ~
-          (EVALUE -> "incomplete statement") ~
-          (TRACEBACK -> List())
-        case Interpreter.ExecuteError(ename, evalue, traceback) =>
-          _state = SessionState.Idle()
-
-          (STATUS -> ERROR) ~
-          (EXECUTION_COUNT -> executionCount) ~
-          (ENAME -> ename) ~
-          (EVALUE -> evalue) ~
-          (TRACEBACK -> traceback)
-        case Interpreter.ExecuteAborted(message) =>
-          _state = SessionState.Error(System.currentTimeMillis())
-
-          (STATUS -> ERROR) ~
-          (EXECUTION_COUNT -> executionCount) ~
-          (ENAME -> "Error") ~
-          (EVALUE -> f"Interpreter died:\n$message") ~
-          (TRACEBACK -> List())
-      }
-    } catch {
-      case e: Throwable =>
-        error("Exception when executing code", e)
-
-        _state = SessionState.Idle()
-
-
-        (STATUS -> ERROR) ~
-        (EXECUTION_COUNT -> executionCount) ~
-        (ENAME -> f"Internal Error: ${e.getClass.getName}") ~
-        (EVALUE -> e.getMessage) ~
-        (TRACEBACK -> List())
-    }
-  }
-}
-
-case class Statement(id: Int, result: Future[JValue])

+ 0 - 146
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/WebApp.scala

@@ -1,146 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy.{ExecuteRequest, Logging}
-import com.cloudera.hue.livy.sessions._
-import com.fasterxml.jackson.core.JsonParseException
-import org.json4s.{DefaultFormats, JsonDSL, MappingException, _}
-import org.scalatra._
-import org.scalatra.json.JacksonJsonSupport
-
-import _root_.scala.concurrent.duration.Duration
-import _root_.scala.concurrent.{Await, ExecutionContext, Future, TimeoutException}
-
-object WebApp extends Logging
-
-class WebApp(session: Session) extends ScalatraServlet with FutureSupport with JacksonJsonSupport {
-
-  override protected implicit def executor: ExecutionContext = ExecutionContext.global
-  override protected implicit def jsonFormats: Formats = DefaultFormats ++ Serializers.Formats
-
-  before() {
-    contentType = formats("json")
-
-    session.state match {
-      case SessionState.ShuttingDown() => halt(500, "Shutting down")
-      case _ =>
-    }
-  }
-
-  get("/") {
-    Serializers.serializeSession(session)
-  }
-
-  post("/execute") {
-    val req = parsedBody.extract[ExecuteRequest]
-    Serializers.serializeStatement(session.execute(req.code))
-  }
-
-  get("/history") {
-    val from = params.get("from").map(_.toInt)
-    val size = params.get("size").map(_.toInt)
-
-    Serializers.serializeHistory(session.history, from, size)
-  }
-
-  get("/history/:statementId") {
-    val statementId = params("statementId").toInt
-
-    session.history.lift(statementId) match {
-      case Some(statement) => Serializers.serializeStatement(statement)
-      case None => NotFound("Statement not found")
-    }
-  }
-
-  delete("/") {
-    session.close()
-    Future {
-      Thread.sleep(1000)
-      System.exit(0)
-    }
-  }
-
-  error {
-    case e: JsonParseException => BadRequest(e.getMessage)
-    case e: MappingException => BadRequest(e.getMessage)
-    case e =>
-      WebApp.error("internal error", e)
-      InternalServerError(e.toString)
-  }
-}
-
-private object Serializers {
-  import JsonDSL._
-
-  def Formats: List[CustomSerializer[_]] = List(StatementSerializer)
-
-  def serializeSession(session: Session): JValue = {
-    Map("state" -> session.state.toString)
-  }
-
-  def serializeHistory(history: IndexedSeq[Statement],
-                       fromOpt: Option[Int],
-                       sizeOpt: Option[Int]): JValue = {
-    val size = sizeOpt.getOrElse(100)
-    var from = fromOpt.getOrElse(-1)
-    if (from < 0) {
-      from = math.max(0, history.length - size)
-    }
-    val until = from + size
-
-    ("from", from) ~
-      ("total", history.length) ~
-      ("statements", history.view(from, until)
-        .map(serializeStatement(_, Duration(0, TimeUnit.SECONDS))))
-  }
-
-  def serializeStatement(statement: Statement,
-                         duration: Duration = Duration(10, TimeUnit.SECONDS)): JValue = {
-    val result = try {
-      Await.result(statement.result, duration)
-    } catch {
-      case _: TimeoutException => null
-    }
-
-    ("id", statement.id) ~ ("result", result)
-  }
-
-  case object SessionSerializer extends CustomSerializer[Session](
-    implicit formats => ( {
-      // We don't support deserialization.
-      PartialFunction.empty
-    }, {
-      case session: Session => serializeSession(session)
-    }
-      )
-  )
-
-  case object StatementSerializer extends CustomSerializer[Statement](
-    implicit formats => ( {
-      // We don't support deserialization.
-      PartialFunction.empty
-    }, {
-      case statement: Statement => serializeStatement(statement)
-    }
-      )
-  )
-}

+ 0 - 30
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/package.scala

@@ -1,30 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy
-
-import org.json4s.JField
-
-package object repl {
-  type MimeTypeMap = List[JField]
-
-  val APPLICATION_JSON = "application/json"
-  val APPLICATION_LIVY_TABLE_JSON = "application/vnd.livy.table.v1+json"
-  val IMAGE_PNG = "image/png"
-  val TEXT_PLAIN = "text/plain"
-}

+ 0 - 131
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/process/ProcessInterpreter.scala

@@ -1,131 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl.process
-
-import java.io.{BufferedReader, IOException, InputStreamReader, PrintWriter}
-import java.util.concurrent.locks.ReentrantLock
-
-import com.cloudera.hue.livy.{Utils, Logging}
-import com.cloudera.hue.livy.repl.Interpreter
-import org.json4s.JValue
-
-import scala.concurrent.Promise
-import scala.io.Source
-
-private sealed trait Request
-private case class ExecuteRequest(code: String, promise: Promise[JValue]) extends Request
-private case class ShutdownRequest(promise: Promise[Unit]) extends Request
-
-/**
- * Abstract trait that describes an interpreter that is running in a separate process.
- *
- * This type is not thread safe, so must be protected by a mutex.
- *
- * @param process
- */
-abstract class ProcessInterpreter(process: Process)
-  extends Interpreter
-  with Logging
-{
-  protected[this] val stdin = new PrintWriter(process.getOutputStream)
-  protected[this] val stdout = new BufferedReader(new InputStreamReader(process.getInputStream), 1)
-
-  override def start() = {
-    waitUntilReady()
-  }
-
-  override def execute(code: String): Interpreter.ExecuteResponse = {
-    try {
-      sendExecuteRequest(code)
-    } catch {
-      case e: Throwable =>
-        Interpreter.ExecuteError(e.getClass.getName, e.getMessage)
-    }
-  }
-
-  override def close(): Unit = {
-    if (Utils.isProcessAlive(process)) {
-      logger.info("Shutting down process")
-      sendShutdownRequest()
-
-      try {
-        process.getInputStream.close()
-        process.getOutputStream.close()
-      } catch {
-        case _: IOException =>
-      }
-
-      try {
-        process.destroy()
-      } finally {
-        logger.info("process has been shut down")
-      }
-    }
-  }
-
-  protected def sendExecuteRequest(request: String): Interpreter.ExecuteResponse
-
-  protected def sendShutdownRequest(): Unit = {}
-
-  protected def waitUntilReady(): Unit
-
-  private[this] val stderrLock = new ReentrantLock()
-  private[this] var stderrLines = Seq[String]()
-
-  protected def takeErrorLines(): String = {
-    stderrLock.lock()
-    try {
-      val lines = stderrLines
-      stderrLines = Seq()
-      lines.mkString("\n")
-    } finally {
-      stderrLock.unlock()
-    }
-  }
-
-  private[this] val stderrThread = new Thread("process stderr thread") {
-    override def run() = {
-      val lines = Source.fromInputStream(process.getErrorStream).getLines()
-
-      for (line <- lines) {
-        stderrLock.lock()
-        try {
-          stderrLines :+= line
-        } finally {
-          stderrLock.unlock()
-        }
-      }
-    }
-  }
-
-  stderrThread.setDaemon(true)
-  stderrThread.start()
-
-  private[this] val processWatcherThread = new Thread("process watcher thread") {
-    override def run() = {
-      val exitCode = process.waitFor()
-      if (exitCode != 0) {
-        error(f"Process has died with $exitCode")
-      }
-    }
-  }
-
-  processWatcherThread.setDaemon(true)
-  processWatcherThread.start()
-}

+ 0 - 189
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/python/PythonInterpreter.scala

@@ -1,189 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl.python
-
-import java.io._
-import java.lang.ProcessBuilder.Redirect
-import java.nio.file.{Paths, Files}
-
-import com.cloudera.hue.livy.Logging
-import com.cloudera.hue.livy.repl.Interpreter
-import com.cloudera.hue.livy.repl.process.ProcessInterpreter
-import org.json4s.JsonAST.JObject
-import org.json4s.jackson.JsonMethods._
-import org.json4s.jackson.Serialization.write
-import org.json4s.{DefaultFormats, JValue}
-import py4j.GatewayServer
-
-import scala.annotation.tailrec
-import scala.collection.JavaConversions._
-
-object PythonInterpreter extends Logging {
-  def apply(): Interpreter = {
-    val pythonExec = sys.env.getOrElse("PYSPARK_DRIVER_PYTHON", "python")
-
-    val gatewayServer = new GatewayServer(null, 0)
-    gatewayServer.start()
-
-    val builder = new ProcessBuilder(Seq(pythonExec, createFakeShell().toString))
-
-    val env = builder.environment()
-
-    val pythonPath = sys.env.getOrElse("PYTHONPATH", "")
-      .split(File.pathSeparator)
-      .++(findPySparkArchives())
-      .++(findPyFiles())
-
-    env.put("PYTHONPATH", pythonPath.mkString(File.pathSeparator))
-    env.put("PYTHONUNBUFFERED", "YES")
-    env.put("PYSPARK_GATEWAY_PORT", "" + gatewayServer.getListeningPort)
-    env.put("SPARK_HOME", sys.env.getOrElse("SPARK_HOME", "."))
-
-    builder.redirectError(Redirect.PIPE)
-
-    val process = builder.start()
-
-    new PythonInterpreter(process, gatewayServer)
-  }
-
-  private def findPySparkArchives(): Seq[String] = {
-    sys.env.get("PYSPARK_ARCHIVES_PATH")
-      .map(_.split(",").toSeq)
-      .getOrElse {
-        sys.env.get("SPARK_HOME").map { sparkHome =>
-          val pyLibPath = Seq(sparkHome, "python", "lib").mkString(File.separator)
-          val pyArchivesFile = new File(pyLibPath, "pyspark.zip")
-          require(pyArchivesFile.exists(),
-            "pyspark.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
-
-          val py4jFile = Files.newDirectoryStream(Paths.get(pyLibPath), "py4j-*-src.zip")
-            .iterator()
-            .next()
-            .toFile
-
-          require(py4jFile.exists(),
-            "py4j-*-src.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
-          Seq(pyArchivesFile.getAbsolutePath, py4jFile.getAbsolutePath)
-        }.getOrElse(Seq())
-      }
-  }
-
-  private def findPyFiles(): Seq[String] = {
-    val pyFiles = sys.props.getOrElse("spark.submit.pyFiles", "").split(",")
-
-    if (sys.env.getOrElse("SPARK_YARN_MODE", "") == "true") {
-      // In spark mode, these files have been localized into the current directory.
-      pyFiles.map { file =>
-        val name = new File(file).getName
-        new File(name).getAbsolutePath
-      }
-    } else {
-      pyFiles
-    }
-  }
-
-  private def createFakeShell(): File = {
-    val source: InputStream = getClass.getClassLoader.getResourceAsStream("fake_shell.py")
-
-    val file = Files.createTempFile("", "").toFile
-    file.deleteOnExit()
-
-    val sink = new FileOutputStream(file)
-    val buf = new Array[Byte](1024)
-    var n = source.read(buf)
-
-    while (n > 0) {
-      sink.write(buf, 0, n)
-      n = source.read(buf)
-    }
-
-    source.close()
-    sink.close()
-
-    file
-  }
-}
-
-private class PythonInterpreter(process: Process, gatewayServer: GatewayServer)
-  extends ProcessInterpreter(process)
-  with Logging
-{
-  implicit val formats = DefaultFormats
-
-  override def kind = "pyspark"
-
-  override def close(): Unit = {
-    try {
-      super.close()
-    } finally {
-      gatewayServer.shutdown()
-    }
-  }
-
-  @tailrec
-  final override protected def waitUntilReady(): Unit = {
-    val line = stdout.readLine()
-    line match {
-      case null | "READY" =>
-      case _ => waitUntilReady()
-    }
-  }
-
-  override protected def sendExecuteRequest(code: String): Interpreter.ExecuteResponse = {
-    sendRequest(Map("msg_type" -> "execute_request", "content" -> Map("code" -> code))) match {
-      case Some(response) =>
-        assert((response \ "msg_type").extract[String] == "execute_reply")
-
-        val content = response \ "content"
-
-        (content \ "status").extract[String] match {
-          case "ok" =>
-            Interpreter.ExecuteSuccess((content \ "data").extract[JObject])
-          case "error" =>
-            val ename = (content \ "ename").extract[String]
-            val evalue = (content \ "evalue").extract[String]
-            val traceback = (content \ "traceback").extract[Seq[String]]
-
-            Interpreter.ExecuteError(ename, evalue, traceback)
-          case status =>
-            Interpreter.ExecuteError("Internal Error", f"Unknown status $status")
-        }
-      case None =>
-        Interpreter.ExecuteAborted(takeErrorLines())
-    }
-  }
-
-  override protected def sendShutdownRequest(): Unit = {
-    sendRequest(Map(
-      "msg_type" -> "shutdown_request",
-      "content" -> ()
-    )).foreach { case rep =>
-      warn(f"process failed to shut down while returning $rep")
-    }
-  }
-
-  private def sendRequest(request: Map[String, Any]): Option[JValue] = {
-    stdin.println(write(request))
-    stdin.flush()
-
-    Option(stdout.readLine()).map { case line =>
-      parse(line)
-    }
-  }
-}

+ 0 - 260
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/scala/SparkInterpreter.scala

@@ -1,260 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl.scala
-
-import java.io._
-
-import com.cloudera.hue.livy.repl
-import com.cloudera.hue.livy.repl.Interpreter
-import org.apache.spark.rdd.RDD
-import org.apache.spark.repl.SparkIMain
-import org.apache.spark.{SparkConf, SparkContext}
-import org.json4s.JsonAST._
-import org.json4s.JsonDSL._
-import org.json4s.{DefaultFormats, Extraction}
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.{JPrintWriter, Results}
-
-
-object SparkInterpreter {
-  private val MAGIC_REGEX = "^%(\\w+)\\W*(.*)".r
-
-  def apply(): SparkInterpreter = { new SparkInterpreter }
-}
-
-/**
- * This represents a Spark interpreter. It is not thread safe.
- */
-class SparkInterpreter extends Interpreter {
-  import SparkInterpreter._
-
-  private implicit def formats = DefaultFormats
-
-  private val outputStream = new ByteArrayOutputStream()
-  private var sparkIMain: SparkIMain = _
-  private var sparkContext: SparkContext = _
-
-  def kind = "spark"
-
-  override def start() = {
-    require(sparkIMain == null && sparkContext == null)
-
-    val settings = new Settings()
-    settings.usejavacp.value = true
-
-    sparkIMain = new SparkIMain(settings, new JPrintWriter(outputStream, true))
-    sparkIMain.initializeSynchronous()
-
-    val sparkConf = new SparkConf(true)
-      .setAppName("Livy Spark shell")
-      .set("spark.repl.class.uri", sparkIMain.classServerUri)
-
-    sparkContext = SparkContext.getOrCreate(sparkConf)
-
-    sparkIMain.beQuietDuring {
-      sparkIMain.bind("sc", "org.apache.spark.SparkContext", sparkContext, List("""@transient"""))
-    }
-  }
-
-  override def execute(code: String): Interpreter.ExecuteResponse = {
-    require(sparkIMain != null && sparkContext != null)
-
-    executeLines(code.trim.split("\n").toList, Interpreter.ExecuteSuccess(JObject(
-      (repl.TEXT_PLAIN, JString(""))
-    )))
-  }
-
-  override def close(): Unit = synchronized {
-    if (sparkContext != null) {
-      sparkContext.stop()
-    }
-
-    if (sparkIMain != null) {
-      sparkIMain.close()
-      sparkIMain = null
-    }
-  }
-
-  private def executeMagic(magic: String, rest: String): Interpreter.ExecuteResponse = {
-    magic match {
-      case "json" => executeJsonMagic(rest)
-      case "table" => executeTableMagic(rest)
-      case _ =>
-        Interpreter.ExecuteError("UnknownMagic", f"Unknown magic command $magic")
-    }
-  }
-
-  private def executeJsonMagic(name: String): Interpreter.ExecuteResponse = {
-    try {
-      val value = sparkIMain.valueOfTerm(name) match {
-        case Some(obj: RDD[_]) => obj.asInstanceOf[RDD[_]].take(10)
-        case Some(obj) => obj
-        case None => return Interpreter.ExecuteError("NameError", f"Value $name does not exist")
-      }
-
-      Interpreter.ExecuteSuccess(JObject(
-        (repl.APPLICATION_JSON, Extraction.decompose(value))
-      ))
-    } catch {
-      case _: Throwable =>
-        Interpreter.ExecuteError("ValueError", "Failed to convert value into a JSON value")
-    }
-  }
-
-  private class TypesDoNotMatch extends Exception
-
-  private def convertTableType(value: JValue): String = {
-    value match {
-      case (JNothing | JNull) => "NULL_TYPE"
-      case JBool(_) => "BOOLEAN_TYPE"
-      case JString(_) => "STRING_TYPE"
-      case JInt(_) => "BIGINT_TYPE"
-      case JDouble(_) => "DOUBLE_TYPE"
-      case JDecimal(_) => "DECIMAL_TYPE"
-      case JArray(arr) =>
-        if (allSameType(arr.iterator)) {
-          "ARRAY_TYPE"
-        } else {
-          throw new TypesDoNotMatch
-        }
-      case JObject(obj) =>
-        if (allSameType(obj.iterator.map(_._2))) {
-          "MAP_TYPE"
-        } else {
-          throw new TypesDoNotMatch
-        }
-    }
-  }
-
-  private def allSameType(values: Iterator[JValue]): Boolean = {
-    if (values.hasNext) {
-      val type_name = convertTableType(values.next())
-      values.forall { case value => type_name.equals(convertTableType(value)) }
-    } else {
-      true
-    }
-  }
-
-  private def executeTableMagic(name: String): Interpreter.ExecuteResponse = {
-    val value = sparkIMain.valueOfTerm(name) match {
-      case Some(obj: RDD[_]) => obj.asInstanceOf[RDD[_]].take(10)
-      case Some(obj) => obj
-      case None => return Interpreter.ExecuteError("NameError", f"Value $name does not exist")
-    }
-
-    extractTableFromJValue(Extraction.decompose(value))
-  }
-
-  private def extractTableFromJValue(value: JValue): Interpreter.ExecuteResponse = {
-    // Convert the value into JSON and map it to a table.
-    val rows: List[JValue] = value match {
-      case JArray(arr) => arr
-      case _ => List(value)
-    }
-
-    try {
-      val headers = scala.collection.mutable.Map[String, Map[String, String]]()
-
-      val data = rows.map { case row =>
-        val cols: List[JField] = row match {
-          case JArray(arr: List[JValue]) =>
-            arr.zipWithIndex.map { case (v, index) => JField(index.toString, v) }
-          case JObject(obj) => obj.sortBy(_._1)
-          case value: JValue => List(JField("0", value))
-        }
-
-        cols.map { case (k, v) =>
-          val typeName = convertTableType(v)
-
-          headers.get(k) match {
-            case Some(header) =>
-              if (header.get("type").get != typeName) {
-                throw new TypesDoNotMatch
-              }
-            case None =>
-              headers.put(k, Map(
-                "type" -> typeName,
-                "name" -> k
-              ))
-          }
-
-          v
-        }
-      }
-
-      Interpreter.ExecuteSuccess(
-        repl.APPLICATION_LIVY_TABLE_JSON -> (
-          ("headers" -> headers.toSeq.sortBy(_._1).map(_._2)) ~ ("data" -> data)
-        ))
-    } catch {
-      case _: TypesDoNotMatch =>
-        Interpreter.ExecuteError("TypeError", "table rows have different types")
-    }
-  }
-
-  private def executeLines(lines: List[String], result: Interpreter.ExecuteResponse): Interpreter.ExecuteResponse = {
-    lines match {
-      case Nil => result
-      case head :: tail =>
-        val result = executeLine(head)
-
-        result match {
-          case Interpreter.ExecuteIncomplete() =>
-            tail match {
-              case Nil =>
-                result
-
-              case next :: nextTail =>
-                executeLines(head + "\n" + next :: nextTail, result)
-            }
-          case Interpreter.ExecuteError(_, _, _) =>
-            result
-
-          case _ =>
-            executeLines(tail, result)
-        }
-    }
-  }
-
-  private def executeLine(code: String): Interpreter.ExecuteResponse = {
-    code match {
-      case MAGIC_REGEX(magic, rest) =>
-        executeMagic(magic, rest)
-      case _ =>
-        scala.Console.withOut(outputStream) {
-          sparkIMain.interpret(code) match {
-            case Results.Success =>
-              Interpreter.ExecuteSuccess(
-                repl.TEXT_PLAIN -> readStdout()
-              )
-            case Results.Incomplete => Interpreter.ExecuteIncomplete()
-            case Results.Error => Interpreter.ExecuteError("Error", readStdout())
-          }
-        }
-    }
-  }
-
-  private def readStdout() = {
-    val output = outputStream.toString("UTF-8").trim
-    outputStream.reset()
-
-    output
-  }
-}

+ 0 - 250
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/sparkr/SparkRInterpreter.scala

@@ -1,250 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl.sparkr
-
-import java.io.{File, FileOutputStream}
-import java.lang.ProcessBuilder.Redirect
-import java.nio.file.Files
-
-import com.cloudera.hue.livy.repl
-import com.cloudera.hue.livy.repl.Interpreter
-import com.cloudera.hue.livy.repl.process.ProcessInterpreter
-import org.apache.commons.codec.binary.Base64
-import org.json4s.JsonDSL._
-import org.json4s._
-
-import scala.annotation.tailrec
-import scala.collection.JavaConversions._
-
-object SparkRInterpreter {
-  private val LIVY_END_MARKER = "----LIVY_END_OF_COMMAND----"
-  private val PRINT_MARKER = f"""print("$LIVY_END_MARKER")"""
-  private val EXPECTED_OUTPUT = f"""[1] "$LIVY_END_MARKER""""
-
-  private val PLOT_REGEX = (
-    "(" +
-      "(?:bagplot)|" +
-      "(?:barplot)|" +
-      "(?:boxplot)|" +
-      "(?:dotchart)|" +
-      "(?:hist)|" +
-      "(?:lines)|" +
-      "(?:pie)|" +
-      "(?:pie3D)|" +
-      "(?:plot)|" +
-      "(?:qqline)|" +
-      "(?:qqnorm)|" +
-      "(?:scatterplot)|" +
-      "(?:scatterplot3d)|" +
-      "(?:scatterplot\\.matrix)|" +
-      "(?:splom)|" +
-      "(?:stripchart)|" +
-      "(?:vioplot)" +
-    ")"
-    ).r.unanchored
-
-  def apply(): SparkRInterpreter = {
-    val executable = sparkRExecutable
-      .getOrElse(throw new Exception(f"Cannot find sparkR executable"))
-
-    val builder = new ProcessBuilder(Seq(executable.getAbsolutePath))
-
-    val env = builder.environment()
-    env.put("SPARK_HOME", sys.env.getOrElse("SPARK_HOME", "."))
-    env.put("SPARKR_DRIVER_R", createFakeShell().toString)
-
-    builder.redirectError(Redirect.PIPE)
-
-    val process = builder.start()
-
-    new SparkRInterpreter(process)
-  }
-
-  def sparkRExecutable: Option[File] = {
-    val executable = sys.env.getOrElse("SPARKR_DRIVER_R", "sparkR")
-    val executableFile = new File(executable)
-
-    if (executableFile.exists) {
-      Some(executableFile)
-    } else {
-      // see if sparkR is on the path.
-      val path: Option[String] = sys.env.get("PATH")
-      assume(path.isDefined, "PATH is not defined?")
-
-      path.get
-        .split(File.pathSeparator)
-        .map(new File(_, executable))
-        .find(_.exists)
-    }
-  }
-
-  private def createFakeShell(): File = {
-    val source = getClass.getClassLoader.getResourceAsStream("fake_R.sh")
-
-    val file = Files.createTempFile("", "").toFile
-    file.deleteOnExit()
-
-    val sink = new FileOutputStream(file)
-    val buf = new Array[Byte](1024)
-    var n = source.read(buf)
-
-    while (n > 0) {
-      sink.write(buf, 0, n)
-      n = source.read(buf)
-    }
-
-    source.close()
-    sink.close()
-
-    file.setExecutable(true)
-
-    file
-  }
-}
-
-class SparkRInterpreter(process: Process)
-  extends ProcessInterpreter(process)
-{
-  import SparkRInterpreter._
-
-  implicit val formats = DefaultFormats
-
-  private[this] var executionCount = 0
-
-  override def kind = "sparkR"
-
-  final override protected def waitUntilReady(): Unit = {
-    // Set the option to catch and ignore errors instead of halting.
-    sendExecuteRequest("options(error = dump.frames)")
-    executionCount = 0
-  }
-
-  override protected def sendExecuteRequest(command: String): Interpreter.ExecuteResponse = {
-    var code = command
-
-    // Create a image file if this command is trying to plot.
-    val tempFile = PLOT_REGEX.findFirstIn(code).map { case _ =>
-      val tempFile = Files.createTempFile("", ".png")
-      val tempFileString = tempFile.toAbsolutePath
-
-      code = f"""png("$tempFileString")\n$code\ndev.off()"""
-
-      tempFile
-    }
-
-    try {
-      var content: JObject = repl.TEXT_PLAIN -> (sendRequest(code) + takeErrorLines())
-
-      // If we rendered anything, pass along the last image.
-      tempFile.foreach { case file =>
-        val bytes = Files.readAllBytes(file)
-        if (bytes.nonEmpty) {
-          val image = Base64.encodeBase64String(bytes)
-          content = content ~ (repl.IMAGE_PNG -> image)
-        }
-      }
-
-      Interpreter.ExecuteSuccess(content)
-    } catch {
-      case e: Error =>
-        val message = Seq(e.output, takeErrorLines()).mkString("\n")
-        Interpreter.ExecuteError("Error", message)
-      case e: Exited =>
-        Interpreter.ExecuteAborted(takeErrorLines())
-    } finally {
-      tempFile.foreach(Files.delete)
-    }
-
-  }
-
-  private def sendRequest(code: String): String = {
-    stdin.println(code)
-    stdin.flush()
-
-    stdin.println(PRINT_MARKER)
-    stdin.flush()
-
-    readTo(EXPECTED_OUTPUT)
-  }
-
-  override protected def sendShutdownRequest() = {
-    stdin.println("q()")
-    stdin.flush()
-
-    while (stdout.readLine() != null) {}
-  }
-
-  @tailrec
-  private def readTo(marker: String, output: StringBuilder = StringBuilder.newBuilder): String = {
-    var char = readChar(output)
-
-    // Remove any ANSI color codes which match the pattern "\u001b\\[[0-9;]*[mG]".
-    // It would be easier to do this with a regex, but unfortunately I don't see an easy way to do
-    // without copying the StringBuilder into a string for each character.
-    if (char == '\u001b') {
-      if (readChar(output) == '[') {
-        char = readDigits(output)
-
-        if (char == 'm' || char == 'G') {
-          output.delete(output.lastIndexOf('\u001b'), output.length)
-        }
-      }
-    }
-
-    if (output.endsWith(marker)) {
-      val result = output.toString()
-      result.substring(0, result.length - marker.length)
-        .stripPrefix("\n")
-        .stripSuffix("\n")
-    } else {
-      readTo(marker, output)
-    }
-  }
-
-  private def readChar(output: StringBuilder): Char = {
-    val byte = stdout.read()
-    if (byte == -1) {
-      throw new Exited(output.toString())
-    } else {
-      val char = byte.toChar
-      output.append(char)
-      char
-    }
-  }
-
-  @tailrec
-  private def readDigits(output: StringBuilder): Char = {
-    val byte = stdout.read()
-    if (byte == -1) {
-      throw new Exited(output.toString())
-    }
-
-    val char = byte.toChar
-
-    if (('0' to '9').contains(char)) {
-      output.append(char)
-      readDigits(output)
-    } else {
-      char
-    }
-  }
-
-  private class Exited(val output: String) extends Exception {}
-  private class Error(val output: String) extends Exception {}
-}

+ 0 - 34
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/BaseInterpreterSpec.scala

@@ -1,34 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import org.scalatest.{FlatSpec, Matchers}
-
-abstract class BaseInterpreterSpec extends FlatSpec with Matchers {
-
-  def createInterpreter(): Interpreter
-
-  def withInterpreter(testCode: Interpreter => Any) = {
-    val interpreter = createInterpreter()
-    interpreter.start()
-    try {
-      testCode(interpreter)
-    } finally interpreter.close()
-  }
-}

+ 0 - 50
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/BaseSessionSpec.scala

@@ -1,50 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import java.util.concurrent.TimeUnit
-import com.cloudera.hue.livy.sessions.SessionState
-import org.json4s.DefaultFormats
-import org.scalatest.{FlatSpec, Matchers}
-
-import _root_.scala.concurrent.duration.Duration
-
-abstract class BaseSessionSpec extends FlatSpec with Matchers {
-
-  implicit val formats = DefaultFormats
-
-  def withSession(testCode: Session => Any) = {
-    val session = Session(createInterpreter())
-    session.waitForStateChange(SessionState.NotStarted(), Duration(30, TimeUnit.SECONDS))
-    try {
-      testCode(session)
-    } finally session.close()
-  }
-
-  def createInterpreter(): Interpreter
-
-  it should "start in the starting or idle state" in withSession { session =>
-    session.state should (equal (SessionState.Starting()) or equal (SessionState.Idle()))
-  }
-
-  it should "eventually become the idle state" in withSession { session =>
-    session.waitForStateChange(SessionState.Starting(), Duration(30, TimeUnit.SECONDS))
-    session.state should equal (SessionState.Idle())
-  }
-}

+ 0 - 213
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/PythonInterpreterSpec.scala

@@ -1,213 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import com.cloudera.hue.livy.repl
-import com.cloudera.hue.livy.repl.python.PythonInterpreter
-import org.json4s.JsonDSL._
-import org.json4s.{DefaultFormats, JValue}
-
-class PythonInterpreterSpec extends BaseInterpreterSpec {
-
-  implicit val formats = DefaultFormats
-
-  override def createInterpreter() = PythonInterpreter()
-
-  it should "execute `1 + 2` == 3" in withInterpreter { interpreter =>
-    val response = interpreter.execute("1 + 2")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "3"
-    ))
-  }
-
-  it should "execute multiple statements" in withInterpreter { interpreter =>
-    var response = interpreter.execute("x = 1")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> ""
-    ))
-
-    response = interpreter.execute("y = 2")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> ""
-    ))
-
-    response = interpreter.execute("x + y")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "3"
-    ))
-  }
-
-  it should "execute multiple statements in one block" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """
-        |x = 1
-        |
-        |y = 2
-        |
-        |x + y
-      """.stripMargin)
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "3"
-    ))
-  }
-
-  it should "parse a class" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """
-        |class Counter(object):
-        |   def __init__(self):
-        |       self.count = 0
-        |
-        |   def add_one(self):
-        |       self.count += 1
-        |
-        |   def add_two(self):
-        |       self.count += 2
-        |
-        |counter = Counter()
-        |counter.add_one()
-        |counter.add_two()
-        |counter.count
-      """.stripMargin)
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "3"
-    ))
-  }
-
-  it should "do json magic" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """x = [[1, 'a'], [3, 'b']]
-        |%json x
-      """.stripMargin)
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.APPLICATION_JSON -> List[JValue](
-        List[JValue](1, "a"),
-        List[JValue](3, "b")
-      )
-    ))
-  }
-
-  it should "do table magic" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """x = [[1, 'a'], [3, 'b']]
-        |%table x
-      """.stripMargin)
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.APPLICATION_LIVY_TABLE_JSON -> (
-        ("headers" -> List(
-          ("type" -> "INT_TYPE") ~ ("name" -> "0"),
-          ("type" -> "STRING_TYPE") ~ ("name" -> "1")
-        )) ~
-          ("data" -> List(
-            List[JValue](1, "a"),
-            List[JValue](3, "b")
-          ))
-        )
-    ))
-  }
-
-  it should "allow magic inside statements" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """x = [[1, 'a'], [3, 'b']]
-        |%table x
-        |1 + 2
-      """.stripMargin)
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "3"
-    ))
-  }
-
-  it should "capture stdout" in withInterpreter { interpreter =>
-    val response = interpreter.execute("print 'Hello World'")
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "Hello World"
-    ))
-  }
-
-  it should "report an error if accessing an unknown variable" in withInterpreter { interpreter =>
-    val response = interpreter.execute("x")
-    response should equal(Interpreter.ExecuteError(
-      "NameError",
-      "name 'x' is not defined",
-      List(
-        "Traceback (most recent call last):\n",
-        "NameError: name 'x' is not defined\n"
-      )
-    ))
-  }
-
-  it should "report an error if empty magic command" in withInterpreter { interpreter =>
-    val response = interpreter.execute("%")
-    response should equal(Interpreter.ExecuteError(
-      "UnknownMagic",
-      "magic command not specified",
-      List("UnknownMagic: magic command not specified\n")
-    ))
-  }
-
-  it should "report an error if unknown magic command" in withInterpreter { interpreter =>
-    val response = interpreter.execute("%foo")
-    response should equal(Interpreter.ExecuteError(
-      "UnknownMagic",
-      "unknown magic command 'foo'",
-      List("UnknownMagic: unknown magic command 'foo'\n")
-    ))
-  }
-
-  it should "not execute part of the block if there is a syntax error" in withInterpreter { interpreter =>
-    var response = interpreter.execute(
-      """x = 1
-        |'
-      """.stripMargin)
-
-    response should equal(Interpreter.ExecuteError(
-      "SyntaxError",
-      "EOL while scanning string literal (<stdin>, line 2)",
-      List(
-        "  File \"<stdin>\", line 2\n",
-        "    '\n",
-        "    ^\n",
-        "SyntaxError: EOL while scanning string literal\n"
-      )
-    ))
-
-    response = interpreter.execute("x")
-    response should equal(Interpreter.ExecuteError(
-      "NameError",
-      "name 'x' is not defined",
-      List(
-        "Traceback (most recent call last):\n",
-        "NameError: name 'x' is not defined\n"
-      )
-    ))
-  }
-
-
-  it should "execute spark commands" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """sc.parallelize(xrange(0, 2)).map(lambda i: i + 1).collect()""")
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "[1, 2]"
-    ))
-  }
-}

+ 0 - 204
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/PythonSessionSpec.scala

@@ -1,204 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import com.cloudera.hue.livy.repl.python.PythonInterpreter
-import org.json4s.Extraction
-import org.json4s.JsonAST.JValue
-
-import _root_.scala.concurrent.Await
-import _root_.scala.concurrent.duration.Duration
-
-class PythonSessionSpec extends BaseSessionSpec {
-
-  override def createInterpreter() = PythonInterpreter()
-
-  it should "execute `1 + 2` == 3" in withSession { session =>
-    val statement = session.execute("1 + 2")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "3"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "execute `x = 1`, then `y = 2`, then `x + y`" in withSession { session =>
-    var statement = session.execute("x = 1")
-    statement.id should equal (0)
-
-    var result = Await.result(statement.result, Duration.Inf)
-    var expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> ""
-      )
-    ))
-
-    result should equal (expectedResult)
-
-    statement = session.execute("y = 2")
-    statement.id should equal (1)
-
-    result = Await.result(statement.result, Duration.Inf)
-    expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 1,
-      "data" -> Map(
-        "text/plain" -> ""
-      )
-    ))
-
-    result should equal (expectedResult)
-
-    statement = session.execute("x + y")
-    statement.id should equal (2)
-
-    result = Await.result(statement.result, Duration.Inf)
-    expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 2,
-      "data" -> Map(
-        "text/plain" -> "3"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "do table magic" in withSession { session =>
-    val statement = session.execute("x = [[1, 'a'], [3, 'b']]\n%table x")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "application/vnd.livy.table.v1+json" -> Map(
-          "headers" -> List(
-            Map("type" -> "INT_TYPE", "name" -> "0"),
-            Map("type" -> "STRING_TYPE", "name" -> "1")),
-          "data" -> List(List(1, "a"), List(3, "b"))
-        )
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "capture stdout" in withSession { session =>
-    val statement = session.execute("""print 'Hello World'""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "Hello World"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "report an error if accessing an unknown variable" in withSession { session =>
-    val statement = session.execute("""x""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "error",
-      "execution_count" -> 0,
-      "traceback" -> List(
-        "Traceback (most recent call last):\n",
-        "NameError: name 'x' is not defined\n"
-      ),
-      "ename" -> "NameError",
-      "evalue" -> "name 'x' is not defined"
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "report an error if exception is thrown" in withSession { session =>
-    val statement = session.execute(
-      """def foo():
-        |    raise Exception()
-        |foo()
-        |""".stripMargin)
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "error",
-      "execution_count" -> 0,
-      "traceback" -> List(
-        "Traceback (most recent call last):\n",
-        "Exception\n"
-      ),
-      "ename" -> "Exception",
-      "evalue" -> ""
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "access the spark context" in withSession { session =>
-    val statement = session.execute("""sc""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val resultMap = result.extract[Map[String, JValue]]
-
-    // Manually extract the values since the line numbers in the exception could change.
-    resultMap("status").extract[String] should equal ("ok")
-    resultMap("execution_count").extract[Int] should equal (0)
-
-    val data = resultMap("data").extract[Map[String, JValue]]
-    data("text/plain").extract[String] should include ("<pyspark.context.SparkContext object at")
-  }
-
-  it should "execute spark commands" in withSession { session =>
-    val statement = session.execute("""
-                                      |sc.parallelize(xrange(0, 2)).map(lambda i: i + 1).collect()
-                                      |""".stripMargin)
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "[1, 2]"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-}

+ 0 - 128
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/ScalaInterpreterSpec.scala

@@ -1,128 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import com.cloudera.hue.livy.repl
-import com.cloudera.hue.livy.repl.scala.SparkInterpreter
-import org.json4s.JsonDSL._
-import org.json4s.{DefaultFormats, JValue}
-
-class ScalaInterpreterSpec extends BaseInterpreterSpec {
-
-  implicit val formats = DefaultFormats
-
-  override def createInterpreter() = SparkInterpreter()
-
-  it should "execute `1 + 2` == 3" in withInterpreter { interpreter =>
-    val response = interpreter.execute("1 + 2")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "res0: Int = 3"
-    ))
-  }
-
-  it should "execute multiple statements" in withInterpreter { interpreter =>
-    var response = interpreter.execute("val x = 1")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "x: Int = 1"
-    ))
-
-    response = interpreter.execute("val y = 2")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "y: Int = 2"
-    ))
-
-    response = interpreter.execute("x + y")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "res0: Int = 3"
-    ))
-  }
-
-  it should "execute multiple statements in one block" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """
-        |val x = 1
-        |
-        |val y = 2
-        |
-        |x + y
-      """.stripMargin)
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "res2: Int = 3"
-    ))
-  }
-
-  it should "do table magic" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """val x = List(List(1, "a"), List(3, "b"))
-        |%table x
-      """.stripMargin)
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.APPLICATION_LIVY_TABLE_JSON -> (
-        ("headers" -> List(
-          ("type" -> "BIGINT_TYPE") ~ ("name" -> "0"),
-          ("type" -> "STRING_TYPE") ~ ("name" -> "1")
-        )) ~
-          ("data" -> List(
-            List[JValue](1, "a"),
-            List[JValue](3, "b")
-          ))
-        )
-    ))
-  }
-
-  it should "allow magic inside statements" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """val x = List(List(1, "a"), List(3, "b"))
-        |%table x
-        |1 + 2
-      """.stripMargin)
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "res0: Int = 3"
-    ))
-  }
-
-  it should "capture stdout" in withInterpreter { interpreter =>
-    val response = interpreter.execute("println(\"Hello World\")")
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "Hello World"
-    ))
-  }
-
-  it should "report an error if accessing an unknown variable" in withInterpreter { interpreter =>
-    val response = interpreter.execute("x")
-    response should equal(Interpreter.ExecuteError(
-      "Error",
-      """<console>:8: error: not found: value x
-        |              x
-        |              ^""".stripMargin,
-      List()
-    ))
-  }
-
-  it should "execute spark commands" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """sc.parallelize(0 to 1).map { i => i+1 }.collect""".stripMargin)
-
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "res0: Array[Int] = Array(1, 2)"
-    ))
-  }
-}

+ 0 - 111
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkRInterpreterSpec.scala

@@ -1,111 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import com.cloudera.hue.livy.repl
-import com.cloudera.hue.livy.repl.sparkr.SparkRInterpreter
-import org.json4s.JsonDSL._
-import org.json4s.{DefaultFormats, JValue}
-
-class SparkRInterpreterSpec extends BaseInterpreterSpec {
-
-  implicit val formats = DefaultFormats
-
-  override protected def withFixture(test: NoArgTest) = {
-    val sparkRExecutable = SparkRInterpreter.sparkRExecutable
-    assume(sparkRExecutable.isDefined, "Cannot find sparkR")
-    test()
-  }
-
-  override def createInterpreter() = {
-    SparkRInterpreter()
-  }
-
-  it should "execute `1 + 2` == 3" in withInterpreter { interpreter =>
-    val response = interpreter.execute("1 + 2")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "[1] 3"
-    ))
-  }
-
-  it should "execute multiple statements" in withInterpreter { interpreter =>
-    var response = interpreter.execute("x = 1")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> ""
-    ))
-
-    response = interpreter.execute("y = 2")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> ""
-    ))
-
-    response = interpreter.execute("x + y")
-    response should equal (Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "[1] 3"
-    ))
-  }
-
-  it should "execute multiple statements in one block" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """
-        |x = 1
-        |
-        |y = 2
-        |
-        |x + y
-      """.stripMargin)
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "[1] 3"
-    ))
-  }
-
-  it should "capture stdout" in withInterpreter { interpreter =>
-    val response = interpreter.execute("cat(3)")
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "3"
-    ))
-  }
-
-  it should "report an error if accessing an unknown variable" in withInterpreter { interpreter =>
-    val response = interpreter.execute("x")
-    response should equal(Interpreter.ExecuteSuccess(
-      repl.TEXT_PLAIN -> "Error: object 'x' not found"
-    ))
-  }
-
-  it should "execute spark commands" in withInterpreter { interpreter =>
-    val response = interpreter.execute(
-      """head(createDataFrame(sqlContext, faithful))""")
-
-    response match {
-      case Interpreter.ExecuteSuccess(map: JValue) =>
-        (map \ "text/plain").extract[String] should include (
-          """  eruptions waiting
-            |1     3.600      79
-            |2     1.800      54
-            |3     3.333      74
-            |4     2.283      62
-            |5     4.533      85
-            |6     2.883      55""".stripMargin)
-      case _ =>
-        throw new Exception("response is not a success")
-    }
-
-  }
-}

+ 0 - 185
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkRSessionSpec.scala

@@ -1,185 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import com.cloudera.hue.livy.repl.sparkr.SparkRInterpreter
-import org.json4s.Extraction
-import org.json4s.JsonAST.JValue
-import org.scalatest.BeforeAndAfterAll
-
-import _root_.scala.concurrent.Await
-import _root_.scala.concurrent.duration.Duration
-
-class SparkRSessionSpec extends BaseSessionSpec {
-
-  override protected def withFixture(test: NoArgTest) = {
-    val sparkRExecutable = SparkRInterpreter.sparkRExecutable
-    assume(sparkRExecutable.isDefined, "Cannot find sparkR")
-    test()
-  }
-
-  override def createInterpreter() = SparkRInterpreter()
-
-  it should "execute `1 + 2` == 3" in withSession { session =>
-    val statement = session.execute("1 + 2")
-    statement.id should equal(0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "[1] 3"
-      )
-    ))
-
-    result should equal(expectedResult)
-  }
-
-    it should "execute `x = 1`, then `y = 2`, then `x + y`" in withSession { session =>
-    var statement = session.execute("x = 1")
-    statement.id should equal (0)
-
-    var result = Await.result(statement.result, Duration.Inf)
-    var expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> ""
-      )
-    ))
-
-    result should equal (expectedResult)
-
-    statement = session.execute("y = 2")
-    statement.id should equal (1)
-
-    result = Await.result(statement.result, Duration.Inf)
-    expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 1,
-      "data" -> Map(
-        "text/plain" -> ""
-      )
-    ))
-
-    result should equal (expectedResult)
-
-    statement = session.execute("x + y")
-    statement.id should equal (2)
-
-    result = Await.result(statement.result, Duration.Inf)
-    expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 2,
-      "data" -> Map(
-        "text/plain" -> "[1] 3"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-    it should "capture stdout from print" in withSession { session =>
-    val statement = session.execute("""print('Hello World')""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "[1] \"Hello World\""
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-    it should "capture stdout from cat" in withSession { session =>
-    val statement = session.execute("""cat(3)""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "3"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-    it should "report an error if accessing an unknown variable" in withSession { session =>
-    val statement = session.execute("""x""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "Error: object 'x' not found"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-    it should "access the spark context" in withSession { session =>
-    val statement = session.execute("""sc""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val resultMap = result.extract[Map[String, JValue]]
-
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "Java ref type org.apache.spark.api.java.JavaSparkContext id 0"
-      )
-    ))
-  }
-
-    it should "execute spark commands" in withSession { session =>
-    val statement = session.execute("""
-                                      |head(createDataFrame(sqlContext, faithful))
-                                      |""".stripMargin)
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val resultMap = result.extract[Map[String, JValue]]
-
-    // Manually extract since sparkr outputs a lot of spark logging information.
-    resultMap("status").extract[String] should equal ("ok")
-    resultMap("execution_count").extract[Int] should equal (0)
-
-    val data = resultMap("data").extract[Map[String, JValue]]
-    data("text/plain").extract[String] should include ("""  eruptions waiting
-                                                         |1     3.600      79
-                                                         |2     1.800      54
-                                                         |3     3.333      74
-                                                         |4     2.283      62
-                                                         |5     4.533      85
-                                                         |6     2.883      55""".stripMargin)
-  }
-}

+ 0 - 197
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/SparkSessionSpec.scala

@@ -1,197 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import com.cloudera.hue.livy.repl.scala.SparkInterpreter
-import org.json4s.Extraction
-import org.json4s.JsonAST.{JArray, JValue}
-
-import _root_.scala.concurrent.Await
-import _root_.scala.concurrent.duration.Duration
-
-class SparkSessionSpec extends BaseSessionSpec {
-
-  override def createInterpreter() = SparkInterpreter()
-
-  it should "execute `1 + 2` == 3" in withSession { session =>
-    val statement = session.execute("1 + 2")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "res0: Int = 3"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "execute `x = 1`, then `y = 2`, then `x + y`" in withSession { session =>
-    var statement = session.execute("val x = 1")
-    statement.id should equal (0)
-
-    var result = Await.result(statement.result, Duration.Inf)
-    var expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "x: Int = 1"
-      )
-    ))
-
-    result should equal (expectedResult)
-
-    statement = session.execute("val y = 2")
-    statement.id should equal (1)
-
-    result = Await.result(statement.result, Duration.Inf)
-    expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 1,
-      "data" -> Map(
-        "text/plain" -> "y: Int = 2"
-      )
-    ))
-
-    result should equal (expectedResult)
-
-    statement = session.execute("x + y")
-    statement.id should equal (2)
-
-    result = Await.result(statement.result, Duration.Inf)
-    expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 2,
-      "data" -> Map(
-        "text/plain" -> "res0: Int = 3"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "capture stdout" in withSession { session =>
-    val statement = session.execute("""println("Hello World")""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "Hello World"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "report an error if accessing an unknown variable" in withSession { session =>
-    val statement = session.execute("""x""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "error",
-      "execution_count" -> 0,
-      "ename" -> "Error",
-      "evalue" ->
-        """<console>:8: error: not found: value x
-          |              x
-          |              ^""".stripMargin,
-      "traceback" -> List()
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "report an error if exception is thrown" in withSession { session =>
-    val statement = session.execute("""throw new Exception()""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val resultMap = result.extract[Map[String, JValue]]
-
-    // Manually extract the values since the line numbers in the exception could change.
-    resultMap("status").extract[String] should equal ("error")
-    resultMap("execution_count").extract[Int] should equal (0)
-    resultMap("ename").extract[String] should equal ("Error")
-    resultMap("evalue").extract[String] should include ("java.lang.Exception")
-    resultMap("traceback").extract[List[_]] should equal (List())
-  }
-
-  it should "access the spark context" in withSession { session =>
-    val statement = session.execute("""sc""")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-    val resultMap = result.extract[Map[String, JValue]]
-
-    // Manually extract the values since the line numbers in the exception could change.
-    resultMap("status").extract[String] should equal ("ok")
-    resultMap("execution_count").extract[Int] should equal (0)
-
-    val data = resultMap("data").extract[Map[String, JValue]]
-    data("text/plain").extract[String] should include ("res0: org.apache.spark.SparkContext = org.apache.spark.SparkContext")
-  }
-
-  it should "execute spark commands" in withSession { session =>
-    val statement = session.execute(
-      """sc.parallelize(0 to 1).map{i => i+1}.collect""".stripMargin)
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "text/plain" -> "res0: Array[Int] = Array(1, 2)"
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-
-  it should "do table magic" in withSession { session =>
-    val statement = session.execute("val x = List((1, \"a\"), (3, \"b\"))\n%table x")
-    statement.id should equal (0)
-
-    val result = Await.result(statement.result, Duration.Inf)
-
-
-    val expectedResult = Extraction.decompose(Map(
-      "status" -> "ok",
-      "execution_count" -> 0,
-      "data" -> Map(
-        "application/vnd.livy.table.v1+json" -> Map(
-          "headers" -> List(
-            Map("type" -> "BIGINT_TYPE", "name" -> "_1"),
-            Map("type" -> "STRING_TYPE", "name" -> "_2")),
-          "data" -> List(List(1, "a"), List(3, "b"))
-        )
-      )
-    ))
-
-    result should equal (expectedResult)
-  }
-}

+ 0 - 116
apps/spark/java/livy-repl/src/test/scala/com/cloudera/hue/livy/repl/WebAppSpec.scala

@@ -1,116 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.repl
-
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy.repl
-import com.cloudera.hue.livy.sessions._
-import org.json4s.JsonAST.{JArray, JString}
-import org.json4s.JsonDSL._
-import org.json4s.jackson.JsonMethods._
-import org.json4s.{JValue, DefaultFormats, Extraction}
-import org.scalatest.{BeforeAndAfter, FunSpecLike}
-import org.scalatra.test.scalatest.ScalatraSuite
-
-import _root_.scala.concurrent.duration.Duration
-import _root_.scala.concurrent.{Await, Future}
-
-class WebAppSpec extends ScalatraSuite with FunSpecLike with BeforeAndAfter {
-
-  implicit val formats = DefaultFormats
-
-  class MockInterpreter extends Interpreter {
-    override def kind: String = "mock"
-
-    override def start() = {}
-
-    override def execute(code: String) = {
-      Thread.sleep(1000)
-      Interpreter.ExecuteSuccess(repl.TEXT_PLAIN -> "1")
-    }
-
-    override def close() = {}
-  }
-
-  val interpreter = new MockInterpreter()
-  val session = new Session(new MockInterpreter())
-
-  val servlet = new WebApp(session)
-
-  addServlet(servlet, "/*")
-
-  describe("A session") {
-    it("GET / should return the session state") {
-      get("/") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        val parsedBody = parse(body)
-        parsedBody \ "state" should equal (JString("idle"))
-      }
-
-      session.execute("")
-
-      get("/") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        val parsedBody = parse(body)
-        parsedBody \ "state" should equal (JString("busy"))
-      }
-    }
-
-    it("GET /history with no history should be empty") {
-      get("/history") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        parse(body) should equal (
-          ("from", 0) ~
-            ("total", 0) ~
-            ("statements", JArray(List())))
-      }
-    }
-
-    it("GET /history with history should return something") {
-      Await.ready(session.execute("").result, Duration(10, TimeUnit.SECONDS))
-
-      get("/history") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        parse(body) should equal (
-          ("from" -> 0) ~
-          ("total" -> 1) ~
-          (
-            "statements" -> List[JValue](
-              ("id" -> 0) ~
-              ("result" ->
-                ("status" -> "ok") ~
-                ("execution_count" -> 0) ~
-                ("data" -> (repl.TEXT_PLAIN -> "1"))
-              )
-            )
-          )
-        )
-      }
-    }
-
-    after {
-      session.clearHistory()
-    }
-  }
-}

+ 0 - 230
apps/spark/java/livy-server/pom.xml

@@ -1,230 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue.livy</groupId>
-        <artifactId>livy-main</artifactId>
-        <relativePath>../pom.xml</relativePath>
-        <version>0.2.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>livy-server_2.10</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <dependencies>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-spark_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-yarn_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>io.dropwizard.metrics</groupId>
-            <artifactId>metrics-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>io.dropwizard.metrics</groupId>
-            <artifactId>metrics-healthchecks</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>javax.servlet-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.httpcomponents</groupId>
-            <artifactId>httpclient</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-server</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-servlet</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatest</groupId>
-            <artifactId>scalatest_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-json_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-metrics_${scala.binary.version}</artifactId>
-            <version>2.4.0.M3</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>com.typesafe.akka</groupId>
-                    <artifactId>akka-actor_${scala.binary.version}</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-scalatest_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatra</groupId>
-            <artifactId>scalatra-test_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>net.databinder.dispatch</groupId>
-            <artifactId>dispatch-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>net.databinder.dispatch</groupId>
-            <artifactId>dispatch-json4s-jackson_${scala.binary.version}</artifactId>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <configuration>
-                    <archive>
-                        <manifest>
-                            <mainClass>com.cloudera.hue.livy.server.Main</mainClass>
-                        </manifest>
-                    </archive>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-            </plugin>
-
-            <plugin>
-                <groupId>org.scalatest</groupId>
-                <artifactId>scalatest-maven-plugin</artifactId>
-                <version>1.0</version>
-                <configuration>
-                    <systemProperties>
-                        <spark.master>local</spark.master>
-                        <spark.driver.allowMultipleContexts>true</spark.driver.allowMultipleContexts>
-                        <spark.ui.enabled>false</spark.ui.enabled>
-                        <settings.usejavacp.value>true</settings.usejavacp.value>
-                        <livy.repl.jar>../livy-repl/target/livy-repl_${scala.binary.version}-${project.version}.jar</livy.repl.jar>
-                    </systemProperties>
-                </configuration>
-            </plugin>
-
-        </plugins>
-    </build>
-
-</project>
-

+ 0 - 13
apps/spark/java/livy-server/src/main/resources/logback-access.xml

@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-    <!-- always a good activate OnConsoleStatusListener -->
-    <statusListener class="ch.qos.logback.core.status.OnConsoleStatusListener" />
-
-    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-        <encoder>
-            <pattern>%h %l %u %user %date "%r" %s %b</pattern>
-        </encoder>
-    </appender>
-
-    <appender-ref ref="STDOUT" />
-</configuration>

+ 0 - 12
apps/spark/java/livy-server/src/main/resources/logback.xml

@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-        <encoder>
-            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
-        </encoder>
-    </appender>
-
-    <root level="info">
-        <appender-ref ref="STDOUT" />
-    </root>
-</configuration>

+ 0 - 179
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/Main.scala

@@ -1,179 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.server
-
-import java.io.{File, IOException}
-import javax.servlet.ServletContext
-
-import com.cloudera.hue.livy._
-import com.cloudera.hue.livy.server.batch.BatchSessionServlet
-import com.cloudera.hue.livy.server.interactive.InteractiveSessionServlet
-import com.cloudera.hue.livy.spark.SparkManager
-import org.scalatra._
-import org.scalatra.metrics.MetricsBootstrap
-import org.scalatra.metrics.MetricsSupportExtensions._
-import org.scalatra.servlet.ScalatraListener
-import org.slf4j.LoggerFactory
-
-object Main {
-
-  val SESSION_KIND = "livy-server.session.kind"
-  val PROCESS_SESSION = "process"
-  val YARN_SESSION = "yarn"
-  lazy val logger = LoggerFactory.getLogger(this.getClass)
-
-  def main(args: Array[String]): Unit = {
-    val livyConf = new LivyConf()
-    Utils.loadDefaultLivyProperties(livyConf)
-
-    val host = livyConf.get("livy.server.host", "0.0.0.0")
-    val port = livyConf.getInt("livy.server.port", 8998)
-
-    // Make sure the `spark-submit` program exists, otherwise much of livy won't work.
-    testSparkHome(livyConf)
-    testSparkSubmit(livyConf)
-
-    val server = new WebServer(livyConf, host, port)
-
-    server.context.setResourceBase("src/main/com/cloudera/hue/livy/server")
-    server.context.setInitParameter(ScalatraListener.LifeCycleKey, classOf[ScalatraBootstrap].getCanonicalName)
-    server.context.addEventListener(new ScalatraListener)
-
-    server.start()
-
-    try {
-      if (!sys.props.contains("livy.server.serverUrl")) {
-        sys.props("livy.server.serverUrl") = f"http://${server.host}:${server.port}"
-      }
-    } finally {
-      server.join()
-      server.stop()
-
-      // Make sure to close all our outstanding http requests.
-      dispatch.Http.shutdown()
-    }
-  }
-
-  /**
-   * Sets the spark-submit path if it's not configured in the LivyConf
-   */
-  private def testSparkHome(livyConf: LivyConf) = {
-    val sparkHome = livyConf.sparkHome().getOrElse {
-      System.err.println("Livy requires the SPARK_HOME environment variable")
-      sys.exit(1)
-    }
-
-    val sparkHomeFile = new File(sparkHome)
-
-    if (!sparkHomeFile.exists) {
-      System.err.println("SPARK_HOME path does not exist")
-      sys.exit(1)
-    }
-  }
-
-  /**
-   * Test that the configured `spark-submit` executable exists.
-   *
-   * @param livyConf
-   */
-  private def testSparkSubmit(livyConf: LivyConf) = {
-    try {
-      val versions_regex = (
-        """^(?:""" +
-          """(1\.3\.0)|""" +
-          """(1\.3\.1)|""" +
-          """(1\.4\.0)|""" +
-          """(1\.4\.1)|""" +
-          """(1\.5\.0)|""" +
-          """(1\.5\.1)""" +
-        """)(-.*)?"""
-      ).r
-
-      val version = sparkSubmitVersion(livyConf)
-
-      versions_regex.findFirstIn(version) match {
-        case Some(_) =>
-          logger.info(f"Using spark-submit version $version")
-        case None =>
-          logger.warn(f"Warning, livy has not been tested with spark-submit version $version")
-      }
-    } catch {
-      case e: IOException =>
-        System.err.println("Failed to run spark-submit executable: " + e.toString)
-        System.exit(1)
-    }
-  }
-
-  /**
-   * Return the version of the configured `spark-submit` version.
-   *
-   * @param livyConf
-   * @return the version
-   */
-  private def sparkSubmitVersion(livyConf: LivyConf): String = {
-    val sparkSubmit = livyConf.sparkSubmit()
-    val pb = new ProcessBuilder(sparkSubmit, "--version")
-    pb.redirectErrorStream(true)
-    pb.redirectInput(ProcessBuilder.Redirect.PIPE)
-
-    val process = new LineBufferedProcess(pb.start())
-    val exitCode = process.waitFor()
-    val output = process.inputIterator.mkString("\n")
-
-    val regex = """version (.*)""".r.unanchored
-
-    output match {
-      case regex(version) => version
-      case _ => throw new IOException(f"Unable to determine spark-submit version [$exitCode]:\n$output")
-    }
-  }
-
-}
-
-class ScalatraBootstrap
-  extends LifeCycle
-  with Logging
-  with MetricsBootstrap {
-
-  var sparkManager: SparkManager = null
-
-  override def init(context: ServletContext): Unit = {
-    try {
-      val livyConf = new LivyConf()
-      sparkManager = SparkManager(livyConf)
-
-      context.mount(new InteractiveSessionServlet(sparkManager.interactiveManager), "/sessions/*")
-      context.mount(new BatchSessionServlet(sparkManager.batchManager), "/batches/*")
-      context.mountMetricsAdminServlet("/")
-
-      context.initParameters(org.scalatra.EnvironmentKey) = livyConf.get("livy.environment", "development")
-    } catch {
-      case e: Throwable =>
-        println(f"Exception thrown when initializing server: $e")
-        sys.exit(1)
-    }
-  }
-
-  override def destroy(context: ServletContext): Unit = {
-    if (sparkManager != null) {
-      sparkManager.shutdown()
-      sparkManager = null
-    }
-  }
-}

+ 0 - 146
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/SessionServlet.scala

@@ -1,146 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.server
-
-import com.cloudera.hue.livy.Logging
-import com.cloudera.hue.livy.sessions.{SessionManager, Session}
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession.SessionFailedToStart
-import com.cloudera.hue.livy.spark.ConfigOptionNotAllowed
-import com.fasterxml.jackson.core.JsonParseException
-import org.json4s.JsonDSL._
-import org.json4s.{DefaultFormats, Formats, JValue, MappingException}
-import org.scalatra._
-import org.scalatra.json.JacksonJsonSupport
-
-import scala.concurrent.{ExecutionContext, Future}
-
-object SessionServlet extends Logging
-
-abstract class SessionServlet[S <: Session](sessionManager: SessionManager[S])
-  extends ScalatraServlet
-  with FutureSupport
-  with MethodOverride
-  with JacksonJsonSupport
-  with UrlGeneratorSupport
-{
-  override protected implicit def executor: ExecutionContext = ExecutionContext.global
-
-  override protected implicit def jsonFormats: Formats = DefaultFormats
-
-  protected def serializeSession(session: S): JValue
-
-  before() {
-    contentType = formats("json")
-  }
-
-  get("/") {
-    val from = params.get("from").map(_.toInt).getOrElse(0)
-    val size = params.get("size").map(_.toInt).getOrElse(100)
-
-    val sessions = sessionManager.all()
-
-    Map(
-      "from" -> from,
-      "total" -> sessionManager.size(),
-      "sessions" -> sessions.view(from, from + size).map(serializeSession)
-    )
-  }
-
-  val getSession = get("/:id") {
-    val id = params("id").toInt
-
-    sessionManager.get(id) match {
-      case None => NotFound("session not found")
-      case Some(session) => serializeSession(session)
-    }
-  }
-
-  get("/:id/state") {
-    val id = params("id").toInt
-
-    sessionManager.get(id) match {
-      case None => NotFound("batch not found")
-      case Some(batch) =>
-        ("id", batch.id) ~ ("state", batch.state.toString)
-    }
-  }
-
-  get("/:id/log") {
-    val id = params("id").toInt
-
-    sessionManager.get(id) match {
-      case None => NotFound("session not found")
-      case Some(session) =>
-        val from = params.get("from").map(_.toInt)
-        val size = params.get("size").map(_.toInt)
-        val (from_, total, logLines) = serializeLogs(session, from, size)
-
-        ("id", session.id) ~
-          ("from", from_) ~
-          ("total", total) ~
-          ("log", logLines)
-    }
-  }
-
-  delete("/:id") {
-    val id = params("id").toInt
-
-    sessionManager.delete(id) match {
-      case None => NotFound("session not found")
-      case Some(future) => new AsyncResult {
-        val is = future.map { case () => Ok(Map("msg" -> "deleted")) }
-      }
-    }
-  }
-
-  post("/") {
-    new AsyncResult {
-      val is = Future {
-        val session = sessionManager.create(parsedBody)
-        Created(session,
-          headers = Map("Location" -> url(getSession, "id" -> session.id.toString))
-        )
-      }
-    }
-  }
-
-  error {
-    case e: JsonParseException => BadRequest(e.getMessage)
-    case e: MappingException => BadRequest(e.getMessage)
-    case e: ConfigOptionNotAllowed => BadRequest(e.getMessage)
-    case e: SessionFailedToStart => InternalServerError(e.getMessage)
-    case e: dispatch.StatusCode => ActionResult(ResponseStatus(e.code), e.getMessage, Map.empty)
-    case e =>
-      SessionServlet.error("internal error", e)
-      InternalServerError(e.toString)
-  }
-
-  private def serializeLogs(session: S, fromOpt: Option[Int], sizeOpt: Option[Int]) = {
-    val lines = session.logLines()
-
-    val size = sizeOpt.getOrElse(100)
-    var from = fromOpt.getOrElse(-1)
-    if (from < 0) {
-      from = math.max(0, lines.length - size)
-    }
-    val until = from + size
-
-    (from, lines.length, lines.view(from, until))
-  }
-}

+ 0 - 74
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/batch/BatchSessionServlet.scala

@@ -1,74 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.server.batch
-
-import com.cloudera.hue.livy.Logging
-import com.cloudera.hue.livy.server.SessionServlet
-import com.cloudera.hue.livy.sessions.SessionManager
-import com.cloudera.hue.livy.sessions.batch.BatchSession
-import org.json4s.JsonDSL._
-import org.json4s._
-
-import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
-
-object BatchSessionServlet extends Logging
-
-class BatchSessionServlet(batchManager: SessionManager[BatchSession])
-  extends SessionServlet[BatchSession](batchManager)
-{
-  override protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-  override protected implicit def jsonFormats: Formats = DefaultFormats ++ Serializers.Formats
-
-  override protected def serializeSession(session: BatchSession) = Serializers.serializeBatch(session)
-
-}
-
-private object Serializers {
-
-  def Formats: List[CustomSerializer[_]] = List(BatchSerializer)
-
-  def serializeBatch(batch: BatchSession): JValue = {
-    ("id", batch.id) ~
-      ("state", batch.state.toString) ~
-      ("log", getLogs(batch, None, Some(10))._3)
-  }
-
-  def getLogs(batch: BatchSession, fromOpt: Option[Int], sizeOpt: Option[Int]) = {
-    val lines = batch.logLines()
-
-    val size = sizeOpt.getOrElse(100)
-    var from = fromOpt.getOrElse(-1)
-    if (from < 0) {
-      from = math.max(0, lines.length - size)
-    }
-    val until = from + size
-
-    (from, lines.length, lines.view(from, until))
-  }
-
-  case object BatchSerializer extends CustomSerializer[BatchSession](
-    implicit formats => ( {
-    // We don't support deserialization.
-    PartialFunction.empty
-  }, {
-    case batch: BatchSession => serializeBatch(batch)
-  }
-    )
-  )
-}

+ 0 - 223
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServlet.scala

@@ -1,223 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.server.interactive
-
-import java.net.URL
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy.{ExecuteRequest, Logging}
-import com.cloudera.hue.livy.server.SessionServlet
-import com.cloudera.hue.livy.sessions._
-import com.cloudera.hue.livy.sessions.interactive.{InteractiveSession, Statement, StatementState}
-import org.json4s.JsonAST.JString
-import org.json4s._
-import org.scalatra._
-
-import scala.concurrent._
-import scala.concurrent.duration._
-
-object InteractiveSessionServlet extends Logging
-
-class InteractiveSessionServlet(sessionManager: SessionManager[InteractiveSession])
-  extends SessionServlet(sessionManager)
-{
-  override protected implicit def jsonFormats: Formats = DefaultFormats ++ Serializers.Formats
-
-  override protected def serializeSession(session: InteractiveSession) = Serializers.serializeSession(session)
-
-  post("/:sessionId/callback") {
-    val sessionId = params("sessionId").toInt
-    val callback = parsedBody.extract[CallbackRequest]
-
-    sessionManager.get(sessionId) match {
-      case Some(session) =>
-        if (session.state == SessionState.Starting()) {
-          session.url = new URL(callback.url)
-          Accepted()
-        } else if (session.state.isActive) {
-          Ok()
-        } else {
-          BadRequest("Session is in wrong state")
-        }
-      case None => NotFound("Session not found")
-    }
-  }
-
-  post("/:sessionId/stop") {
-    val sessionId = params("sessionId").toInt
-    sessionManager.get(sessionId) match {
-      case Some(session) =>
-        val future = session.stop()
-
-        new AsyncResult() { val is = for { _ <- future } yield NoContent() }
-      case None => NotFound("Session not found")
-    }
-  }
-
-  post("/:sessionId/interrupt") {
-    val sessionId = params("sessionId").toInt
-    sessionManager.get(sessionId) match {
-      case Some(session) =>
-        val future = for {
-          _ <- session.interrupt()
-        } yield Ok(Map("msg" -> "interrupted"))
-
-        // FIXME: this is silently eating exceptions.
-        new AsyncResult() { val is = future }
-      case None => NotFound("Session not found")
-    }
-  }
-
-  get("/:sessionId/statements") {
-    val sessionId = params("sessionId").toInt
-
-    sessionManager.get(sessionId) match {
-      case None => NotFound("Session not found")
-      case Some(session: InteractiveSession) =>
-        val from = params.get("from").map(_.toInt).getOrElse(0)
-        val size = params.get("size").map(_.toInt).getOrElse(session.statements.length)
-
-        Map(
-          "total_statements" -> session.statements.length,
-          "statements" -> session.statements.view(from, from + size)
-        )
-    }
-  }
-
-  val getStatement = get("/:sessionId/statements/:statementId") {
-    val sessionId = params("sessionId").toInt
-    val statementId = params("statementId").toInt
-
-    val from = params.get("from").map(_.toInt)
-    val size = params.get("size").map(_.toInt)
-
-    sessionManager.get(sessionId) match {
-      case None => NotFound("Session not found")
-      case Some(session) =>
-        session.statements.lift(statementId) match {
-          case None => NotFound("Statement not found")
-          case Some(statement) =>
-            Serializers.serializeStatement(statement, from, size)
-        }
-    }
-  }
-
-  post("/:sessionId/statements") {
-    val sessionId = params("sessionId").toInt
-    val req = parsedBody.extract[ExecuteRequest]
-
-    sessionManager.get(sessionId) match {
-      case Some(session) =>
-        val statement = session.executeStatement(req)
-
-        Created(statement,
-          headers = Map(
-            "Location" -> url(getStatement,
-              "sessionId" -> session.id.toString,
-              "statementId" -> statement.id.toString)))
-      case None => NotFound("Session not found")
-    }
-  }
-}
-
-private case class CallbackRequest(url: String)
-
-private object Serializers {
-  import JsonDSL._
-
-  def SessionFormats: List[CustomSerializer[_]] = List(SessionSerializer, SessionKindSerializer, SessionStateSerializer)
-  def StatementFormats: List[CustomSerializer[_]] = List(StatementSerializer, StatementStateSerializer)
-  def Formats: List[CustomSerializer[_]] = SessionFormats ++ StatementFormats
-
-  private def serializeSessionState(state: SessionState) = JString(state.toString)
-
-  private def serializeSessionKind(kind: Kind) = JString(kind.toString)
-
-  private def serializeStatementState(state: StatementState) = JString(state.toString)
-
-  def serializeSession(session: InteractiveSession): JValue = {
-    ("id", session.id) ~
-      ("state", serializeSessionState(session.state)) ~
-      ("kind", serializeSessionKind(session.kind)) ~
-      ("proxyUser", session.proxyUser) ~
-      ("log", getLogs(session, None, Some(10))._3)
-  }
-  
-  def getLogs(session: InteractiveSession, fromOpt: Option[Int], sizeOpt: Option[Int]) = {
-    val lines = session.logLines()
-
-    val size = sizeOpt.getOrElse(100)
-    var from = fromOpt.getOrElse(-1)
-    if (from < 0) {
-      from = math.max(0, lines.length - size)
-    }
-    val until = from + size
-
-    (from, lines.length, lines.view(from, until))
-  }
-
-  def serializeStatement(statement: Statement, from: Option[Int], size: Option[Int]): JValue = {
-    // Take a couple milliseconds to see if the statement has finished.
-    val output = try {
-      Await.result(statement.output(), Duration(100, TimeUnit.MILLISECONDS))
-    } catch {
-      case _: TimeoutException => null
-    }
-
-    ("id" -> statement.id) ~
-      ("state" -> serializeStatementState(statement.state)) ~
-      ("output" -> output)
-  }
-
-  case object SessionSerializer extends CustomSerializer[InteractiveSession](implicit formats => ( {
-    // We don't support deserialization.
-    PartialFunction.empty
-  }, {
-    case session: InteractiveSession =>
-      serializeSession(session)
-  }
-    )
-  )
-
-  case object SessionStateSerializer extends CustomSerializer[SessionState](implicit formats => ( {
-    // We don't support deserialization.
-    PartialFunction.empty
-  }, {
-    case state: SessionState => JString(state.toString)
-  }
-    )
-  )
-
-  case object StatementSerializer extends CustomSerializer[Statement](implicit formats => ( {
-    // We don't support deserialization.
-    PartialFunction.empty
-  }, {
-    case statement: Statement =>
-      serializeStatement(statement, None, None)
-  }))
-
-  case object StatementStateSerializer extends CustomSerializer[StatementState](implicit formats => ( {
-    // We don't support deserialization.
-    PartialFunction.empty
-  }, {
-    case state: StatementState => JString(state.toString)
-  }
-    )
-  )
-}

+ 0 - 135
apps/spark/java/livy-server/src/test/scala/com/cloudera/hue/livy/server/batch/BatchServletSpec.scala

@@ -1,135 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.server.batch
-
-import java.io.FileWriter
-import java.nio.file.{Files, Path}
-import java.util.concurrent.TimeUnit
-import com.cloudera.hue.livy.sessions.{SessionManager, SessionState}
-import com.cloudera.hue.livy.spark.SparkProcessBuilderFactory
-import com.cloudera.hue.livy.spark.batch.{BatchSessionProcessFactory, CreateBatchRequest}
-import com.cloudera.hue.livy.{LivyConf, Utils}
-import org.json4s.JsonAST.{JArray, JInt, JObject, JString}
-import org.json4s.jackson.JsonMethods._
-import org.json4s.jackson.Serialization.write
-import org.json4s.{DefaultFormats, Formats}
-import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSpecLike}
-import org.scalatra.test.scalatest.ScalatraSuite
-
-import scala.concurrent.duration.Duration
-
-class BatchServletSpec extends ScalatraSuite with FunSpecLike with BeforeAndAfterAll with BeforeAndAfter {
-
-  protected implicit def jsonFormats: Formats = DefaultFormats
-
-  val script: Path = {
-    val script = Files.createTempFile("livy-test", ".py")
-    script.toFile.deleteOnExit()
-    val writer = new FileWriter(script.toFile)
-    try {
-      writer.write(
-        """
-          |print "hello world"
-        """.stripMargin)
-    } finally {
-      writer.close()
-    }
-    script
-  }
-
-  val livyConf = new LivyConf()
-  val batchFactory = new BatchSessionProcessFactory(new SparkProcessBuilderFactory(livyConf))
-  val batchManager = new SessionManager(livyConf, batchFactory)
-  val servlet = new BatchSessionServlet(batchManager)
-
-  addServlet(servlet, "/*")
-
-  after {
-    batchManager.shutdown()
-  }
-
-  describe("Batch Servlet") {
-    it("should create and tear down a batch") {
-      get("/") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        val parsedBody = parse(body)
-        parsedBody \ "sessions" should equal (JArray(List()))
-      }
-
-      val createBatchRequest = write(CreateBatchRequest(
-        file = script.toString
-      ))
-
-      post("/", body = createBatchRequest, headers = Map("Content-Type" -> "application/json")) {
-        status should equal (201)
-        header("Content-Type") should include("application/json")
-        header("Location") should equal("/0")
-        val parsedBody = parse(body)
-        parsedBody \ "id" should equal (JInt(0))
-
-        val batch = batchManager.get(0)
-        batch should be (defined)
-      }
-
-      // Wait for the process to finish.
-      {
-        val batch = batchManager.get(0).get
-        Utils.waitUntil({ () => !batch.state.isActive }, Duration(10, TimeUnit.SECONDS))
-        (batch.state match {
-          case SessionState.Success(_) => true
-          case _ => false
-        }) should be (true)
-      }
-
-      get("/0") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        val parsedBody = parse(body)
-        parsedBody \ "id" should equal (JInt(0))
-        parsedBody \ "state" should equal (JString("success"))
-
-        val batch = batchManager.get(0)
-        batch should be (defined)
-      }
-
-      get("/0/log?size=1000") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        val parsedBody = parse(body)
-        parsedBody \ "id" should equal (JInt(0))
-        (parsedBody \ "log").extract[List[String]] should contain ("hello world")
-
-        val batch = batchManager.get(0)
-        batch should be (defined)
-      }
-
-      delete("/0") {
-        status should equal (200)
-        header("Content-Type") should include("application/json")
-        val parsedBody = parse(body)
-        parsedBody should equal (JObject(("msg", JString("deleted"))))
-
-        val batch = batchManager.get(0)
-        batch should not be defined
-      }
-    }
-  }
-
-}

+ 0 - 142
apps/spark/java/livy-server/src/test/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServletSpec.scala

@@ -1,142 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.server.interactive
-
-import java.net.URL
-import java.util.concurrent.atomic.AtomicInteger
-
-import com.cloudera.hue.livy.{ExecuteRequest, LivyConf}
-import com.cloudera.hue.livy.sessions._
-import com.cloudera.hue.livy.sessions.interactive.{InteractiveSession, Statement}
-import com.cloudera.hue.livy.spark.interactive.{CreateInteractiveRequest, InteractiveSessionFactory}
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilderFactory}
-import org.json4s.JsonAST.{JArray, JInt, JObject, JString}
-import org.json4s.jackson.JsonMethods._
-import org.json4s.jackson.Serialization.write
-import org.json4s.{DefaultFormats, Formats}
-import org.scalatest.FunSpecLike
-import org.scalatra.test.scalatest.ScalatraSuite
-
-import scala.concurrent.Future
-
-class InteractiveSessionServletSpec extends ScalatraSuite with FunSpecLike {
-
-  protected implicit def jsonFormats: Formats = DefaultFormats ++ Serializers.SessionFormats
-
-  class MockInteractiveSession(val id: Int) extends InteractiveSession {
-    var _state: SessionState = SessionState.Idle()
-
-    var _idCounter = new AtomicInteger()
-    var _statements = IndexedSeq[Statement]()
-
-    override def kind: Kind = Spark()
-
-    override def logLines() = IndexedSeq()
-
-    override def state = _state
-
-    override def stop(): Future[Unit] = Future.successful(())
-
-    override def url_=(url: URL): Unit = ???
-
-    override def executeStatement(executeRequest: ExecuteRequest): Statement = {
-      val id = _idCounter.getAndIncrement
-      val statement = new Statement(
-        id,
-        executeRequest,
-        Future.successful(JObject()))
-
-      _statements :+= statement
-
-      statement
-    }
-
-    override def proxyUser: Option[String] = None
-
-    override def url: Option[URL] = ???
-
-    override def statements: IndexedSeq[Statement] = _statements
-
-    override def interrupt(): Future[Unit] = ???
-  }
-
-  class MockInteractiveSessionFactory(processFactory: SparkProcessBuilderFactory)
-    extends InteractiveSessionFactory(processFactory) {
-
-    protected override def create(id: Int,
-                                  process: SparkProcess,
-                                  request: CreateInteractiveRequest): InteractiveSession = {
-      new MockInteractiveSession(id)
-    }
-  }
-
-  val livyConf = new LivyConf()
-  val processFactory = new SparkProcessBuilderFactory(livyConf)
-  val sessionManager = new SessionManager(livyConf, new MockInteractiveSessionFactory(processFactory))
-  val servlet = new InteractiveSessionServlet(sessionManager)
-
-  addServlet(servlet, "/*")
-
-  it("should setup and tear down an interactive session") {
-    get("/") {
-      status should equal(200)
-      header("Content-Type") should include("application/json")
-      val parsedBody = parse(body)
-      parsedBody \ "sessions" should equal(JArray(List()))
-    }
-
-    val createInteractiveRequest = write(CreateInteractiveRequest(
-      kind = Spark()
-    ))
-
-    post("/", body = createInteractiveRequest, headers = Map("Content-Type" -> "application/json")) {
-      status should equal (201)
-      header("Content-Type") should include("application/json")
-
-      header("Location") should equal("/0")
-      val parsedBody = parse(body)
-      parsedBody \ "id" should equal (JInt(0))
-
-      val session = sessionManager.get(0)
-      session should be (defined)
-    }
-
-    get("/0") {
-      status should equal (200)
-      header("Content-Type") should include("application/json")
-      val parsedBody = parse(body)
-      parsedBody \ "id" should equal (JInt(0))
-      parsedBody \ "state" should equal (JString("idle"))
-
-      val batch = sessionManager.get(0)
-      batch should be (defined)
-    }
-
-    delete("/0") {
-      status should equal (200)
-      header("Content-Type") should include("application/json")
-      val parsedBody = parse(body)
-      parsedBody should equal (JObject(("msg", JString("deleted"))))
-
-      val session = sessionManager.get(0)
-      session should not be defined
-    }
-  }
-
-}

+ 0 - 145
apps/spark/java/livy-spark/pom.xml

@@ -1,145 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue.livy</groupId>
-        <artifactId>livy-main</artifactId>
-        <relativePath>../pom.xml</relativePath>
-        <version>0.2.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>livy-spark_2.10</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <dependencies>
-
-        <dependency>
-            <groupId>com.cloudera.hue.livy</groupId>
-            <artifactId>livy-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.cloudera.hue.livy</groupId>
-            <artifactId>livy-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.cloudera.hue.livy</groupId>
-            <artifactId>livy-repl_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.cloudera.hue.livy</groupId>
-            <artifactId>livy-yarn_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatest</groupId>
-            <artifactId>scalatest_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>net.databinder.dispatch</groupId>
-            <artifactId>dispatch-core_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>net.databinder.dispatch</groupId>
-            <artifactId>dispatch-json4s-jackson_${scala.binary.version}</artifactId>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-            </plugin>
-
-            <plugin>
-                <groupId>org.scalatest</groupId>
-                <artifactId>scalatest-maven-plugin</artifactId>
-                <version>1.0</version>
-                <configuration>
-                    <systemProperties>
-                        <spark.master>local</spark.master>
-                        <spark.driver.allowMultipleContexts>true</spark.driver.allowMultipleContexts>
-                        <spark.ui.enabled>false</spark.ui.enabled>
-                        <settings.usejavacp.value>true</settings.usejavacp.value>
-                        <livy.repl.jar>../livy-repl/target/livy-repl_${scala.binary.version}-${project.version}.jar</livy.repl.jar>
-                    </systemProperties>
-                </configuration>
-            </plugin>
-
-        </plugins>
-    </build>
-
-</project>
-

+ 0 - 123
apps/spark/java/livy-spark/src/main/resources/com/cloudera/hue/livy/spark/default-spark-user-configurable-options.conf

@@ -1,123 +0,0 @@
-// Application properties
-spark.app.name
-spark.driver.cores
-spark.driver.maxResultSize
-spark.driver.memory
-spark.executor.memory
-spark.extraListeners
-// spark.local.dir
-spark.logConf
-// spark.master
-
-// Runtime Environment
-spark.driver.extraClassPath
-// spark.driver.extraJavaOptions
-spark.driver.extraLibraryPath
-spark.driver.userClassPathFirst
-spark.executor.extraClassPath
-// spark.executor.extraJavaOptions
-spark.executor.extraLibraryPath
-spark.executor.logs.rolling.maxRetainedFiles
-spark.executor.logs.rolling.maxSize
-spark.executor.logs.rolling.strategy
-spark.executor.logs.rolling.time.interval
-spark.executor.userClassPathFirst
-// spark.executorEnv.[EnvironmentVariableName]
-spark.python.profile
-spark.python.profile.dump
-spark.python.worker.memory
-spark.python.worker.reuse
-spark.reducer.maxSizeInFlight
-
-// Shuffle Behavior
-spark.shuffle.blockTransferService
-spark.shuffle.compress
-spark.shuffle.consolidateFiles
-spark.shuffle.file.buffer
-spark.shuffle.io.maxRetries
-spark.shuffle.io.numConnectionsPerPeer
-spark.shuffle.io.preferDirectBufs
-spark.shuffle.io.retryWait
-spark.shuffle.manager
-spark.shuffle.memoryFraction
-spark.shuffle.service.enabled
-spark.shuffle.service.port
-spark.shuffle.sort.bypassMergeThreshold
-spark.shuffle.spill
-spark.shuffle.spill.compress
-
-// Spark UI
-spark.eventLog.compress
-spark.eventLog.dir
-spark.eventLog.enabled
-spark.ui.killEnabled
-spark.ui.port
-spark.ui.retainedJobs
-spark.ui.retainedStages
-spark.worker.ui.retainedExecutors
-spark.worker.ui.retainedDrivers
-
-// Compression and Serialization
-spark.broadcast.compress
-spark.closure.serializer
-spark.io.compression.codec
-spark.io.compression.lz4.blockSize
-spark.io.compression.snappy.blockSize
-spark.kryo.classesToRegister
-spark.kryo.referenceTracking
-spark.kryo.registrationRequired
-spark.kryo.registrator
-spark.kryoserializer.buffer.max
-spark.kryoserializer.buffer
-spark.rdd.compress
-spark.serializer
-spark.serializer.objectStreamReset
-
-// Execution Behavior
-spark.broadcast.blockSize
-spark.broadcast.factory
-spark.cleaner.ttl
-spark.executor.cores
-spark.totalExecutor.cores
-spark.default.parallelism
-spark.executor.heartbeatInterval
-spark.files.fetchTimeout
-spark.files.useFetchCache
-spark.files.overwrite
-spark.hadoop.cloneConf
-spark.hadoop.validateOutputSpecs
-spark.storage.memoryFraction
-spark.storage.memoryMapThreshold
-spark.storage.unrollFraction
-spark.externalBlockStore.blockManager
-spark.externalBlockStore.baseDir
-spark.externalBlockStore.url
-
-// Networking
-
-// Dynamic Allocation
-spark.dynamicAllocation.enabled
-spark.dynamicAllocation.executorIdleTimeout
-spark.dynamicAllocation.cachedExecutorIdleTimeout
-spark.dynamicAllocation.initialExecutors
-spark.dynamicAllocation.maxExecutors
-spark.dynamicAllocation.minExecutors
-spark.dynamicAllocation.schedulerBacklogTimeout
-spark.dynamicAllocation.sustainedSchedulerBacklogTimeout
-
-// Security
-
-// Encryption
-
-// Spark Streaming
-spark.streaming.backpressure.enabled
-spark.streaming.blockInterval
-spark.streaming.receiver.maxRate
-spark.streaming.receiver.writeAheadLog.enable
-spark.streaming.unpersist
-spark.streaming.kafka.maxRatePerPartition
-spark.streaming.kafka.maxRetries
-spark.streaming.ui.retainedBatches
-
-// SparkR
-spark.r.numRBackendThreads

+ 0 - 32
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/ConfigOptionNotAllowed.scala

@@ -1,32 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark
-
-/**
- * This exception is raised if this configuration option is not allowed to be set by a user.
- *
- * @param key The configuration key
- * @param value The configuration value
- *
- */
-case class ConfigOptionNotAllowed(key: String, value: String) extends Exception {
-  override def getMessage: String = {
-    s"Not allowed to specify the '$key' configuration variable"
-  }
-}

+ 0 - 99
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkManager.scala

@@ -1,99 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark
-
-import com.cloudera.hue.livy.{Utils, LivyConf}
-import com.cloudera.hue.livy.LivyConf.{Process, Yarn}
-import com.cloudera.hue.livy.sessions.SessionManager
-import com.cloudera.hue.livy.sessions.batch.BatchSession
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import com.cloudera.hue.livy.spark.batch.{BatchSessionProcessFactory, BatchSessionYarnFactory}
-import com.cloudera.hue.livy.spark.interactive.{InteractiveSessionProcessFactory, InteractiveSessionYarnFactory}
-import com.cloudera.hue.livy.yarn.Client
-
-import scala.io.Source
-
-object SparkManager {
-  def apply(livyConf: LivyConf): SparkManager = {
-    val userConfigurableOptions = loadSparkUserConfigurableOptions()
-    val processFactory = new SparkProcessBuilderFactory(livyConf, userConfigurableOptions)
-
-    livyConf.sessionKind() match {
-      case Process() => new SparkProcessManager(processFactory)
-      case Yarn() => new SparkYarnManager(processFactory)
-    }
-  }
-
-  private val SparkUserConfig = "spark-user-configurable-options.conf"
-  private val DefaultSparkUserConfig = "default-spark-user-configurable-options.conf"
-
-  private def loadSparkUserConfigurableOptions(): Set[String] = {
-    Utils.getLivyConfigFile(SparkUserConfig)
-      .map(Source.fromFile)
-      .orElse {
-        Option(getClass.getResourceAsStream(DefaultSparkUserConfig))
-          .map(Source.fromInputStream)
-      }
-      .map { source =>
-        source.getLines()
-          .map(_.trim)
-          .filter(!_.startsWith("//"))
-          .toSet
-      }
-      .getOrElse(Set())
-  }
-}
-
-trait SparkManager {
-  def batchManager: SessionManager[BatchSession]
-
-  def interactiveManager: SessionManager[InteractiveSession]
-
-  def shutdown()
-}
-
-private class SparkProcessManager(processFactory: SparkProcessBuilderFactory) extends SparkManager {
-  private[this] val batchFactory = new BatchSessionProcessFactory(processFactory)
-  private[this] val interactiveFactory = new InteractiveSessionProcessFactory(processFactory)
-
-  val batchManager = new SessionManager(processFactory.livyConf, batchFactory)
-
-  val interactiveManager = new SessionManager(processFactory.livyConf, interactiveFactory)
-
-  override def shutdown(): Unit = {
-    batchManager.shutdown()
-    interactiveManager.shutdown()
-  }
-}
-
-private class SparkYarnManager(processFactory: SparkProcessBuilderFactory) extends SparkManager {
-  private[this] val client = new Client(processFactory.livyConf)
-  private[this] val batchFactory = new BatchSessionYarnFactory(client, processFactory)
-  private[this] val interactiveFactory = new InteractiveSessionYarnFactory(client, processFactory)
-
-  val batchManager = new SessionManager(processFactory.livyConf, batchFactory)
-
-  val interactiveManager = new SessionManager(processFactory.livyConf, interactiveFactory)
-
-  override def shutdown(): Unit = {
-    batchManager.shutdown()
-    interactiveManager.shutdown()
-    client.close()
-  }
-}

+ 0 - 12
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkProcess.scala

@@ -1,12 +0,0 @@
-package com.cloudera.hue.livy.spark
-
-import com.cloudera.hue.livy.LineBufferedProcess
-
-object SparkProcess {
-  def apply(process: Process): SparkProcess = {
-    new SparkProcess(process)
-  }
-}
-
-class SparkProcess(process: Process) extends LineBufferedProcess(process) {
-}

+ 0 - 299
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkProcessBuilder.scala

@@ -1,299 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark
-
-import com.cloudera.hue.livy.{LivyConf, Logging}
-
-import scala.collection.JavaConversions._
-import scala.collection.mutable
-import scala.collection.mutable.ArrayBuffer
-
-object SparkProcessBuilder {
-  /**
-   * Represents a path that is either allowed to reference a local file, or must exist in our
-   * cache directory or on hdfs.
-   */
-  sealed trait Path
-  case class AbsolutePath(path: String) extends Path
-  case class RelativePath(path: String) extends Path
-}
-
-class SparkProcessBuilder(livyConf: LivyConf, userConfigurableOptions: Set[String]) extends Logging {
-  import SparkProcessBuilder._
-
-  private[this] val fsRoot = livyConf.filesystemRoot()
-
-  private[this] var _executable: Path = AbsolutePath(livyConf.sparkSubmit())
-  private[this] var _master: Option[String] = None
-  private[this] var _deployMode: Option[String] = None
-  private[this] var _className: Option[String] = None
-  private[this] var _name: Option[String] = Some("Livy")
-  private[this] var _jars: ArrayBuffer[Path] = ArrayBuffer()
-  private[this] var _pyFiles: ArrayBuffer[Path] = ArrayBuffer()
-  private[this] var _files: ArrayBuffer[Path] = ArrayBuffer()
-  private[this] val _conf = mutable.HashMap[String, String]()
-  private[this] var _driverClassPath: ArrayBuffer[String] = ArrayBuffer()
-  private[this] var _proxyUser: Option[String] = None
-
-  private[this] var _queue: Option[String] = None
-  private[this] var _archives: ArrayBuffer[Path] = ArrayBuffer()
-
-  private[this] var _env: ArrayBuffer[(String, String)] = ArrayBuffer()
-  private[this] var _redirectOutput: Option[ProcessBuilder.Redirect] = None
-  private[this] var _redirectError: Option[ProcessBuilder.Redirect] = None
-  private[this] var _redirectErrorStream: Option[Boolean] = None
-
-  def executable(executable: Path): SparkProcessBuilder = {
-    _executable = executable
-    this
-  }
-
-  def master(masterUrl: String): SparkProcessBuilder = {
-    _master = Some(masterUrl)
-    this
-  }
-
-  def deployMode(deployMode: String): SparkProcessBuilder = {
-    _deployMode = Some(deployMode)
-    this
-  }
-
-  def className(className: String): SparkProcessBuilder = {
-    _className = Some(className)
-    this
-  }
-
-  def name(name: String): SparkProcessBuilder = {
-    _name = Some(name)
-    this
-  }
-
-  def jar(jar: Path): SparkProcessBuilder = {
-    this._jars += jar
-    this
-  }
-
-  def jars(jars: Traversable[Path]): SparkProcessBuilder = {
-    this._jars ++= jars
-    this
-  }
-
-  def pyFile(pyFile: Path): SparkProcessBuilder = {
-    this._pyFiles += pyFile
-    this
-  }
-
-  def pyFiles(pyFiles: Traversable[Path]): SparkProcessBuilder = {
-    this._pyFiles ++= pyFiles
-    this
-  }
-
-  def file(file: Path): SparkProcessBuilder = {
-    this._files += file
-    this
-  }
-
-  def files(files: Traversable[Path]): SparkProcessBuilder = {
-    this._files ++= files
-    this
-  }
-
-  def conf(key: String): Option[String] = {
-    _conf.get(key)
-  }
-
-  def conf(key: String, value: String, admin: Boolean = false): SparkProcessBuilder = {
-    if (admin || userConfigurableOptions.contains(key)) {
-      this._conf(key) = value
-    } else {
-      throw new ConfigOptionNotAllowed(key, value)
-    }
-
-    this
-  }
-
-  def conf(conf: Traversable[(String, String)]): SparkProcessBuilder = {
-    conf.foreach { case (key, value) => this.conf(key, value) }
-    this
-  }
-
-  def driverJavaOptions(driverJavaOptions: String): SparkProcessBuilder = {
-    conf("spark.driver.extraJavaOptions", driverJavaOptions)
-  }
-
-  def driverClassPath(classPath: String): SparkProcessBuilder = {
-    _driverClassPath += classPath
-    this
-  }
-
-  def driverClassPaths(classPaths: Traversable[String]): SparkProcessBuilder = {
-    _driverClassPath ++= classPaths
-    this
-  }
-
-  def driverCores(driverCores: Int): SparkProcessBuilder = {
-    this.driverCores(driverCores.toString)
-  }
-
-  def driverMemory(driverMemory: String): SparkProcessBuilder = {
-    conf("spark.driver.memory", driverMemory)
-  }
-
-  def driverCores(driverCores: String): SparkProcessBuilder = {
-    conf("spark.driver.cores", driverCores)
-  }
-
-  def executorCores(executorCores: Int): SparkProcessBuilder = {
-    this.executorCores(executorCores.toString)
-  }
-
-  def executorCores(executorCores: String): SparkProcessBuilder = {
-    conf("spark.executor.cores", executorCores)
-  }
-
-  def totalExecutorCores(totalExecutorCores: Int): SparkProcessBuilder = {
-    this.totalExecutorCores(totalExecutorCores.toString)
-  }
-
-  def totalExecutorCores(totalExecutorCores: String): SparkProcessBuilder = {
-    conf("spark.totalExecutor.cores", totalExecutorCores)
-  }
-
-  def executorMemory(executorMemory: String): SparkProcessBuilder = {
-    conf("spark.executor.memory", executorMemory)
-  }
-
-  def numExecutors(numExecutors: Int): SparkProcessBuilder = {
-    this.numExecutors(numExecutors.toString)
-  }
-
-  def numExecutors(numExecutors: String): SparkProcessBuilder = {
-    this.conf("spark.executor.instances", numExecutors)
-  }
-
-  def proxyUser(proxyUser: String): SparkProcessBuilder = {
-    _proxyUser = Some(proxyUser)
-    this
-  }
-
-  def queue(queue: String): SparkProcessBuilder = {
-    _queue = Some(queue)
-    this
-  }
-
-  def archive(archive: Path): SparkProcessBuilder = {
-    _archives += archive
-    this
-  }
-
-  def archives(archives: Traversable[Path]): SparkProcessBuilder = {
-    archives.foreach(archive)
-    this
-  }
-
-  def env(key: String, value: String): SparkProcessBuilder = {
-    _env += ((key, value))
-    this
-  }
-
-  def redirectOutput(redirect: ProcessBuilder.Redirect): SparkProcessBuilder = {
-    _redirectOutput = Some(redirect)
-    this
-  }
-
-  def redirectError(redirect: ProcessBuilder.Redirect): SparkProcessBuilder = {
-    _redirectError = Some(redirect)
-    this
-  }
-
-  def redirectErrorStream(redirect: Boolean): SparkProcessBuilder = {
-    _redirectErrorStream = Some(redirect)
-    this
-  }
-
-  def start(file: Path, args: Traversable[String]): SparkProcess = {
-    var arguments = ArrayBuffer(fromPath(_executable))
-
-    def addOpt(option: String, value: Option[String]): Unit = {
-      value.foreach { v =>
-        arguments += option
-        arguments += v
-      }
-    }
-
-    def addList(option: String, values: Traversable[String]): Unit = {
-      if (values.nonEmpty) {
-        arguments += option
-        arguments += values.mkString(",")
-      }
-    }
-
-    addOpt("--master", _master)
-    addOpt("--deploy-mode", _deployMode)
-    addOpt("--name", _name)
-    addList("--jars", _jars.map(fromPath))
-    addList("--py-files", _pyFiles.map(fromPath))
-    addList("--files", _files.map(fromPath))
-    addOpt("--class", _className)
-    _conf.foreach { case (key, value) =>
-      arguments += "--conf"
-      arguments += f"$key=$value"
-    }
-    addList("--driver-class-path", _driverClassPath)
-
-    if (livyConf.getBoolean(LivyConf.IMPERSONATION_ENABLED_KEY, true)) {
-      addOpt("--proxy-user", _proxyUser)
-    }
-
-    addOpt("--queue", _queue)
-    addList("--archives", _archives.map(fromPath))
-
-    arguments += fromPath(file)
-    arguments ++= args
-
-    val argsString = arguments
-      .map("'" + _.replace("'", "\\'") + "'")
-      .mkString(" ")
-
-    info(s"Running $argsString")
-
-    val pb = new ProcessBuilder(arguments)
-    val env = pb.environment()
-
-    for ((key, value) <- _env) {
-      env.put(key, value)
-    }
-
-    _redirectOutput.foreach(pb.redirectOutput)
-    _redirectError.foreach(pb.redirectError)
-    _redirectErrorStream.foreach(pb.redirectErrorStream)
-
-    SparkProcess(pb.start())
-  }
-
-  private def fromPath(path: Path) = path match {
-    case AbsolutePath(p) => p
-    case RelativePath(p) =>
-      if (p.startsWith("hdfs://")) {
-        p
-      } else {
-        fsRoot + "/" + p
-      }
-  }
-}

+ 0 - 31
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkProcessBuilderFactory.scala

@@ -1,31 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark
-
-import com.cloudera.hue.livy.LivyConf
-
-class SparkProcessBuilderFactory(val livyConf: LivyConf, userConfigurableOptions: Set[String]) {
-  def this(livyConf: LivyConf) = {
-    this(livyConf, Set())
-  }
-
-  def builder() = {
-    new SparkProcessBuilder(livyConf, userConfigurableOptions)
-  }
-}

+ 0 - 42
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/SparkUserConfigurableOptions.scala

@@ -1,42 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark
-
-import java.io.File
-
-import com.cloudera.hue.livy.Utils
-
-import scala.io.Source
-
-object SparkUserConfigurableOptions {
-  def apply(): Set[String] = {
-    val file = Utils.getLivyConfDir()
-      .map(path => new File(path, "spark-user-configurable-options.conf"))
-      .get
-
-    apply(file)
-  }
-
-  def apply(configFile: File): Set[String] = {
-    Source.fromFile(configFile).getLines()
-      .map(_.trim)
-      .filter(_.startsWith("//"))
-      .toSet
-  }
-}

+ 0 - 64
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionFactory.scala

@@ -1,64 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-import java.lang.ProcessBuilder.Redirect
-
-import com.cloudera.hue.livy.sessions.SessionFactory
-import com.cloudera.hue.livy.sessions.batch.BatchSession
-import com.cloudera.hue.livy.spark.SparkProcessBuilder.RelativePath
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilder, SparkProcessBuilderFactory}
-import org.json4s.JValue
-
-abstract class BatchSessionFactory(factory: SparkProcessBuilderFactory) extends SessionFactory[BatchSession] {
-  override def create(id: Int, createRequest: JValue) =
-    create(id, createRequest.extract[CreateBatchRequest])
-
-  def create(id: Int, request: CreateBatchRequest): BatchSession = {
-    val builder = sparkBuilder(request)
-    val process = builder.start(RelativePath(request.file), request.args)
-    create(id, process)
-  }
-
-  protected def create(id: Int, process: SparkProcess): BatchSession
-
-  protected def sparkBuilder(request: CreateBatchRequest): SparkProcessBuilder = {
-    val builder = factory.builder()
-    builder.conf(request.conf)
-    request.proxyUser.foreach(builder.proxyUser)
-    request.className.foreach(builder.className)
-    request.jars.map(RelativePath).foreach(builder.jar)
-    request.pyFiles.map(RelativePath).foreach(builder.pyFile)
-    request.files.map(RelativePath).foreach(builder.file)
-    request.driverMemory.foreach(builder.driverMemory)
-    request.driverCores.foreach(builder.driverCores)
-    request.executorMemory.foreach(builder.executorMemory)
-    request.executorCores.foreach(builder.executorCores)
-    request.totalExecutorCores.foreach(builder.totalExecutorCores)
-    request.numExecutors.foreach(builder.numExecutors)
-    request.archives.map(RelativePath).foreach(builder.archive)
-    request.queue.foreach(builder.queue)
-    request.name.foreach(builder.name)
-
-    builder.redirectOutput(Redirect.PIPE)
-    builder.redirectErrorStream(true)
-
-    builder
-  }
-}

+ 0 - 75
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionProcess.scala

@@ -1,75 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-import com.cloudera.hue.livy.LineBufferedProcess
-import com.cloudera.hue.livy.sessions.SessionState
-import com.cloudera.hue.livy.sessions.batch.BatchSession
-import com.cloudera.hue.livy.spark.SparkProcess
-
-import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future}
-
-object BatchSessionProcess {
-  def apply(id: Int, process: SparkProcess): BatchSession = {
-    new BatchSessionProcess(id, process)
-  }
-}
-
-private class BatchSessionProcess(val id: Int,
-                                  process: LineBufferedProcess) extends BatchSession {
-  protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-
-  private[this] var _state: SessionState = SessionState.Running()
-
-  override def state: SessionState = _state
-
-  override def logLines(): IndexedSeq[String] = process.inputLines
-
-  override def stop(): Future[Unit] = {
-    Future {
-      destroyProcess()
-    }
-  }
-
-  private def destroyProcess() = {
-    if (process.isAlive) {
-      process.destroy()
-      reapProcess(process.waitFor())
-    }
-  }
-
-  private def reapProcess(exitCode: Int) = synchronized {
-    if (_state.isActive) {
-      if (exitCode == 0) {
-        _state = SessionState.Success()
-      } else {
-        _state = SessionState.Error()
-      }
-    }
-  }
-
-  /** Simple daemon thread to make sure we change state when the process exits. */
-  private[this] val thread = new Thread("Batch Process Reaper") {
-    override def run(): Unit = {
-      reapProcess(process.waitFor())
-    }
-  }
-  thread.setDaemon(true)
-  thread.start()
-}

+ 0 - 30
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionProcessFactory.scala

@@ -1,30 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-import com.cloudera.hue.livy.sessions.batch.BatchSession
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilderFactory}
-
-class BatchSessionProcessFactory(processFactory: SparkProcessBuilderFactory)
-  extends BatchSessionFactory(processFactory)
-{
-  protected override def create(id: Int, process: SparkProcess): BatchSession = {
-    BatchSessionProcess(id, process)
-  }
-}

+ 0 - 90
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionYarn.scala

@@ -1,90 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-import com.cloudera.hue.livy.LineBufferedProcess
-import com.cloudera.hue.livy.sessions._
-import com.cloudera.hue.livy.sessions.batch.BatchSession
-import com.cloudera.hue.livy.spark.SparkProcess
-import com.cloudera.hue.livy.yarn._
-
-import scala.annotation.tailrec
-import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future}
-
-object BatchSessionYarn {
-  implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-
-  def apply(client: Client, id: Int, process: SparkProcess): BatchSession = {
-    val job = Future {
-      client.getJobFromProcess(process)
-    }
-    new BatchSessionYarn(id, process, job)
-  }
-}
-
-private class BatchSessionYarn(val id: Int, process: LineBufferedProcess, jobFuture: Future[Job]) extends BatchSession {
-
-  implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-
-  private var _state: SessionState = SessionState.Starting()
-
-  private var _jobThread: Thread = _
-
-  jobFuture.onComplete {
-    case util.Failure(_) =>
-      _state = SessionState.Error()
-
-    case util.Success(job) =>
-      _state = SessionState.Running()
-
-      _jobThread = new Thread {
-        override def run(): Unit = {
-          @tailrec
-          def aux(): Unit = {
-            if (_state == SessionState.Running()) {
-              Thread.sleep(5000)
-              job.getStatus match {
-                case ApplicationState.SuccessfulFinish() =>
-                  _state = SessionState.Success()
-                case ApplicationState.UnsuccessfulFinish() =>
-                  _state = SessionState.Error()
-                case _ => aux()
-              }
-            }
-          }
-
-          aux()
-        }
-      }
-      _jobThread.setDaemon(true)
-      _jobThread.start()
-  }
-
-  override def state: SessionState = _state
-
-  override def stop(): Future[Unit] = {
-    jobFuture.map { job =>
-      job.stop()
-      _state = SessionState.Success()
-      ()
-    }
-  }
-
-  override def logLines(): IndexedSeq[String] = process.inputLines
-}

+ 0 - 35
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/BatchSessionYarnFactory.scala

@@ -1,35 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilderFactory}
-import com.cloudera.hue.livy.yarn.Client
-
-class BatchSessionYarnFactory(client: Client, factory: SparkProcessBuilderFactory)
-  extends BatchSessionFactory(factory) {
-
-  protected override def create(id: Int, process: SparkProcess) =
-    BatchSessionYarn(client, id, process)
-
-  override def sparkBuilder(request: CreateBatchRequest) = {
-    val builder = super.sparkBuilder(request)
-    builder.master("yarn-cluster")
-    builder
-  }
-}

+ 0 - 38
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/batch/CreateBatchRequest.scala

@@ -1,38 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-case class CreateBatchRequest(
-    file: String,
-    proxyUser: Option[String] = None,
-    args: List[String] = List(),
-    className: Option[String] = None,
-    jars: List[String] = List(),
-    pyFiles: List[String] = List(),
-    files: List[String] = List(),
-    driverMemory: Option[String] = None,
-    driverCores: Option[Int] = None,
-    executorMemory: Option[String] = None,
-    executorCores: Option[Int] = None,
-    totalExecutorCores: Option[Int] = None,
-    numExecutors: Option[Int] = None,
-    archives: List[String] = List(),
-    queue: Option[String] = None,
-    name: Option[String] = None,
-    conf: Map[String, String] = Map())

+ 0 - 38
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/CreateInteractiveRequest.scala

@@ -1,38 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import com.cloudera.hue.livy.sessions.Kind
-
-case class CreateInteractiveRequest(
-    kind: Kind,
-    proxyUser: Option[String] = None,
-    jars: Option[String] = None,
-    pyFiles: List[String] = List(),
-    files: Option[String] = None,
-    driverMemory: Option[String] = None,
-    driverCores: Option[Int] = None,
-    executorMemory: Option[String] = None,
-    executorCores: Option[Int] = None,
-    totalExecutorCores: Option[Int] = None,
-    numExecutors: Option[Int] = None,
-    archives: List[String] = List(),
-    queue: Option[String] = None,
-    name: Option[String] = None,
-    conf: Map[String, String] = Map())

+ 0 - 150
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionFactory.scala

@@ -1,150 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import java.io.File
-import java.lang.ProcessBuilder.Redirect
-import java.nio.file.{Paths, Files}
-
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import com.cloudera.hue.livy.sessions.{PySpark, SessionFactory, SessionKindSerializer}
-import com.cloudera.hue.livy.spark.SparkProcessBuilder.{AbsolutePath, RelativePath}
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilder, SparkProcessBuilderFactory}
-import com.cloudera.hue.livy.{LivyConf, Utils}
-import org.json4s.{DefaultFormats, Formats, JValue}
-
-object InteractiveSessionFactory {
-  private val LivyReplDriverClassPath = "livy.repl.driverClassPath"
-  private val LivyReplJar = "livy.repl.jar"
-  private val LivyServerUrl = "livy.server.serverUrl"
-  private val SparkDriverExtraJavaOptions = "spark.driver.extraJavaOptions"
-  private val SparkLivyCallbackUrl = "spark.livy.callbackUrl"
-  private val SparkLivyPort = "spark.livy.port"
-  private val SparkSubmitPyFiles = "spark.submit.pyFiles"
-  private val SparkYarnIsPython = "spark.yarn.isPython"
-}
-
-abstract class InteractiveSessionFactory(processFactory: SparkProcessBuilderFactory)
-  extends SessionFactory[InteractiveSession] {
-
-  import InteractiveSessionFactory._
-
-  override protected implicit def jsonFormats: Formats = DefaultFormats ++ List(SessionKindSerializer)
-
-  override def create(id: Int, createRequest: JValue) =
-    create(id, createRequest.extract[CreateInteractiveRequest])
-
-  def create(id: Int, request: CreateInteractiveRequest): InteractiveSession = {
-    val builder = sparkBuilder(id, request)
-    val kind = request.kind.toString
-    val process = builder.start(AbsolutePath(livyJar(processFactory.livyConf)), List(kind))
-
-    create(id, process, request)
-  }
-
-  protected def create(id: Int, process: SparkProcess, request: CreateInteractiveRequest): InteractiveSession
-
-  protected def sparkBuilder(id: Int, request: CreateInteractiveRequest): SparkProcessBuilder = {
-    val builder = processFactory.builder()
-
-    builder.className("com.cloudera.hue.livy.repl.Main")
-    builder.conf(request.conf)
-    request.archives.map(RelativePath).foreach(builder.archive)
-    request.driverCores.foreach(builder.driverCores)
-    request.driverMemory.foreach(builder.driverMemory)
-    request.executorCores.foreach(builder.executorCores)
-    request.totalExecutorCores.foreach(builder.totalExecutorCores)
-    request.executorMemory.foreach(builder.executorMemory)
-    request.numExecutors.foreach(builder.numExecutors)
-    request.files.foreach(_.split(",").map(RelativePath).foreach(builder.file))
-    request.jars.foreach(_.split(",").map(RelativePath).foreach(builder.jar))
-    request.proxyUser.foreach(builder.proxyUser)
-    request.queue.foreach(builder.queue)
-    request.name.foreach(builder.name)
-
-    request.kind match {
-      case PySpark() =>
-        builder.conf(SparkYarnIsPython, "true", admin = true)
-
-        // FIXME: Spark-1.4 seems to require us to manually upload the PySpark support files.
-        // We should only do this for Spark 1.4.x
-        val pySparkFiles = findPySparkArchives()
-        builder.files(pySparkFiles.map(AbsolutePath))
-
-        // We can't actually use `builder.pyFiles`, because livy-repl is a Jar, and
-        // spark-submit will reject it because it isn't a Python file. Instead we'll pass it
-        // through a special property that the livy-repl will use to expose these libraries in
-        // the Python shell.
-        builder.files(request.pyFiles.map(RelativePath))
-
-        builder.conf(SparkSubmitPyFiles, (pySparkFiles ++ request.pyFiles).mkString(","), admin = true)
-      case _ =>
-    }
-
-    sys.env.get("LIVY_REPL_JAVA_OPTS").foreach { replJavaOpts =>
-      val javaOpts = builder.conf(SparkDriverExtraJavaOptions) match {
-        case Some(javaOptions) => f"$javaOptions $replJavaOpts"
-        case None => replJavaOpts
-      }
-      builder.conf(SparkDriverExtraJavaOptions, javaOpts, admin = true)
-    }
-
-    processFactory.livyConf.getOption(LivyReplDriverClassPath)
-      .foreach(builder.driverClassPath)
-
-    sys.props.get(LivyServerUrl).foreach { serverUrl =>
-      val callbackUrl = f"$serverUrl/sessions/$id/callback"
-      builder.conf(SparkLivyCallbackUrl, callbackUrl, admin = true)
-    }
-
-    builder.conf(SparkLivyPort, "0", admin = true)
-
-    builder.redirectOutput(Redirect.PIPE)
-    builder.redirectErrorStream(true)
-
-    builder
-  }
-
-  private def livyJar(livyConf: LivyConf) = {
-    livyConf.getOption(LivyReplJar)
-      .getOrElse(Utils.jarOfClass(getClass).head)
-  }
-
-  private def findPySparkArchives(): Seq[String] = {
-    sys.env.get("PYSPARK_ARCHIVES_PATH")
-      .map(_.split(",").toSeq)
-      .getOrElse {
-        sys.env.get("SPARK_HOME") .map { case sparkHome =>
-          val pyLibPath = Seq(sparkHome, "python", "lib").mkString(File.separator)
-          val pyArchivesFile = new File(pyLibPath, "pyspark.zip")
-          require(pyArchivesFile.exists(),
-            "pyspark.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
-
-          val py4jFile = Files.newDirectoryStream(Paths.get(pyLibPath), "py4j-*-src.zip")
-            .iterator()
-            .next()
-            .toFile
-
-          require(py4jFile.exists(),
-            "py4j-*-src.zip not found in Spark environment; cannot run pyspark application in YARN mode.")
-          Seq(pyArchivesFile.getAbsolutePath, py4jFile.getAbsolutePath)
-        }.getOrElse(Seq())
-      }
-  }
-}

+ 0 - 77
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionProcess.scala

@@ -1,77 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import java.net.URL
-
-import com.cloudera.hue.livy.Logging
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import com.cloudera.hue.livy.spark.SparkProcess
-
-import scala.annotation.tailrec
-import scala.concurrent.Future
-
-object InteractiveSessionProcess extends Logging {
-
-  def apply(id: Int,
-            process: SparkProcess,
-            createInteractiveRequest: CreateInteractiveRequest): InteractiveSession = {
-    new InteractiveSessionProcess(id, process, createInteractiveRequest)
-  }
-}
-
-private class InteractiveSessionProcess(id: Int,
-                                        process: SparkProcess,
-                                        request: CreateInteractiveRequest)
-  extends InteractiveWebSession(id, process, request) {
-
-  val stdoutThread = new Thread {
-    override def run() = {
-      val regex = """Starting livy-repl on (https?://.*)""".r
-
-      val lines = process.inputIterator
-
-      // Loop until we find the ip address to talk to livy-repl.
-      @tailrec
-      def readUntilURL(): Unit = {
-        if (lines.hasNext) {
-          val line = lines.next()
-
-          line match {
-            case regex(url_) => url = new URL(url_)
-            case _ => readUntilURL()
-          }
-        }
-      }
-
-      readUntilURL()
-    }
-  }
-
-  stdoutThread.setName("process session stdout reader")
-  stdoutThread.setDaemon(true)
-  stdoutThread.start()
-
-  override def stop(): Future[Unit] = {
-    super.stop().andThen { case r =>
-      stdoutThread.join()
-      r
-    }
-  }
-}

+ 0 - 34
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionProcessFactory.scala

@@ -1,34 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilderFactory}
-
-import scala.concurrent.ExecutionContext
-
-class InteractiveSessionProcessFactory(processFactory: SparkProcessBuilderFactory)
-  extends InteractiveSessionFactory(processFactory) {
-
-  implicit def executor: ExecutionContext = ExecutionContext.global
-
-  protected override def create(id: Int, process: SparkProcess, createInteractiveRequest: CreateInteractiveRequest): InteractiveSession = {
-    InteractiveSessionProcess(id, process, createInteractiveRequest)
-  }
-}

+ 0 - 77
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionYarn.scala

@@ -1,77 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy.sessions.SessionState
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import com.cloudera.hue.livy.spark.SparkProcess
-import com.cloudera.hue.livy.yarn.Client
-
-import scala.concurrent.duration._
-import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future}
-
-object InteractiveSessionYarn {
-  protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-
-  private lazy val regex = """Application report for (\w+)""".r.unanchored
-
-  def apply(client: Client,
-            id: Int,
-            process: SparkProcess,
-            request: CreateInteractiveRequest): InteractiveSession = {
-    new InteractiveSessionYarn(id, client, process, request)
-  }
-}
-
-private class InteractiveSessionYarn(id: Int,
-                                     client: Client,
-                                     process: SparkProcess,
-                                     request: CreateInteractiveRequest)
-  extends InteractiveWebSession(id, process, request) {
-
-  private val job = Future {
-    val job = client.getJobFromProcess(process)
-
-    job
-  }
-
-  job.onFailure { case _ =>
-    _state = SessionState.Error()
-  }
-
-  override def logLines() = process.inputLines
-
-  override def stop(): Future[Unit] = {
-    super.stop().andThen {
-      case _ =>
-        try {
-          val job_ = Await.result(job, Duration(10, TimeUnit.SECONDS))
-          job_.waitForFinish(10000).getOrElse {
-            job_.stop()
-          }
-        } catch {
-          case e: Throwable =>
-            _state = SessionState.Error()
-            throw e
-        }
-    }
-  }
-}

+ 0 - 43
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionYarnFactory.scala

@@ -1,43 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
-import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilderFactory}
-import com.cloudera.hue.livy.yarn.Client
-
-import scala.concurrent.ExecutionContext
-
-class InteractiveSessionYarnFactory(client: Client, processFactory: SparkProcessBuilderFactory)
-  extends InteractiveSessionFactory(processFactory) {
-
-  implicit def executor: ExecutionContext = ExecutionContext.global
-
-  protected override def create(id: Int,
-                                process: SparkProcess,
-                                request: CreateInteractiveRequest): InteractiveSession = {
-    InteractiveSessionYarn(client, id, process, request)
-  }
-
-  override def sparkBuilder(id: Int, request: CreateInteractiveRequest) = {
-    val builder = super.sparkBuilder(id, request)
-    builder.master("yarn-cluster")
-    builder
-  }
- }

+ 0 - 244
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveWebSession.scala

@@ -1,244 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import java.net.{ConnectException, URL}
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy._
-import com.cloudera.hue.livy.ExecuteRequest
-import com.cloudera.hue.livy.sessions._
-import com.cloudera.hue.livy.sessions.interactive.{Statement, InteractiveSession}
-import com.cloudera.hue.livy.spark.SparkProcess
-import dispatch._
-import org.json4s.JsonAST.{JNull, JString}
-import org.json4s.jackson.Serialization.write
-import org.json4s.{DefaultFormats, Formats, JValue}
-
-import scala.annotation.tailrec
-import scala.concurrent.duration.Duration
-import scala.concurrent.{Future, _}
-
-abstract class InteractiveWebSession(val id: Int,
-                                     process: SparkProcess,
-                                     request: CreateInteractiveRequest)
-  extends InteractiveSession
-  with Logging {
-
-  protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
-  protected implicit def jsonFormats: Formats = DefaultFormats
-
-  protected[this] var _state: SessionState = SessionState.Starting()
-
-  private[this] var _lastActivity = Long.MaxValue
-  private[this] var _url: Option[URL] = None
-
-  private[this] var _executedStatements = 0
-  private[this] var _statements = IndexedSeq[Statement]()
-
-  override def kind = request.kind
-
-  override def logLines() = process.inputLines
-
-  override def proxyUser = request.proxyUser
-
-  override def url: Option[URL] = _url
-
-  override def url_=(url: URL) = {
-    ensureState(SessionState.Starting(), {
-      _state = SessionState.Idle()
-      _url = Some(url)
-    })
-  }
-
-  private def svc = {
-    val url = _url.head
-    dispatch.url(url.toString)
-  }
-
-  override def lastActivity: Option[Long] = Some(_lastActivity)
-
-  override def state: SessionState = _state
-
-  override def executeStatement(content: ExecuteRequest): Statement = {
-    ensureRunning {
-      _state = SessionState.Busy()
-      touchLastActivity()
-
-      val req = (svc / "execute").setContentType("application/json", "UTF-8") << write(content)
-
-      val future = Http(req OK as.json4s.Json).map { case resp: JValue =>
-        parseResponse(resp).getOrElse {
-          // The result isn't ready yet. Loop until it is.
-          val id = (resp \ "id").extract[Int]
-          waitForStatement(id)
-        }
-      }
-
-      val statement = new Statement(_executedStatements, content, future)
-
-      _executedStatements += 1
-      _statements = _statements :+ statement
-
-      statement
-    }
-  }
-
-  @tailrec
-  private def waitForStatement(id: Int): JValue = {
-    val req = (svc / "history" / id).setContentType("application/json", "UTF-8")
-    val resp = Await.result(Http(req OK as.json4s.Json), Duration.Inf)
-
-    parseResponse(resp) match {
-      case Some(result) => result
-      case None =>
-        Thread.sleep(1000)
-        waitForStatement(id)
-    }
-  }
-
-  private def parseResponse(response: JValue): Option[JValue] = {
-    response \ "result" match {
-      case JNull => None
-      case result =>
-        // If the response errored out, it's possible it took down the interpreter. Check if
-        // it's still running.
-        result \ "status" match {
-          case JString("error") =>
-            if (replErroredOut()) {
-              transition(SessionState.Error())
-            } else {
-              transition(SessionState.Idle())
-            }
-          case _ => transition(SessionState.Idle())
-        }
-
-        Some(result)
-    }
-  }
-
-  private def replErroredOut() = {
-    val req = svc.setContentType("application/json", "UTF-8")
-    val response = Await.result(Http(req OK as.json4s.Json), Duration.Inf)
-
-    response \ "state" match {
-      case JString("error") => true
-      case _ => false
-    }
-  }
-
-  override def statements: IndexedSeq[Statement] = _statements
-
-  override def interrupt(): Future[Unit] = {
-    stop()
-  }
-
-  override def stop(): Future[Unit] = {
-    val future: Future[Unit] = synchronized {
-      _state match {
-        case SessionState.Idle() =>
-          _state = SessionState.Busy()
-
-          Http(svc.DELETE OK as.String).either() match {
-            case (Right(_) | Left(_: ConnectException)) =>
-              // Make sure to eat any connection errors because the repl shut down before it sent
-              // out an OK.
-              synchronized {
-                _state = SessionState.Dead()
-              }
-
-              Future.successful(())
-
-            case Left(t: Throwable) =>
-              Future.failed(t)
-          }
-        case SessionState.NotStarted() =>
-          Future {
-            waitForStateChange(SessionState.NotStarted(), Duration(10, TimeUnit.SECONDS))
-            stop()
-          }
-        case SessionState.Starting() =>
-          Future {
-            waitForStateChange(SessionState.Starting(), Duration(10, TimeUnit.SECONDS))
-            stop()
-          }
-        case SessionState.Busy() | SessionState.Running() =>
-          Future {
-            waitForStateChange(SessionState.Busy(), Duration(10, TimeUnit.SECONDS))
-            stop()
-          }
-        case SessionState.ShuttingDown() =>
-          Future {
-            waitForStateChange(SessionState.ShuttingDown(), Duration(10, TimeUnit.SECONDS))
-            stop()
-          }
-        case SessionState.Error(_) | SessionState.Dead(_) | SessionState.Success(_) =>
-          Future.successful(Unit)
-      }
-    }
-
-    future.andThen { case r =>
-      process.waitFor()
-      r
-    }
-  }
-
-  private def transition(state: SessionState) = synchronized {
-    _state = state
-  }
-
-  private def touchLastActivity() = {
-    _lastActivity = System.currentTimeMillis()
-  }
-
-  private def ensureState[A](state: SessionState, f: => A) = {
-    synchronized {
-      if (_state == state) {
-        f
-      } else {
-        throw new IllegalStateException("Session is in state %s" format _state)
-      }
-    }
-  }
-
-  private def ensureRunning[A](f: => A) = {
-    synchronized {
-      _state match {
-        case SessionState.Idle() | SessionState.Busy() =>
-          f
-        case _ =>
-          throw new IllegalStateException("Session is in state %s" format _state)
-      }
-    }
-  }
-
-  // Error out the job if the process errors out.
-  Future {
-    if (process.waitFor() == 0) {
-      // Set the state to done if the session shut down before contacting us.
-      _state match {
-        case (SessionState.Dead(_) | SessionState.Error(_) | SessionState.Success(_)) =>
-        case _ =>
-          _state = SessionState.Success()
-      }
-    } else {
-      _state = SessionState.Error()
-    }
-  }
-}

+ 0 - 43
apps/spark/java/livy-spark/src/test/scala/com/cloudera/hue/livy/spark/SparkProcessBuilderSpec.scala

@@ -1,43 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark
-
-import com.cloudera.hue.livy.LivyConf
-import org.scalatest.{FlatSpec, Matchers}
-
-class SparkProcessBuilderSpec
-  extends FlatSpec
-  with Matchers {
-
-  it should "not error if passed an allowed config option" in {
-    val allowed = Set("spark.app.name")
-    val builder = new SparkProcessBuilder(new LivyConf(), allowed)
-
-    builder.conf("spark.app.name", "hello")
-  }
-
-  it should "raise an exception if passed an illegal config option" in {
-    val allowed = Set("spark.app.name")
-    val builder = new SparkProcessBuilder(new LivyConf(), allowed)
-
-    intercept[ConfigOptionNotAllowed] {
-      builder.conf("spark.master", "local")
-    }
-  }
-}

+ 0 - 71
apps/spark/java/livy-spark/src/test/scala/com/cloudera/hue/livy/spark/batch/BatchProcessSpec.scala

@@ -1,71 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.batch
-
-import java.io.FileWriter
-import java.nio.file.{Files, Path}
-import java.util.concurrent.TimeUnit
-
-import com.cloudera.hue.livy.sessions.SessionState
-import com.cloudera.hue.livy.spark.SparkProcessBuilderFactory
-import com.cloudera.hue.livy.{LivyConf, Utils}
-import org.scalatest.{BeforeAndAfterAll, FunSpec, ShouldMatchers}
-
-import scala.concurrent.duration.Duration
-
-class BatchProcessSpec
-  extends FunSpec
-  with BeforeAndAfterAll
-  with ShouldMatchers {
-
-  val script: Path = {
-    val script = Files.createTempFile("livy-test", ".py")
-    script.toFile.deleteOnExit()
-    val writer = new FileWriter(script.toFile)
-    try {
-      writer.write(
-        """
-          |print "hello world"
-        """.stripMargin)
-    } finally {
-      writer.close()
-    }
-    script
-  }
-
-  describe("A Batch process") {
-    it("should create a process") {
-      val req = CreateBatchRequest(
-        file = script.toString
-      )
-
-      val livyConf = new LivyConf()
-      val builder = new BatchSessionProcessFactory(new SparkProcessBuilderFactory(livyConf))
-      val batch = builder.create(0, req)
-
-      Utils.waitUntil({ () => !batch.state.isActive }, Duration(10, TimeUnit.SECONDS))
-      (batch.state match {
-        case SessionState.Success(_) => true
-        case _ => false
-      }) should be (true)
-
-      batch.logLines() should contain("hello world")
-    }
-  }
-}

+ 0 - 40
apps/spark/java/livy-spark/src/test/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionProcessSpec.scala

@@ -1,40 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.spark.interactive
-
-import com.cloudera.hue.livy.LivyConf
-import com.cloudera.hue.livy.sessions.{BaseInteractiveSessionSpec, PySpark}
-import com.cloudera.hue.livy.spark.SparkProcessBuilderFactory
-import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
-
-class InteractiveSessionProcessSpec
-  extends BaseInteractiveSessionSpec
-  with FunSpecLike
-  with Matchers
-  with BeforeAndAfter {
-
-  val livyConf = new LivyConf()
-  livyConf.set("livy.repl.driverClassPath", sys.props("java.class.path"))
-
-  def createSession() = {
-    val processFactory = new SparkProcessBuilderFactory(livyConf)
-    val interactiveFactory = new InteractiveSessionProcessFactory(processFactory)
-    interactiveFactory.create(0, CreateInteractiveRequest(kind = PySpark()))
-  }
-}

+ 0 - 66
apps/spark/java/livy-yarn/pom.xml

@@ -1,66 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue.livy</groupId>
-        <artifactId>livy-main</artifactId>
-        <relativePath>../pom.xml</relativePath>
-        <version>0.2.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>livy-yarn_2.10</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <properties>
-        <PermGen>64m</PermGen>
-        <MaxPermGen>512m</MaxPermGen>
-    </properties>
-
-    <dependencies>
-
-        <dependency>
-            <groupId>${project.groupId}</groupId>
-            <artifactId>livy-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-    </dependencies>
-
-</project>

+ 0 - 51
apps/spark/java/livy-yarn/src/main/assembly/dist.xml

@@ -1,51 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
-  license agreements. See the NOTICE file distributed with this work for additional
-  information regarding copyright ownership. The ASF licenses this file to
-  you under the Apache License, Version 2.0 (the "License"); you may not use
-  this file except in compliance with the License. You may obtain a copy of
-  the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
-  by applicable law or agreed to in writing, software distributed under the
-  License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
-  OF ANY KIND, either express or implied. See the License for the specific
-  language governing permissions and limitations under the License. -->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-    <id>dist</id>
-    <formats>
-        <format>tar.gz</format>
-    </formats>
-
-    <includeBaseDirectory>false</includeBaseDirectory>
-
-    <dependencySets>
-        <dependencySet>
-            <outputDirectory>lib</outputDirectory>
-            <useProjectArtifact>true</useProjectArtifact>
-
-            <useTransitiveFiltering>true</useTransitiveFiltering>
-        </dependencySet>
-    </dependencySets>
-
-    <fileSets>
-        <fileSet>
-            <directory>${basedir}/src/main/bash</directory>
-            <outputDirectory>/bin</outputDirectory>
-            <fileMode>0744</fileMode>
-            <includes>
-                <include>*</include>
-            </includes>
-        </fileSet>
-
-        <fileSet>
-            <directory>${basedir}/src/main/resources</directory>
-            <outputDirectory>/resources</outputDirectory>
-            <fileMode>0744</fileMode>
-            <includes>
-                <include>*</include>
-            </includes>
-        </fileSet>
-    </fileSets>
-
-</assembly>

+ 0 - 21
apps/spark/java/livy-yarn/src/main/bash/run-am.sh

@@ -1,21 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-[[ $JAVA_OPTS != *-server* ]] && export JAVA_OPTS="$JAVA_OPTS -server"
-
-exec $(dirname $0)/run-class.sh com.cloudera.hue.livy.yarn.AppMaster $@

+ 0 - 40
apps/spark/java/livy-yarn/src/main/bash/run-class.sh

@@ -1,40 +0,0 @@
-#!/bin/bash
-
-home_dir=`pwd`
-base_dir=$(dirname $0)/..
-cd $base_dir
-base_dir=`pwd`
-cd $home_dir
-
-HADOOP_YARN_HOME="${HADOOP_YARN_HOME:-$HOME/.livy}"
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_YARN_HOME/conf}"
-CLASSPATH="$HADOOP_CONF_DIR:$base_dir/lib/*"
-DEFAULT_LOGBACK_FILE="$base_dir/resources/logback.xml"
-
-#for file in $base_dir/lib/*.[jw]ar;
-#do
-#  CLASSPATH=$CLASSPATH:$file
-#done
-
-if [ -z "$JAVA_HOME" ]; then
-  JAVA="java"
-else
-  JAVA="$JAVA_HOME/bin/java"
-fi
-
-# Try and use 64-bit mode if available in JVM_OPTS
-function check_and_enable_64_bit_mode {
-  `$JAVA -d64 -version`
-  if [ $? -eq 0 ] ; then
-    JAVA_OPTS="$JAVA_OPTS -d64"
-  fi
-}
-
-# Check if 64 bit is set. If not - try and set it if it's supported
-[[ $JAVA_OPTS != *-d64* ]] && check_and_enable_64_bit_mode
-
-# Check if logback configuration is specified. If not - set to resources/logback.xml
-[[ $JAVA_OPTS != *-Dlogback.configuration* && -f $DEFAULT_LOGBACK_FILE ]] && JAVA_OPTS="$JAVA_OPTS -Dlogback.configuration=file:$DEFAULT_LOGBACK_FILE"
-
-echo $JAVA $JAVA_OPTS -cp "$CLASSPATH" "$@"
-exec $JAVA $JAVA_OPTS -cp "$CLASSPATH" "$@"

+ 0 - 19
apps/spark/java/livy-yarn/src/main/bash/run-job.sh

@@ -1,19 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-exec $(dirname $0)/run-class.sh com.cloudera.hue.livy.yarn.Client $@

+ 0 - 12
apps/spark/java/livy-yarn/src/main/resources/logback.xml

@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<configuration>
-    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
-        <encoder>
-            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
-        </encoder>
-    </appender>
-
-    <root level="info">
-        <appender-ref ref="STDOUT" />
-    </root>
-</configuration>

+ 0 - 29
apps/spark/java/livy-yarn/src/main/scala/com/cloudera/hue/livy/yarn/ApplicationState.scala

@@ -1,29 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.yarn
-
-sealed trait ApplicationState
-
-object ApplicationState {
-  case class New() extends ApplicationState
-  case class Accepted() extends ApplicationState
-  case class Running() extends ApplicationState
-  case class SuccessfulFinish() extends ApplicationState
-  case class UnsuccessfulFinish() extends ApplicationState
-}

+ 0 - 78
apps/spark/java/livy-yarn/src/main/scala/com/cloudera/hue/livy/yarn/Client.scala

@@ -1,78 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.yarn
-
-import java.io.File
-
-import com.cloudera.hue.livy.{LineBufferedProcess, LivyConf, Logging}
-import org.apache.hadoop.fs.Path
-import org.apache.hadoop.yarn.client.api.YarnClient
-import org.apache.hadoop.yarn.conf.YarnConfiguration
-import org.apache.hadoop.yarn.util.ConverterUtils
-
-import scala.annotation.tailrec
-import scala.concurrent.ExecutionContext
-
-object Client {
-  private lazy val regex = """Application report for (\w+)""".r.unanchored
-}
-
-class FailedToSubmitApplication extends Exception
-
-class Client(livyConf: LivyConf) extends Logging {
-  import Client._
-
-  protected implicit def executor: ExecutionContext = ExecutionContext.global
-
-  private[this] val yarnConf = new YarnConfiguration()
-  private[this] val yarnClient = YarnClient.createYarnClient()
-  val path = new Path(sys.env("HADOOP_CONF_DIR") + File.separator + YarnConfiguration.YARN_SITE_CONFIGURATION_FILE)
-  yarnConf.addResource(path)
-  val rm_address = yarnConf.get(YarnConfiguration.RM_ADDRESS)
-  info(s"Resource Manager address: $rm_address")
-
-  yarnClient.init(yarnConf)
-  yarnClient.start()
-
-  def getJobFromProcess(process: LineBufferedProcess): Job = {
-    parseApplicationId(process.inputIterator) match {
-      case Some(appId) => new Job(yarnClient, ConverterUtils.toApplicationId(appId))
-      case None => throw new FailedToSubmitApplication
-    }
-  }
-
-  def close() = {
-    yarnClient.close()
-  }
-
-  @tailrec
-  private def parseApplicationId(lines: Iterator[String]): Option[String] = {
-    if (lines.hasNext) {
-      val line = lines.next()
-      line match {
-        case regex(applicationId) => Some(applicationId)
-        case _ => parseApplicationId(lines)
-      }
-    } else {
-      None
-    }
-  }
-}
-
-

+ 0 - 105
apps/spark/java/livy-yarn/src/main/scala/com/cloudera/hue/livy/yarn/Job.scala

@@ -1,105 +0,0 @@
-/*
- * Licensed to Cloudera, Inc. under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  Cloudera, Inc. licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.hue.livy.yarn
-
-import org.apache.hadoop.yarn.api.records.{FinalApplicationStatus, YarnApplicationState, ApplicationId}
-import org.apache.hadoop.yarn.client.api.YarnClient
-
-class Job(yarnClient: YarnClient, appId: ApplicationId) {
-  def waitForFinish(timeoutMs: Long): Option[ApplicationState] = {
-    val startTimeMs = System.currentTimeMillis()
-
-    while (System.currentTimeMillis() - startTimeMs < timeoutMs) {
-      val status = getStatus
-      status match {
-        case ApplicationState.SuccessfulFinish() | ApplicationState.UnsuccessfulFinish() =>
-          return Some(status)
-        case _ =>
-      }
-
-      Thread.sleep(1000)
-    }
-
-    None
-  }
-
-  def waitForStatus(status: ApplicationState, timeoutMs: Long): Option[ApplicationState] = {
-    val startTimeMs = System.currentTimeMillis()
-
-    while (System.currentTimeMillis() - startTimeMs < timeoutMs) {
-      if (getStatus == status) {
-        return Some(status)
-      }
-
-      Thread.sleep(1000)
-    }
-
-    None
-  }
-
-  def waitForRPC(timeoutMs: Long): Option[(String, Int)] = {
-    waitForStatus(ApplicationState.Running(), timeoutMs)
-
-    val startTimeMs = System.currentTimeMillis()
-
-    while (System.currentTimeMillis() - startTimeMs < timeoutMs) {
-      val statusResponse = yarnClient.getApplicationReport(appId)
-
-      (statusResponse.getHost, statusResponse.getRpcPort) match {
-        case ("N/A", _) | (_, -1) =>
-        case (hostname, port) => return Some((hostname, port))
-      }
-    }
-
-    None
-  }
-
-  def getHost: String = {
-    val statusResponse = yarnClient.getApplicationReport(appId)
-    statusResponse.getHost
-  }
-
-  def getPort: Int = {
-    val statusResponse = yarnClient.getApplicationReport(appId)
-    statusResponse.getRpcPort
-  }
-
-  def getStatus: ApplicationState = {
-    val statusResponse = yarnClient.getApplicationReport(appId)
-    convertState(statusResponse.getYarnApplicationState, statusResponse.getFinalApplicationStatus)
-  }
-
-  def stop(): Unit = {
-    yarnClient.killApplication(appId)
-  }
-
-  private def convertState(state: YarnApplicationState, status: FinalApplicationStatus): ApplicationState = {
-    (state, status) match {
-      case (YarnApplicationState.FINISHED, FinalApplicationStatus.SUCCEEDED) => ApplicationState.SuccessfulFinish()
-      case (YarnApplicationState.FINISHED, _) |
-           (YarnApplicationState.KILLED, _) |
-           (YarnApplicationState.FAILED, _) => ApplicationState.UnsuccessfulFinish()
-      case (YarnApplicationState.NEW, _) |
-           (YarnApplicationState.NEW_SAVING, _) |
-           (YarnApplicationState.SUBMITTED, _) => ApplicationState.New()
-      case (YarnApplicationState.RUNNING, _) => ApplicationState.Running()
-      case (YarnApplicationState.ACCEPTED, _) => ApplicationState.Accepted()
-    }
-  }
-}

+ 0 - 551
apps/spark/java/pom.xml

@@ -1,551 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.cloudera.hue</groupId>
-        <artifactId>hue-parent</artifactId>
-        <relativePath>../../../maven/pom.xml</relativePath>
-        <version>3.10.0-SNAPSHOT</version>
-    </parent>
-
-    <groupId>com.cloudera.hue.livy</groupId>
-    <artifactId>livy-main</artifactId>
-    <version>0.2.0-SNAPSHOT</version>
-    <packaging>pom</packaging>
-
-    <name>livy-main</name>
-    <description>livy-main</description>
-
-    <licenses>
-        <license>
-            <name>The Apache Software License, Version 2.0</name>
-            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-        </license>
-    </licenses>
-
-    <organization>
-        <name>Cloudera Inc</name>
-        <url>http://www.cloudera.com</url>
-    </organization>
-
-    <properties>
-        <commons-codec.version>1.9</commons-codec.version>
-        <dispatch.version>0.11.2</dispatch.version>
-        <httpclient.version>4.5</httpclient.version>
-        <httpcore.version>4.4.1</httpcore.version>
-        <jackson.version>2.4.4</jackson.version>
-        <jackson-module-scala.version>2.4.4</jackson-module-scala.version>
-        <javax.servlet-api.version>3.1.0</javax.servlet-api.version>
-        <jetty.version>9.2.10.v20150310</jetty.version>
-        <json4s.version>3.2.11</json4s.version>
-        <logback.version>1.1.2</logback.version>
-        <metrics.version>3.1.0</metrics.version>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <py4j.version>0.8.2.1</py4j.version>
-        <scala.binary.version>2.10</scala.binary.version>
-        <scala.compat.version>2.10</scala.compat.version>
-        <scala.version>2.10.4</scala.version>
-        <scalatest.version>2.2.4</scalatest.version>
-        <scalatra.version>2.3.0</scalatra.version>
-        <snappy-java.version>1.1.1.6</snappy-java.version>
-    </properties>
-
-    <modules>
-        <module>livy-assembly</module>
-        <module>livy-core</module>
-        <module>livy-repl</module>
-        <module>livy-server</module>
-        <module>livy-spark</module>
-        <module>livy-yarn</module>
-    </modules>
-
-    <dependencyManagement>
-        <dependencies>
-
-            <dependency>
-                <groupId>ch.qos.logback</groupId>
-                <artifactId>logback-access</artifactId>
-                <version>${logback.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>ch.qos.logback</groupId>
-                <artifactId>logback-classic</artifactId>
-                <version>${logback.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>com.fasterxml.jackson.core</groupId>
-                <artifactId>jackson-core</artifactId>
-                <version>${jackson.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>com.fasterxml.jackson.core</groupId>
-                <artifactId>jackson-databind</artifactId>
-                <version>${jackson.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>com.fasterxml.jackson.module</groupId>
-                <artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
-                <version>${jackson-module-scala.version}</version>
-                <scope>provided</scope>
-            </dependency>
-
-            <dependency>
-                <groupId>commons-codec</groupId>
-                <artifactId>commons-codec</artifactId>
-                <version>${commons-codec.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>io.dropwizard.metrics</groupId>
-                <artifactId>metrics-core</artifactId>
-                <version>${metrics.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>io.dropwizard.metrics</groupId>
-                <artifactId>metrics-healthchecks</artifactId>
-                <version>${metrics.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>javax.servlet</groupId>
-                <artifactId>javax.servlet-api</artifactId>
-                <version>${javax.servlet-api.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-common</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-client</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-yarn-api</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-yarn-client</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-yarn-common</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.httpcomponents</groupId>
-                <artifactId>httpcore</artifactId>
-                <version>${httpcore.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.httpcomponents</groupId>
-                <artifactId>httpclient</artifactId>
-                <version>${httpclient.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.spark</groupId>
-                <artifactId>spark-core_${scala.binary.version}</artifactId>
-                <version>${spark.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.spark</groupId>
-                <artifactId>spark-yarn_${scala.binary.version}</artifactId>
-                <version>${spark.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-http</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-continuation</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-servlet</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-server</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-util</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-plus</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.eclipse.jetty</groupId>
-                <artifactId>jetty-security</artifactId>
-                <version>${jetty.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.json4s</groupId>
-                <artifactId>json4s_${scala.binary.version}</artifactId>
-                <version>${json4s.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.json4s</groupId>
-                <artifactId>json4s-ast_${scala.binary.version}</artifactId>
-                <version>${json4s.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.json4s</groupId>
-                <artifactId>json4s-core_${scala.binary.version}</artifactId>
-                <version>${json4s.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.json4s</groupId>
-                <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
-                <version>${json4s.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scala-lang</groupId>
-                <artifactId>scala-compiler</artifactId>
-                <version>${scala.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scala-lang</groupId>
-                <artifactId>scala-library</artifactId>
-                <version>${scala.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scala-lang</groupId>
-                <artifactId>scala-reflect</artifactId>
-                <version>${scala.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scalatest</groupId>
-                <artifactId>scalatest_${scala.binary.version}</artifactId>
-                <version>${scalatest.version}</version>
-                <scope>test</scope>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scalatra</groupId>
-                <artifactId>scalatra_${scala.binary.version}</artifactId>
-                <version>${scalatra.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scalatra</groupId>
-                <artifactId>scalatra-jetty_${scala.binary.version}</artifactId>
-                <version>${scalatra.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scalatra</groupId>
-                <artifactId>scalatra-json_${scala.binary.version}</artifactId>
-                <version>${scalatra.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scalatra</groupId>
-                <artifactId>scalatra-scalatest_${scala.binary.version}</artifactId>
-                <version>${scalatra.version}</version>
-                <scope>test</scope>
-            </dependency>
-
-            <dependency>
-                <groupId>org.scalatra</groupId>
-                <artifactId>scalatra-test_${scala.binary.version}</artifactId>
-                <version>${scalatra.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>org.xerial.snappy</groupId>
-                <artifactId>snappy-java</artifactId>
-                <version>${snappy-java.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>net.databinder.dispatch</groupId>
-                <artifactId>dispatch-core_${scala.binary.version}</artifactId>
-                <version>${dispatch.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>net.databinder.dispatch</groupId>
-                <artifactId>dispatch-json4s-jackson_${scala.binary.version}</artifactId>
-                <version>${dispatch.version}</version>
-            </dependency>
-
-            <dependency>
-                <groupId>net.sf.py4j</groupId>
-                <artifactId>py4j</artifactId>
-                <version>${py4j.version}</version>
-            </dependency>
-
-        </dependencies>
-    </dependencyManagement>
-
-    <build>
-        <pluginManagement>
-            <plugins>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-enforcer-plugin</artifactId>
-                    <version>1.0</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-compiler-plugin</artifactId>
-                    <version>2.5.1</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-site-plugin</artifactId>
-                    <version>3.3</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-javadoc-plugin</artifactId>
-                    <version>2.8.1</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-assembly-plugin</artifactId>
-                    <version>2.2</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-antrun-plugin</artifactId>
-                    <version>1.6</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.codehaus.mojo</groupId>
-                    <artifactId>build-helper-maven-plugin</artifactId>
-                    <version>1.8</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-maven-plugins</artifactId>
-                    <version>${hadoop.version}</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-jar-plugin</artifactId>
-                    <version>2.3.2</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-source-plugin</artifactId>
-                    <version>2.1.2</version>
-                </plugin>
-                <plugin>
-                    <groupId>com.atlassian.maven.plugins</groupId>
-                    <artifactId>maven-clover2-plugin</artifactId>
-                    <version>3.0.5</version>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.rat</groupId>
-                    <artifactId>apache-rat-plugin</artifactId>
-                    <version>0.8</version>
-                </plugin>
-
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-deploy-plugin</artifactId>
-                    <version>2.8.2</version>
-                </plugin>
-
-                <plugin>
-                    <groupId>org.apache.felix</groupId>
-                    <artifactId>maven-bundle-plugin</artifactId>
-                    <version>2.4.0</version>
-                </plugin>
-
-                <plugin>
-                    <groupId>net.alchim31.maven</groupId>
-                    <artifactId>scala-maven-plugin</artifactId>
-                    <version>3.2.2</version>
-                    <executions>
-                        <execution>
-                            <goals>
-                                <goal>compile</goal>
-                                <goal>testCompile</goal>
-                            </goals>
-                        </execution>
-                    </executions>
-                    <configuration>
-                        <scalaVersion>${scala.version}</scalaVersion>
-                        <args>
-                            <arg>-deprecation</arg>
-                            <arg>-feature</arg>
-                        </args>
-                    </configuration>
-                </plugin>
-
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-install-plugin</artifactId>
-                    <version>2.5.2</version>
-                </plugin>
-
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-shade-plugin</artifactId>
-                    <version>2.4.2</version>
-                </plugin>
-
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-surefire-plugin</artifactId>
-                    <version>2.7</version>
-                    <configuration>
-                        <skipTests>true</skipTests>
-                    </configuration>
-                </plugin>
-
-                <plugin>
-                    <groupId>org.scalatest</groupId>
-                    <artifactId>scalatest-maven-plugin</artifactId>
-                    <version>1.0</version>
-                    <configuration>
-                        <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-                        <junitxml>.</junitxml>
-                        <filereports>WDF TestSuite.txt</filereports>
-                    </configuration>
-                    <executions>
-                        <execution>
-                            <id>test</id>
-                            <goals>
-                                <goal>test</goal>
-                            </goals>
-                        </execution>
-                    </executions>
-                </plugin>
-
-            </plugins>
-        </pluginManagement>
-
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>${sourceJavaVersion}</source>
-                    <target>${targetJavaVersion}</target>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-enforcer-plugin</artifactId>
-                <inherited>false</inherited>
-                <configuration>
-                    <rules>
-                        <requireMavenVersion>
-                            <version>[3.0.0,)</version>
-                        </requireMavenVersion>
-                        <requireJavaVersion>
-                            <version>[${minJavaVersion}.0,${maxJavaVersion}.1000}]</version>
-                        </requireJavaVersion>
-                        <requireOS>
-                            <family>unix</family>
-                        </requireOS>
-                    </rules>
-                </configuration>
-                <executions>
-                    <execution>
-                        <id>clean</id>
-                        <goals>
-                            <goal>enforce</goal>
-                        </goals>
-                        <phase>pre-clean</phase>
-                    </execution>
-                    <execution>
-                        <id>default</id>
-                        <goals>
-                            <goal>enforce</goal>
-                        </goals>
-                        <phase>validate</phase>
-                    </execution>
-                    <execution>
-                        <id>site</id>
-                        <goals>
-                            <goal>enforce</goal>
-                        </goals>
-                        <phase>pre-site</phase>
-                    </execution>
-                </executions>
-            </plugin>
-
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-
-    </build>
-
-    <reporting>
-        <plugins>
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </reporting>
-
-</project>

+ 0 - 1
maven/pom.xml

@@ -39,7 +39,6 @@
   <properties>
     <hadoop-mr1.version>2.6.0-mr1-cdh5.5.0</hadoop-mr1.version>
     <hadoop.version>2.6.0-cdh5.5.0</hadoop.version>
-    <spark.version>1.5.0-cdh5.5.0</spark.version>
     <slf4j.version>1.6.1</slf4j.version>
     <commons-logging.version>1.0.4</commons-logging.version>
     <thrift.version>0.9.0</thrift.version>