Browse Source

HUE-3308 [spark] Moving out references to Livy as it moved to its project

Romain Rigaux 9 years ago
parent
commit
f211725

+ 2 - 29
apps/spark/src/spark/conf.py

@@ -28,12 +28,7 @@ from spark.settings import NICE_NAME
 LOG = logging.getLogger(__name__)
 
 
-LIVY_ASSEMBLY_JAR = Config(
-  key="livy_assembly_jar",
-  help=_t("Path to livy-assembly.jar"),
-  private=True,
-  default=os.path.join(os.path.dirname(__file__), "..", "..", "java-lib", "livy-assembly.jar"))
-
+# Livy
 LIVY_SERVER_HOST = Config(
   key="livy_server_host",
   help=_t("Host address of the Livy Server."),
@@ -44,30 +39,8 @@ LIVY_SERVER_PORT = Config(
   help=_t("Port of the Livy Server."),
   default="8998")
 
-LIVY_SERVER_SESSION_KIND = Config(
-  key="livy_server_session_kind",
-  help=_t("Configure livy to start in local 'process' mode, or 'yarn' workers."),
-  default="process")
-
-LIVY_YARN_JAR = Config(
-  key="livy_yarn_jar",
-  help=_t("Path to livy-assembly.jar inside HDFS"),
-  private=True)
-
-LIVY_IMPERSONATION_ENABLED = Config(
-  key="livy_impersonation_enabled",
-  help=_t("Use impersonation when submitting livy jobs"),
-  default=True,
-  type=coerce_bool)
-
-START_LIVY_SERVER = Config(
-  key="start_livy_server",
-  help=_t("Experimental option to launch livy"),
-  default=False,
-  type=coerce_bool,
-  private=True)
-
 
+# Spark SQL
 SQL_SERVER_HOST = Config(
   key="sql_server_host",
   help=_t("Host where SparkSQL server is running."),

+ 0 - 16
apps/spark/src/spark/management/__init__.py

@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.

+ 0 - 16
apps/spark/src/spark/management/commands/__init__.py

@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.

+ 0 - 82
apps/spark/src/spark/management/commands/livy_server.py

@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import subprocess
-
-from django.core.management.base import NoArgsCommand
-import spark.conf
-
-
-LOG = logging.getLogger(__name__)
-
-
-class Command(NoArgsCommand):
-  """
-  Starts livy server.
-  """
-
-  help = 'start livy server'
-
-  def handle(self, *args, **kwargs):
-    env = os.environ.copy()
-    classpath = env.get('CLASSPATH', '').split(os.pathsep)
-
-    jar = spark.conf.LIVY_ASSEMBLY_JAR.get()
-    classpath.insert(0, jar)
-
-    # Add the hadoop classpath if it's available.
-    try:
-      p = subprocess.Popen(['hadoop', 'classpath'], stdout=subprocess.PIPE)
-    except OSError:
-      pass
-    else:
-      hadoop_classpath = p.communicate()[0]
-      if p.wait() == 0:
-        classpath.append(hadoop_classpath)
-
-    args = [
-      "java",
-    ]
-
-    args.extend(("-cp", os.pathsep.join(classpath)))
-
-    server_host = spark.conf.LIVY_SERVER_HOST.get()
-    args.append("-Dlivy.server.host=" + server_host)
-
-    server_port = spark.conf.LIVY_SERVER_PORT.get()
-    args.append("-Dlivy.server.port=" + server_port)
-
-    session_factory = spark.conf.LIVY_SERVER_SESSION_KIND.get()
-    args.append("-Dlivy.server.session.factory=" + session_factory)
-
-    livy_yarn_jar = spark.conf.LIVY_YARN_JAR.get()
-    if livy_yarn_jar:
-      args.append("-Dlivy.yarn.jar=" + livy_yarn_jar)
-
-    if spark.conf.LIVY_IMPERSONATION_ENABLED.get():
-      args.append("-Dlivy.impersonation.enabled=true")
-    else:
-      args.append("-Dlivy.impersonation.enabled=false")
-
-    args.append("com.cloudera.hue.livy.server.Main")
-
-    LOG.info("Executing %r (%r) (%r)" % (args[0], args, env))
-
-    # Use exec, so that this takes only one process.
-    os.execvpe(args[0], args, env)

+ 0 - 41
apps/spark/src/spark/monkey_patches.py

@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-# Licensed to Cloudera, Inc. under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  Cloudera, Inc. licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import spark.conf
-
-def _start_livy_server():
-  import atexit
-  import subprocess
-  import sys
-  import time
-
-  p = subprocess.Popen([sys.executable, sys.argv[0], 'livy_server'])
-
-  def cleanup():
-    p.terminate()
-    for _ in xrange(5):
-      if p.poll() == None:
-        time.sleep(1)
-      else:
-        break
-    else:
-      p.kill()
-
-  atexit.register(cleanup)
-
-if spark.conf.START_LIVY_SERVER.get():
-  _start_livy_server()

+ 0 - 11
apps/spark/src/spark/urls.py

@@ -15,15 +15,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-# FIXME: This could be replaced with hooking into the `AppConfig.ready()`
-# signal in Django 1.7:
-#
-# https://docs.djangoproject.com/en/1.7/ref/applications/#django.apps.AppConfig.ready
-#
-# For now though we have to load in the monkey patches here because we know
-# this file has been loaded after `desktop.settings` has been loaded.
-import spark.monkey_patches
-
-
 urlpatterns = ()