Browse Source

HUE-2962 [spark] Update spark properties to utilize new configuration format

Add SparkConfiguration to Default Configuration API
Jenny Kim 9 years ago
parent
commit
638b12755a

+ 1 - 1
desktop/conf.dist/hue.ini

@@ -832,7 +832,7 @@
   ## thrift_version=7
 
   # A comma-separated list of white-listed Hive configuration properties that users are authorized to set.
-  # config_whitelist=hive.map.aggr,hive.exec.compress.output,hive.exec.parallel,hive.execution.engine,mapreduce.job.queuename
+  ## config_whitelist=hive.map.aggr,hive.exec.compress.output,hive.exec.parallel,hive.execution.engine,mapreduce.job.queuename
 
   # Override the default desktop username and password of the hue user used for authentications with other services.
   # e.g. Used for LDAP/PAM pass-through authentication.

+ 1 - 1
desktop/conf/pseudo-distributed.ini.tmpl

@@ -894,7 +894,7 @@
   ## auth_password=
 
   # A comma-separated list of white-listed Impala configuration properties that users are authorized to set.
-  # config_whitelist=debug_action,explain_level,mem_limit,optimize_partition_key_scans,query_timeout_s
+  ## config_whitelist=debug_action,explain_level,mem_limit,optimize_partition_key_scans,query_timeout_s
 
   [[ssl]]
     # SSL communication enabled for this server.

+ 16 - 0
desktop/core/src/desktop/configuration/__init__.py

@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

+ 2 - 1
desktop/core/src/desktop/configuration/api.py

@@ -27,6 +27,7 @@ from desktop.lib.i18n import force_unicode
 from desktop.models import DefaultConfiguration
 
 from notebook.connectors.hiveserver2 import HiveConfiguration, ImpalaConfiguration
+from notebook.connectors.spark_shell import SparkConfiguration
 
 
 LOG = logging.getLogger(__name__)
@@ -51,7 +52,7 @@ def api_error_handler(func):
 def get_configurable():
   # TODO: Use metaclasses to self-register configurable apps
   app_configs = {}
-  config_classes = [HiveConfiguration, ImpalaConfiguration]
+  config_classes = [HiveConfiguration, ImpalaConfiguration, SparkConfiguration]
 
   for config_cls in config_classes:
     if not hasattr(config_cls, 'APP_NAME') or not hasattr(config_cls, 'PROPERTIES'):

+ 1 - 0
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -517,6 +517,7 @@ class HS2Api(Api):
     functions = snippet['properties'].get('functions', None)
     properties = session['properties']
 
+    # Get properties from session if not defined in snippet
     if not settings:
       settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
 

+ 115 - 21
desktop/libs/notebook/src/notebook/connectors/spark_shell.py

@@ -19,15 +19,19 @@ import logging
 import re
 import time
 
-
-LOG = logging.getLogger(__name__)
-
-
 from django.utils.translation import ugettext as _
 
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import force_unicode
 from desktop.lib.rest.http_client import RestException
+from desktop.models import DefaultConfiguration
+
+from notebook.data_export import download as spark_download
+from notebook.connectors.base import Api, QueryError, SessionExpired, _get_snippet_session
+
+
+LOG = logging.getLogger(__name__)
+
 
 try:
   from spark.conf import LIVY_SERVER_SESSION_KIND
@@ -35,34 +39,124 @@ try:
 except ImportError, e:
   LOG.exception('Spark is not enabled')
 
-from notebook.data_export import download as spark_download
-from notebook.connectors.base import SessionExpired, _get_snippet_session, Api,\
-  QueryError
 
+class SparkConfiguration(object):
 
-class SparkApi(Api):
+  APP_NAME = 'spark'
 
   PROPERTIES = [
-    {'name': 'jars', 'nice_name': _('Jars'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
-    {'name': 'files', 'nice_name': _('Files'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
-    {'name': 'pyFiles', 'nice_name': _('pyFiles'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
-
-    {'name': 'driverMemory', 'nice_name': _('Driver Memory'), 'default': '1', 'type': 'jvm', 'is_yarn': False},
-
-    {'name': 'driverCores', 'nice_name': _('Driver Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
-    {'name': 'executorMemory', 'nice_name': _('Executors Memory'), 'default': '1', 'type': 'jvm', 'is_yarn': True},
-    {'name': 'executorCores', 'nice_name': _('Executor Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
-    {'name': 'totalExecutorCores', 'nice_name': _('Total Executor Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
-    {'name': 'queue', 'nice_name': _('Queue'), 'default': '1', 'type': 'string', 'is_yarn': True},
-    {'name': 'archives', 'nice_name': _('Archives'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': True},
-    {'name': 'numExecutors', 'nice_name': _('Executors Numbers'), 'default': '1', 'type': 'number', 'is_yarn': True},
+    {
+      "name": "jars",
+      "nice_name": _("Jars"),
+      "help_text": _("Add one or more JAR files to the list of resources."),
+      "type": "csv-hdfs-files",
+      "is_yarn": False,
+      "multiple": True,
+      "value": [],
+    }, {
+      "name": "files",
+      "nice_name": _("Files"),
+      "help_text": _("Files to be placed in the working directory of each executor."),
+      "type": "csv-hdfs-files",
+      "is_yarn": False,
+      "multiple": True,
+      "value": [],
+    }, {
+      "name": "pyFiles",
+      "nice_name": _("pyFiles"),
+      "help_text": _("Python files to be placed in the working directory of each executor."),
+      "type": "csv-hdfs-files",
+      "is_yarn": False,
+      "multiple": True,
+      "value": [],
+    }, {
+      "name": "driverMemory",
+      "nice_name": _("Driver Memory"),
+      "help_text": _("Amount of memory to use for the driver process in GB. (Default: 1). "),
+      "type": "jvm",
+      "is_yarn": False,
+      "multiple": False,
+      "value": '1',
+    }, {
+      "name": "totalExecutorCores",
+      "nice_name": _("Total Executor Cores"),
+      "help_text": _("number of cluster cores used by executor, only in standalone mode (Default: 1))"),
+      "type": "number",
+      "is_yarn": False,
+      "multiple": False,
+      "value": '1',
+    },
+    # YARN-only properties
+    {
+      "name": "driverCores",
+      "nice_name": _("Driver Cores"),
+      "help_text": _("Number of cores used by the driver, only in cluster mode (Default: 1)"),
+      "type": "number",
+      "is_yarn": True,
+      "multiple": False,
+      "value": '1',
+    }, {
+      "name": "numExecutors",
+      "nice_name": _("Executors Number"),
+      "help_text": _("Number of executors to launch, only in cluster mode (Default: 2)"),
+      "type": "number",
+      "is_yarn": True,
+      "multiple": False,
+      "value": '2',
+    }, {
+      "name": "executorMemory",
+      "nice_name": _("Executor Memory"),
+      "help_text": _("Amount of memory to use per executor process in GB. (Default: 1)"),
+      "type": "jvm",
+      "is_yarn": True,
+      "multiple": False,
+      "value": '1',
+    }, {
+      "name": "executorCores",
+      "nice_name": _("Executor Cores"),
+      "help_text": _("Number of cores used by the driver, only in cluster mode (Default: 1)"),
+      "type": "number",
+      "is_yarn": True,
+      "multiple": False,
+      "value": '1',
+    }, {
+      "name": "queue",
+      "nice_name": _("Queue"),
+      "help_text": _("The YARN queue to submit to, only in cluster mode (Default: default)"),
+      "type": "string",
+      "is_yarn": True,
+      "multiple": False,
+      "value": 'default',
+    }, {
+      "name": "archives",
+      "nice_name": _("Archives"),
+      "help_text": _("Archives to be extracted into the working directory of each executor, only in cluster mode."),
+      "type": "csv-hdfs-files",
+      "is_yarn": True,
+      "multiple": True,
+      "value": [],
+    }
   ]
 
+
+class SparkApi(Api):
+
   SPARK_UI_RE = re.compile("Started SparkUI at (http[s]?://([0-9a-zA-Z-_\.]+):(\d+))")
   YARN_JOB_RE = re.compile("tracking URL: (http[s]?://.+/)")
   STANDALONE_JOB_RE = re.compile("Got job (\d+)")
 
+  @staticmethod
+  def get_properties():
+    return SparkConfiguration.PROPERTIES
+
   def create_session(self, lang='scala', properties=None):
+    if not properties:
+      config = DefaultConfiguration.objects.get_configuration_for_user(app='spark', user=self.user)
+      if config is not None:
+        properties = config.properties_list
+      else:
+        properties = self.get_properties()
+
     props = dict([(p['name'], p['value']) for p in properties]) if properties is not None else {}
 
     props['kind'] = lang

+ 1 - 0
desktop/libs/notebook/src/notebook/static/notebook/css/notebook.css

@@ -990,6 +990,7 @@ h4.header {
 #helpModal .tab-content {
   min-height: 355px;
   max-height: 355px;
+}
 
 .tooltip {
   z-index: 3000 !important;

+ 1 - 1
desktop/libs/notebook/src/notebook/views.py

@@ -73,7 +73,7 @@ def notebook(request):
       'notebooks_json': json.dumps([notebook.get_data()]),
       'options_json': json.dumps({
           'languages': get_interpreters(request.user),
-          'session_properties': SparkApi.PROPERTIES,
+          'session_properties': SparkApi.get_properties(),
           'is_optimizer_enabled': has_optimizer(),
       }),
       'is_yarn_mode': is_yarn_mode,