|
@@ -19,15 +19,19 @@ import logging
|
|
|
import re
|
|
import re
|
|
|
import time
|
|
import time
|
|
|
|
|
|
|
|
-
|
|
|
|
|
-LOG = logging.getLogger(__name__)
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
from django.utils.translation import ugettext as _
|
|
from django.utils.translation import ugettext as _
|
|
|
|
|
|
|
|
from desktop.lib.exceptions_renderable import PopupException
|
|
from desktop.lib.exceptions_renderable import PopupException
|
|
|
from desktop.lib.i18n import force_unicode
|
|
from desktop.lib.i18n import force_unicode
|
|
|
from desktop.lib.rest.http_client import RestException
|
|
from desktop.lib.rest.http_client import RestException
|
|
|
|
|
+from desktop.models import DefaultConfiguration
|
|
|
|
|
+
|
|
|
|
|
+from notebook.data_export import download as spark_download
|
|
|
|
|
+from notebook.connectors.base import Api, QueryError, SessionExpired, _get_snippet_session
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+LOG = logging.getLogger(__name__)
|
|
|
|
|
+
|
|
|
|
|
|
|
|
try:
|
|
try:
|
|
|
from spark.conf import LIVY_SERVER_SESSION_KIND
|
|
from spark.conf import LIVY_SERVER_SESSION_KIND
|
|
@@ -35,34 +39,124 @@ try:
|
|
|
except ImportError, e:
|
|
except ImportError, e:
|
|
|
LOG.exception('Spark is not enabled')
|
|
LOG.exception('Spark is not enabled')
|
|
|
|
|
|
|
|
-from notebook.data_export import download as spark_download
|
|
|
|
|
-from notebook.connectors.base import SessionExpired, _get_snippet_session, Api,\
|
|
|
|
|
- QueryError
|
|
|
|
|
|
|
|
|
|
|
|
+class SparkConfiguration(object):
|
|
|
|
|
|
|
|
-class SparkApi(Api):
|
|
|
|
|
|
|
+ APP_NAME = 'spark'
|
|
|
|
|
|
|
|
PROPERTIES = [
|
|
PROPERTIES = [
|
|
|
- {'name': 'jars', 'nice_name': _('Jars'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
|
|
|
|
|
- {'name': 'files', 'nice_name': _('Files'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
|
|
|
|
|
- {'name': 'pyFiles', 'nice_name': _('pyFiles'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
|
|
|
|
|
-
|
|
|
|
|
- {'name': 'driverMemory', 'nice_name': _('Driver Memory'), 'default': '1', 'type': 'jvm', 'is_yarn': False},
|
|
|
|
|
-
|
|
|
|
|
- {'name': 'driverCores', 'nice_name': _('Driver Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
|
|
|
|
|
- {'name': 'executorMemory', 'nice_name': _('Executors Memory'), 'default': '1', 'type': 'jvm', 'is_yarn': True},
|
|
|
|
|
- {'name': 'executorCores', 'nice_name': _('Executor Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
|
|
|
|
|
- {'name': 'totalExecutorCores', 'nice_name': _('Total Executor Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
|
|
|
|
|
- {'name': 'queue', 'nice_name': _('Queue'), 'default': '1', 'type': 'string', 'is_yarn': True},
|
|
|
|
|
- {'name': 'archives', 'nice_name': _('Archives'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': True},
|
|
|
|
|
- {'name': 'numExecutors', 'nice_name': _('Executors Numbers'), 'default': '1', 'type': 'number', 'is_yarn': True},
|
|
|
|
|
|
|
+ {
|
|
|
|
|
+ "name": "jars",
|
|
|
|
|
+ "nice_name": _("Jars"),
|
|
|
|
|
+ "help_text": _("Add one or more JAR files to the list of resources."),
|
|
|
|
|
+ "type": "csv-hdfs-files",
|
|
|
|
|
+ "is_yarn": False,
|
|
|
|
|
+ "multiple": True,
|
|
|
|
|
+ "value": [],
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "files",
|
|
|
|
|
+ "nice_name": _("Files"),
|
|
|
|
|
+ "help_text": _("Files to be placed in the working directory of each executor."),
|
|
|
|
|
+ "type": "csv-hdfs-files",
|
|
|
|
|
+ "is_yarn": False,
|
|
|
|
|
+ "multiple": True,
|
|
|
|
|
+ "value": [],
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "pyFiles",
|
|
|
|
|
+ "nice_name": _("pyFiles"),
|
|
|
|
|
+ "help_text": _("Python files to be placed in the working directory of each executor."),
|
|
|
|
|
+ "type": "csv-hdfs-files",
|
|
|
|
|
+ "is_yarn": False,
|
|
|
|
|
+ "multiple": True,
|
|
|
|
|
+ "value": [],
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "driverMemory",
|
|
|
|
|
+ "nice_name": _("Driver Memory"),
|
|
|
|
|
+ "help_text": _("Amount of memory to use for the driver process in GB. (Default: 1). "),
|
|
|
|
|
+ "type": "jvm",
|
|
|
|
|
+ "is_yarn": False,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": '1',
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "totalExecutorCores",
|
|
|
|
|
+ "nice_name": _("Total Executor Cores"),
|
|
|
|
|
+ "help_text": _("number of cluster cores used by executor, only in standalone mode (Default: 1))"),
|
|
|
|
|
+ "type": "number",
|
|
|
|
|
+ "is_yarn": False,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": '1',
|
|
|
|
|
+ },
|
|
|
|
|
+ # YARN-only properties
|
|
|
|
|
+ {
|
|
|
|
|
+ "name": "driverCores",
|
|
|
|
|
+ "nice_name": _("Driver Cores"),
|
|
|
|
|
+ "help_text": _("Number of cores used by the driver, only in cluster mode (Default: 1)"),
|
|
|
|
|
+ "type": "number",
|
|
|
|
|
+ "is_yarn": True,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": '1',
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "numExecutors",
|
|
|
|
|
+ "nice_name": _("Executors Number"),
|
|
|
|
|
+ "help_text": _("Number of executors to launch, only in cluster mode (Default: 2)"),
|
|
|
|
|
+ "type": "number",
|
|
|
|
|
+ "is_yarn": True,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": '2',
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "executorMemory",
|
|
|
|
|
+ "nice_name": _("Executor Memory"),
|
|
|
|
|
+ "help_text": _("Amount of memory to use per executor process in GB. (Default: 1)"),
|
|
|
|
|
+ "type": "jvm",
|
|
|
|
|
+ "is_yarn": True,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": '1',
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "executorCores",
|
|
|
|
|
+ "nice_name": _("Executor Cores"),
|
|
|
|
|
+ "help_text": _("Number of cores used by the driver, only in cluster mode (Default: 1)"),
|
|
|
|
|
+ "type": "number",
|
|
|
|
|
+ "is_yarn": True,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": '1',
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "queue",
|
|
|
|
|
+ "nice_name": _("Queue"),
|
|
|
|
|
+ "help_text": _("The YARN queue to submit to, only in cluster mode (Default: default)"),
|
|
|
|
|
+ "type": "string",
|
|
|
|
|
+ "is_yarn": True,
|
|
|
|
|
+ "multiple": False,
|
|
|
|
|
+ "value": 'default',
|
|
|
|
|
+ }, {
|
|
|
|
|
+ "name": "archives",
|
|
|
|
|
+ "nice_name": _("Archives"),
|
|
|
|
|
+ "help_text": _("Archives to be extracted into the working directory of each executor, only in cluster mode."),
|
|
|
|
|
+ "type": "csv-hdfs-files",
|
|
|
|
|
+ "is_yarn": True,
|
|
|
|
|
+ "multiple": True,
|
|
|
|
|
+ "value": [],
|
|
|
|
|
+ }
|
|
|
]
|
|
]
|
|
|
|
|
|
|
|
|
|
+
|
|
|
|
|
+class SparkApi(Api):
|
|
|
|
|
+
|
|
|
SPARK_UI_RE = re.compile("Started SparkUI at (http[s]?://([0-9a-zA-Z-_\.]+):(\d+))")
|
|
SPARK_UI_RE = re.compile("Started SparkUI at (http[s]?://([0-9a-zA-Z-_\.]+):(\d+))")
|
|
|
YARN_JOB_RE = re.compile("tracking URL: (http[s]?://.+/)")
|
|
YARN_JOB_RE = re.compile("tracking URL: (http[s]?://.+/)")
|
|
|
STANDALONE_JOB_RE = re.compile("Got job (\d+)")
|
|
STANDALONE_JOB_RE = re.compile("Got job (\d+)")
|
|
|
|
|
|
|
|
|
|
+ @staticmethod
|
|
|
|
|
+ def get_properties():
|
|
|
|
|
+ return SparkConfiguration.PROPERTIES
|
|
|
|
|
+
|
|
|
def create_session(self, lang='scala', properties=None):
|
|
def create_session(self, lang='scala', properties=None):
|
|
|
|
|
+ if not properties:
|
|
|
|
|
+ config = DefaultConfiguration.objects.get_configuration_for_user(app='spark', user=self.user)
|
|
|
|
|
+ if config is not None:
|
|
|
|
|
+ properties = config.properties_list
|
|
|
|
|
+ else:
|
|
|
|
|
+ properties = self.get_properties()
|
|
|
|
|
+
|
|
|
props = dict([(p['name'], p['value']) for p in properties]) if properties is not None else {}
|
|
props = dict([(p['name'], p['value']) for p in properties]) if properties is not None else {}
|
|
|
|
|
|
|
|
props['kind'] = lang
|
|
props['kind'] = lang
|