|
|
@@ -17,22 +17,15 @@
|
|
|
|
|
|
import json
|
|
|
import logging
|
|
|
-import sys
|
|
|
-
|
|
|
from collections import OrderedDict
|
|
|
|
|
|
from django.test.client import Client
|
|
|
from django.urls import reverse
|
|
|
+from django.utils.translation import gettext as _, gettext_lazy as _t
|
|
|
|
|
|
from desktop import appmanager
|
|
|
-from desktop.conf import is_oozie_enabled, has_connectors, is_cm_managed, ENABLE_UNIFIED_ANALYTICS, get_clusters
|
|
|
-from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection, coerce_json_dict, coerce_bool, coerce_csv
|
|
|
-
|
|
|
-if sys.version_info[0] > 2:
|
|
|
- from django.utils.translation import gettext_lazy as _t, gettext as _
|
|
|
-else:
|
|
|
- from django.utils.translation import ugettext_lazy as _t, ugettext as _
|
|
|
-
|
|
|
+from desktop.conf import ENABLE_UNIFIED_ANALYTICS, get_clusters, has_connectors, is_oozie_enabled
|
|
|
+from desktop.lib.conf import Config, ConfigSection, UnspecifiedConfigSection, coerce_bool, coerce_csv, coerce_json_dict
|
|
|
|
|
|
LOG = logging.getLogger()
|
|
|
|
|
|
@@ -40,12 +33,7 @@ LOG = logging.getLogger()
|
|
|
INTERPRETERS_CACHE = None
|
|
|
|
|
|
|
|
|
-SHOW_NOTEBOOKS = Config(
|
|
|
- key="show_notebooks",
|
|
|
- help=_t("Show the notebook menu or not"),
|
|
|
- type=coerce_bool,
|
|
|
- default=True
|
|
|
-)
|
|
|
+SHOW_NOTEBOOKS = Config(key="show_notebooks", help=_t("Show the notebook menu or not"), type=coerce_bool, default=False)
|
|
|
|
|
|
|
|
|
def _remove_duplications(a_list):
|
|
|
@@ -56,24 +44,26 @@ def check_has_missing_permission(user, interpreter, user_apps=None):
|
|
|
# TODO: port to cluster config
|
|
|
if user_apps is None:
|
|
|
user_apps = appmanager.get_apps_dict(user) # Expensive method
|
|
|
- return (interpreter == 'hive' and 'hive' not in user_apps) or \
|
|
|
- (interpreter == 'impala' and 'impala' not in user_apps) or \
|
|
|
- (interpreter == 'pig' and 'pig' not in user_apps) or \
|
|
|
- (interpreter == 'solr' and 'search' not in user_apps) or \
|
|
|
- (interpreter in ('spark', 'pyspark', 'r', 'jar', 'py', 'sparksql') and 'spark' not in user_apps) or \
|
|
|
- (interpreter in ('java', 'spark2', 'mapreduce', 'shell', 'sqoop1', 'distcp') and 'oozie' not in user_apps)
|
|
|
+ return (
|
|
|
+ (interpreter == 'hive' and 'hive' not in user_apps)
|
|
|
+ or (interpreter == 'impala' and 'impala' not in user_apps)
|
|
|
+ or (interpreter == 'pig' and 'pig' not in user_apps)
|
|
|
+ or (interpreter == 'solr' and 'search' not in user_apps)
|
|
|
+ or (interpreter in ('spark', 'pyspark', 'r', 'jar', 'py', 'sparksql') and 'spark' not in user_apps)
|
|
|
+ or (interpreter in ('java', 'spark2', 'mapreduce', 'shell', 'sqoop1', 'distcp') and 'oozie' not in user_apps)
|
|
|
+ )
|
|
|
|
|
|
|
|
|
def _connector_to_interpreter(connector):
|
|
|
return {
|
|
|
- 'name': connector['nice_name'],
|
|
|
- 'type': connector['name'], # Aka id
|
|
|
- 'dialect': connector['dialect'],
|
|
|
- 'category': connector['category'],
|
|
|
- 'is_sql': connector['dialect_properties']['is_sql'],
|
|
|
- 'interface': connector['interface'],
|
|
|
- 'options': {setting['name']: setting['value'] for setting in connector['settings']},
|
|
|
- 'dialect_properties': connector['dialect_properties'],
|
|
|
+ 'name': connector['nice_name'],
|
|
|
+ 'type': connector['name'], # Aka id
|
|
|
+ 'dialect': connector['dialect'],
|
|
|
+ 'category': connector['category'],
|
|
|
+ 'is_sql': connector['dialect_properties']['is_sql'],
|
|
|
+ 'interface': connector['interface'],
|
|
|
+ 'options': {setting['name']: setting['value'] for setting in connector['settings']},
|
|
|
+ 'dialect_properties': connector['dialect_properties'],
|
|
|
}
|
|
|
|
|
|
|
|
|
@@ -94,21 +84,15 @@ def get_ordered_interpreters(user=None):
|
|
|
|
|
|
if has_connectors():
|
|
|
from desktop.lib.connectors.api import _get_installed_connectors
|
|
|
+
|
|
|
interpreters = [
|
|
|
- _connector_to_interpreter(connector)
|
|
|
- for connector in _get_installed_connectors(categories=['editor', 'catalogs'], user=user)
|
|
|
+ _connector_to_interpreter(connector) for connector in _get_installed_connectors(categories=['editor', 'catalogs'], user=user)
|
|
|
]
|
|
|
else:
|
|
|
if INTERPRETERS_CACHE is None:
|
|
|
- none_user = None # for getting full list of interpreters
|
|
|
- if is_cm_managed():
|
|
|
- extra_interpreters = INTERPRETERS.get() # Combine the other apps interpreters
|
|
|
- _default_interpreters(none_user)
|
|
|
- else:
|
|
|
- extra_interpreters = {}
|
|
|
-
|
|
|
- if not INTERPRETERS.get():
|
|
|
- _default_interpreters(none_user)
|
|
|
+ none_user = None # For getting full list of interpreters
|
|
|
+ extra_interpreters = INTERPRETERS.get() # Combine the other apps interpreters
|
|
|
+ _default_interpreters(none_user)
|
|
|
|
|
|
INTERPRETERS_CACHE = INTERPRETERS.get()
|
|
|
INTERPRETERS_CACHE.update(extra_interpreters)
|
|
|
@@ -131,16 +115,18 @@ def get_ordered_interpreters(user=None):
|
|
|
|
|
|
reordered_interpreters = interpreters_shown_on_wheel + [i for i in user_interpreters if i not in interpreters_shown_on_wheel]
|
|
|
|
|
|
- interpreters = [{
|
|
|
+ interpreters = [
|
|
|
+ {
|
|
|
'name': INTERPRETERS_CACHE[i].NAME.get(),
|
|
|
'type': i,
|
|
|
'interface': INTERPRETERS_CACHE[i].INTERFACE.get(),
|
|
|
- 'options': INTERPRETERS_CACHE[i].OPTIONS.get()
|
|
|
+ 'options': INTERPRETERS_CACHE[i].OPTIONS.get(),
|
|
|
}
|
|
|
for i in reordered_interpreters
|
|
|
]
|
|
|
|
|
|
- return [{
|
|
|
+ return [
|
|
|
+ {
|
|
|
"name": i.get('nice_name', i['name']),
|
|
|
'displayName': displayName(i.get('dialect', i['name']).lower(), i.get('nice_name', i['name'])),
|
|
|
"type": i['type'],
|
|
|
@@ -149,14 +135,15 @@ def get_ordered_interpreters(user=None):
|
|
|
'dialect': i.get('dialect', i['type']).lower(),
|
|
|
'dialect_properties': i.get('dialect_properties') or {}, # Empty when connectors off
|
|
|
'category': i.get('category', 'editor'),
|
|
|
- "is_sql": i.get('is_sql') or \
|
|
|
- i['interface'] in ["hiveserver2", "rdbms", "jdbc", "solr", "sqlalchemy", "ksql", "flink", "trino"] or \
|
|
|
- i['type'] in ["sql", "sparksql"],
|
|
|
- "is_catalog": i['interface'] in ["hms",],
|
|
|
+ "is_sql": i.get('is_sql')
|
|
|
+ or i['interface'] in ["hiveserver2", "rdbms", "jdbc", "solr", "sqlalchemy", "ksql", "flink", "trino"]
|
|
|
+ or i['type'] in ["sql", "sparksql"],
|
|
|
+ "is_catalog": i['interface'] in ["hms"],
|
|
|
}
|
|
|
for i in interpreters
|
|
|
]
|
|
|
|
|
|
+
|
|
|
def computes_for_dialect(dialect, user):
|
|
|
# import here due to avoid cyclic dependency
|
|
|
from beeswax.models import Namespace
|
|
|
@@ -168,6 +155,7 @@ def computes_for_dialect(dialect, user):
|
|
|
|
|
|
return ns_with_computes
|
|
|
|
|
|
+
|
|
|
# cf. admin wizard too
|
|
|
|
|
|
INTERPRETERS = UnspecifiedConfigSection(
|
|
|
@@ -177,69 +165,67 @@ INTERPRETERS = UnspecifiedConfigSection(
|
|
|
help=_t("Define the name and how to connect and execute the language."),
|
|
|
members=dict(
|
|
|
NAME=Config(
|
|
|
- "name",
|
|
|
- help=_t("The name of the snippet."),
|
|
|
- default="SQL",
|
|
|
- type=str,
|
|
|
+ "name",
|
|
|
+ help=_t("The name of the snippet."),
|
|
|
+ default="SQL",
|
|
|
+ type=str,
|
|
|
),
|
|
|
INTERFACE=Config(
|
|
|
- "interface",
|
|
|
- help="The backend connection to use to communicate with the server.",
|
|
|
- default="hiveserver2",
|
|
|
- type=str,
|
|
|
+ "interface",
|
|
|
+ help="The backend connection to use to communicate with the server.",
|
|
|
+ default="hiveserver2",
|
|
|
+ type=str,
|
|
|
),
|
|
|
- OPTIONS=Config(
|
|
|
- key='options',
|
|
|
- help=_t('Specific options for connecting to the server.'),
|
|
|
- type=coerce_json_dict,
|
|
|
- default='{}'
|
|
|
- )
|
|
|
- )
|
|
|
- )
|
|
|
+ OPTIONS=Config(key='options', help=_t('Specific options for connecting to the server.'), type=coerce_json_dict, default='{}'),
|
|
|
+ ),
|
|
|
+ ),
|
|
|
)
|
|
|
|
|
|
INTERPRETERS_SHOWN_ON_WHEEL = Config(
|
|
|
key="interpreters_shown_on_wheel",
|
|
|
- help=_t("Comma separated list of interpreters that should be shown on the wheel. "
|
|
|
- "This list takes precedence over the order in which the interpreter entries appear. "
|
|
|
- "Only the first 5 interpreters will appear on the wheel."),
|
|
|
+ help=_t(
|
|
|
+ "Comma separated list of interpreters that should be shown on the wheel. "
|
|
|
+ "This list takes precedence over the order in which the interpreter entries appear. "
|
|
|
+ "Only the first 5 interpreters will appear on the wheel."
|
|
|
+ ),
|
|
|
type=coerce_csv,
|
|
|
- default=[]
|
|
|
+ default=[],
|
|
|
+)
|
|
|
+
|
|
|
+ENABLE_ALL_INTERPRETERS = Config(
|
|
|
+ key="enable_all_interpreters",
|
|
|
+ help=_t("Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app."),
|
|
|
+ type=coerce_bool,
|
|
|
+ default=False,
|
|
|
)
|
|
|
|
|
|
DEFAULT_INTERPRETER = Config(
|
|
|
key="default_interpreter",
|
|
|
help=_t("Set the default interpreter for all users. Starred interpreters at user level will get more priority than the value below."),
|
|
|
type=str,
|
|
|
- default=''
|
|
|
+ default='',
|
|
|
)
|
|
|
|
|
|
DEFAULT_LIMIT = Config(
|
|
|
- "default_limit",
|
|
|
- help="Default limit to use in SELECT statements if not present. Set to 0 to disable.",
|
|
|
- default=5000,
|
|
|
- type=int
|
|
|
+ "default_limit", help="Default limit to use in SELECT statements if not present. Set to 0 to disable.", default=5000, type=int
|
|
|
)
|
|
|
|
|
|
ENABLE_DBPROXY_SERVER = Config(
|
|
|
key="enable_dbproxy_server",
|
|
|
help=_t("Main flag to override the automatic starting of the DBProxy server."),
|
|
|
type=coerce_bool,
|
|
|
- default=True
|
|
|
+ default=True,
|
|
|
)
|
|
|
|
|
|
DBPROXY_EXTRA_CLASSPATH = Config(
|
|
|
key="dbproxy_extra_classpath",
|
|
|
help=_t("Additional classes to put on the dbproxy classpath when starting. Values separated by ':'"),
|
|
|
type=str,
|
|
|
- default=''
|
|
|
+ default='',
|
|
|
)
|
|
|
|
|
|
ENABLE_QUERY_BUILDER = Config(
|
|
|
- key="enable_query_builder",
|
|
|
- help=_t("Flag to enable the SQL query builder of the table assist."),
|
|
|
- type=coerce_bool,
|
|
|
- default=False
|
|
|
+ key="enable_query_builder", help=_t("Flag to enable the SQL query builder of the table assist."), type=coerce_bool, default=False
|
|
|
)
|
|
|
|
|
|
# Note: requires Oozie app
|
|
|
@@ -247,42 +233,34 @@ ENABLE_QUERY_SCHEDULING = Config(
|
|
|
key="enable_query_scheduling",
|
|
|
help=_t("Flag to enable the creation of a coordinator for the current SQL query."),
|
|
|
type=coerce_bool,
|
|
|
- default=False
|
|
|
+ default=False,
|
|
|
)
|
|
|
|
|
|
ENABLE_EXTERNAL_STATEMENT = Config(
|
|
|
key="enable_external_statements",
|
|
|
help=_t("Flag to enable the selection of queries from files, saved queries into the editor or as snippet."),
|
|
|
type=coerce_bool,
|
|
|
- default=False
|
|
|
+ default=False,
|
|
|
)
|
|
|
|
|
|
ENABLE_BATCH_EXECUTE = Config(
|
|
|
key="enable_batch_execute",
|
|
|
help=_t("Flag to enable the bulk submission of queries as a background task through Oozie."),
|
|
|
type=coerce_bool,
|
|
|
- dynamic_default=is_oozie_enabled
|
|
|
+ dynamic_default=is_oozie_enabled,
|
|
|
)
|
|
|
|
|
|
-ENABLE_SQL_INDEXER = Config(
|
|
|
- key="enable_sql_indexer",
|
|
|
- help=_t("Flag to turn on the SQL indexer."),
|
|
|
- type=coerce_bool,
|
|
|
- default=False
|
|
|
-)
|
|
|
+ENABLE_SQL_INDEXER = Config(key="enable_sql_indexer", help=_t("Flag to turn on the SQL indexer."), type=coerce_bool, default=False)
|
|
|
|
|
|
ENABLE_PRESENTATION = Config(
|
|
|
- key="enable_presentation",
|
|
|
- help=_t("Flag to turn on the Presentation mode of the editor."),
|
|
|
- type=coerce_bool,
|
|
|
- default=True
|
|
|
+ key="enable_presentation", help=_t("Flag to turn on the Presentation mode of the editor."), type=coerce_bool, default=True
|
|
|
)
|
|
|
|
|
|
ENABLE_QUERY_ANALYSIS = Config(
|
|
|
key="enable_query_analysis",
|
|
|
help=_t("Flag to turn on the built-in hints on Impala queries in the editor."),
|
|
|
type=coerce_bool,
|
|
|
- default=False
|
|
|
+ default=False,
|
|
|
)
|
|
|
|
|
|
|
|
|
@@ -290,115 +268,78 @@ EXAMPLES = ConfigSection(
|
|
|
key='examples',
|
|
|
help=_t('Define which query and table examples can be automatically setup for the available dialects.'),
|
|
|
members=dict(
|
|
|
- AUTO_LOAD=Config(
|
|
|
- 'auto_load',
|
|
|
- help=_t('If installing the examples automatically at startup.'),
|
|
|
- type=coerce_bool,
|
|
|
- default=False
|
|
|
- ),
|
|
|
+ AUTO_LOAD=Config('auto_load', help=_t('If installing the examples automatically at startup.'), type=coerce_bool, default=False),
|
|
|
AUTO_OPEN=Config(
|
|
|
- 'auto_open',
|
|
|
- help=_t('If automatically loading the dialect example at Editor opening.'),
|
|
|
- type=coerce_bool,
|
|
|
- default=False
|
|
|
+ 'auto_open', help=_t('If automatically loading the dialect example at Editor opening.'), type=coerce_bool, default=False
|
|
|
),
|
|
|
- QUERIES=Config(
|
|
|
- 'queries',
|
|
|
- help='Names of the saved queries to install. All if empty.',
|
|
|
- type=coerce_csv,
|
|
|
- default=[]
|
|
|
- ),
|
|
|
- TABLES=Config(
|
|
|
- key='tables',
|
|
|
- help=_t('Names of the tables to install. All if empty.'),
|
|
|
- type=coerce_csv,
|
|
|
- default=[]
|
|
|
- )
|
|
|
- )
|
|
|
+ QUERIES=Config('queries', help='Names of the saved queries to install. All if empty.', type=coerce_csv, default=[]),
|
|
|
+ TABLES=Config(key='tables', help=_t('Names of the tables to install. All if empty.'), type=coerce_csv, default=[]),
|
|
|
+ ),
|
|
|
)
|
|
|
|
|
|
+
|
|
|
def _default_interpreters(user):
|
|
|
interpreters = []
|
|
|
apps = appmanager.get_apps_dict(user)
|
|
|
|
|
|
if 'hive' in apps:
|
|
|
from beeswax.hive_site import get_hive_execution_engine
|
|
|
+
|
|
|
interpreter_name = 'Impala' if get_hive_execution_engine() == 'impala' else 'Hive' # Until using a proper dialect for 'FENG'
|
|
|
|
|
|
- interpreters.append(('hive', {
|
|
|
- 'name': interpreter_name, 'interface': 'hiveserver2', 'options': {}
|
|
|
- }),)
|
|
|
+ interpreters.append(
|
|
|
+ ('hive', {'name': interpreter_name, 'interface': 'hiveserver2', 'options': {}}),
|
|
|
+ )
|
|
|
|
|
|
if 'impala' in apps:
|
|
|
- interpreters.append(('impala', {
|
|
|
- 'name': 'Impala', 'interface': 'hiveserver2', 'options': {}
|
|
|
- }),)
|
|
|
-
|
|
|
- if 'pig' in apps:
|
|
|
- interpreters.append(('pig', {
|
|
|
- 'name': 'Pig', 'interface': 'oozie', 'options': {}
|
|
|
- }))
|
|
|
-
|
|
|
- if 'oozie' in apps and 'jobsub' in apps:
|
|
|
- interpreters.extend((
|
|
|
- ('java', {
|
|
|
- 'name': 'Java', 'interface': 'oozie', 'options': {}
|
|
|
- }),
|
|
|
- ('spark2', {
|
|
|
- 'name': 'Spark', 'interface': 'oozie', 'options': {}
|
|
|
- }),
|
|
|
- ('mapreduce', {
|
|
|
- 'name': 'MapReduce', 'interface': 'oozie', 'options': {}
|
|
|
- }),
|
|
|
- ('shell', {
|
|
|
- 'name': 'Shell', 'interface': 'oozie', 'options': {}
|
|
|
- }),
|
|
|
- ('sqoop1', {
|
|
|
- 'name': 'Sqoop 1', 'interface': 'oozie', 'options': {}
|
|
|
- }),
|
|
|
- ('distcp', {
|
|
|
- 'name': 'Distcp', 'interface': 'oozie', 'options': {}
|
|
|
- }),
|
|
|
- ))
|
|
|
-
|
|
|
- from dashboard.conf import get_properties # Cyclic dependency
|
|
|
- dashboards = get_properties()
|
|
|
- if dashboards.get('solr') and dashboards['solr']['analytics']:
|
|
|
- interpreters.append(('solr', {
|
|
|
- 'name': 'Solr SQL', 'interface': 'solr', 'options': {}
|
|
|
- }),)
|
|
|
-
|
|
|
- from desktop.models import Cluster # Cyclic dependency
|
|
|
- cluster = Cluster(user)
|
|
|
- if cluster and cluster.get_type() == 'dataeng':
|
|
|
- interpreters.append(('dataeng', {
|
|
|
- 'name': 'DataEng', 'interface': 'dataeng', 'options': {}
|
|
|
- }))
|
|
|
-
|
|
|
- if 'spark' in apps:
|
|
|
- interpreters.extend((
|
|
|
- ('spark', {
|
|
|
- 'name': 'Scala', 'interface': 'livy', 'options': {}
|
|
|
- }),
|
|
|
- ('pyspark', {
|
|
|
- 'name': 'PySpark', 'interface': 'livy', 'options': {}
|
|
|
- }),
|
|
|
- ('r', {
|
|
|
- 'name': 'R', 'interface': 'livy', 'options': {}
|
|
|
- }),
|
|
|
- ('jar', {
|
|
|
- 'name': 'Spark Submit Jar', 'interface': 'livy-batch', 'options': {}
|
|
|
- }),
|
|
|
- ('py', {
|
|
|
- 'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}
|
|
|
- }),
|
|
|
- ('text', {
|
|
|
- 'name': 'Text', 'interface': 'text', 'options': {}
|
|
|
- }),
|
|
|
- ('markdown', {
|
|
|
- 'name': 'Markdown', 'interface': 'text', 'options': {}
|
|
|
- })
|
|
|
- ))
|
|
|
+ interpreters.append(
|
|
|
+ ('impala', {'name': 'Impala', 'interface': 'hiveserver2', 'options': {}}),
|
|
|
+ )
|
|
|
+
|
|
|
+ # Other interpreters are behind a flag to not enable them by default. Users need to explicitly add configs for enabling them or
|
|
|
+ # if they want to revert to old scenario, they can disable the flag which will show all default interpreters for every whitelisted app.
|
|
|
+ if ENABLE_ALL_INTERPRETERS.get():
|
|
|
+ if 'pig' in apps:
|
|
|
+ interpreters.append(('pig', {'name': 'Pig', 'interface': 'oozie', 'options': {}}))
|
|
|
+
|
|
|
+ if 'oozie' in apps and 'jobsub' in apps:
|
|
|
+ interpreters.extend(
|
|
|
+ (
|
|
|
+ ('java', {'name': 'Java', 'interface': 'oozie', 'options': {}}),
|
|
|
+ ('spark2', {'name': 'Spark', 'interface': 'oozie', 'options': {}}),
|
|
|
+ ('mapreduce', {'name': 'MapReduce', 'interface': 'oozie', 'options': {}}),
|
|
|
+ ('shell', {'name': 'Shell', 'interface': 'oozie', 'options': {}}),
|
|
|
+ ('sqoop1', {'name': 'Sqoop 1', 'interface': 'oozie', 'options': {}}),
|
|
|
+ ('distcp', {'name': 'Distcp', 'interface': 'oozie', 'options': {}}),
|
|
|
+ )
|
|
|
+ )
|
|
|
+
|
|
|
+ from dashboard.conf import get_properties # Cyclic dependency
|
|
|
+
|
|
|
+ dashboards = get_properties()
|
|
|
+ if dashboards.get('solr') and dashboards['solr']['analytics']:
|
|
|
+ interpreters.append(
|
|
|
+ ('solr', {'name': 'Solr SQL', 'interface': 'solr', 'options': {}}),
|
|
|
+ )
|
|
|
+
|
|
|
+ from desktop.models import Cluster # Cyclic dependency
|
|
|
+
|
|
|
+ cluster = Cluster(user)
|
|
|
+ if cluster and cluster.get_type() == 'dataeng':
|
|
|
+ interpreters.append(('dataeng', {'name': 'DataEng', 'interface': 'dataeng', 'options': {}}))
|
|
|
+
|
|
|
+ if 'spark' in apps:
|
|
|
+ interpreters.extend(
|
|
|
+ (
|
|
|
+ ('spark', {'name': 'Scala', 'interface': 'livy', 'options': {}}),
|
|
|
+ ('pyspark', {'name': 'PySpark', 'interface': 'livy', 'options': {}}),
|
|
|
+ ('r', {'name': 'R', 'interface': 'livy', 'options': {}}),
|
|
|
+ ('jar', {'name': 'Spark Submit Jar', 'interface': 'livy-batch', 'options': {}}),
|
|
|
+ ('py', {'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}}),
|
|
|
+ ('text', {'name': 'Text', 'interface': 'text', 'options': {}}),
|
|
|
+ ('markdown', {'name': 'Markdown', 'interface': 'text', 'options': {}}),
|
|
|
+ )
|
|
|
+ )
|
|
|
|
|
|
INTERPRETERS.set_for_testing(OrderedDict(interpreters))
|
|
|
|
|
|
@@ -435,12 +376,7 @@ def config_validator(user, interpreters=None):
|
|
|
msg += ' Failed to authenticate, check authentication configurations.'
|
|
|
|
|
|
LOG.exception(msg)
|
|
|
- res.append(
|
|
|
- (
|
|
|
- '%(name)s - %(dialect)s (%(type)s)' % interpreter,
|
|
|
- _(msg) + (' %s' % trace[:100] + ('...' if len(trace) > 50 else ''))
|
|
|
- )
|
|
|
- )
|
|
|
+ res.append(('%(name)s - %(dialect)s (%(type)s)' % interpreter, _(msg) + (' %s' % trace[:100] + ('...' if len(trace) > 50 else ''))))
|
|
|
|
|
|
return res
|
|
|
|
|
|
@@ -467,16 +403,13 @@ def _excute_test_query(client, connector_id, interpreter=None):
|
|
|
"snippets": [{"id":"2b7d1f46-17a0-30af-efeb-33d4c29b1055","type":"%(connector_id)s","status":"running","statement":"select * from web_logs","properties":{"settings":[],"variables":[],"files":[],"functions":[]},"result":{"id":"b424befa-f4f5-8799-a0b4-79753f2552b1","type":"table","handle":{"log_context":null,"statements_count":1,"end":{"column":21,"row":0},"statement_id":0,"has_more_statements":false,"start":{"column":0,"row":0},"secret":"rVRWw7YPRGqPT7LZ/TeFaA==an","has_result_set":true,"statement":"select * from web_logs","operation_type":0,"modified_row_count":null,"guid":"7xm6+epkRx6dyvYvGNYePA==an"}},"lastExecuted": 1462554843817,"database":"default"}],
|
|
|
"uuid": "d9efdee1-ef25-4d43-b8f9-1a170f69a05a"
|
|
|
}
|
|
|
- """ % {
|
|
|
+ """ % { # noqa: E501
|
|
|
'connector_id': connector_id,
|
|
|
}
|
|
|
snippet = json.loads(notebook_json)['snippets'][0]
|
|
|
snippet['interpreter'] = interpreter
|
|
|
|
|
|
return client.post(
|
|
|
- reverse('notebook:api_sample_data', kwargs={'database': 'default', 'table': 'default'}), {
|
|
|
- 'notebook': notebook_json,
|
|
|
- 'snippet': json.dumps(snippet),
|
|
|
- 'is_async': json.dumps(True),
|
|
|
- 'operation': json.dumps('hello')
|
|
|
- })
|
|
|
+ reverse('notebook:api_sample_data', kwargs={'database': 'default', 'table': 'default'}),
|
|
|
+ {'notebook': notebook_json, 'snippet': json.dumps(snippet), 'is_async': json.dumps(True), 'operation': json.dumps('hello')},
|
|
|
+ )
|