Răsfoiți Sursa

[notebook] Remove import dependencies on Spark

Romain Rigaux 10 ani în urmă
părinte
comite
60b8218

+ 13 - 10
desktop/libs/notebook/src/notebook/connectors/hiveserver2.py

@@ -23,21 +23,24 @@ from django.core.urlresolvers import reverse
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import force_unicode
 from desktop.lib.i18n import force_unicode
 
 
-from beeswax import data_export
-from beeswax.api import _autocomplete
-from beeswax.design import hql_query
-from beeswax import conf as beeswax_conf
-from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, QueryHistory, HiveServerQueryHistory
-from beeswax.server import dbms
-from beeswax.server.dbms import get_query_server_config, QueryServerException
-from beeswax.views import _parse_out_hadoop_jobs
-
 from notebook.connectors.base import Api, QueryError, QueryExpired
 from notebook.connectors.base import Api, QueryError, QueryExpired
 
 
-
 LOG = logging.getLogger(__name__)
 LOG = logging.getLogger(__name__)
 
 
 
 
+try:
+  from beeswax import data_export
+  from beeswax.api import _autocomplete
+  from beeswax.design import hql_query
+  from beeswax import conf as beeswax_conf
+  from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, QueryHistory, HiveServerQueryHistory
+  from beeswax.server import dbms
+  from beeswax.server.dbms import get_query_server_config, QueryServerException
+  from beeswax.views import _parse_out_hadoop_jobs
+except ImportError, e:
+  LOG.exception('Hive and HiveServer2 interfaces are not enabled')
+
+
 def query_error_handler(func):
 def query_error_handler(func):
   def decorator(*args, **kwargs):
   def decorator(*args, **kwargs):
     try:
     try:

+ 2 - 2
desktop/libs/notebook/src/notebook/connectors/pig_batch.py

@@ -31,8 +31,8 @@ try:
   from pig import api
   from pig import api
   from pig.models import PigScript2, get_workflow_output, hdfs_link
   from pig.models import PigScript2, get_workflow_output, hdfs_link
   from oozie.views.dashboard import check_job_access_permission, check_job_edition_permission
   from oozie.views.dashboard import check_job_access_permission, check_job_edition_permission
-except ImportError, e:
-  LOG.exception('Pig application is not enabled')
+except Exception, e:
+  LOG.exception('Pig application is not enabled: %s' % e)
 
 
 
 
 class PigApi(Api):
 class PigApi(Api):

+ 7 - 3
desktop/libs/notebook/src/notebook/connectors/spark_batch.py

@@ -17,12 +17,16 @@
 
 
 import logging
 import logging
 
 
-from spark.job_server_api import get_api as get_spark_api
 
 
-from notebook.connectors.base import Api
+LOG = logging.getLogger(__name__)
 
 
 
 
-LOG = logging.getLogger(__name__)
+try:
+  from spark.job_server_api import get_api as get_spark_api
+except ImportError, e:
+  LOG.exception('Spark is not enabled')
+  
+from notebook.connectors.base import Api
 
 
 
 
 class SparkBatchApi(Api):
 class SparkBatchApi(Api):

+ 9 - 5
desktop/libs/notebook/src/notebook/connectors/spark_shell.py

@@ -19,23 +19,27 @@ import logging
 import re
 import re
 import time
 import time
 
 
+
+LOG = logging.getLogger(__name__)
+
+
 from django.utils.translation import ugettext as _
 from django.utils.translation import ugettext as _
 
 
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import force_unicode
 from desktop.lib.i18n import force_unicode
 from desktop.lib.rest.http_client import RestException
 from desktop.lib.rest.http_client import RestException
 
 
-from spark.conf import LIVY_SERVER_SESSION_KIND
-from spark.job_server_api import get_api as get_spark_api
+try:
+  from spark.conf import LIVY_SERVER_SESSION_KIND
+  from spark.job_server_api import get_api as get_spark_api
+except ImportError, e:
+  LOG.exception('Spark is not enabled')
 
 
 from notebook.data_export import download as spark_download
 from notebook.data_export import download as spark_download
 from notebook.connectors.base import SessionExpired, _get_snippet_session, Api,\
 from notebook.connectors.base import SessionExpired, _get_snippet_session, Api,\
   QueryError
   QueryError
 
 
 
 
-LOG = logging.getLogger(__name__)
-
-
 class SparkApi(Api):
 class SparkApi(Api):
 
 
   PROPERTIES = [
   PROPERTIES = [

+ 8 - 2
desktop/libs/notebook/src/notebook/views.py

@@ -24,7 +24,6 @@ from django.utils.translation import ugettext as _
 from desktop.lib.django_util import render, JsonResponse
 from desktop.lib.django_util import render, JsonResponse
 from desktop.lib.json_utils import JSONEncoderForHTML
 from desktop.lib.json_utils import JSONEncoderForHTML
 from desktop.models import Document2, Document
 from desktop.models import Document2, Document
-from spark.conf import LIVY_SERVER_SESSION_KIND
 
 
 from notebook.decorators import check_document_access_permission, check_document_modify_permission
 from notebook.decorators import check_document_access_permission, check_document_modify_permission
 from notebook.connectors.base import Notebook, get_api
 from notebook.connectors.base import Notebook, get_api
@@ -51,6 +50,13 @@ def notebook(request):
   except:
   except:
     LOG.exception('failed to get autocomplete base url')
     LOG.exception('failed to get autocomplete base url')
 
 
+  is_yarn_mode = False
+  try:
+    from spark.conf import LIVY_SERVER_SESSION_KIND
+    is_yarn_mode = LIVY_SERVER_SESSION_KIND.get()
+  except:
+    LOG.exception('Spark is not enabled')
+
   return render('notebook.mako', request, {
   return render('notebook.mako', request, {
       'notebooks_json': json.dumps([notebook.get_data()]),
       'notebooks_json': json.dumps([notebook.get_data()]),
       'options_json': json.dumps({
       'options_json': json.dumps({
@@ -58,7 +64,7 @@ def notebook(request):
           'session_properties': SparkApi.PROPERTIES
           'session_properties': SparkApi.PROPERTIES
       }),
       }),
       'autocomplete_base_url': autocomplete_base_url,
       'autocomplete_base_url': autocomplete_base_url,
-      'is_yarn_mode': LIVY_SERVER_SESSION_KIND.get()
+      'is_yarn_mode': is_yarn_mode
   })
   })