Эх сурвалжийг харах

Revert "[interpreters] Do not add all default interpreters of every whitelisted app (#2903)" (#2911)

This reverts commit 6e299b11ce5c60b1e2cad418e34f56f9d1c5998a.
Harsh Gupta 3 жил өмнө
parent
commit
b2bab308e0

+ 0 - 3
desktop/conf.dist/hue.ini

@@ -944,9 +944,6 @@ tls=no
 ## Flag to enable the bulk submission of queries as a background task through Oozie.
 # enable_batch_execute=true
 
-## Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app.
-# enable_all_interpreters=true
-
 ## Flag to turn on the SQL indexer.
 # enable_sql_indexer=false
 

+ 0 - 3
desktop/conf/pseudo-distributed.ini.tmpl

@@ -927,9 +927,6 @@
   ## Flag to enable the bulk submission of queries as a background task through Oozie.
   # enable_batch_execute=false
 
-  ## Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app.
-  # enable_all_interpreters=true
-
   ## Flag to turn on the SQL indexer.
   # enable_sql_indexer=false
 

+ 56 - 131
desktop/libs/notebook/src/notebook/api_tests.py

@@ -44,7 +44,7 @@ import notebook.connectors.hiveserver2
 from notebook.api import _historify
 from notebook.connectors.base import Notebook, QueryError, Api, QueryExpired
 from notebook.decorators import api_error_handler
-from notebook.conf import get_ordered_interpreters, INTERPRETERS_SHOWN_ON_WHEEL, INTERPRETERS, ENABLE_ALL_INTERPRETERS
+from notebook.conf import get_ordered_interpreters, INTERPRETERS_SHOWN_ON_WHEEL, INTERPRETERS
 
 
 if sys.version_info[0] > 2:
@@ -643,10 +643,6 @@ def test_get_interpreters_to_show():
           'name': 'Hive', 'displayName': 'Hive', 'interface': 'hiveserver2', 'type': 'hive', 'is_sql': True,
           'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
       }),
-      ('impala', {
-          'name': 'Impala', 'displayName': 'Impala', 'interface': 'hiveserver2', 'type': 'impala', 'is_sql': True,
-          'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'impala'
-      }),
       ('spark', {
           'name': 'Scala', 'displayName': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {},
           'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
@@ -674,10 +670,6 @@ def test_get_interpreters_to_show():
           'name': 'Hive', 'displayName': 'Hive', 'interface': 'hiveserver2', 'is_sql': True, 'type': 'hive',
           'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
       }),
-      ('impala', {
-          'name': 'Impala', 'displayName': 'Impala', 'interface': 'hiveserver2', 'type': 'impala', 'is_sql': True,
-          'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'impala'
-      }),
       ('spark', {
           'name': 'Scala', 'displayName': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {},
           'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
@@ -688,8 +680,7 @@ def test_get_interpreters_to_show():
     resets = [
       INTERPRETERS.set_for_testing(default_interpreters),
       APP_BLACKLIST.set_for_testing(''),
-      ENABLE_CONNECTORS.set_for_testing(False),
-      ENABLE_ALL_INTERPRETERS.set_for_testing(False)
+      ENABLE_CONNECTORS.set_for_testing(False)
     ]
     appmanager.DESKTOP_MODULES = []
     appmanager.DESKTOP_APPS = None
@@ -742,139 +733,73 @@ def test_get_ordered_interpreters():
 
   try:
     resets = [APP_BLACKLIST.set_for_testing('')]
-    flag_reset = ENABLE_ALL_INTERPRETERS.set_for_testing(False)
     appmanager.DESKTOP_MODULES = []
     appmanager.DESKTOP_APPS = None
     appmanager.load_apps(APP_BLACKLIST.get())
 
-    with patch('notebook.conf.appmanager.get_apps_dict') as get_apps_dict:
-      with patch('notebook.conf.has_connectors') as has_connectors:
-        get_apps_dict.return_value = {'hive': {}} # Impala blacklisted indirectly
-        has_connectors.return_value = False
-        notebook.conf.INTERPRETERS_CACHE = None
+    with patch('notebook.conf.is_cm_managed') as is_cm_managed:
+      with patch('notebook.conf.appmanager.get_apps_dict') as get_apps_dict:
+        with patch('notebook.conf.has_connectors') as has_connectors:
+          get_apps_dict.return_value = {'hive': {}}
+          has_connectors.return_value = False
+          notebook.conf.INTERPRETERS_CACHE = None
+
+          is_cm_managed.return_value = False
+
+          # No CM --> Verbatim
+          INTERPRETERS.set_for_testing(
+            OrderedDict((
+              ('phoenix', {
+                  'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
+              }),)
+            )
+          )
+          assert_equal(
+            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+            ['phoenix']
+          )
+          assert_equal(  # Check twice because of cache
+            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+            ['phoenix']
+          )
 
-        # No interpreters explicitly added
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-        )))
+          is_cm_managed.return_value = True
+          notebook.conf.INTERPRETERS_CACHE = None
 
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive']
-        )
-        assert_equal(  # Check twice because of cache
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive']
-        )
-        notebook.conf.INTERPRETERS_CACHE = None
-
-        # Interpreter added explicitly
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-            ('phoenix', {
-                'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
-            }),)
+          # CM --> Append []
+          INTERPRETERS.set_for_testing(
+            OrderedDict(()
+            )
           )
-        )
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'phoenix']
-        )
-        assert_equal(  # Check twice
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'phoenix']
-        )
-        notebook.conf.INTERPRETERS_CACHE = None
-
-        # Add one of the spark editor explicitly when spark is blacklisted
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-            ('pyspark', {
-                'name': 'PySpark', 'interface': 'livy', 'dialect': 'pyspark'
-            }),)
+
+          assert_equal(
+            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+            ['hive']
           )
-        )
-        # Explicitly added spark editor not seen when flag is False
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive']
-        )
-        assert_equal(  # Check twice because of cache
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive']
-        )
-        notebook.conf.INTERPRETERS_CACHE = None
-
-        # Whitelist spark app and no explicit interpreter added
-        get_apps_dict.return_value = {'hive': {}, 'spark': {}}
-
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-        )))
-        # No spark interpreter because ENABLE_ALL_INTERPRETERS is currently False
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive']
-        )
-        assert_equal(  # Check twice because of cache
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive']
-        )
-        notebook.conf.INTERPRETERS_CACHE = None
-
-        # Add one of the spark editor explicitly
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-            ('pyspark', {
-                'name': 'PySpark', 'interface': 'livy', 'dialect': 'pyspark'
-            }),)
+          assert_equal(  # Check twice
+            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+            ['hive']
           )
-        )
-        # Explicitly added spark editor seen even when flag is False
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'pyspark']
-        )
-        assert_equal(  # Check twice because of cache
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'pyspark']
-        )
-        notebook.conf.INTERPRETERS_CACHE = None
 
-        flag_reset = ENABLE_ALL_INTERPRETERS.set_for_testing(True) # Check interpreters when flag is True
+          notebook.conf.INTERPRETERS_CACHE = None
 
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-        )))
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown']
-        )
-        assert_equal(  # Check twice because of cache
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown']
-        )
-        notebook.conf.INTERPRETERS_CACHE = None
-
-        # Interpreter added explicitly when flag is True
-        INTERPRETERS.set_for_testing(
-          OrderedDict((
-            ('phoenix', {
-              'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
-            }),)
+          # CM --> Append [Phoenix]
+          INTERPRETERS.set_for_testing(
+            OrderedDict((
+              ('phoenix', {
+                  'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
+              }),)
+            )
+          )
+          assert_equal(
+            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+            ['hive', 'phoenix']
+          )
+          assert_equal(  # Check twice
+            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+            ['hive', 'phoenix']
           )
-        )
-        assert_equal(
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown', 'phoenix']
-        )
-        assert_equal(  # Check twice
-          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown', 'phoenix']
-        )
-
   finally:
-    flag_reset()
     for reset in resets:
       reset()
     appmanager.DESKTOP_MODULES = []

+ 74 - 76
desktop/libs/notebook/src/notebook/conf.py

@@ -97,9 +97,15 @@ def get_ordered_interpreters(user=None):
     ]
   else:
     if INTERPRETERS_CACHE is None:
-      none_user = None # For getting full list of interpreters
-      extra_interpreters = INTERPRETERS.get()  # Combine the other apps interpreters
-      _default_interpreters(none_user)
+      none_user = None # for getting full list of interpreters
+      if is_cm_managed():
+        extra_interpreters = INTERPRETERS.get()  # Combine the other apps interpreters
+        _default_interpreters(none_user)
+      else:
+        extra_interpreters = {}
+
+      if not INTERPRETERS.get():
+        _default_interpreters(none_user)
 
       INTERPRETERS_CACHE = INTERPRETERS.get()
       INTERPRETERS_CACHE.update(extra_interpreters)
@@ -185,13 +191,6 @@ INTERPRETERS_SHOWN_ON_WHEEL = Config(
   default=[]
 )
 
-ENABLE_ALL_INTERPRETERS = Config(
-  key="enable_all_interpreters",
-  help=_t("Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app."),
-  type=coerce_bool,
-  default=True
-)
-
 DEFAULT_LIMIT = Config(
   "default_limit",
   help="Default limit to use in SELECT statements if not present. Set to 0 to disable.",
@@ -319,72 +318,71 @@ def _default_interpreters(user):
       'name': 'Impala', 'interface': 'hiveserver2', 'options': {}
     }),)
 
-  if ENABLE_ALL_INTERPRETERS.get():
-    if 'pig' in apps:
-      interpreters.append(('pig', {
-        'name': 'Pig', 'interface': 'oozie', 'options': {}
-      }))
-
-    if 'oozie' in apps and 'jobsub' in apps:
-      interpreters.extend((
-        ('java', {
-            'name': 'Java', 'interface': 'oozie', 'options': {}
-        }),
-        ('spark2', {
-            'name': 'Spark', 'interface': 'oozie', 'options': {}
-        }),
-        ('mapreduce', {
-            'name': 'MapReduce', 'interface': 'oozie', 'options': {}
-        }),
-        ('shell', {
-            'name': 'Shell', 'interface': 'oozie', 'options': {}
-        }),
-        ('sqoop1', {
-            'name': 'Sqoop 1', 'interface': 'oozie', 'options': {}
-        }),
-        ('distcp', {
-            'name': 'Distcp', 'interface': 'oozie', 'options': {}
-        }),
-      ))
-
-    from dashboard.conf import get_properties  # Cyclic dependency
-    dashboards = get_properties()
-    if dashboards.get('solr') and dashboards['solr']['analytics']:
-      interpreters.append(('solr', {
-          'name': 'Solr SQL', 'interface': 'solr', 'options': {}
-      }),)
-
-    from desktop.models import Cluster  # Cyclic dependency
-    cluster = Cluster(user)
-    if cluster and cluster.get_type() == 'dataeng':
-      interpreters.append(('dataeng', {
-          'name': 'DataEng', 'interface': 'dataeng', 'options': {}
-      }))
-
-    if 'spark' in apps:
-      interpreters.extend((
-        ('spark', {
-            'name': 'Scala', 'interface': 'livy', 'options': {}
-        }),
-        ('pyspark', {
-            'name': 'PySpark', 'interface': 'livy', 'options': {}
-        }),
-        ('r', {
-            'name': 'R', 'interface': 'livy', 'options': {}
-        }),
-        ('jar', {
-            'name': 'Spark Submit Jar', 'interface': 'livy-batch', 'options': {}
-        }),
-        ('py', {
-            'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}
-        }),
-        ('text', {
-            'name': 'Text', 'interface': 'text', 'options': {}
-        }),
-        ('markdown', {
-            'name': 'Markdown', 'interface': 'text', 'options': {}
-        })
-      ))
+  if 'pig' in apps:
+    interpreters.append(('pig', {
+      'name': 'Pig', 'interface': 'oozie', 'options': {}
+    }))
+
+  if 'oozie' in apps and 'jobsub' in apps:
+    interpreters.extend((
+      ('java', {
+          'name': 'Java', 'interface': 'oozie', 'options': {}
+      }),
+      ('spark2', {
+          'name': 'Spark', 'interface': 'oozie', 'options': {}
+      }),
+      ('mapreduce', {
+          'name': 'MapReduce', 'interface': 'oozie', 'options': {}
+      }),
+      ('shell', {
+          'name': 'Shell', 'interface': 'oozie', 'options': {}
+      }),
+      ('sqoop1', {
+          'name': 'Sqoop 1', 'interface': 'oozie', 'options': {}
+      }),
+      ('distcp', {
+          'name': 'Distcp', 'interface': 'oozie', 'options': {}
+      }),
+    ))
+
+  from dashboard.conf import get_properties  # Cyclic dependency
+  dashboards = get_properties()
+  if dashboards.get('solr') and dashboards['solr']['analytics']:
+    interpreters.append(('solr', {
+        'name': 'Solr SQL', 'interface': 'solr', 'options': {}
+    }),)
+
+  from desktop.models import Cluster  # Cyclic dependency
+  cluster = Cluster(user)
+  if cluster and cluster.get_type() == 'dataeng':
+    interpreters.append(('dataeng', {
+        'name': 'DataEng', 'interface': 'dataeng', 'options': {}
+    }))
+
+  if 'spark' in apps:
+    interpreters.extend((
+      ('spark', {
+          'name': 'Scala', 'interface': 'livy', 'options': {}
+      }),
+      ('pyspark', {
+          'name': 'PySpark', 'interface': 'livy', 'options': {}
+      }),
+      ('r', {
+          'name': 'R', 'interface': 'livy', 'options': {}
+      }),
+      ('jar', {
+          'name': 'Spark Submit Jar', 'interface': 'livy-batch', 'options': {}
+      }),
+      ('py', {
+          'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}
+      }),
+      ('text', {
+          'name': 'Text', 'interface': 'text', 'options': {}
+      }),
+      ('markdown', {
+          'name': 'Markdown', 'interface': 'text', 'options': {}
+      })
+    ))
 
   INTERPRETERS.set_for_testing(OrderedDict(interpreters))