Selaa lähdekoodia

[interpreters] Do not add all default interpreters of every whitelisted app (#2903)

- This ensures that if user needs an interpreter X, then user needs to add its config under [[interpreters]]. This way we wont be showing all unneeded interpreters and confuse users whenever any app is whitelisted.
- Hive and Impala editors are added by default and does not have effect of this flag but they honour app_blacklist.
- By default the flag is set to True, if users want to remove unnecessary editor of whitelisted apps, they need to set the flag to False. Then if required, they also add specific editors also under [[interpreters]] instead of popping up every one of them.
- Fixed existing unit tests and added more units test for all scenarios.
Harsh Gupta 3 vuotta sitten
vanhempi
commit
6e299b11ce

+ 3 - 0
desktop/conf.dist/hue.ini

@@ -944,6 +944,9 @@ tls=no
 ## Flag to enable the bulk submission of queries as a background task through Oozie.
 # enable_batch_execute=true
 
+## Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app.
+# enable_all_interpreters=true
+
 ## Flag to turn on the SQL indexer.
 # enable_sql_indexer=false
 

+ 3 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -927,6 +927,9 @@
   ## Flag to enable the bulk submission of queries as a background task through Oozie.
   # enable_batch_execute=false
 
+  ## Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app.
+  # enable_all_interpreters=true
+
   ## Flag to turn on the SQL indexer.
   # enable_sql_indexer=false
 

+ 131 - 56
desktop/libs/notebook/src/notebook/api_tests.py

@@ -44,7 +44,7 @@ import notebook.connectors.hiveserver2
 from notebook.api import _historify
 from notebook.connectors.base import Notebook, QueryError, Api, QueryExpired
 from notebook.decorators import api_error_handler
-from notebook.conf import get_ordered_interpreters, INTERPRETERS_SHOWN_ON_WHEEL, INTERPRETERS
+from notebook.conf import get_ordered_interpreters, INTERPRETERS_SHOWN_ON_WHEEL, INTERPRETERS, ENABLE_ALL_INTERPRETERS
 
 
 if sys.version_info[0] > 2:
@@ -643,6 +643,10 @@ def test_get_interpreters_to_show():
           'name': 'Hive', 'displayName': 'Hive', 'interface': 'hiveserver2', 'type': 'hive', 'is_sql': True,
           'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
       }),
+      ('impala', {
+          'name': 'Impala', 'displayName': 'Impala', 'interface': 'hiveserver2', 'type': 'impala', 'is_sql': True,
+          'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'impala'
+      }),
       ('spark', {
           'name': 'Scala', 'displayName': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {},
           'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
@@ -670,6 +674,10 @@ def test_get_interpreters_to_show():
           'name': 'Hive', 'displayName': 'Hive', 'interface': 'hiveserver2', 'is_sql': True, 'type': 'hive',
           'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'hive'
       }),
+      ('impala', {
+          'name': 'Impala', 'displayName': 'Impala', 'interface': 'hiveserver2', 'type': 'impala', 'is_sql': True,
+          'options': {}, 'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'impala'
+      }),
       ('spark', {
           'name': 'Scala', 'displayName': 'Scala', 'interface': 'livy', 'type': 'spark', 'is_sql': False, 'options': {},
           'dialect_properties': {}, 'is_catalog': False, 'category': 'editor', 'dialect': 'scala'
@@ -680,7 +688,8 @@ def test_get_interpreters_to_show():
     resets = [
       INTERPRETERS.set_for_testing(default_interpreters),
       APP_BLACKLIST.set_for_testing(''),
-      ENABLE_CONNECTORS.set_for_testing(False)
+      ENABLE_CONNECTORS.set_for_testing(False),
+      ENABLE_ALL_INTERPRETERS.set_for_testing(False)
     ]
     appmanager.DESKTOP_MODULES = []
     appmanager.DESKTOP_APPS = None
@@ -733,73 +742,139 @@ def test_get_ordered_interpreters():
 
   try:
     resets = [APP_BLACKLIST.set_for_testing('')]
+    flag_reset = ENABLE_ALL_INTERPRETERS.set_for_testing(False)
     appmanager.DESKTOP_MODULES = []
     appmanager.DESKTOP_APPS = None
     appmanager.load_apps(APP_BLACKLIST.get())
 
-    with patch('notebook.conf.is_cm_managed') as is_cm_managed:
-      with patch('notebook.conf.appmanager.get_apps_dict') as get_apps_dict:
-        with patch('notebook.conf.has_connectors') as has_connectors:
-          get_apps_dict.return_value = {'hive': {}}
-          has_connectors.return_value = False
-          notebook.conf.INTERPRETERS_CACHE = None
-
-          is_cm_managed.return_value = False
-
-          # No CM --> Verbatim
-          INTERPRETERS.set_for_testing(
-            OrderedDict((
-              ('phoenix', {
-                  'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
-              }),)
-            )
-          )
-          assert_equal(
-            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-            ['phoenix']
-          )
-          assert_equal(  # Check twice because of cache
-            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-            ['phoenix']
-          )
+    with patch('notebook.conf.appmanager.get_apps_dict') as get_apps_dict:
+      with patch('notebook.conf.has_connectors') as has_connectors:
+        get_apps_dict.return_value = {'hive': {}} # Impala blacklisted indirectly
+        has_connectors.return_value = False
+        notebook.conf.INTERPRETERS_CACHE = None
 
-          is_cm_managed.return_value = True
-          notebook.conf.INTERPRETERS_CACHE = None
+        # No interpreters explicitly added
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+        )))
 
-          # CM --> Append []
-          INTERPRETERS.set_for_testing(
-            OrderedDict(()
-            )
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive']
+        )
+        assert_equal(  # Check twice because of cache
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive']
+        )
+        notebook.conf.INTERPRETERS_CACHE = None
+
+        # Interpreter added explicitly
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+            ('phoenix', {
+                'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
+            }),)
           )
-
-          assert_equal(
-            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-            ['hive']
+        )
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'phoenix']
+        )
+        assert_equal(  # Check twice
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'phoenix']
+        )
+        notebook.conf.INTERPRETERS_CACHE = None
+
+        # Add one of the spark editor explicitly when spark is blacklisted
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+            ('pyspark', {
+                'name': 'PySpark', 'interface': 'livy', 'dialect': 'pyspark'
+            }),)
           )
-          assert_equal(  # Check twice
-            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-            ['hive']
+        )
+        # Explicitly added spark editor not seen when flag is False
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive']
+        )
+        assert_equal(  # Check twice because of cache
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive']
+        )
+        notebook.conf.INTERPRETERS_CACHE = None
+
+        # Whitelist spark app and no explicit interpreter added
+        get_apps_dict.return_value = {'hive': {}, 'spark': {}}
+
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+        )))
+        # No spark interpreter because ENABLE_ALL_INTERPRETERS is currently False
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive']
+        )
+        assert_equal(  # Check twice because of cache
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive']
+        )
+        notebook.conf.INTERPRETERS_CACHE = None
+
+        # Add one of the spark editor explicitly
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+            ('pyspark', {
+                'name': 'PySpark', 'interface': 'livy', 'dialect': 'pyspark'
+            }),)
           )
+        )
+        # Explicitly added spark editor seen even when flag is False
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'pyspark']
+        )
+        assert_equal(  # Check twice because of cache
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'pyspark']
+        )
+        notebook.conf.INTERPRETERS_CACHE = None
 
-          notebook.conf.INTERPRETERS_CACHE = None
+        flag_reset = ENABLE_ALL_INTERPRETERS.set_for_testing(True) # Check interpreters when flag is True
 
-          # CM --> Append [Phoenix]
-          INTERPRETERS.set_for_testing(
-            OrderedDict((
-              ('phoenix', {
-                  'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
-              }),)
-            )
-          )
-          assert_equal(
-            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-            ['hive', 'phoenix']
-          )
-          assert_equal(  # Check twice
-            [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
-            ['hive', 'phoenix']
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+        )))
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown']
+        )
+        assert_equal(  # Check twice because of cache
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown']
+        )
+        notebook.conf.INTERPRETERS_CACHE = None
+
+        # Interpreter added explicitly when flag is True
+        INTERPRETERS.set_for_testing(
+          OrderedDict((
+            ('phoenix', {
+              'name': 'Phoenix', 'interface': 'sqlalchemy', 'dialect': 'phoenix'
+            }),)
           )
+        )
+        assert_equal(
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown', 'phoenix']
+        )
+        assert_equal(  # Check twice
+          [interpreter['dialect'] for interpreter in get_ordered_interpreters()],
+          ['hive', 'scala', 'pyspark', 'r', 'spark submit jar', 'spark submit python', 'text', 'markdown', 'phoenix']
+        )
+
   finally:
+    flag_reset()
     for reset in resets:
       reset()
     appmanager.DESKTOP_MODULES = []

+ 76 - 74
desktop/libs/notebook/src/notebook/conf.py

@@ -97,15 +97,9 @@ def get_ordered_interpreters(user=None):
     ]
   else:
     if INTERPRETERS_CACHE is None:
-      none_user = None # for getting full list of interpreters
-      if is_cm_managed():
-        extra_interpreters = INTERPRETERS.get()  # Combine the other apps interpreters
-        _default_interpreters(none_user)
-      else:
-        extra_interpreters = {}
-
-      if not INTERPRETERS.get():
-        _default_interpreters(none_user)
+      none_user = None # For getting full list of interpreters
+      extra_interpreters = INTERPRETERS.get()  # Combine the other apps interpreters
+      _default_interpreters(none_user)
 
       INTERPRETERS_CACHE = INTERPRETERS.get()
       INTERPRETERS_CACHE.update(extra_interpreters)
@@ -191,6 +185,13 @@ INTERPRETERS_SHOWN_ON_WHEEL = Config(
   default=[]
 )
 
+ENABLE_ALL_INTERPRETERS = Config(
+  key="enable_all_interpreters",
+  help=_t("Flag to enable all interpreters (Hive and Impala are added by default) related to every whitelisted app."),
+  type=coerce_bool,
+  default=True
+)
+
 DEFAULT_LIMIT = Config(
   "default_limit",
   help="Default limit to use in SELECT statements if not present. Set to 0 to disable.",
@@ -318,71 +319,72 @@ def _default_interpreters(user):
       'name': 'Impala', 'interface': 'hiveserver2', 'options': {}
     }),)
 
-  if 'pig' in apps:
-    interpreters.append(('pig', {
-      'name': 'Pig', 'interface': 'oozie', 'options': {}
-    }))
-
-  if 'oozie' in apps and 'jobsub' in apps:
-    interpreters.extend((
-      ('java', {
-          'name': 'Java', 'interface': 'oozie', 'options': {}
-      }),
-      ('spark2', {
-          'name': 'Spark', 'interface': 'oozie', 'options': {}
-      }),
-      ('mapreduce', {
-          'name': 'MapReduce', 'interface': 'oozie', 'options': {}
-      }),
-      ('shell', {
-          'name': 'Shell', 'interface': 'oozie', 'options': {}
-      }),
-      ('sqoop1', {
-          'name': 'Sqoop 1', 'interface': 'oozie', 'options': {}
-      }),
-      ('distcp', {
-          'name': 'Distcp', 'interface': 'oozie', 'options': {}
-      }),
-    ))
-
-  from dashboard.conf import get_properties  # Cyclic dependency
-  dashboards = get_properties()
-  if dashboards.get('solr') and dashboards['solr']['analytics']:
-    interpreters.append(('solr', {
-        'name': 'Solr SQL', 'interface': 'solr', 'options': {}
-    }),)
-
-  from desktop.models import Cluster  # Cyclic dependency
-  cluster = Cluster(user)
-  if cluster and cluster.get_type() == 'dataeng':
-    interpreters.append(('dataeng', {
-        'name': 'DataEng', 'interface': 'dataeng', 'options': {}
-    }))
-
-  if 'spark' in apps:
-    interpreters.extend((
-      ('spark', {
-          'name': 'Scala', 'interface': 'livy', 'options': {}
-      }),
-      ('pyspark', {
-          'name': 'PySpark', 'interface': 'livy', 'options': {}
-      }),
-      ('r', {
-          'name': 'R', 'interface': 'livy', 'options': {}
-      }),
-      ('jar', {
-          'name': 'Spark Submit Jar', 'interface': 'livy-batch', 'options': {}
-      }),
-      ('py', {
-          'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}
-      }),
-      ('text', {
-          'name': 'Text', 'interface': 'text', 'options': {}
-      }),
-      ('markdown', {
-          'name': 'Markdown', 'interface': 'text', 'options': {}
-      })
-    ))
+  if ENABLE_ALL_INTERPRETERS.get():
+    if 'pig' in apps:
+      interpreters.append(('pig', {
+        'name': 'Pig', 'interface': 'oozie', 'options': {}
+      }))
+
+    if 'oozie' in apps and 'jobsub' in apps:
+      interpreters.extend((
+        ('java', {
+            'name': 'Java', 'interface': 'oozie', 'options': {}
+        }),
+        ('spark2', {
+            'name': 'Spark', 'interface': 'oozie', 'options': {}
+        }),
+        ('mapreduce', {
+            'name': 'MapReduce', 'interface': 'oozie', 'options': {}
+        }),
+        ('shell', {
+            'name': 'Shell', 'interface': 'oozie', 'options': {}
+        }),
+        ('sqoop1', {
+            'name': 'Sqoop 1', 'interface': 'oozie', 'options': {}
+        }),
+        ('distcp', {
+            'name': 'Distcp', 'interface': 'oozie', 'options': {}
+        }),
+      ))
+
+    from dashboard.conf import get_properties  # Cyclic dependency
+    dashboards = get_properties()
+    if dashboards.get('solr') and dashboards['solr']['analytics']:
+      interpreters.append(('solr', {
+          'name': 'Solr SQL', 'interface': 'solr', 'options': {}
+      }),)
+
+    from desktop.models import Cluster  # Cyclic dependency
+    cluster = Cluster(user)
+    if cluster and cluster.get_type() == 'dataeng':
+      interpreters.append(('dataeng', {
+          'name': 'DataEng', 'interface': 'dataeng', 'options': {}
+      }))
+
+    if 'spark' in apps:
+      interpreters.extend((
+        ('spark', {
+            'name': 'Scala', 'interface': 'livy', 'options': {}
+        }),
+        ('pyspark', {
+            'name': 'PySpark', 'interface': 'livy', 'options': {}
+        }),
+        ('r', {
+            'name': 'R', 'interface': 'livy', 'options': {}
+        }),
+        ('jar', {
+            'name': 'Spark Submit Jar', 'interface': 'livy-batch', 'options': {}
+        }),
+        ('py', {
+            'name': 'Spark Submit Python', 'interface': 'livy-batch', 'options': {}
+        }),
+        ('text', {
+            'name': 'Text', 'interface': 'text', 'options': {}
+        }),
+        ('markdown', {
+            'name': 'Markdown', 'interface': 'text', 'options': {}
+        })
+      ))
 
   INTERPRETERS.set_for_testing(OrderedDict(interpreters))