Эх сурвалжийг харах

HUE-8330 [cluster] Only populate computes depending on the correct type of cluster

Romain 6 жил өмнө
parent
commit
b230f1290f

+ 1 - 2
apps/impala/src/impala/server.py

@@ -57,7 +57,6 @@ def get_api(user, url):
 def _get_impala_server_url(session):
   properties = session.get_properties()
   http_addr = properties.get('coordinator_host', properties.get('http_addr'))
-  # Remove scheme if found
   http_addr = http_addr.replace('http://', '').replace('https://', '')
   return ('https://' if get_webserver_certificate_file() else 'http://') + http_addr
 
@@ -307,4 +306,4 @@ class ImpalaDaemonApi(object):
       'query_id': query_id
     }
 
-    return self._root.get('query_profile_encoded', params=params)
+    return self._root.get('query_profile_encoded', params=params)

+ 14 - 36
desktop/core/src/desktop/api2.py

@@ -135,50 +135,28 @@ def get_context_namespaces(request, interface):
 
 @api_error_handler
 def get_context_computes(request, interface):
+  '''
+  Some clusters like Snowball can have multiple computes for a certain languages (Hive, Impala...).
+  '''
   response = {}
   computes = []
 
   clusters = get_clusters(request.user).values()
-  has_altus_clusters = [cluster for cluster in clusters if 'altus' in cluster['type']]
-
-  computes.extend([{
-      'id': cluster['id'],
-      'name': cluster['name'],
-      'namespace': cluster['id'],
-      'interface': interface,
-      'type': cluster['type']
-    } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all')
-  ])
+  has_altus_clusters = [cluster for cluster in clusters if 'altus' in cluster['type'] or 'snowball' in cluster['type']]
 
   if has_altus_clusters:
     if interface == 'impala' or interface == 'report':
       if IS_K8S_ONLY.get():
         dw_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
-      else:
-        dw_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']
-
-      computes.extend([{
-          'id': cluster.get('crn'),
-          'name': cluster.get('clusterName'),
-          'status': cluster.get('status'),
-          'namespace': cluster.get('namespaceCrn', cluster.get('crn')),
-          'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '',
-          'type': 'altus-dw'
-        } for cluster in dw_clusters if (cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515') or (IS_K8S_ONLY.get() and 'TERMINAT' not in cluster['status'])]
-      )
-
-    if interface == 'oozie' or interface == 'spark2':
-      computes.extend([{
-          'id': cluster.get('crn'),
-          'name': cluster.get('clusterName'),
-          'status': cluster.get('status'),
-          'environmentType': cluster.get('environmentType'),
-          'serviceType': cluster.get('serviceType'),
-          'namespace': cluster.get('namespaceCrn'),
-          'type': 'altus-de'
-        } for cluster in DataEngApi(request.user).list_clusters()['clusters']]
-      )
-      # TODO if interface == 'spark2' keep only SPARK type
+        computes.extend([{
+            'id': cluster.get('crn'),
+            'name': cluster.get('clusterName'),
+            'status': cluster.get('status'),
+            'namespace': cluster.get('namespaceCrn', cluster.get('crn')),
+            'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '',
+            'type': 'altus-dw'
+          } for cluster in dw_clusters]
+        )
 
   response[interface] = computes
   response['status'] = 0
@@ -186,7 +164,7 @@ def get_context_computes(request, interface):
   return JsonResponse(response)
 
 
-# Deprecated
+# Deprecated, not used.
 @api_error_handler
 def get_context_clusters(request, interface):
   response = {}

+ 1 - 0
desktop/core/src/desktop/lib/connectors/api.py

@@ -107,6 +107,7 @@ AVAILABLE_CONNECTORS = _group_category_connectors(CONNECTOR_TYPES)
 # TODO: remove installed connectors that don't have a connector or are blacklisted
 # TODO: load back from DB and apply Category properties, e.g. defaults, interface, category, category_name...
 # TODO: connector groups: if we want one type (e.g. Hive) to show-up with multiple computes and the same saved query.
+# TODO: type --> name, type --> SQL language, e.g. mysql
 CONNECTOR_INSTANCES = [
   {'name': 'Impala', 'type': Impala().TYPE + '-1', 'connector_name': Impala().TYPE, 'interface': Impala().INTERFACE, 'settings': Impala().PROPERTIES, 'id': 1, 'category': 'engines', 'description': ''},
   {'name': 'Hive', 'type': Hive().TYPE + '-2', 'connector_name': Hive().TYPE, 'interface': Hive().INTERFACE, 'settings': Hive().PROPERTIES, 'id': 2, 'category': 'engines', 'description': ''},

+ 1 - 1
desktop/libs/notebook/src/notebook/connectors/base.py

@@ -349,7 +349,7 @@ def get_api(request, snippet):
   elif cluster and 'crn:altus:dataeng:' in cluster_name:
     interface = 'dataeng'
 
-  LOG.info('Selected cluster %s %s interface %s' % (cluster_name, cluster, interface))
+  LOG.debug('Selected connector %s %s interface=%s compute=%s' % (cluster_name, cluster, interface, snippet.get('compute')))
   snippet['interface'] = interface
 
   if interface.startswith('hiveserver2') or interface == 'hms':