Преглед на файлове

HUE-7258 [jb] Get conf via CM if Spnego enabled on Spark history server

Ying Chen преди 6 години
родител
ревизия
f18726d

+ 3 - 0
desktop/conf.dist/hue.ini

@@ -1039,6 +1039,9 @@
       # URL of the Spark History Server
       ## spark_history_server_url=http://localhost:18088
 
+      # Change this if your Spark History Server is Kerberos-secured
+      ## spark_history_server_security_enabled=false
+
       # In secure mode (HTTPS), if SSL certificates from YARN Rest APIs
       # have to be verified against certificate authority
       ## ssl_cert_ca_verify=True

+ 3 - 0
desktop/conf/pseudo-distributed.ini.tmpl

@@ -1040,6 +1040,9 @@
       # URL of the Spark History Server
       ## spark_history_server_url=http://localhost:18088
 
+      # Change this if your Spark History Server is Kerberos-secured
+      ## spark_history_server_security_enabled=false
+
       # In secure mode (HTTPS), if SSL certificates from YARN Rest APIs
       # have to be verified against certificate authority
       ## ssl_cert_ca_verify=True

+ 13 - 0
desktop/libs/hadoop/src/hadoop/conf.py

@@ -134,6 +134,16 @@ def get_spark_history_server_url():
   url = get_spark_history_server_from_cm()
   return url if url else 'http://localhost:18088'
 
+def get_spark_history_server_security_enabled():
+  """
+    Try to get Spark history server URL from Cloudera Manager API, otherwise give default URL
+  """
+  from metadata.conf import MANAGER
+  from metadata.manager_client import ManagerApi
+  if MANAGER.API_URL.get():
+      return ManagerApi().get_spark_history_server_security_enabled()
+  return False
+
 
 YARN_CLUSTERS = UnspecifiedConfigSection(
   "yarn_clusters",
@@ -170,6 +180,9 @@ YARN_CLUSTERS = UnspecifiedConfigSection(
       SPARK_HISTORY_SERVER_URL=Config("spark_history_server_url",
                   dynamic_default=get_spark_history_server_url,
                   help="URL of the Spark History Server"),
+      SPARK_HISTORY_SERVER_SECURITY_ENABLED=Config("spark_history_server_security_enabled",
+                  dynamic_default=get_spark_history_server_security_enabled,
+                  help="Is Spark History Server running with Kerberos authentication"),
       SSL_CERT_CA_VERIFY=Config("ssl_cert_ca_verify",
                   help="In secure mode (HTTPS), if SSL certificates from YARN Rest APIs have to be verified against certificate authority",
                   dynamic_default=default_ssl_validate,

+ 1 - 1
desktop/libs/hadoop/src/hadoop/yarn/spark_history_server_api.py

@@ -51,7 +51,7 @@ def get_history_server_api():
         yarn_cluster = cluster.get_cluster_conf_for_job_submission()
         if yarn_cluster is None:
           raise PopupException(_('No Spark History Server is available.'))
-        API_CACHE = SparkHistoryServerApi(yarn_cluster.SPARK_HISTORY_SERVER_URL.get(), yarn_cluster.SECURITY_ENABLED.get(), yarn_cluster.SSL_CERT_CA_VERIFY.get())
+        API_CACHE = SparkHistoryServerApi(yarn_cluster.SPARK_HISTORY_SERVER_URL.get(), yarn_cluster.SPARK_HISTORY_SERVER_SECURITY_ENABLED.get(), yarn_cluster.SSL_CERT_CA_VERIFY.get())
     finally:
       API_CACHE_LOCK.release()
 

+ 35 - 18
desktop/libs/metadata/src/metadata/manager_client.py

@@ -82,7 +82,7 @@ class ManagerApi(object):
       raise ManagerApiException(e)
 
 
-  def get_spark_history_server_url(self, cluster_name=None):
+  def get_spark_history_server_configs(self, cluster_name=None):
     service_name = "SPARK_ON_YARN"
     shs_role_type = "SPARK_YARN_HISTORY_SERVER"
 
@@ -115,28 +115,45 @@ class ManagerApi(object):
             'spark_service_display_name': spark_service_display_name,
             'shs_server_name': shs_server_name
           }, params={'view': 'full'})['items']
+          return shs_server_hostId, shs_server_configs
+    except Exception, e:
+      LOG.warn("Check Spark History Server via ManagerApi: %s" % e)
 
-          shs_ui_port = None
-          shs_ssl_port = None
-          shs_ssl_enabled = None
-          for config in shs_server_configs:
-            if 'relatedName' in config and 'default' in config:
-              if config['relatedName'] == 'spark.history.ui.port':
-                shs_ui_port = config['default']
-              if config['relatedName'] == 'spark.ssl.historyServer.port':
-                shs_ssl_port = config['default']
-              if config['relatedName'] == 'spark.ssl.historyServer.enabled':
-                shs_ssl_enabled = config['default']
-          shs_ui_host = self._root.get('hosts/%(hostId)s' % {'hostId': shs_server_hostId})
-          shs_ui_hostname = shs_ui_host['hostname'] if shs_ui_host else None
-
-          return self.assemble_shs_url(shs_ui_hostname, shs_ui_port, shs_ssl_port, shs_ssl_enabled)
+    return None, None
 
-    except Exception, e:
-      LOG.warn("Check Spark history server via ManangerAPI: %s" % e)
+  def get_spark_history_server_url(self, cluster_name=None):
+    shs_server_hostId, shs_server_configs = self.get_spark_history_server_configs(cluster_name=cluster_name)
+
+    if shs_server_hostId and shs_server_configs:
+      shs_ui_port = None
+      shs_ssl_port = None
+      shs_ssl_enabled = None
+      for config in shs_server_configs:
+        if 'relatedName' in config and 'default' in config:
+          if config['relatedName'] == 'spark.history.ui.port':
+            shs_ui_port = config['default']
+          if config['relatedName'] == 'spark.ssl.historyServer.port':
+            shs_ssl_port = config['default']
+          if config['relatedName'] == 'spark.ssl.historyServer.enabled':
+            shs_ssl_enabled = config['default']
+      shs_ui_host = self._root.get('hosts/%(hostId)s' % {'hostId': shs_server_hostId})
+      shs_ui_hostname = shs_ui_host['hostname'] if shs_ui_host else None
+
+      return self.assemble_shs_url(shs_ui_hostname, shs_ui_port, shs_ssl_port, shs_ssl_enabled)
 
     return None
 
+  def get_spark_history_server_security_enabled(self, cluster_name=None):
+    shs_server_hostId, shs_server_configs = self.get_spark_history_server_configs(cluster_name=cluster_name)
+
+    if shs_server_configs:
+      for config in shs_server_configs:
+        if 'relatedName' in config and 'default' in config and config['relatedName'] == 'history_server_spnego_enabled':
+          shs_security_enabled = config['default']
+          return shs_security_enabled and shs_security_enabled == 'true'
+
+    return False
+
   def assemble_shs_url(self, shs_ui_hostname, shs_ui_port=None, shs_ssl_port=None, shs_ssl_enabled=None):
     if not shs_ui_hostname or not shs_ui_port or not shs_ssl_port or not shs_ssl_enabled:
       LOG.warn("Spark conf not found!")