Explorar o código

HUE-8555 [cluster] Do not submit remote coordinator jobs by default

Romain Rigaux %!s(int64=7) %!d(string=hai) anos
pai
achega
d8ed540f18

+ 53 - 52
apps/oozie/src/oozie/views/editor2.py

@@ -23,7 +23,7 @@ from django.forms.formsets import formset_factory
 from django.shortcuts import redirect
 from django.utils.translation import ugettext as _
 
-from desktop.conf import USE_NEW_EDITOR
+from desktop.conf import USE_NEW_EDITOR, IS_MULTICLUSTER_ONLY, has_multi_cluster
 from desktop.lib import django_mako
 from desktop.lib.django_util import JsonResponse, render
 from desktop.lib.exceptions_renderable import PopupException
@@ -35,6 +35,7 @@ from desktop.models import Document, Document2
 from liboozie.credentials import Credentials
 from liboozie.oozie_api import get_oozie
 from liboozie.submission2 import Submission
+from metadata.conf import DEFAULT_PUBLIC_KEY
 from notebook.connectors.base import Notebook
 
 from oozie.decorators import check_document_access_permission, check_document_modify_permission,\
@@ -46,7 +47,6 @@ from oozie.models2 import Node, Workflow, Coordinator, Bundle, NODES, WORKFLOW_N
   _import_workspace, _save_workflow
 from oozie.utils import convert_to_server_timezone
 from oozie.views.editor import edit_workflow as old_edit_workflow, edit_coordinator as old_edit_coordinator, edit_bundle as old_edit_bundle
-from notebook.connectors.dataeng import DataEngApi
 
 
 LOG = logging.getLogger(__name__)
@@ -721,57 +721,58 @@ def submit_coordinator(request, doc_id):
 def _submit_coordinator(request, coordinator, mapping):
   try:
     wf = coordinator.workflow
-    mapping['auto-cluster'] = {
-      u'additionalClusterResourceTags': [],
-      u'automaticTerminationCondition': u'EMPTY_JOB_QUEUE', #'u'NONE',
-      u'cdhVersion': u'CDH514',
-      u'clouderaManagerPassword': u'guest',
-      u'clouderaManagerUsername': u'guest',
-      u'clusterName': u'analytics4', # Add time variable
-      u'computeWorkersConfiguration': {
-        u'bidUSDPerHr': 0,
-        u'groupSize': 0,
-        u'useSpot': False
-      },
-      u'environmentName': u'crn:altus:environments:us-west-1:12a0079b-1591-4ca0-b721-a446bda74e67:environment:analytics/236ebdda-18bd-428a-9d2b-cd6973d42946',
-      u'instanceBootstrapScript': u'',
-      u'instanceType': u'm4.xlarge',
-      u'jobSubmissionGroupName': u'',
-      u'jobs': [{
-          u'failureAction': u'INTERRUPT_JOB_QUEUE',
-          u'name': u'a87e20d7-5c0d-49ee-ab37-625fa2803d51',
-          u'sparkJob': {
-            u'applicationArguments': ['5'],
-            u'jars': [u's3a://datawarehouse-customer360/ETL/spark-examples.jar'],
-            u'mainClass': u'org.apache.spark.examples.SparkPi'
-          }
+    if IS_MULTICLUSTER_ONLY.get() and has_multi_cluster():
+      mapping['auto-cluster'] = {
+        u'additionalClusterResourceTags': [],
+        u'automaticTerminationCondition': u'EMPTY_JOB_QUEUE', #'u'NONE',
+        u'cdhVersion': u'CDH514',
+        u'clouderaManagerPassword': u'guest',
+        u'clouderaManagerUsername': u'guest',
+        u'clusterName': u'analytics4', # Add time variable
+        u'computeWorkersConfiguration': {
+          u'bidUSDPerHr': 0,
+          u'groupSize': 0,
+          u'useSpot': False
         },
-#         {
-#           u'failureAction': u'INTERRUPT_JOB_QUEUE',
-#           u'name': u'a87e20d7-5c0d-49ee-ab37-625fa2803d51',
-#           u'sparkJob': {
-#             u'applicationArguments': ['10'],
-#             u'jars': [u's3a://datawarehouse-customer360/ETL/spark-examples.jar'],
-#             u'mainClass': u'org.apache.spark.examples.SparkPi'
-#           }
-#         },
-#         {
-#           u'failureAction': u'INTERRUPT_JOB_QUEUE',
-#           u'name': u'a87e20d7-5c0d-49ee-ab37-625fa2803d51',
-#           u'sparkJob': {
-#             u'applicationArguments': [u'filesystems3.conf'],
-#             u'jars': [u's3a://datawarehouse-customer360/ETL/envelope-0.6.0-SNAPSHOT-c6.jar'],
-#             u'mainClass': u'com.cloudera.labs.envelope.EnvelopeMain',
-#             u'sparkArguments': u'--archives=s3a://datawarehouse-customer360/ETL/filesystems3.conf'
-#           }
-#         }
-      ],
-      u'namespaceName': u'crn:altus:sdx:us-west-1:12a0079b-1591-4ca0-b721-a446bda74e67:namespace:analytics/7ea35fe5-dbc9-4b17-92b1-97a1ab32e410',
-      u'publicKey': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDuTEfNIW8LEcVgprUrourbYjoW1RaTLhfzPnnBjJrg14koQrosl+s9phrpBBLTWmQuQdvy9iC2ma//gY5nz/7e+QuaeENhhoEiZn1PDBbFakD/AOjZXIu6DTEgCrOeXsQauFZKOkcFvrBGJC0qigYU3b8Eys4cun3RQ4S9WkDW6538wOSnsm6sXcL84KqbH+ay5gTk+lz3bi/6plALZMItbRz9IulXnLM4QfCwMxXTU/IjtnT+ltZVvKsWpfvDQ3Oyu/a6gK369iXcSP0e07KAzWiv2WYX46sNzZ8+de9ho1/VMaXnI4WrooV9lxByKWD+WsXkqtctT16VfxpX8CeR romain@unreal\\n',
-      u'serviceType': u'SPARK',
-      u'workersConfiguration': {},
-      u'workersGroupSize': u'3'
-    }
+        u'environmentName': u'crn:altus:environments:us-west-1:12a0079b-1591-4ca0-b721-a446bda74e67:environment:analytics/236ebdda-18bd-428a-9d2b-cd6973d42946',
+        u'instanceBootstrapScript': u'',
+        u'instanceType': u'm4.xlarge',
+        u'jobSubmissionGroupName': u'',
+        u'jobs': [{
+            u'failureAction': u'INTERRUPT_JOB_QUEUE',
+            u'name': u'a87e20d7-5c0d-49ee-ab37-625fa2803d51',
+            u'sparkJob': {
+              u'applicationArguments': ['5'],
+              u'jars': [u's3a://datawarehouse-customer360/ETL/spark-examples.jar'],
+              u'mainClass': u'org.apache.spark.examples.SparkPi'
+            }
+          },
+  #         {
+  #           u'failureAction': u'INTERRUPT_JOB_QUEUE',
+  #           u'name': u'a87e20d7-5c0d-49ee-ab37-625fa2803d51',
+  #           u'sparkJob': {
+  #             u'applicationArguments': ['10'],
+  #             u'jars': [u's3a://datawarehouse-customer360/ETL/spark-examples.jar'],
+  #             u'mainClass': u'org.apache.spark.examples.SparkPi'
+  #           }
+  #         },
+  #         {
+  #           u'failureAction': u'INTERRUPT_JOB_QUEUE',
+  #           u'name': u'a87e20d7-5c0d-49ee-ab37-625fa2803d51',
+  #           u'sparkJob': {
+  #             u'applicationArguments': [u'filesystems3.conf'],
+  #             u'jars': [u's3a://datawarehouse-customer360/ETL/envelope-0.6.0-SNAPSHOT-c6.jar'],
+  #             u'mainClass': u'com.cloudera.labs.envelope.EnvelopeMain',
+  #             u'sparkArguments': u'--archives=s3a://datawarehouse-customer360/ETL/filesystems3.conf'
+  #           }
+  #         }
+        ],
+        u'namespaceName': u'crn:altus:sdx:us-west-1:12a0079b-1591-4ca0-b721-a446bda74e67:namespace:analytics/7ea35fe5-dbc9-4b17-92b1-97a1ab32e410',
+        u'publicKey': DEFAULT_PUBLIC_KEY.get(),
+        u'serviceType': u'SPARK',
+        u'workersConfiguration': {},
+        u'workersGroupSize': u'3'
+      }
     wf_dir = Submission(request.user, wf, request.fs, request.jt, mapping, local_tz=coordinator.data['properties']['timezone']).deploy()
 
     properties = {'wf_application_path': request.fs.get_hdfs_path(wf_dir)}

+ 2 - 1
desktop/core/src/desktop/models.py

@@ -1560,6 +1560,7 @@ def get_cluster_config(user):
   return cluster_config.get_config()
 
 
+# Aka 'Atus'
 ANALYTIC_DB = 'altus'
 
 
@@ -1663,7 +1664,7 @@ class ClusterConfig():
     _interpreters = get_ordered_interpreters(self.user)
 
     if self.cluster_type == ANALYTIC_DB:
-      _interpreters = [interpreter for interpreter in _interpreters if interpreter['type'] == 'impala']
+      _interpreters = [interpreter for interpreter in _interpreters if interpreter['type'] in ('impala', 'hive', 'spark2', 'pyspark', 'mapreduce')]
 
     for interpreter in _interpreters:
       interpreters.append({

+ 1 - 1
desktop/core/src/desktop/templates/hue.mako

@@ -245,7 +245,7 @@ ${ hueIcons.symbols() }
         % if user.is_authenticated() and section != 'login' and (cluster != ANALYTIC_DB or IS_MULTICLUSTER_ONLY.get()):
         <div class="dropdown navbar-dropdown pull-right">
           % if IS_MULTICLUSTER_ONLY.get():
-            <!-- ko component: { name: 'hue-app-switcher', params: { onPrem: ko.observable(false) } } --><!-- /ko -->
+            ## <!-- ko component: { name: 'hue-app-switcher', params: { onPrem: ko.observable(false) } } --><!-- /ko -->
           % endif
 
           <%

+ 1 - 0
desktop/core/src/desktop/templates/hue_icons.mako

@@ -364,6 +364,7 @@
     <!-- ko case: 'security' --><i class="fa fa-fw fa-lock"></i><!-- /ko -->
     <!-- ko case: 'shell' --><i class="fa fa-fw fa-terminal"></i><!-- /ko -->
     <!-- ko case: 'spark' --><svg class="hi hi-fw"><use xlink:href="#hi-spark"></use></svg><!-- /ko -->
+    <!-- ko case: 'spark2' --><svg class="hi hi-fw"><use xlink:href="#hi-spark"></use></svg><!-- /ko -->
     <!-- ko case: 'sqoop' --><svg class="hi hi-fw"><use xlink:href="#hi-sqoop"></use></svg><!-- /ko -->
     <!-- ko case: 'sqoop1' --><svg class="hi hi-fw"><use xlink:href="#hi-sqoop"></use></svg><!-- /ko -->
     <!-- ko case: 'tables' --><i class="fa fa-fw fa-table"></i><!-- /ko -->

+ 1 - 1
desktop/core/src/desktop/templates/ko_components/ko_multi_cluster_sidebar.mako

@@ -130,7 +130,7 @@ from desktop.views import _ko
               url: '/dashboard/new_search'
             },{
               label: '${ _('Scheduler') }',
-              url: '/oozie/editor/workflow/new/'
+              url: '/oozie/editor/coordinator/new/'
             },{
               label: '${ _('Importer') }',
               url: '/indexer/importer'

+ 7 - 0
desktop/libs/metadata/src/metadata/conf.py

@@ -199,6 +199,13 @@ ALTUS = ConfigSection(
   )
 )
 
+DEFAULT_PUBLIC_KEY = Config(
+  key="default_publick_key",
+  help=_t("Public key used for cluster creation."),
+  type=str,
+  default=''
+)
+
 
 def get_navigator_auth_type():
   return NAVIGATOR.AUTH_TYPE.get().lower()