Эх сурвалжийг харах

HUE-8536 [sqoop] Include hive-site.xml automatically when importing data to hive

Romain Rigaux 7 жил өмнө
parent
commit
8e82a3ba16

+ 2 - 2
desktop/libs/indexer/src/indexer/indexers/rdbms.py

@@ -208,11 +208,11 @@ def run_sqoop(request, source, destination, start_time):
   elif destination_type == 'table':
     success_url = reverse('metastore:describe_table', kwargs={'database': destination_database_name, 'table': destination_table_name})
     if rdbms_all_tables_selected:
-      statement = 'import-all-tables %(statement)s --hive-import' % {
+      statement = 'import-all-tables %(statement)s --hive-import --delete-target-dir' % {
         'statement': statement
       }
     else:
-      statement = 'import %(statement)s --table %(rdbmsTableName)s --hive-import' % {
+      statement = 'import %(statement)s --table %(rdbmsTableName)s --hive-import --delete-target-dir' % {
         'statement': statement,
         'rdbmsTableName': rdbms_table_name
       }

+ 2 - 1
desktop/libs/liboozie/src/liboozie/submission2.py

@@ -326,7 +326,8 @@ STORED AS TEXTFILE %s""" % (self.properties.get('send_result_path'), '\n\n\n'.jo
           statements = notebook.get_data()['snippets'][0]['statement_raw']
 
           self._create_file(deployment_dir, action.data['name'] + '.pig', statements)
-        elif action.data['type'] == 'spark' or action.data['type'] == 'spark-document':
+        elif action.data['type'] in ('spark', 'spark-document') or (
+              action.data['type'] in ('sqoop', 'sqoop-document') and action.data['properties']['statement'] in '--hive-import'):
           if not [f for f in action.data.get('properties').get('files', []) if f.get('value').endswith('hive-site.xml')]:
             hive_site_lib = Hdfs.join(deployment_dir + '/lib/', 'hive-site.xml')
             hive_site_content = get_hive_site_content()