Procházet zdrojové kódy

HUE-8997 [importer] Support transaction table

Jean-Francois Desjeans Gauthier před 6 roky
rodič
revize
834f6f58d8

+ 1 - 1
apps/beeswax/src/beeswax/hive_site.py

@@ -181,7 +181,7 @@ def get_use_sasl():
 
 
 def has_concurrency_support():
-  '''For SQL transactions like INSERT, DELETE, UPDATE since Hive 3.'''
+  '''For SQL transactions like INSERT, DELETE, UPDATE since Hive 3. Possibly use set -v in future to obtain properties hive.create.as.acid=true & hive.create.as.insert.only=true'''
   return get_conf().get(_CNF_HIVE_SUPPORT_CONCURRENCY, 'TRUE').upper() == 'TRUE'
 
 

+ 15 - 0
apps/impala/src/impala/impala_flags.py

@@ -28,6 +28,8 @@ _SSL_SERVER_CERTIFICATE = '-ssl_server_certificate'
 _MAX_RESULT_CACHE_SIZE = '-max_result_cache_size'
 _AUTHORIZED_PROXY_USER_CONFIG = '-authorized_proxy_user_config'
 _PRINCIPAL = '-principal'
+_DEFAULT_QUERY_OPTIONS = '-default_query_options'
+_DEFAULT_TRANSACTIONAL_TYPE = 'default_transactional_type'
 
 
 def reset():
@@ -76,6 +78,19 @@ def is_impersonation_enabled():
   user_config = get_conf().get(_AUTHORIZED_PROXY_USER_CONFIG)
   return True if user_config and 'hue=' in user_config else False
 
+def default_query_option(option_name):
+  query_options = get_conf().get(_DEFAULT_QUERY_OPTIONS)
+  if not query_options:
+    return query_options
+  options = dict([option.split('=') for option in query_options.split(',')])
+  return options.get(option_name)
+
+def default_transactional_type():
+  return default_query_option(_DEFAULT_TRANSACTIONAL_TYPE)
+
+def is_transactional():
+  return default_transactional_type() is not None
+
 def is_kerberos_enabled():
   return get_conf().get(_PRINCIPAL) is not None
 

+ 0 - 9
desktop/libs/indexer/src/indexer/api3.py

@@ -362,15 +362,6 @@ def importer_submit(request):
     if source['path']:
       path = urllib_unquote(source['path'])
       source['path'] = request.fs.netnormpath(path)
-      parent_path = request.fs.parent_path(path)
-      stats = request.fs.stats(parent_path)
-      split = urlparse(path)
-      # Only for HDFS, import data and non-external table
-      if split.scheme in ('', 'hdfs') and destination['importData'] and destination['useDefaultLocation'] and oct(stats["mode"])[-1] != '7' and not request.POST.get('show_command'):
-        user_scratch_dir = request.fs.get_home_dir() + '/.scratchdir'
-        request.fs.do_as_user(request.user, request.fs.mkdir, user_scratch_dir, 0o0777)
-        request.fs.do_as_user(request.user, request.fs.rename, source['path'], user_scratch_dir)
-        source['path'] = user_scratch_dir + '/' + source['path'].split('/')[-1]
 
   if destination['ouputFormat'] in ('database', 'table'):
     destination['nonDefaultLocation'] = request.fs.netnormpath(destination['nonDefaultLocation']) if destination['nonDefaultLocation'] else destination['nonDefaultLocation']

+ 39 - 15
desktop/libs/indexer/src/indexer/indexers/sql.py

@@ -20,19 +20,23 @@ from builtins import object
 import logging
 import sys
 import urllib.request, urllib.error
+import uuid
 
 from django.contrib.auth.models import User
 from django.urls import reverse
 from django.utils.translation import ugettext as _
 
 from desktop.lib import django_mako
+from desktop.lib.exceptions_renderable import PopupException
+
 from notebook.models import make_notebook
 from azure.abfs.__init__ import abfspath
 
 if sys.version_info[0] > 2:
-  from urllib.parse import unquote as urllib_unquote
+  from urllib.parse import urlparse, unquote as urllib_unquote
 else:
   from urllib import unquote as urllib_unquote
+  from urlparse import urlparse
 
 LOG = logging.getLogger(__name__)
 
@@ -66,10 +70,14 @@ class SQLIndexer(object):
     comment = destination['description']
 
     source_path = urllib_unquote(source['path'])
+    load_data = destination['importData']
     external = not destination['useDefaultLocation']
     external_path = urllib_unquote(destination['nonDefaultLocation'])
 
-    load_data = destination['importData']
+    editor_type = destination['sourceType']
+    is_transactional = destination['isTransactional']
+    default_transactional_type = 'insert_only' if destination['isInsertOnly'] else 'default'
+
     skip_header = destination['hasHeader']
 
     primary_keys = destination['primaryKeys']
@@ -115,8 +123,8 @@ class SQLIndexer(object):
     "escapeChar"    = "\\\\"
     ''' % source['format']
 
-
-    if table_format in ('parquet', 'kudu'):
+    use_temp_table = table_format in ('parquet', 'orc', 'kudu') or is_transactional
+    if use_temp_table: # We'll be using a temp table to load data
       if load_data:
         table_name, final_table_name = 'hue__tmp_%s' % table_name, table_name
 
@@ -135,18 +143,33 @@ class SQLIndexer(object):
       collection_delimiter = None
       map_delimiter = None
 
-    if external or (load_data and table_format in ('parquet', 'kudu')):
+    if external or (load_data and table_format in ('parquet', 'orc', 'kudu')): # We'll use location to load data
       if not self.fs.isdir(external_path): # File selected
         external_path, external_file_name = self.fs.split(external_path)
 
         if len(self.fs.listdir(external_path)) > 1:
-          external_path = external_path + '/%s_table' % external_file_name # If dir not just the file, create data dir and move file there.
+          external_path = external_path + '/%s%s_table' % (external_file_name, str(uuid.uuid4()))  # If dir not just the file, create data dir and move file there. Make sure it's unique.
           self.fs.mkdir(external_path)
           self.fs.rename(source_path, external_path)
-    
+    elif load_data: # We'll use load data command
+      parent_path = self.fs.parent_path(source_path)
+      stats = self.fs.stats(parent_path)
+      split = urlparse(source_path)
+      # Only for HDFS, import data and non-external table
+      if split.scheme in ('', 'hdfs') and oct(stats["mode"])[-1] != '7':
+        user_scratch_dir = self.fs.get_home_dir() + '/.scratchdir/%s' % str(uuid.uuid4()) # Make sure it's unique.
+        self.fs.do_as_user(self.user, self.fs.mkdir, user_scratch_dir, 0o0777)
+        self.fs.do_as_user(self.user, self.fs.rename, source['path'], user_scratch_dir)
+        source_path = user_scratch_dir + '/' + source['path'].split('/')[-1]
+
     if external_path.lower().startswith("abfs"): #this is to check if its using an ABFS path
-      external_path = abfspath(external_path) 
-      
+      external_path = abfspath(external_path)
+
+    tbl_properties={}
+    if skip_header:
+      tbl_properties['skip.header.line.count'] = '1'
+    tbl_properties['transactional'] = 'false' # The temp table is not transactional, but final table can be if is_transactional. tbl_properties that don't exist in previous versions can safely be added without error
+
     sql += django_mako.render_to_string("gen/create_table_statement.mako", {
         'table': {
             'name': table_name,
@@ -158,10 +181,10 @@ class SQLIndexer(object):
             'serde_name': serde_name,
             'serde_properties': serde_properties,
             'file_format': file_format,
-            'external': external or load_data and table_format in ('parquet', 'kudu'),
+            'external': external or load_data and table_format in ('parquet', 'orc', 'kudu'),
             'path': external_path,
-            'skip_header': skip_header,
             'primary_keys': primary_keys if table_format == 'kudu' and not load_data else [],
+            'tbl_properties': tbl_properties
          },
         'columns': columns,
         'partition_columns': partition_columns,
@@ -180,8 +203,8 @@ class SQLIndexer(object):
       db = dbms.get(self.user, query_server=query_server_config)
       sql += "\n\n%s;" % db.load_data(database, table_name, form_data, None, generate_ddl_only=True)
 
-    if load_data and table_format in ('parquet', 'kudu'):
-      file_format = table_format
+    if load_data and use_temp_table:
+      file_format = 'TextFile' if table_format == 'text' else table_format
       if table_format == 'kudu':
         columns_list = ['`%s`' % col for col in primary_keys + [col['name'] for col in destination['columns'] if col['name'] not in primary_keys and col['keep']]]
         extra_create_properties = """PRIMARY KEY (%(primary_keys)s)
@@ -196,6 +219,9 @@ class SQLIndexer(object):
       else:
         columns_list = ['*']
         extra_create_properties = 'STORED AS %(file_format)s' % {'file_format': file_format}
+        if is_transactional:
+          extra_create_properties += '\nTBLPROPERTIES("transactional"="true", "transactional_properties"="%s")' % default_transactional_type
+
       sql += '''\n\nCREATE TABLE `%(database)s`.`%(final_table_name)s`%(comment)s
         %(extra_create_properties)s
         AS SELECT %(columns_list)s
@@ -212,8 +238,6 @@ class SQLIndexer(object):
           'table_name': table_name
       }
 
-    editor_type = 'impala' if table_format == 'kudu' else destination['sourceType']
-
     on_success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': final_table_name}) + '?source_type=' + source_type
 
     return make_notebook(

Rozdílová data souboru nebyla zobrazena, protože soubor je příliš velký
+ 8 - 1
desktop/libs/indexer/src/indexer/indexers/sql_tests.py


+ 2 - 2
desktop/libs/indexer/src/indexer/templates/gen/create_table_statement.mako

@@ -127,7 +127,7 @@ INPUTFORMAT ${table["input_format_class"] | n} OUTPUTFORMAT ${table["output_form
 % if table.get("external", False):
 LOCATION '${table["path"] | n}'
 % endif
-% if table.get("skip_header", False):
-TBLPROPERTIES("skip.header.line.count" = "1")
+% if table.get("tbl_properties"):
+TBLPROPERTIES(${ ', '.join(['"' + prop + '"' + ' = ' + '"' + table.get("tbl_properties").get(prop) + '"' for prop in table.get("tbl_properties")]) | n })
 % endif
 ;

+ 36 - 1
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -20,6 +20,8 @@
   from desktop import conf
   from desktop.views import commonheader, commonfooter, commonshare, commonimportexport, _ko
   from filebrowser.conf import SHOW_UPLOAD_BUTTON
+  from beeswax import hive_site
+  from impala import impala_flags
   from notebook.conf import ENABLE_SQL_INDEXER
 
   from indexer.conf import ENABLE_NEW_INDEXER, ENABLE_SQOOP, ENABLE_KAFKA, CONFIG_INDEXER_LIBS_PATH, ENABLE_SCALABLE_INDEXER, ENABLE_ALTUS, ENABLE_ENVELOPE, ENABLE_FIELD_EDITOR
@@ -628,6 +630,14 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
                   <input type="checkbox" data-bind="checked: useDefaultLocation"> ${_('Store in Default location')}
                 </label>
               </div>
+              <div class="control-group" data-bind="visible: isTransactionalVisible">
+                <label class="checkbox inline-block">
+                  <input type="checkbox" data-bind="checked: isTransactional"> ${_('Transactional table')}
+                </label>
+                <label class="checkbox inline-block" title="${_('Full transactional support available in Hive with ORC')}">
+                  <input type="checkbox" data-bind="checked: isInsertOnly, enable: isTransactionalUpdateEnabled"> ${_('Insert only')}
+                </label>
+              </div>
 
               <div class="control-group" data-bind="visible: !useDefaultLocation()">
                 <label for="path" class="control-label"><div>${ _('External location') }</div>
@@ -2393,11 +2403,21 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
       self.tableFormats = ko.pureComputed(function() {
         if (wizard.source.inputFormat() === 'kafka') {
           return [{'value': 'kudu', 'name': 'Kudu'}];
+        } else if (vm.sourceType == 'impala') { // Impala supports Kudu
+          return [
+            {'value': 'text', 'name': 'Text'},
+            {'value': 'parquet', 'name': 'Parquet'},
+            {'value': 'kudu', 'name': 'Kudu'},
+            {'value': 'csv', 'name': 'Csv'},
+            {'value': 'avro', 'name': 'Avro'},
+            {'value': 'json', 'name': 'Json'},
+            {'value': 'regexp', 'name': 'Regexp'},
+            {'value': 'orc', 'name': 'ORC'},
+          ];
         }
         return [
           {'value': 'text', 'name': 'Text'},
           {'value': 'parquet', 'name': 'Parquet'},
-          {'value': 'kudu', 'name': 'Kudu'},
           {'value': 'csv', 'name': 'Csv'},
           {'value': 'avro', 'name': 'Avro'},
           {'value': 'json', 'name': 'Json'},
@@ -2425,6 +2445,21 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
       self.useDefaultLocation = ko.observable(true);
       self.nonDefaultLocation = ko.observable('');
 
+      var isTransactionalVisibleImpala = '${ impala_flags.is_transactional() }'.toLowerCase() == 'true';
+      var isTransactionalVisibleHive = '${ hive_site.has_concurrency_support() }'.toLowerCase() == 'true';
+      var transactionalDefaultType = '${ impala_flags.default_transactional_type() }'.toLowerCase();
+
+      self.isTransactionalVisible = ko.observable((vm.sourceType == 'impala' && isTransactionalVisibleImpala) || (vm.sourceType == 'hive' && isTransactionalVisibleHive));
+      self.isTransactional = ko.observable(self.isTransactionalVisible());
+      self.isInsertOnly = ko.observable(true); // Impala doesn't have yet full support.
+      self.isTransactionalUpdateEnabled = ko.pureComputed(function() {
+        var enabled = self.tableFormat() == 'orc' && (vm.sourceType == 'hive' || (vm.sourceType == 'impala' && transactionalDefaultType.length && transactionalDefaultType != 'insert_only'));
+        if (!enabled) {
+          self.isInsertOnly(true);
+        }
+        return enabled;
+      });
+
       self.hasHeader = ko.observable(false);
 
       self.useCustomDelimiters = ko.observable(false);

Některé soubory nejsou zobrazeny, neboť je v těchto rozdílových datech změněno mnoho souborů