Browse Source

[Importer] supporting large file(10MB) for local importer (#2801)

Ayush Goyal 3 years ago
parent
commit
fe7f840823

+ 3 - 0
desktop/core/src/desktop/conf.py

@@ -642,6 +642,7 @@ def default_from_email():
     _default_from_email = "hue@" + fqdn
   return _default_from_email
 
+from indexer.conf import ENABLE_DIRECT_UPLOAD  # Circular dependency 
 
 def default_database_options():
   """Database type dependent options"""
@@ -649,6 +650,8 @@ def default_database_options():
     return {'threaded': True}
   elif DATABASE.ENGINE.get().endswith('sqlite3'):
     return {'timeout': 30}
+  elif DATABASE.ENGINE.get().endswith('mysql') and ENABLE_DIRECT_UPLOAD:     # Setting this variable to 64MB because mysql5.7
+    return {'init_command': 'SET GLOBAL max_allowed_packet=67108864'}        # has default value of this variable 4MB
   else:
     return {}
 

+ 3 - 0
desktop/core/src/desktop/settings.py

@@ -37,6 +37,7 @@ from desktop.lib.python_util import force_dict_to_strings
 
 from aws.conf import is_enabled as is_s3_enabled
 from azure.conf import is_abfs_enabled
+from indexer.conf import ENABLE_DIRECT_UPLOAD
 
 if sys.version_info[0] > 2:
   from django.utils.translation import gettext_lazy as _
@@ -117,6 +118,8 @@ USE_TZ = False
 # Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
 MEDIA_URL = ''
 
+if ENABLE_DIRECT_UPLOAD:
+  DATA_UPLOAD_MAX_MEMORY_SIZE = 67108864  # Setting this variable to 64MB as we are sending long POST requests
 
 ############################################################
 # Part 3: Django configuration

+ 18 - 9
desktop/libs/indexer/src/indexer/indexers/sql.py

@@ -315,6 +315,7 @@ class SQLIndexer(object):
     columns = [col for col_index, col in enumerate(destination['columns']) if col_index not in cols_to_remove]
 
     dialect = get_interpreter(source_type, self.user)['dialect']
+    tmp_table_name = table_name
 
     if dialect in ('hive', 'mysql'):
 
@@ -331,10 +332,11 @@ class SQLIndexer(object):
       }
 
     elif dialect == 'impala':
-      sql = '''CREATE TABLE IF NOT EXISTS %(database)s.%(table_name)s_tmp (
+      tmp_table_name = tmp_table_name + '_tmp'
+      sql = '''CREATE TABLE IF NOT EXISTS %(database)s.%(table_name)s (
 %(columns)s);\n''' % {
           'database': database,
-          'table_name': table_name,
+          'table_name': tmp_table_name,
           'columns': ',\n'.join(['  `%(name)s` string' % col for col in columns]),
       }                                                 # Impala does not implicitly cast between string and numeric or Boolean types.
 
@@ -355,23 +357,30 @@ class SQLIndexer(object):
           _csv_rows.append(tuple(row))
 
         if _csv_rows:
-          csv_rows = str(_csv_rows)[1:-1]
 
-          if dialect in ('hive', 'mysql'):
-            sql += '''\nINSERT INTO %(database)s.%(table_name)s VALUES %(csv_rows)s;\n'''% {
+          insert_sql = ""
+          for count in range(1 + len(_csv_rows)//5000):      # Inserting 5000 (decided through testing on some files) rows through one sql
+            temp_list = _csv_rows[count*5000:(count+1)*5000] # statement as we can get memory issue if we insert all rows in one statement.
+            if len(temp_list) == 0:
+              break
+            csv_rows = str(temp_list)[1:-1]
+            insert_sql += '''\nINSERT INTO %(database)s.%(table_name)s VALUES %(csv_rows)s;\n'''% {
               'database': database,
-              'table_name': table_name,
+              'table_name': tmp_table_name,
               'csv_rows': csv_rows
             }
+
+          if dialect in ('hive', 'mysql'):
+            sql += insert_sql
+
           elif dialect == 'impala':
              # casting from string to boolean is not allowed in impala so string -> int -> bool
             sql_ = ',\n'.join([
               '  CAST ( `%(name)s` AS %(type)s ) `%(name)s`' % col if col['type'] != 'boolean' \
               else '  CAST ( CAST ( `%(name)s` AS TINYINT ) AS boolean ) `%(name)s`' % col for col in columns
             ])
-
-            sql += '''\nINSERT INTO %(database)s.%(table_name)s_tmp VALUES %(csv_rows)s;\n
-CREATE TABLE IF NOT EXISTS %(database)s.%(table_name)s
+            sql += insert_sql
+            sql += '''\nCREATE TABLE IF NOT EXISTS %(database)s.%(table_name)s
 AS SELECT\n%(sql_)s\nFROM  %(database)s.%(table_name)s_tmp;\n\nDROP TABLE IF EXISTS %(database)s.%(table_name)s_tmp;'''% {
               'database': database,
               'table_name': table_name,

+ 2 - 2
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -3233,8 +3233,8 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
         var files = $('#inputfile')[0].files[0];
         fd.append('file', files);
         var file_size = files.size;
-        if (file_size > 200000) {
-          $.jHueNotify.warn("${ _('File size exceeds the supported size (200 KB).') }");
+        if (file_size > 10485760) {
+          $.jHueNotify.warn("${ _('File size exceeds the supported size (10 MB). Please use the S3, ABFS or HDFS browser to upload files.') }");
         } else {
           $.ajax({
             url:"/indexer/api/indexer/upload_local_file",