Преглед на файлове

[Importer] adding hasHeader, file_url and unittest to improve the code

ayush.goyal преди 4 години
родител
ревизия
6945f585fb

+ 30 - 27
desktop/libs/indexer/src/indexer/api3.py

@@ -25,6 +25,7 @@ import json
 import logging
 import urllib.error
 import sys
+import uuid
 
 from django.urls import reverse
 from django.views.decorators.http import require_POST
@@ -43,6 +44,7 @@ from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.i18n import smart_unicode
 from desktop.lib.python_util import check_encoding
 from desktop.models import Document2
+from desktop.settings import BASE_DIR
 from kafka.kafka_api import get_topics, get_topic_data
 from notebook.connectors.base import get_api, Notebook
 from notebook.decorators import api_error_handler
@@ -88,7 +90,6 @@ try:
 except ImportError as e:
   LOG.warning('Solr Search interface is not enabled')
 
-csv_data = []
 
 def _escape_white_space_characters(s, inverse=False):
   MAPPINGS = {
@@ -228,30 +229,35 @@ def guess_field_types(request):
   file_format = json.loads(request.POST.get('fileFormat', '{}'))
 
   if file_format['inputFormat'] == 'localfile':
+    path = file_format['path']
 
-    if len(csv_data) <= 5:
-      sample = csv_data[1:]
-    else:
-      sample = csv_data[1:5]
+    with open(BASE_DIR + path, 'r') as local_file:
 
-    column_row = csv_data[0]
+      reader = csv.reader(local_file)
+      csv_data = list(reader)
 
-    field_type_guesses = []
-    for count, col in enumerate(column_row):
-      column_samples = [sample_row[count] for sample_row in sample if len(sample_row) > count]
+      if file_format['format']['hasHeader']:
+        sample = csv_data[1:5]
+        column_row = csv_data[0]
+      else:
+        sample = csv_data[:4]
+        column_row = ['field_' + str(count+1) for count, col in enumerate(sample[0])] 
 
-      field_type_guess = guess_field_type_from_samples(column_samples)
-      field_type_guesses.append(field_type_guess)
+      field_type_guesses = []
+      for count, col in enumerate(column_row):
+        column_samples = [sample_row[count] for sample_row in sample if len(sample_row) > count]
+        field_type_guess = guess_field_type_from_samples(column_samples)
+        field_type_guesses.append(field_type_guess)
 
-    columns = [
-      Field(column_row[count], field_type_guesses[count]).to_dict()
-      for count, col in enumerate(column_row)
-    ]
+      columns = [
+        Field(column_row[count], field_type_guesses[count]).to_dict()
+        for count, col in enumerate(column_row)
+      ]
 
-    format_ = {
-      'columns': columns,
-      'sample': sample
-    }
+      format_ = {
+        'columns': columns,
+        'sample': sample
+      }
 
   elif file_format['inputFormat'] == 'file':
     indexer = MorphlineIndexer(request.user, request.fs)
@@ -539,7 +545,6 @@ def importer_submit(request):
         request,
         source,
         destination,
-        csv_data[1:],
         start_time
       )
     else:
@@ -733,12 +738,10 @@ def upload_local_file(request):
 
   upload_file = request.FILES['inputfile']
   fs = FileSystemStorage()
-  name = fs.save(upload_file.name, upload_file)
-  reader = csv.reader(decode_utf8(upload_file))
-
-  csv_data.clear()
+  username = request.user.username
+  filename = "%s_%s.%s" % (username, uuid.uuid4(), 'csv')
+  name = fs.save(filename, upload_file)
 
-  for row in reader:
-    csv_data.append(row)
+  local_file_url = fs.url(name)
 
-  return JsonResponse({'file_url': fs.url(name)})
+  return JsonResponse({'local_file_url': local_file_url})

+ 19 - 10
desktop/libs/indexer/src/indexer/indexers/sql.py

@@ -17,6 +17,7 @@
 from future import standard_library
 standard_library.install_aliases()
 from builtins import object
+import csv
 import logging
 import sys
 import urllib.request, urllib.error
@@ -33,6 +34,7 @@ from useradmin.models import User
 
 from desktop.lib import django_mako
 from desktop.lib.exceptions_renderable import PopupException
+from desktop.settings import BASE_DIR
 
 if sys.version_info[0] > 2:
   from urllib.parse import urlparse, unquote as urllib_unquote
@@ -266,7 +268,7 @@ class SQLIndexer(object):
         is_task=True
     )
 
-  def create_table_from_local_file(self, source, destination, csv_data, start_time=-1):
+  def create_table_from_local_file(self, source, destination, start_time=-1):
     if '.' in destination['name']:
       database, table_name = destination['name'].split('.', 1)
     else:
@@ -292,13 +294,20 @@ class SQLIndexer(object):
           'primary_keys': ', '.join(destination.get('indexerPrimaryKey'))
       }
 
-    for csv_row in csv_data:
-      sql += '''
-          INSERT INTO %(table_name)s VALUES %(csv_row)s;
-          ''' % {
-                  'table_name': table_name,
-                  'csv_row': tuple(csv_row)
-                }
+    path = source['path']
+    if path:
+      with open(BASE_DIR + path, 'r') as local_file:
+        reader = csv.reader(local_file)
+        list_of_tuples = list(map(tuple, reader))
+
+        if source['format']['hasHeader']:
+          list_of_tuples = list_of_tuples[1:]
+
+        csv_rows = str(list_of_tuples)[1:-1]
+        sql += '''INSERT INTO %(table_name)s VALUES %(csv_rows)s;'''% {
+            'table_name': table_name,
+            'csv_rows': csv_rows
+          }
 
     on_success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': final_table_name}) + \
         '?source_type=' + source_type
@@ -358,8 +367,8 @@ def _create_table(request, source, destination, start_time=-1):
   else:
     return notebook.execute(request, batch=False)
 
-def _create_table_from_local(request, source, destination, csv_data, start_time=-1):
-  notebook = SQLIndexer(user=request.user, fs=request.fs).create_table_from_local_file(source, destination, csv_data, start_time)
+def _create_table_from_local(request, source, destination, start_time=-1):
+  notebook = SQLIndexer(user=request.user, fs=request.fs).create_table_from_local_file(source, destination, start_time)
 
   if request.POST.get('show_command'):
     return {'status': 0, 'commands': notebook.get_str()}

+ 49 - 0
desktop/libs/indexer/src/indexer/indexers/sql_tests.py

@@ -603,3 +603,52 @@ def test_create_ddl_with_abfs():
   finally:
     finish()
   assert_true(u"\'abfs://my-data@yingstorage.dfs.core.windows.net/test_data/cars.csv\'" in sql)
+
+
+def test_create_table_from_local():
+  source = {
+    'path': '',
+  }
+  destination = {
+    'name': 'default.test1',
+    'columns': [
+      {'name': 'date', 'type': 'timestamp'},
+      {'name': 'hour', 'type': 'bigint'},
+      {'name': 'minute', 'type': 'bigint'},
+      {'name': 'dep', 'type': 'bigint'},
+      {'name': 'arr', 'type': 'bigint'},
+      {'name': 'dep_delay', 'type': 'bigint'},
+      {'name': 'arr_delay', 'type': 'bigint'},
+      {'name': 'carrier', 'type': 'string'},
+      {'name': 'flight', 'type': 'bigint'},
+      {'name': 'dest', 'type': 'string'},
+      {'name': 'plane', 'type': 'string'},
+      {'name': 'cancelled', 'type': 'boolean'},
+      {'name': 'time', 'type': 'bigint'},
+      {'name': 'dist', 'type': 'bigint'},
+    ],
+    'indexerPrimaryKey': []
+  }
+  request = MockRequest(fs=MockFs())
+  sql = SQLIndexer(user=request.user, fs=request.fs).create_table_from_local_file(source, destination).get_str()
+
+  statement = '''USE default;
+
+CREATE TABLE IF NOT EXISTS test1 (
+  `date` timestamp,
+  `hour` bigint,
+  `minute` bigint,
+  `dep` bigint,
+  `arr` bigint,
+  `dep_delay` bigint,
+  `arr_delay` bigint,
+  `carrier` string,
+  `flight` bigint,
+  `dest` string,
+  `plane` string,
+  `cancelled` boolean,
+  `time` bigint,
+  `dist` bigint
+);'''
+
+  assert_equal(statement, sql)

+ 11 - 17
desktop/libs/indexer/src/indexer/templates/importer.mako

@@ -3161,23 +3161,17 @@ ${ commonheader(_("Importer"), "indexer", user, request, "60px") | n,unicode }
           var files = $('#inputfile')[0].files[0];
           fd.append('inputfile', files);
           var file_size = files.size;
-          if (file_size < 30*1000) {
-            $.ajax({
-              url:"/indexer/api/indexer/upload_local_file",
-              type: 'post',
-              data: fd,
-              contentType:false,
-              cache: false,
-              processData:false,
-              success:function (response) {
-                viewModel.createWizard.source.path(response['file_url']);
-              }
-            
-            });
-          }
-          else {
-            alert("As of now File size must be less than 30KB");
-          }
+          $.ajax({
+            url:"/indexer/api/indexer/upload_local_file",
+            type: 'post',
+            data: fd,
+            contentType:false,
+            cache: false,
+            processData:false,
+            success:function (response) {
+              viewModel.createWizard.source.path(response['local_file_url']);
+            }
+          });
         };
 
       $('.importer-droppable').droppable({