|
@@ -25,6 +25,7 @@ import json
|
|
|
import logging
|
|
import logging
|
|
|
import urllib.error
|
|
import urllib.error
|
|
|
import sys
|
|
import sys
|
|
|
|
|
+import uuid
|
|
|
|
|
|
|
|
from django.urls import reverse
|
|
from django.urls import reverse
|
|
|
from django.views.decorators.http import require_POST
|
|
from django.views.decorators.http import require_POST
|
|
@@ -43,6 +44,7 @@ from desktop.lib.exceptions_renderable import PopupException
|
|
|
from desktop.lib.i18n import smart_unicode
|
|
from desktop.lib.i18n import smart_unicode
|
|
|
from desktop.lib.python_util import check_encoding
|
|
from desktop.lib.python_util import check_encoding
|
|
|
from desktop.models import Document2
|
|
from desktop.models import Document2
|
|
|
|
|
+from desktop.settings import BASE_DIR
|
|
|
from kafka.kafka_api import get_topics, get_topic_data
|
|
from kafka.kafka_api import get_topics, get_topic_data
|
|
|
from notebook.connectors.base import get_api, Notebook
|
|
from notebook.connectors.base import get_api, Notebook
|
|
|
from notebook.decorators import api_error_handler
|
|
from notebook.decorators import api_error_handler
|
|
@@ -88,7 +90,6 @@ try:
|
|
|
except ImportError as e:
|
|
except ImportError as e:
|
|
|
LOG.warning('Solr Search interface is not enabled')
|
|
LOG.warning('Solr Search interface is not enabled')
|
|
|
|
|
|
|
|
-csv_data = []
|
|
|
|
|
|
|
|
|
|
def _escape_white_space_characters(s, inverse=False):
|
|
def _escape_white_space_characters(s, inverse=False):
|
|
|
MAPPINGS = {
|
|
MAPPINGS = {
|
|
@@ -228,30 +229,35 @@ def guess_field_types(request):
|
|
|
file_format = json.loads(request.POST.get('fileFormat', '{}'))
|
|
file_format = json.loads(request.POST.get('fileFormat', '{}'))
|
|
|
|
|
|
|
|
if file_format['inputFormat'] == 'localfile':
|
|
if file_format['inputFormat'] == 'localfile':
|
|
|
|
|
+ path = file_format['path']
|
|
|
|
|
|
|
|
- if len(csv_data) <= 5:
|
|
|
|
|
- sample = csv_data[1:]
|
|
|
|
|
- else:
|
|
|
|
|
- sample = csv_data[1:5]
|
|
|
|
|
|
|
+ with open(BASE_DIR + path, 'r') as local_file:
|
|
|
|
|
|
|
|
- column_row = csv_data[0]
|
|
|
|
|
|
|
+ reader = csv.reader(local_file)
|
|
|
|
|
+ csv_data = list(reader)
|
|
|
|
|
|
|
|
- field_type_guesses = []
|
|
|
|
|
- for count, col in enumerate(column_row):
|
|
|
|
|
- column_samples = [sample_row[count] for sample_row in sample if len(sample_row) > count]
|
|
|
|
|
|
|
+ if file_format['format']['hasHeader']:
|
|
|
|
|
+ sample = csv_data[1:5]
|
|
|
|
|
+ column_row = csv_data[0]
|
|
|
|
|
+ else:
|
|
|
|
|
+ sample = csv_data[:4]
|
|
|
|
|
+ column_row = ['field_' + str(count+1) for count, col in enumerate(sample[0])]
|
|
|
|
|
|
|
|
- field_type_guess = guess_field_type_from_samples(column_samples)
|
|
|
|
|
- field_type_guesses.append(field_type_guess)
|
|
|
|
|
|
|
+ field_type_guesses = []
|
|
|
|
|
+ for count, col in enumerate(column_row):
|
|
|
|
|
+ column_samples = [sample_row[count] for sample_row in sample if len(sample_row) > count]
|
|
|
|
|
+ field_type_guess = guess_field_type_from_samples(column_samples)
|
|
|
|
|
+ field_type_guesses.append(field_type_guess)
|
|
|
|
|
|
|
|
- columns = [
|
|
|
|
|
- Field(column_row[count], field_type_guesses[count]).to_dict()
|
|
|
|
|
- for count, col in enumerate(column_row)
|
|
|
|
|
- ]
|
|
|
|
|
|
|
+ columns = [
|
|
|
|
|
+ Field(column_row[count], field_type_guesses[count]).to_dict()
|
|
|
|
|
+ for count, col in enumerate(column_row)
|
|
|
|
|
+ ]
|
|
|
|
|
|
|
|
- format_ = {
|
|
|
|
|
- 'columns': columns,
|
|
|
|
|
- 'sample': sample
|
|
|
|
|
- }
|
|
|
|
|
|
|
+ format_ = {
|
|
|
|
|
+ 'columns': columns,
|
|
|
|
|
+ 'sample': sample
|
|
|
|
|
+ }
|
|
|
|
|
|
|
|
elif file_format['inputFormat'] == 'file':
|
|
elif file_format['inputFormat'] == 'file':
|
|
|
indexer = MorphlineIndexer(request.user, request.fs)
|
|
indexer = MorphlineIndexer(request.user, request.fs)
|
|
@@ -539,7 +545,6 @@ def importer_submit(request):
|
|
|
request,
|
|
request,
|
|
|
source,
|
|
source,
|
|
|
destination,
|
|
destination,
|
|
|
- csv_data[1:],
|
|
|
|
|
start_time
|
|
start_time
|
|
|
)
|
|
)
|
|
|
else:
|
|
else:
|
|
@@ -733,12 +738,10 @@ def upload_local_file(request):
|
|
|
|
|
|
|
|
upload_file = request.FILES['inputfile']
|
|
upload_file = request.FILES['inputfile']
|
|
|
fs = FileSystemStorage()
|
|
fs = FileSystemStorage()
|
|
|
- name = fs.save(upload_file.name, upload_file)
|
|
|
|
|
- reader = csv.reader(decode_utf8(upload_file))
|
|
|
|
|
-
|
|
|
|
|
- csv_data.clear()
|
|
|
|
|
|
|
+ username = request.user.username
|
|
|
|
|
+ filename = "%s_%s.%s" % (username, uuid.uuid4(), 'csv')
|
|
|
|
|
+ name = fs.save(filename, upload_file)
|
|
|
|
|
|
|
|
- for row in reader:
|
|
|
|
|
- csv_data.append(row)
|
|
|
|
|
|
|
+ local_file_url = fs.url(name)
|
|
|
|
|
|
|
|
- return JsonResponse({'file_url': fs.url(name)})
|
|
|
|
|
|
|
+ return JsonResponse({'local_file_url': local_file_url})
|