Эх сурвалжийг харах

[indexer] Respect newlines in quotes

Abraham Elmahrek 11 жил өмнө
parent
commit
85e7ae5

+ 0 - 3
desktop/libs/indexer/src/indexer/controller.py

@@ -172,10 +172,7 @@ class CollectionManagerController(object):
           data = json.dumps([value for value in utils.field_values_from_log(fh, fields)])
           content_type = 'json'
         elif data_type == 'separated':
-          # 'data' first line should be headers.
           data = json.dumps([value for value in utils.field_values_from_separated_file(fh, kwargs.get('separator', ','), kwargs.get('quote_character', '"'), fields)], indent=2)
-          with open('/tmp/output.json', 'w') as f:
-            f.write(data)
           content_type = 'json'
         else:
           raise PopupException(_('Could not update index. Unknown type %s') % data_type)

+ 1 - 5
desktop/libs/indexer/src/indexer/templates/collections.mako

@@ -57,7 +57,7 @@ ${ commonheader(_('Collection Manager'), "indexer", user, "29px") | n,unicode }
 
 
 <div class="search-bar" style="height: 30px">
-  <h4><a href="#manager">${_('Collection Manager')}</a></h4>
+  <h4><a href="#manage">${_('Collection Manager')}</a></h4>
 </div>
 
 
@@ -493,10 +493,6 @@ vm.edit.sourceType.subscribe(function(value) {
   }
 });
 
-// $(window).on('hashchange', function() {
-//   vm.breadcrum(window.location.hash.substring(1));
-// });
-
 routie({
   "": function() {
     vm.breadcrum("manage");

+ 26 - 17
desktop/libs/indexer/src/indexer/utils.py

@@ -157,24 +157,35 @@ def field_values_from_separated_file(fh, delimiter, quote_character, fields=None
   else:
     boolean_fields = [field['name'] for field in fields if field['type'] in BOOLEAN_FIELD_TYPES]
 
-  csvfile = StringIO.StringIO()
   content = fh.read()
-  is_first = True
+  headers = None
   while content:
     last_newline = content.rfind('\n')
     if last_newline > -1:
-      if not is_first:
-        csvfile.write('\n')
-      csvfile.write(content[:last_newline])
-      content = content[last_newline+1:]
+      # If new line is quoted, skip this iteration and try again.
+      if content[:last_newline].count('"') % 2 != 0:
+        content += fh.read()
+        continue
+      else:
+        if headers is None:
+          csvfile = StringIO.StringIO(content[:last_newline])
+        else:
+          csvfile = StringIO.StringIO('\n' + content[:last_newline])
+        content = content[last_newline+1:] + fh.read()
     else:
-      if not is_first:
-        csvfile.write('\n')
-      csvfile.write(content[:])
-      content = ""
-    is_first = False
-    csvfile.seek(0)
-    reader = csv.DictReader(csvfile, delimiter=smart_str(delimiter), quotechar=smart_str(quote_character))
+      if headers is None:
+        csvfile = StringIO.StringIO(content)
+      else:
+        csvfile = StringIO.StringIO('\n' + content)
+      content = fh.read()
+
+    # First line is headers
+    if headers is None:
+      headers = next(csv.reader(csvfile, delimiter=smart_str(delimiter), quotechar=smart_str(quote_character)))
+
+    # User dict reader
+    reader = csv.DictReader(csvfile, fieldnames=headers, delimiter=smart_str(delimiter), quotechar=smart_str(quote_character))
+
     remove_keys = None
     for row in reader:
       # Remove keys that aren't in collection
@@ -191,7 +202,8 @@ def field_values_from_separated_file(fh, delimiter, quote_character, fields=None
       if timestamp_fields:
         for key in timestamp_fields:
           if key in row:
-            row[key] = parse(row[key]).astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+            dt = parse(row[key])
+            row[key] = dt.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
 
       # Parse decimal
       if decimal_fields:
@@ -212,9 +224,6 @@ def field_values_from_separated_file(fh, delimiter, quote_character, fields=None
             row[key] = str(row[key]).lower() == "true"
 
       yield row
-    
-    csvfile.truncate()
-    content += fh.read()
 
 
 def field_values_from_log(fh, fields=[ {'name': 'message', 'type': 'text_general'}, {'name': 'tdate', 'type': 'timestamp'} ]):