浏览代码

HUE-5659 [home] Ignore history dependencies when importing document from different cluster

Exported JSON file doesn't contain separate entry for history documents.
This will result in failure when loading a document with history dependencies
from a different cluster as the entry is not present in the DB.
krish 8 年之前
父节点
当前提交
a874fa1
共有 2 个文件被更改,包括 43 次插入4 次删除
  1. 13 4
      desktop/core/src/desktop/api2.py
  2. 30 0
      desktop/core/src/desktop/tests_doc2.py

+ 13 - 4
desktop/core/src/desktop/api2.py

@@ -682,11 +682,20 @@ def _create_or_update_document_with_owner(doc, owner, uuids_map):
       doc['fields']['parent_directory'] = [home_dir.uuid, home_dir.version, home_dir.is_history]
 
   # Verify that dependencies exist, raise critical error if any dependency not found
+  # Ignore history dependencies
   if doc['fields']['dependencies']:
-    for uuid, version, is_history in doc['fields']['dependencies']:
-      if not uuid in uuids_map.keys() and \
-              not Document2.objects.filter(uuid=uuid, version=version, is_history=is_history).exists():
-        raise PopupException(_('Cannot import document, dependency with UUID: %s not found.') % uuid)
+    history_deps_list = []
+    for index, (uuid, version, is_history) in enumerate(doc['fields']['dependencies']):
+      if not uuid in uuids_map.keys() and not is_history and \
+              not Document2.objects.filter(uuid=uuid, version=version).exists():
+          raise PopupException(_('Cannot import document, dependency with UUID: %s not found.') % uuid)
+      elif is_history:
+        history_deps_list.insert(0, index) # Insert in decreasing order to facilitate delete
+        LOG.warn('History dependency with UUID: %s ignored while importing document %s' % (uuid, doc['fields']['name']))
+
+    # Delete history dependencies not found in the DB
+    for index in history_deps_list:
+      del doc['fields']['dependencies'][index]
 
   return doc
 

+ 30 - 0
desktop/core/src/desktop/tests_doc2.py

@@ -993,3 +993,33 @@ class TestDocument2ImportExport(object):
     assert_equal(1, data['created_count'])
     assert_true('updated_count' in data)
     assert_equal(0, data['updated_count'])
+
+  def test_import_with_history_dependencies(self):
+    query1 = Document2.objects.create(name='query1.sql', type='query-hive', owner=self.user, data={},
+                                      parent_directory=self.home_dir)
+    query2 = Document2.objects.create(name='query2.sql', type='query-hive', owner=self.user, data={},
+                                      parent_directory=self.home_dir, is_history=True)
+    workflow = Document2.objects.create(name='test.wf', type='oozie-workflow2', owner=self.user, data={},
+                                        parent_directory=self.home_dir)
+    workflow.dependencies.add(query1)
+    workflow.dependencies.add(query2)
+
+    response = self.client.get('/desktop/api2/doc/export/', {'documents': json.dumps([workflow.id]), 'format': 'json'})
+    documents = response.content
+
+    # Delete previous entries from DB, so when you import it creates them
+    query1.delete()
+    query2.delete()
+    workflow.delete()
+
+    response = self.client_not_me.post('/desktop/api2/doc/import/', {'documents': documents})
+    assert_true(Document2.objects.filter(name='query1.sql').exists())
+    assert_false(Document2.objects.filter(name='query2.sql').exists())
+
+    data = json.loads(response.content)
+    assert_true('count' in data)
+    assert_equal(2, data['count'])
+    assert_true('created_count' in data)
+    assert_equal(2, data['created_count'])
+    assert_true('updated_count' in data)
+    assert_equal(0, data['updated_count'])