소스 검색

HUE-3303 [core] PostgreSQL requires data update and alter table operations in separate transactions

Jenny Kim 9 년 전
부모
커밋
b3ea1ee

+ 32 - 17
desktop/core/src/desktop/migrations/0013_auto__add_unique_documenttag_owner_tag.py

@@ -14,28 +14,43 @@ class Migration(SchemaMigration):
         # If there are duplicated document tags, we'll have an error when we
         # try to create this index. So to protect against that, we should
         # delete those documents before we create the index.
-        duplicated_records = DocumentTag.objects \
-            .values('owner_id', 'tag') \
-            .annotate(id_count=models.Count('id')) \
-            .filter(id_count__gt=1)
 
-        # Delete all but the first document.
-        for record in duplicated_records:
-            docs = DocumentTag.objects \
-                .values_list('id', flat=True) \
-                .filter(
-                    owner_id=record['owner_id'],
-                    tag=record['tag'],
-                )[1:]
+        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
+        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
+        try:
+            db.start_transaction()
+            duplicated_records = DocumentTag.objects \
+                .values('owner_id', 'tag') \
+                .annotate(id_count=models.Count('id')) \
+                .filter(id_count__gt=1)
 
-            docs = list(docs)
+            # Delete all but the first document.
+            for record in duplicated_records:
+                docs = DocumentTag.objects \
+                    .values_list('id', flat=True) \
+                    .filter(
+                        owner_id=record['owner_id'],
+                        tag=record['tag'],
+                    )[1:]
 
-            logging.warn('Deleting tags %s' % docs)
+                docs = list(docs)
 
-            DocumentTag.objects.filter(id__in=docs).delete()
+                logging.warn('Deleting tags %s' % docs)
 
-        # Adding unique constraint on 'DocumentTag', fields ['owner', 'tag']
-        db.create_unique(u'desktop_documenttag', ['owner_id', 'tag'])
+                DocumentTag.objects.filter(id__in=docs).delete()
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
+
+        try:
+            db.start_transaction()
+            # Adding unique constraint on 'DocumentTag', fields ['owner', 'tag']
+            db.create_unique(u'desktop_documenttag', ['owner_id', 'tag'])
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
 
 
     def backwards(self, orm):

+ 32 - 17
desktop/core/src/desktop/migrations/0014_auto__add_unique_document_content_type_object_id.py

@@ -14,28 +14,43 @@ class Migration(SchemaMigration):
         # If there are duplicated documents, we'll have an error when we try to
         # create this index. So to protect against that, we should delete those
         # documents before we create the index.
-        duplicated_records = Document.objects \
-            .values('content_type_id', 'object_id') \
-            .annotate(id_count=models.Count('id')) \
-            .filter(id_count__gt=1)
 
-        # Delete all but the first document.
-        for record in duplicated_records:
-            docs = Document.objects \
-                .values_list('id', flat=True) \
-                .filter(
-                    content_type_id=record['content_type_id'],
-                    object_id=record['object_id'],
-                )[1:]
+        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
+        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
+        try:
+            db.start_transaction()
+            duplicated_records = Document.objects \
+                .values('content_type_id', 'object_id') \
+                .annotate(id_count=models.Count('id')) \
+                .filter(id_count__gt=1)
 
-            docs = list(docs)
+            # Delete all but the first document.
+            for record in duplicated_records:
+                docs = Document.objects \
+                    .values_list('id', flat=True) \
+                    .filter(
+                        content_type_id=record['content_type_id'],
+                        object_id=record['object_id'],
+                    )[1:]
 
-            logging.warn('Deleting documents %s' % docs)
+                docs = list(docs)
 
-            Document.objects.filter(id__in=docs).delete()
+                logging.warn('Deleting documents %s' % docs)
 
-        # Adding unique constraint on 'Document', fields ['content_type', 'object_id']
-        db.create_unique(u'desktop_document', ['content_type_id', 'object_id'])
+                Document.objects.filter(id__in=docs).delete()
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
+
+        try:
+            db.start_transaction()
+            # Adding unique constraint on 'Document', fields ['content_type', 'object_id']
+            db.create_unique(u'desktop_document', ['content_type_id', 'object_id'])
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
 
 
     def backwards(self, orm):

+ 32 - 17
desktop/core/src/desktop/migrations/0015_auto__add_unique_documentpermission_doc_perms.py

@@ -14,28 +14,43 @@ class Migration(SchemaMigration):
         # If there are duplicated document permissions, we'll have an error
         # when we try to create this index. So to protect against that, we
         # should delete those documents before we create the index.
-        duplicated_records = DocumentPermission.objects \
-            .values('doc_id', 'perms') \
-            .annotate(id_count=models.Count('id')) \
-            .filter(id_count__gt=1)
 
-        # Delete all but the first document.
-        for record in duplicated_records:
-            docs = DocumentPermission.objects \
-                .values_list('id', flat=True) \
-                .filter(
-                    doc_id=record['doc_id'],
-                    perms=record['perms'],
-                )[1:]
+        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
+        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
+        try:
+            db.start_transaction()
+            duplicated_records = DocumentPermission.objects \
+                .values('doc_id', 'perms') \
+                .annotate(id_count=models.Count('id')) \
+                .filter(id_count__gt=1)
 
-            docs = list(docs)
+            # Delete all but the first document.
+            for record in duplicated_records:
+                docs = DocumentPermission.objects \
+                    .values_list('id', flat=True) \
+                    .filter(
+                        doc_id=record['doc_id'],
+                        perms=record['perms'],
+                    )[1:]
 
-            logging.warn('Deleting permissions %s' % docs)
+                docs = list(docs)
 
-            DocumentPermission.objects.filter(id__in=docs).delete()
+                logging.warn('Deleting permissions %s' % docs)
 
-        # Adding unique constraint on 'DocumentPermission', fields ['doc', 'perms']
-        db.create_unique(u'desktop_documentpermission', ['doc_id', 'perms'])
+                DocumentPermission.objects.filter(id__in=docs).delete()
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
+
+        try:
+            db.start_transaction()
+            # Adding unique constraint on 'DocumentPermission', fields ['doc', 'perms']
+            db.create_unique(u'desktop_documentpermission', ['doc_id', 'perms'])
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
 
 
     def backwards(self, orm):

+ 56 - 41
desktop/core/src/desktop/migrations/0016_auto__add_unique_document2_uuid_version_is_history.py

@@ -27,51 +27,66 @@ class Migration(SchemaMigration):
         #
         # Note we reset the `order_by` to make sure that if a default ordering
         # is ever added, it's never included in the group by.
-        duplicated_records = Document2.objects \
-            .values('uuid', 'version', 'is_history') \
-            .annotate(id_count=models.Count('id')) \
-            .filter(id_count__gt=1) \
-            .order_by()
-
-        for record in duplicated_records:
-            # We found some duplicates, now actually fetch the duplicated
-            # documents for these values.
-            docs = Document2.objects \
-                .filter(
+
+        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
+        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
+        try:
+            db.start_transaction()
+            duplicated_records = Document2.objects \
+                .values('uuid', 'version', 'is_history') \
+                .annotate(id_count=models.Count('id')) \
+                .filter(id_count__gt=1) \
+                .order_by()
+
+            for record in duplicated_records:
+                # We found some duplicates, now actually fetch the duplicated
+                # documents for these values.
+                docs = Document2.objects \
+                    .filter(
                     uuid=record['uuid'],
                     version=record['version'],
                     is_history=record['is_history'],
                 ) \
-                .order_by('-version', '-last_modified')
-
-            # Grab all but the first document, which we're preserving as the
-            # current version.
-            docs = list(docs[1:])
-
-            logging.warn('Modifying version number of these duplicated docs %s' %
-                [doc.id for doc in docs])
-
-            # Update all these document's version numbers. To be safe, we want
-            # to give them a unique negative number so there's no collision and
-            # also so they're easily discoverable.
-            version = Document2.objects \
-                .values_list('version') \
-                .filter(uuid=record['uuid']) \
-                .earliest('version')[0]
-
-            version = min(0, version) - 1
-
-            # Finally, update the version numbers.
-            for doc in docs:
-              doc.version = version
-
-              if not db.dry_run:
-                doc.save()
-
-              version -= 1
-
-        # Adding unique constraint on 'Document2', fields ['uuid', 'version', 'is_history']
-        db.create_unique(u'desktop_document2', ['uuid', 'version', 'is_history'])
+                    .order_by('-version', '-last_modified')
+
+                # Grab all but the first document, which we're preserving as the
+                # current version.
+                docs = list(docs[1:])
+
+                logging.warn('Modifying version number of these duplicated docs %s' %
+                             [doc.id for doc in docs])
+
+                # Update all these document's version numbers. To be safe, we want
+                # to give them a unique negative number so there's no collision and
+                # also so they're easily discoverable.
+                version = Document2.objects \
+                    .values_list('version') \
+                    .filter(uuid=record['uuid']) \
+                    .earliest('version')[0]
+
+                version = min(0, version) - 1
+
+                # Finally, update the version numbers.
+                for doc in docs:
+                    doc.version = version
+
+                    if not db.dry_run:
+                        doc.save()
+
+                    version -= 1
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
+
+        try:
+            db.start_transaction()
+            # Adding unique constraint on 'Document2', fields ['uuid', 'version', 'is_history']
+            db.create_unique(u'desktop_document2', ['uuid', 'version', 'is_history'])
+            db.commit_transaction()
+        except Exception, e:
+            db.rollback_transaction()
+            raise e
 
 
     def backwards(self, orm):