Explorar el Código

[desktop] South doesn't like SQLite using transactions.atomic

This is because SQLite doesn't support atomic schema changes, so
South throws an error if you try to use a transaction.
Erick Tryzelaar hace 10 años
padre
commit
0ef8cd2649

+ 15 - 16
desktop/core/src/desktop/migrations/0013_auto__add_unique_documenttag_owner_tag.py

@@ -14,26 +14,25 @@ class Migration(SchemaMigration):
         # If there are duplicated document tags, we'll have an error when we
         # try to create this index. So to protect against that, we should
         # delete those documents before we create the index.
-        with transaction.atomic():
-            duplicated_records = DocumentTag.objects \
-                .values('owner_id', 'tag') \
-                .annotate(id_count=models.Count('id')) \
-                .filter(id_count__gt=1)
+        duplicated_records = DocumentTag.objects \
+            .values('owner_id', 'tag') \
+            .annotate(id_count=models.Count('id')) \
+            .filter(id_count__gt=1)
 
-            # Delete all but the first document.
-            for record in duplicated_records:
-                docs = DocumentTag.objects \
-                    .values_list('id', flat=True) \
-                    .filter(
-                        owner_id=record['owner_id'],
-                        tag=record['tag'],
-                    )[1:]
+        # Delete all but the first document.
+        for record in duplicated_records:
+            docs = DocumentTag.objects \
+                .values_list('id', flat=True) \
+                .filter(
+                    owner_id=record['owner_id'],
+                    tag=record['tag'],
+                )[1:]
 
-                docs = list(docs)
+            docs = list(docs)
 
-                logging.warn('Deleting tags %s' % docs)
+            logging.warn('Deleting tags %s' % docs)
 
-                DocumentTag.objects.filter(id__in=docs).delete()
+            DocumentTag.objects.filter(id__in=docs).delete()
 
         # Adding unique constraint on 'DocumentTag', fields ['owner', 'tag']
         db.create_unique(u'desktop_documenttag', ['owner_id', 'tag'])

+ 15 - 16
desktop/core/src/desktop/migrations/0014_auto__add_unique_document_content_type_object_id.py

@@ -14,26 +14,25 @@ class Migration(SchemaMigration):
         # If there are duplicated documents, we'll have an error when we try to
         # create this index. So to protect against that, we should delete those
         # documents before we create the index.
-        with transaction.atomic():
-            duplicated_records = Document.objects \
-                .values('content_type_id', 'object_id') \
-                .annotate(id_count=models.Count('id')) \
-                .filter(id_count__gt=1)
+        duplicated_records = Document.objects \
+            .values('content_type_id', 'object_id') \
+            .annotate(id_count=models.Count('id')) \
+            .filter(id_count__gt=1)
 
-            # Delete all but the first document.
-            for record in duplicated_records:
-                docs = Document.objects \
-                    .values_list('id', flat=True) \
-                    .filter(
-                        content_type_id=record['content_type_id'],
-                        object_id=record['object_id'],
-                    )[1:]
+        # Delete all but the first document.
+        for record in duplicated_records:
+            docs = Document.objects \
+                .values_list('id', flat=True) \
+                .filter(
+                    content_type_id=record['content_type_id'],
+                    object_id=record['object_id'],
+                )[1:]
 
-                docs = list(docs)
+            docs = list(docs)
 
-                logging.warn('Deleting documents %s' % docs)
+            logging.warn('Deleting documents %s' % docs)
 
-                Document.objects.filter(id__in=docs).delete()
+            Document.objects.filter(id__in=docs).delete()
 
         # Adding unique constraint on 'Document', fields ['content_type', 'object_id']
         db.create_unique(u'desktop_document', ['content_type_id', 'object_id'])

+ 15 - 16
desktop/core/src/desktop/migrations/0015_auto__add_unique_documentpermission_doc_perms.py

@@ -14,26 +14,25 @@ class Migration(SchemaMigration):
         # If there are duplicated document permissions, we'll have an error
         # when we try to create this index. So to protect against that, we
         # should delete those documents before we create the index.
-        with transaction.atomic():
-            duplicated_records = DocumentPermission.objects \
-                .values('doc_id', 'perms') \
-                .annotate(id_count=models.Count('id')) \
-                .filter(id_count__gt=1)
+        duplicated_records = DocumentPermission.objects \
+            .values('doc_id', 'perms') \
+            .annotate(id_count=models.Count('id')) \
+            .filter(id_count__gt=1)
 
-            # Delete all but the first document.
-            for record in duplicated_records:
-                docs = DocumentPermission.objects \
-                    .values_list('id', flat=True) \
-                    .filter(
-                        doc_id=record['doc_id'],
-                        perms=record['perms'],
-                    )[1:]
+        # Delete all but the first document.
+        for record in duplicated_records:
+            docs = DocumentPermission.objects \
+                .values_list('id', flat=True) \
+                .filter(
+                    doc_id=record['doc_id'],
+                    perms=record['perms'],
+                )[1:]
 
-                docs = list(docs)
+            docs = list(docs)
 
-                logging.warn('Deleting permissions %s' % docs)
+            logging.warn('Deleting permissions %s' % docs)
 
-                DocumentPermission.objects.filter(id__in=docs).delete()
+            DocumentPermission.objects.filter(id__in=docs).delete()
 
         # Adding unique constraint on 'DocumentPermission', fields ['doc', 'perms']
         db.create_unique(u'desktop_documentpermission', ['doc_id', 'perms'])

+ 20 - 21
desktop/core/src/desktop/migrations/0016_auto__add_unique_document2_uuid_version_is_history.py

@@ -14,31 +14,30 @@ class Migration(SchemaMigration):
         # As opposed to Document1, we can't just delete Document2 documents if
         # there is a duplication because it actually holds data. So instead
         # we'll just find duplications and emit a better error message.
-        with transaction.atomic():
-            duplicated_records = Document2.objects \
-                .values('uuid', 'version', 'is_history') \
-                .annotate(id_count=models.Count('id')) \
-                .filter(id_count__gt=1)
+        duplicated_records = Document2.objects \
+            .values('uuid', 'version', 'is_history') \
+            .annotate(id_count=models.Count('id')) \
+            .filter(id_count__gt=1)
 
-            duplicated_records = list(duplicated_records)
-            duplicated_ids = []
+        duplicated_records = list(duplicated_records)
+        duplicated_ids = []
 
-            for record in duplicated_records:
-                docs = Document2.objects \
-                    .values_list('id', flat=True) \
-                    .filter(
-                        uuid=record['uuid'],
-                        version=record['version'],
-                        is_history=record['is_history'],
-                    )
+        for record in duplicated_records:
+            docs = Document2.objects \
+                .values_list('id', flat=True) \
+                .filter(
+                    uuid=record['uuid'],
+                    version=record['version'],
+                    is_history=record['is_history'],
+                )
 
-                duplicated_ids.extend(docs)
+            duplicated_ids.extend(docs)
 
-            if duplicated_records:
-                msg = 'Found duplicated Document2 records! %s. ' \
-                    'This will require manual merging of the records' % duplicated_ids
-                logging.error(msg)
-                raise RuntimeError(msg)
+        if duplicated_records:
+            msg = 'Found duplicated Document2 records! %s. ' \
+                'This will require manual merging of the records' % duplicated_ids
+            logging.error(msg)
+            raise RuntimeError(msg)
 
         # Adding unique constraint on 'Document2', fields ['uuid', 'version', 'is_history']
         db.create_unique(u'desktop_document2', ['uuid', 'version', 'is_history'])