Quellcode durchsuchen

HUE-3287 [core] Django 1.11 upgrade
- Adding following modules
asn1crypto-0.24.0
certifi-2018.1.18
cffi-1.11.5
cryptography-2.0
idna-2.6
ipaddress-1.0.19
pycparser-2.18
requests-2.18.4
requests-kerberos-0.12.0
six-1.11.0
urllib3-1.22

Prakash Ranade vor 7 Jahren
Ursprung
Commit
d51d6fb821
100 geänderte Dateien mit 5727 neuen und 2401 gelöschten Zeilen
  1. 19 0
      desktop/core/ext-py/asn1crypto-0.24.0/LICENSE
  2. 3 0
      desktop/core/ext-py/asn1crypto-0.24.0/MANIFEST.in
  3. 2 1
      desktop/core/ext-py/asn1crypto-0.24.0/PKG-INFO
  4. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/__init__.py
  5. 6 6
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_elliptic_curve.py
  6. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_errors.py
  7. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_ffi.py
  8. 170 170
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_inet.py
  9. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_int.py
  10. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_iri.py
  11. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_ordereddict.py
  12. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_perf/__init__.py
  13. 5 1
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_perf/_big_num_ctypes.py
  14. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_teletex_codec.py
  15. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_types.py
  16. 35 14
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/algos.py
  17. 74 57
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/cms.py
  18. 359 216
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/core.py
  19. 8 8
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/crl.py
  20. 1 1
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/csr.py
  21. 10 6
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/keys.py
  22. 24 21
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/ocsp.py
  23. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/parser.py
  24. 3 3
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/pdf.py
  25. 8 8
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/pem.py
  26. 4 4
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/pkcs12.py
  27. 13 13
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/tsp.py
  28. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/util.py
  29. 2 2
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/version.py
  30. 402 81
      desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/x509.py
  31. 339 0
      desktop/core/ext-py/asn1crypto-0.24.0/changelog.md
  32. 79 0
      desktop/core/ext-py/asn1crypto-0.24.0/docs/pem.md
  33. 23 0
      desktop/core/ext-py/asn1crypto-0.24.0/docs/readme.md
  34. 675 0
      desktop/core/ext-py/asn1crypto-0.24.0/docs/universal_types.md
  35. 232 0
      desktop/core/ext-py/asn1crypto-0.24.0/readme.md
  36. 0 1
      desktop/core/ext-py/asn1crypto-0.24.0/setup.cfg
  37. 0 0
      desktop/core/ext-py/asn1crypto-0.24.0/setup.py
  38. 21 0
      desktop/core/ext-py/certifi-2018.1.18/LICENSE
  39. 1 0
      desktop/core/ext-py/certifi-2018.1.18/MANIFEST.in
  40. 69 0
      desktop/core/ext-py/certifi-2018.1.18/PKG-INFO
  41. 46 0
      desktop/core/ext-py/certifi-2018.1.18/README.rst
  42. 3 0
      desktop/core/ext-py/certifi-2018.1.18/certifi/__init__.py
  43. 2 0
      desktop/core/ext-py/certifi-2018.1.18/certifi/__main__.py
  44. 0 937
      desktop/core/ext-py/certifi-2018.1.18/certifi/cacert.pem
  45. 37 0
      desktop/core/ext-py/certifi-2018.1.18/certifi/core.py
  46. 11 0
      desktop/core/ext-py/certifi-2018.1.18/setup.cfg
  47. 67 0
      desktop/core/ext-py/certifi-2018.1.18/setup.py
  48. 5 0
      desktop/core/ext-py/cffi-1.11.5/AUTHORS
  49. 0 0
      desktop/core/ext-py/cffi-1.11.5/LICENSE
  50. 0 0
      desktop/core/ext-py/cffi-1.11.5/MANIFEST.in
  51. 4 1
      desktop/core/ext-py/cffi-1.11.5/PKG-INFO
  52. 30 0
      desktop/core/ext-py/cffi-1.11.5/README.md
  53. 489 153
      desktop/core/ext-py/cffi-1.11.5/c/_cffi_backend.c
  54. 62 20
      desktop/core/ext-py/cffi-1.11.5/c/call_python.c
  55. 11 27
      desktop/core/ext-py/cffi-1.11.5/c/cdlopen.c
  56. 8 2
      desktop/core/ext-py/cffi-1.11.5/c/cffi1_module.c
  57. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/cglob.c
  58. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/commontypes.c
  59. 92 29
      desktop/core/ext-py/cffi-1.11.5/c/ffi_obj.c
  60. 7 3
      desktop/core/ext-py/cffi-1.11.5/c/file_emulator.h
  61. 93 45
      desktop/core/ext-py/cffi-1.11.5/c/lib_obj.c
  62. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffi.c
  63. 3 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffi.h
  64. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffi_common.h
  65. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/fficonfig.h
  66. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffitarget.h
  67. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/prep_cif.c
  68. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/types.c
  69. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/win32.c
  70. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/win64.asm
  71. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/win64.obj
  72. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/malloc_closure.h
  73. 125 7
      desktop/core/ext-py/cffi-1.11.5/c/minibuffer.h
  74. 22 17
      desktop/core/ext-py/cffi-1.11.5/c/misc_thread_common.h
  75. 0 0
      desktop/core/ext-py/cffi-1.11.5/c/misc_thread_posix.h
  76. 6 6
      desktop/core/ext-py/cffi-1.11.5/c/misc_win32.h
  77. 17 1
      desktop/core/ext-py/cffi-1.11.5/c/parse_c_type.c
  78. 37 4
      desktop/core/ext-py/cffi-1.11.5/c/realize_c_type.c
  79. 606 106
      desktop/core/ext-py/cffi-1.11.5/c/test_c.py
  80. 246 0
      desktop/core/ext-py/cffi-1.11.5/c/wchar_helper.h
  81. 149 0
      desktop/core/ext-py/cffi-1.11.5/c/wchar_helper_3.h
  82. 4 4
      desktop/core/ext-py/cffi-1.11.5/cffi/__init__.py
  83. 145 0
      desktop/core/ext-py/cffi-1.11.5/cffi/_cffi_errors.h
  84. 90 24
      desktop/core/ext-py/cffi-1.11.5/cffi/_cffi_include.h
  85. 31 64
      desktop/core/ext-py/cffi-1.11.5/cffi/_embedding.h
  86. 160 73
      desktop/core/ext-py/cffi-1.11.5/cffi/api.py
  87. 59 13
      desktop/core/ext-py/cffi-1.11.5/cffi/backend_ctypes.py
  88. 10 2
      desktop/core/ext-py/cffi-1.11.5/cffi/cffi_opcode.py
  89. 9 5
      desktop/core/ext-py/cffi-1.11.5/cffi/commontypes.py
  90. 123 71
      desktop/core/ext-py/cffi-1.11.5/cffi/cparser.py
  91. 23 0
      desktop/core/ext-py/cffi-1.11.5/cffi/error.py
  92. 19 13
      desktop/core/ext-py/cffi-1.11.5/cffi/ffiplatform.py
  93. 0 0
      desktop/core/ext-py/cffi-1.11.5/cffi/lock.py
  94. 49 40
      desktop/core/ext-py/cffi-1.11.5/cffi/model.py
  95. 5 1
      desktop/core/ext-py/cffi-1.11.5/cffi/parse_c_type.h
  96. 151 78
      desktop/core/ext-py/cffi-1.11.5/cffi/recompiler.py
  97. 46 3
      desktop/core/ext-py/cffi-1.11.5/cffi/setuptools_ext.py
  98. 17 12
      desktop/core/ext-py/cffi-1.11.5/cffi/vengine_cpy.py
  99. 13 9
      desktop/core/ext-py/cffi-1.11.5/cffi/vengine_gen.py
  100. 8 18
      desktop/core/ext-py/cffi-1.11.5/cffi/verifier.py

+ 19 - 0
desktop/core/ext-py/asn1crypto-0.24.0/LICENSE

@@ -0,0 +1,19 @@
+Copyright (c) 2015-2017 Will Bond <will@wbond.net>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 3 - 0
desktop/core/ext-py/asn1crypto-0.24.0/MANIFEST.in

@@ -0,0 +1,3 @@
+include LICENSE
+include readme.md changelog.md
+recursive-include docs *.md

+ 2 - 1
desktop/core/ext-py/asn1crypto-0.22.0/PKG-INFO → desktop/core/ext-py/asn1crypto-0.24.0/PKG-INFO

@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: asn1crypto
-Version: 0.22.0
+Version: 0.24.0
 Summary: Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP
 Home-page: https://github.com/wbond/asn1crypto
 Author: wbond
 Author-email: will@wbond.net
 License: MIT
+Description-Content-Type: UNKNOWN
 Description: Docs for this project are maintained at https://github.com/wbond/asn1crypto#readme.
 Keywords: asn1 crypto pki x509 certificate rsa dsa ec dh
 Platform: UNKNOWN

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/__init__.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/__init__.py


+ 6 - 6
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_elliptic_curve.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_elliptic_curve.py

@@ -160,10 +160,10 @@ class PrimePoint():
 
         p = self.curve.p
 
-        l = ((other.y - self.y) * inverse_mod(other.x - self.x, p)) % p
+        l_ = ((other.y - self.y) * inverse_mod(other.x - self.x, p)) % p
 
-        x3 = (l * l - self.x - other.x) % p
-        y3 = (l * (self.x - x3) - self.y) % p
+        x3 = (l_ * l_ - self.x - other.x) % p
+        y3 = (l_ * (self.x - x3) - self.y) % p
 
         return PrimePoint(self.curve, x3, y3)
 
@@ -232,10 +232,10 @@ class PrimePoint():
         p = self.curve.p
         a = self.curve.a
 
-        l = ((3 * self.x * self.x + a) * inverse_mod(2 * self.y, p)) % p
+        l_ = ((3 * self.x * self.x + a) * inverse_mod(2 * self.y, p)) % p
 
-        x3 = (l * l - 2 * self.x) % p
-        y3 = (l * (self.x - x3) - self.y) % p
+        x3 = (l_ * l_ - 2 * self.x) % p
+        y3 = (l_ * (self.x - x3) - self.y) % p
 
         return PrimePoint(self.curve, x3, y3)
 

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_errors.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_errors.py


+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_ffi.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_ffi.py


+ 170 - 170
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_inet.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_inet.py

@@ -1,170 +1,170 @@
-# coding: utf-8
-from __future__ import unicode_literals, division, absolute_import, print_function
-
-import socket
-import struct
-
-from ._errors import unwrap
-from ._types import byte_cls, bytes_to_list, str_cls, type_name
-
-
-def inet_ntop(address_family, packed_ip):
-    """
-    Windows compatiblity shim for socket.inet_ntop().
-
-    :param address_family:
-        socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
-
-    :param packed_ip:
-        A byte string of the network form of an IP address
-
-    :return:
-        A unicode string of the IP address
-    """
-
-    if address_family not in set([socket.AF_INET, socket.AF_INET6]):
-        raise ValueError(unwrap(
-            '''
-            address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
-            not %s
-            ''',
-            repr(socket.AF_INET),
-            repr(socket.AF_INET6),
-            repr(address_family)
-        ))
-
-    if not isinstance(packed_ip, byte_cls):
-        raise TypeError(unwrap(
-            '''
-            packed_ip must be a byte string, not %s
-            ''',
-            type_name(packed_ip)
-        ))
-
-    required_len = 4 if address_family == socket.AF_INET else 16
-    if len(packed_ip) != required_len:
-        raise ValueError(unwrap(
-            '''
-            packed_ip must be %d bytes long - is %d
-            ''',
-            required_len,
-            len(packed_ip)
-        ))
-
-    if address_family == socket.AF_INET:
-        return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
-
-    octets = struct.unpack(b'!HHHHHHHH', packed_ip)
-
-    runs_of_zero = {}
-    longest_run = 0
-    zero_index = None
-    for i, octet in enumerate(octets + (-1,)):
-        if octet != 0:
-            if zero_index is not None:
-                length = i - zero_index
-                if length not in runs_of_zero:
-                    runs_of_zero[length] = zero_index
-                longest_run = max(longest_run, length)
-                zero_index = None
-        elif zero_index is None:
-            zero_index = i
-
-    hexed = [hex(o)[2:] for o in octets]
-
-    if longest_run < 2:
-        return ':'.join(hexed)
-
-    zero_start = runs_of_zero[longest_run]
-    zero_end = zero_start + longest_run
-
-    return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
-
-
-def inet_pton(address_family, ip_string):
-    """
-    Windows compatiblity shim for socket.inet_ntop().
-
-    :param address_family:
-        socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
-
-    :param ip_string:
-        A unicode string of an IP address
-
-    :return:
-        A byte string of the network form of the IP address
-    """
-
-    if address_family not in set([socket.AF_INET, socket.AF_INET6]):
-        raise ValueError(unwrap(
-            '''
-            address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
-            not %s
-            ''',
-            repr(socket.AF_INET),
-            repr(socket.AF_INET6),
-            repr(address_family)
-        ))
-
-    if not isinstance(ip_string, str_cls):
-        raise TypeError(unwrap(
-            '''
-            ip_string must be a unicode string, not %s
-            ''',
-            type_name(ip_string)
-        ))
-
-    if address_family == socket.AF_INET:
-        octets = ip_string.split('.')
-        error = len(octets) != 4
-        if not error:
-            ints = []
-            for o in octets:
-                o = int(o)
-                if o > 255 or o < 0:
-                    error = True
-                    break
-                ints.append(o)
-
-        if error:
-            raise ValueError(unwrap(
-                '''
-                ip_string must be a dotted string with four integers in the
-                range of 0 to 255, got %s
-                ''',
-                repr(ip_string)
-            ))
-
-        return struct.pack(b'!BBBB', *ints)
-
-    error = False
-    omitted = ip_string.count('::')
-    if omitted > 1:
-        error = True
-    elif omitted == 0:
-        octets = ip_string.split(':')
-        error = len(octets) != 8
-    else:
-        begin, end = ip_string.split('::')
-        begin_octets = begin.split(':')
-        end_octets = end.split(':')
-        missing = 8 - len(begin_octets) - len(end_octets)
-        octets = begin_octets + (['0'] * missing) + end_octets
-
-    if not error:
-        ints = []
-        for o in octets:
-            o = int(o, 16)
-            if o > 65535 or o < 0:
-                error = True
-                break
-            ints.append(o)
-
-        return struct.pack(b'!HHHHHHHH', *ints)
-
-    raise ValueError(unwrap(
-        '''
-        ip_string must be a valid ipv6 string, got %s
-        ''',
-        repr(ip_string)
-    ))
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import socket
+import struct
+
+from ._errors import unwrap
+from ._types import byte_cls, bytes_to_list, str_cls, type_name
+
+
+def inet_ntop(address_family, packed_ip):
+    """
+    Windows compatibility shim for socket.inet_ntop().
+
+    :param address_family:
+        socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
+
+    :param packed_ip:
+        A byte string of the network form of an IP address
+
+    :return:
+        A unicode string of the IP address
+    """
+
+    if address_family not in set([socket.AF_INET, socket.AF_INET6]):
+        raise ValueError(unwrap(
+            '''
+            address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
+            not %s
+            ''',
+            repr(socket.AF_INET),
+            repr(socket.AF_INET6),
+            repr(address_family)
+        ))
+
+    if not isinstance(packed_ip, byte_cls):
+        raise TypeError(unwrap(
+            '''
+            packed_ip must be a byte string, not %s
+            ''',
+            type_name(packed_ip)
+        ))
+
+    required_len = 4 if address_family == socket.AF_INET else 16
+    if len(packed_ip) != required_len:
+        raise ValueError(unwrap(
+            '''
+            packed_ip must be %d bytes long - is %d
+            ''',
+            required_len,
+            len(packed_ip)
+        ))
+
+    if address_family == socket.AF_INET:
+        return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))
+
+    octets = struct.unpack(b'!HHHHHHHH', packed_ip)
+
+    runs_of_zero = {}
+    longest_run = 0
+    zero_index = None
+    for i, octet in enumerate(octets + (-1,)):
+        if octet != 0:
+            if zero_index is not None:
+                length = i - zero_index
+                if length not in runs_of_zero:
+                    runs_of_zero[length] = zero_index
+                longest_run = max(longest_run, length)
+                zero_index = None
+        elif zero_index is None:
+            zero_index = i
+
+    hexed = [hex(o)[2:] for o in octets]
+
+    if longest_run < 2:
+        return ':'.join(hexed)
+
+    zero_start = runs_of_zero[longest_run]
+    zero_end = zero_start + longest_run
+
+    return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:])
+
+
+def inet_pton(address_family, ip_string):
+    """
+    Windows compatibility shim for socket.inet_ntop().
+
+    :param address_family:
+        socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6
+
+    :param ip_string:
+        A unicode string of an IP address
+
+    :return:
+        A byte string of the network form of the IP address
+    """
+
+    if address_family not in set([socket.AF_INET, socket.AF_INET6]):
+        raise ValueError(unwrap(
+            '''
+            address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),
+            not %s
+            ''',
+            repr(socket.AF_INET),
+            repr(socket.AF_INET6),
+            repr(address_family)
+        ))
+
+    if not isinstance(ip_string, str_cls):
+        raise TypeError(unwrap(
+            '''
+            ip_string must be a unicode string, not %s
+            ''',
+            type_name(ip_string)
+        ))
+
+    if address_family == socket.AF_INET:
+        octets = ip_string.split('.')
+        error = len(octets) != 4
+        if not error:
+            ints = []
+            for o in octets:
+                o = int(o)
+                if o > 255 or o < 0:
+                    error = True
+                    break
+                ints.append(o)
+
+        if error:
+            raise ValueError(unwrap(
+                '''
+                ip_string must be a dotted string with four integers in the
+                range of 0 to 255, got %s
+                ''',
+                repr(ip_string)
+            ))
+
+        return struct.pack(b'!BBBB', *ints)
+
+    error = False
+    omitted = ip_string.count('::')
+    if omitted > 1:
+        error = True
+    elif omitted == 0:
+        octets = ip_string.split(':')
+        error = len(octets) != 8
+    else:
+        begin, end = ip_string.split('::')
+        begin_octets = begin.split(':')
+        end_octets = end.split(':')
+        missing = 8 - len(begin_octets) - len(end_octets)
+        octets = begin_octets + (['0'] * missing) + end_octets
+
+    if not error:
+        ints = []
+        for o in octets:
+            o = int(o, 16)
+            if o > 65535 or o < 0:
+                error = True
+                break
+            ints.append(o)
+
+        return struct.pack(b'!HHHHHHHH', *ints)
+
+    raise ValueError(unwrap(
+        '''
+        ip_string must be a valid ipv6 string, got %s
+        ''',
+        repr(ip_string)
+    ))

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_int.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_int.py


+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_iri.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_iri.py


+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_ordereddict.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_ordereddict.py


+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_perf/__init__.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_perf/__init__.py


+ 5 - 1
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_perf/_big_num_ctypes.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_perf/_big_num_ctypes.py

@@ -21,6 +21,8 @@ interfacing with libcrypto.
 
 from __future__ import unicode_literals, division, absolute_import, print_function
 
+import sys
+
 from ctypes import CDLL, c_int, c_char_p, c_void_p
 from ctypes.util import find_library
 
@@ -28,7 +30,9 @@ from .._ffi import LibraryNotFoundError, FFIEngineError
 
 
 try:
-    libcrypto_path = find_library('crypto')
+    # On Python 2, the unicode string here may raise a UnicodeDecodeError as it
+    # tries to join a bytestring path to the unicode name "crypto"
+    libcrypto_path = find_library(b'crypto' if sys.version_info < (3,) else 'crypto')
     if not libcrypto_path:
         raise LibraryNotFoundError('The library libcrypto could not be found')
 

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_teletex_codec.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_teletex_codec.py


+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/_types.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/_types.py


+ 35 - 14
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/algos.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/algos.py

@@ -5,6 +5,7 @@ ASN.1 type classes for various algorithms using in various aspects of public
 key cryptography. Exports the following items:
 
  - AlgorithmIdentifier()
+ - AnyAlgorithmIdentifier()
  - DigestAlgorithm()
  - DigestInfo()
  - DSASignature()
@@ -182,8 +183,7 @@ class RSASSAPSSParams(Sequence):
             'hash_algorithm',
             DigestAlgorithm,
             {
-                'tag_type': 'explicit',
-                'tag': 0,
+                'explicit': 0,
                 'default': {'algorithm': 'sha1'},
             }
         ),
@@ -191,8 +191,7 @@ class RSASSAPSSParams(Sequence):
             'mask_gen_algorithm',
             MaskGenAlgorithm,
             {
-                'tag_type': 'explicit',
-                'tag': 1,
+                'explicit': 1,
                 'default': {
                     'algorithm': 'mgf1',
                     'parameters': {'algorithm': 'sha1'},
@@ -203,8 +202,7 @@ class RSASSAPSSParams(Sequence):
             'salt_length',
             Integer,
             {
-                'tag_type': 'explicit',
-                'tag': 2,
+                'explicit': 2,
                 'default': 20,
             }
         ),
@@ -212,8 +210,7 @@ class RSASSAPSSParams(Sequence):
             'trailer_field',
             TrailerField,
             {
-                'tag_type': 'explicit',
-                'tag': 3,
+                'explicit': 3,
                 'default': 'trailer_field_bc',
             }
         ),
@@ -481,8 +478,7 @@ class RSAESOAEPParams(Sequence):
             'hash_algorithm',
             DigestAlgorithm,
             {
-                'tag_type': 'explicit',
-                'tag': 0,
+                'explicit': 0,
                 'default': {'algorithm': 'sha1'}
             }
         ),
@@ -490,8 +486,7 @@ class RSAESOAEPParams(Sequence):
             'mask_gen_algorithm',
             MaskGenAlgorithm,
             {
-                'tag_type': 'explicit',
-                'tag': 1,
+                'explicit': 1,
                 'default': {
                     'algorithm': 'mgf1',
                     'parameters': {'algorithm': 'sha1'}
@@ -502,8 +497,7 @@ class RSAESOAEPParams(Sequence):
             'p_source_algorithm',
             PSourceAlgorithm,
             {
-                'tag_type': 'explicit',
-                'tag': 2,
+                'explicit': 2,
                 'default': {
                     'algorithm': 'p_specified',
                     'parameters': b''
@@ -1120,3 +1114,30 @@ class Pkcs5MacAlgorithm(Sequence):
 
 
 EncryptionAlgorithm._oid_specs['pbes2'] = Pbes2Params
+
+
+class AnyAlgorithmId(ObjectIdentifier):
+    _map = {}
+
+    def _setup(self):
+        _map = self.__class__._map
+        for other_cls in (EncryptionAlgorithmId, SignedDigestAlgorithmId, DigestAlgorithmId):
+            for oid, name in other_cls._map.items():
+                _map[oid] = name
+
+
+class AnyAlgorithmIdentifier(_ForceNullParameters, Sequence):
+    _fields = [
+        ('algorithm', AnyAlgorithmId),
+        ('parameters', Any, {'optional': True}),
+    ]
+
+    _oid_pair = ('algorithm', 'parameters')
+    _oid_specs = {}
+
+    def _setup(self):
+        Sequence._setup(self)
+        specs = self.__class__._oid_specs
+        for other_cls in (EncryptionAlgorithm, SignedDigestAlgorithm):
+            for oid, spec in other_cls._oid_specs.items():
+                specs[oid] = spec

+ 74 - 57
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/cms.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/cms.py

@@ -15,6 +15,8 @@ compatible with PKCS#7. Exports the following items:
  - SignedData()
 
 Other type classes are defined that help compose the types listed above.
+
+Most CMS structures in the wild are formatted as ContentInfo encapsulating one of the other types.
 """
 
 from __future__ import unicode_literals, division, absolute_import, print_function
@@ -99,6 +101,8 @@ class CMSAttributeType(ObjectIdentifier):
         '1.2.840.113549.1.9.6': 'counter_signature',
         # https://tools.ietf.org/html/rfc3161#page-20
         '1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token',
+        # https://tools.ietf.org/html/rfc6211#page-5
+        '1.2.840.113549.1.9.52': 'cms_algorithm_protection',
     }
 
 
@@ -123,6 +127,14 @@ class ContentType(ObjectIdentifier):
     }
 
 
+class CMSAlgorithmProtection(Sequence):
+    _fields = [
+        ('digest_algorithm', DigestAlgorithm),
+        ('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}),
+        ('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}),
+    ]
+
+
 class SetOfContentType(SetOf):
     _child_spec = ContentType
 
@@ -139,6 +151,10 @@ class SetOfAny(SetOf):
     _child_spec = Any
 
 
+class SetOfCMSAlgorithmProtection(SetOf):
+    _child_spec = CMSAlgorithmProtection
+
+
 class CMSAttribute(Sequence):
     _fields = [
         ('type', CMSAttributeType),
@@ -176,8 +192,8 @@ class AttCertVersion(Integer):
 
 class AttCertSubject(Choice):
     _alternatives = [
-        ('base_certificate_id', IssuerSerial, {'tag_type': 'explicit', 'tag': 0}),
-        ('subject_name', GeneralNames, {'tag_type': 'explicit', 'tag': 1}),
+        ('base_certificate_id', IssuerSerial, {'explicit': 0}),
+        ('subject_name', GeneralNames, {'explicit': 1}),
     ]
 
 
@@ -229,24 +245,24 @@ class ObjectDigestInfo(Sequence):
 
 class Holder(Sequence):
     _fields = [
-        ('base_certificate_id', IssuerSerial, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('entity_name', GeneralNames, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('object_digest_info', ObjectDigestInfo, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}),
+        ('entity_name', GeneralNames, {'implicit': 1, 'optional': True}),
+        ('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}),
     ]
 
 
 class V2Form(Sequence):
     _fields = [
         ('issuer_name', GeneralNames, {'optional': True}),
-        ('base_certificate_id', IssuerSerial, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('object_digest_info', ObjectDigestInfo, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}),
+        ('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}),
     ]
 
 
 class AttCertIssuer(Choice):
     _alternatives = [
         ('v1_form', GeneralNames),
-        ('v2_form', V2Form, {'tag_type': 'explicit', 'tag': 0}),
+        ('v2_form', V2Form, {'explicit': 0}),
     ]
 
 
@@ -264,7 +280,7 @@ class IetfAttrValues(SequenceOf):
 
 class IetfAttrSyntax(Sequence):
     _fields = [
-        ('policy_authority', GeneralNames, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}),
         ('values', IetfAttrValues),
     ]
 
@@ -287,8 +303,8 @@ class SetOfSvceAuthInfo(SetOf):
 
 class RoleSyntax(Sequence):
     _fields = [
-        ('role_authority', GeneralNames, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('role_name', GeneralName, {'tag_type': 'implicit', 'tag': 1}),
+        ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}),
+        ('role_name', GeneralName, {'implicit': 1}),
     ]
 
 
@@ -309,8 +325,8 @@ class ClassList(BitString):
 
 class SecurityCategory(Sequence):
     _fields = [
-        ('type', ObjectIdentifier, {'tag_type': 'implicit', 'tag': 0}),
-        ('value', Any, {'tag_type': 'implicit', 'tag': 1}),
+        ('type', ObjectIdentifier, {'implicit': 0}),
+        ('value', Any, {'implicit': 1}),
     ]
 
 
@@ -320,9 +336,9 @@ class SetOfSecurityCategory(SetOf):
 
 class Clearance(Sequence):
     _fields = [
-        ('policy_id', ObjectIdentifier, {'tag_type': 'implicit', 'tag': 0}),
-        ('class_list', ClassList, {'tag_type': 'implicit', 'tag': 1, 'default': 'unclassified'}),
-        ('security_categories', SetOfSecurityCategory, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('policy_id', ObjectIdentifier, {'implicit': 0}),
+        ('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}),
+        ('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}),
     ]
 
 
@@ -366,8 +382,8 @@ class SetOfTimingMetrics(SetOf):
 class TimingPolicy(Sequence):
     _fields = [
         ('policy_id', SequenceOf, {'spec': ObjectIdentifier}),
-        ('max_offset', BigTime, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('max_delay', BigTime, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('max_offset', BigTime, {'explicit': 0, 'optional': True}),
+        ('max_delay', BigTime, {'explicit': 1, 'optional': True}),
     ]
 
 
@@ -452,10 +468,10 @@ class OtherCertificateFormat(Sequence):
 class CertificateChoices(Choice):
     _alternatives = [
         ('certificate', Certificate),
-        ('extended_certificate', ExtendedCertificate, {'tag_type': 'implicit', 'tag': 0}),
-        ('v1_attr_cert', AttributeCertificateV1, {'tag_type': 'implicit', 'tag': 1}),
-        ('v2_attr_cert', AttributeCertificateV2, {'tag_type': 'implicit', 'tag': 2}),
-        ('other', OtherCertificateFormat, {'tag_type': 'implicit', 'tag': 3}),
+        ('extended_certificate', ExtendedCertificate, {'implicit': 0}),
+        ('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}),
+        ('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}),
+        ('other', OtherCertificateFormat, {'implicit': 3}),
     ]
 
     def validate(self, class_, tag, contents):
@@ -491,7 +507,7 @@ class CertificateSet(SetOf):
 class ContentInfo(Sequence):
     _fields = [
         ('content_type', ContentType),
-        ('content', Any, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('content', Any, {'explicit': 0, 'optional': True}),
     ]
 
     _oid_pair = ('content_type', 'content')
@@ -505,7 +521,7 @@ class SetOfContentInfo(SetOf):
 class EncapsulatedContentInfo(Sequence):
     _fields = [
         ('content_type', ContentType),
-        ('content', ParsableOctetString, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('content', ParsableOctetString, {'explicit': 0, 'optional': True}),
     ]
 
     _oid_pair = ('content_type', 'content')
@@ -522,7 +538,7 @@ class IssuerAndSerialNumber(Sequence):
 class SignerIdentifier(Choice):
     _alternatives = [
         ('issuer_and_serial_number', IssuerAndSerialNumber),
-        ('subject_key_identifier', OctetString, {'tag_type': 'implicit', 'tag': 0}),
+        ('subject_key_identifier', OctetString, {'implicit': 0}),
     ]
 
 
@@ -536,7 +552,7 @@ class CertificateRevocationLists(SetOf):
 
 class SCVPReqRes(Sequence):
     _fields = [
-        ('request', ContentInfo, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('request', ContentInfo, {'explicit': 0, 'optional': True}),
         ('response', ContentInfo),
     ]
 
@@ -564,7 +580,7 @@ class OtherRevocationInfoFormat(Sequence):
 class RevocationInfoChoice(Choice):
     _alternatives = [
         ('crl', CertificateList),
-        ('other', OtherRevocationInfoFormat, {'tag_type': 'implciit', 'tag': 1}),
+        ('other', OtherRevocationInfoFormat, {'implicit': 1}),
     ]
 
 
@@ -577,10 +593,10 @@ class SignerInfo(Sequence):
         ('version', CMSVersion),
         ('sid', SignerIdentifier),
         ('digest_algorithm', DigestAlgorithm),
-        ('signed_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}),
         ('signature_algorithm', SignedDigestAlgorithm),
         ('signature', OctetString),
-        ('unsigned_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -593,8 +609,8 @@ class SignedData(Sequence):
         ('version', CMSVersion),
         ('digest_algorithms', DigestAlgorithms),
         ('encap_content_info', None),
-        ('certificates', CertificateSet, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('crls', RevocationInfoChoices, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
+        ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
         ('signer_infos', SignerInfos),
     ]
 
@@ -619,15 +635,15 @@ class SignedData(Sequence):
 
 class OriginatorInfo(Sequence):
     _fields = [
-        ('certs', CertificateSet, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('crls', RevocationInfoChoices, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('certs', CertificateSet, {'implicit': 0, 'optional': True}),
+        ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}),
     ]
 
 
 class RecipientIdentifier(Choice):
     _alternatives = [
         ('issuer_and_serial_number', IssuerAndSerialNumber),
-        ('subject_key_identifier', OctetString, {'tag_type': 'implicit', 'tag': 0}),
+        ('subject_key_identifier', OctetString, {'implicit': 0}),
     ]
 
 
@@ -662,8 +678,8 @@ class KeyTransRecipientInfo(Sequence):
 class OriginatorIdentifierOrKey(Choice):
     _alternatives = [
         ('issuer_and_serial_number', IssuerAndSerialNumber),
-        ('subject_key_identifier', OctetString, {'tag_type': 'implicit', 'tag': 0}),
-        ('originator_key', PublicKeyInfo, {'tag_type': 'implicit', 'tag': 1}),
+        ('subject_key_identifier', OctetString, {'implicit': 0}),
+        ('originator_key', PublicKeyInfo, {'implicit': 1}),
     ]
 
 
@@ -685,7 +701,7 @@ class RecipientKeyIdentifier(Sequence):
 class KeyAgreementRecipientIdentifier(Choice):
     _alternatives = [
         ('issuer_and_serial_number', IssuerAndSerialNumber),
-        ('r_key_id', RecipientKeyIdentifier, {'tag_type': 'implicit', 'tag': 0}),
+        ('r_key_id', RecipientKeyIdentifier, {'implicit': 0}),
     ]
 
 
@@ -703,8 +719,8 @@ class RecipientEncryptedKeys(SequenceOf):
 class KeyAgreeRecipientInfo(Sequence):
     _fields = [
         ('version', CMSVersion),
-        ('originator', OriginatorIdentifierOrKey, {'tag_type': 'explicit', 'tag': 0}),
-        ('ukm', OctetString, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('originator', OriginatorIdentifierOrKey, {'explicit': 0}),
+        ('ukm', OctetString, {'explicit': 1, 'optional': True}),
         ('key_encryption_algorithm', KeyEncryptionAlgorithm),
         ('recipient_encrypted_keys', RecipientEncryptedKeys),
     ]
@@ -730,7 +746,7 @@ class KEKRecipientInfo(Sequence):
 class PasswordRecipientInfo(Sequence):
     _fields = [
         ('version', CMSVersion),
-        ('key_derivation_algorithm', KdfAlgorithm, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}),
         ('key_encryption_algorithm', KeyEncryptionAlgorithm),
         ('encrypted_key', OctetString),
     ]
@@ -746,10 +762,10 @@ class OtherRecipientInfo(Sequence):
 class RecipientInfo(Choice):
     _alternatives = [
         ('ktri', KeyTransRecipientInfo),
-        ('kari', KeyAgreeRecipientInfo, {'tag_type': 'implicit', 'tag': 1}),
-        ('kekri', KEKRecipientInfo, {'tag_type': 'implicit', 'tag': 2}),
-        ('pwri', PasswordRecipientInfo, {'tag_type': 'implicit', 'tag': 3}),
-        ('ori', OtherRecipientInfo, {'tag_type': 'implicit', 'tag': 4}),
+        ('kari', KeyAgreeRecipientInfo, {'implicit': 1}),
+        ('kekri', KEKRecipientInfo, {'implicit': 2}),
+        ('pwri', PasswordRecipientInfo, {'implicit': 3}),
+        ('ori', OtherRecipientInfo, {'implicit': 4}),
     ]
 
 
@@ -761,17 +777,17 @@ class EncryptedContentInfo(Sequence):
     _fields = [
         ('content_type', ContentType),
         ('content_encryption_algorithm', EncryptionAlgorithm),
-        ('encrypted_content', OctetString, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('encrypted_content', OctetString, {'implicit': 0, 'optional': True}),
     ]
 
 
 class EnvelopedData(Sequence):
     _fields = [
         ('version', CMSVersion),
-        ('originator_info', OriginatorInfo, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
         ('recipient_infos', RecipientInfos),
         ('encrypted_content_info', EncryptedContentInfo),
-        ('unprotected_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -781,8 +797,8 @@ class SignedAndEnvelopedData(Sequence):
         ('recipient_infos', RecipientInfos),
         ('digest_algorithms', DigestAlgorithms),
         ('encrypted_content_info', EncryptedContentInfo),
-        ('certificates', CertificateSet, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('crls', CertificateRevocationLists, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('certificates', CertificateSet, {'implicit': 0, 'optional': True}),
+        ('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}),
         ('signer_infos', SignerInfos),
     ]
 
@@ -818,35 +834,35 @@ class EncryptedData(Sequence):
     _fields = [
         ('version', CMSVersion),
         ('encrypted_content_info', EncryptedContentInfo),
-        ('unprotected_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
     ]
 
 
 class AuthenticatedData(Sequence):
     _fields = [
         ('version', CMSVersion),
-        ('originator_info', OriginatorInfo, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
         ('recipient_infos', RecipientInfos),
         ('mac_algorithm', HmacAlgorithm),
-        ('digest_algorithm', DigestAlgorithm, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}),
         # This does not require the _spec_callbacks approach of SignedData and
         # DigestedData since AuthenticatedData was not part of PKCS#7
         ('encap_content_info', EncapsulatedContentInfo),
-        ('auth_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
         ('mac', OctetString),
-        ('unauth_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 3, 'optional': True}),
+        ('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}),
     ]
 
 
 class AuthEnvelopedData(Sequence):
     _fields = [
         ('version', CMSVersion),
-        ('originator_info', OriginatorInfo, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}),
         ('recipient_infos', RecipientInfos),
         ('auth_encrypted_content_info', EncryptedContentInfo),
-        ('auth_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}),
         ('mac', OctetString),
-        ('unauth_attrs', CMSAttributes, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}),
     ]
 
 
@@ -912,4 +928,5 @@ CMSAttribute._oid_specs = {
     'signing_time': SetOfTime,
     'counter_signature': SignerInfos,
     'signature_time_stamp_token': SetOfContentInfo,
+    'cms_algorithm_protection': SetOfCMSAlgorithmProtection,
 }

+ 359 - 216
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/core.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/core.py

@@ -101,7 +101,7 @@ METHOD_NUM_TO_NAME_MAP = {
 }
 
 
-_OID_RE = re.compile('^\d+(\.\d+)*$')
+_OID_RE = re.compile(r'^\d+(\.\d+)*$')
 
 
 # A global tracker to ensure that _setup() is called for every class, even
@@ -183,13 +183,12 @@ class Asn1Value(object):
     # structures where a string value is encoded using an incorrect tag
     _bad_tag = None
 
-    # A unicode string or None - "explicit" or "implicit" for
-    # tagged values, None for normal
-    tag_type = None
+    # If the value has been implicitly tagged
+    implicit = False
 
-    # If "explicit"ly tagged, the class and tag for the wrapped header
-    explicit_class = None
-    explicit_tag = None
+    # If explicitly tagged, a tuple of 2-element tuples containing the
+    # class int and tag int, from innermost to outermost
+    explicit = None
 
     # The BER/DER header bytes
     _header = None
@@ -230,15 +229,29 @@ class Asn1Value(object):
         value, _ = _parse_build(encoded_data, spec=spec, spec_params=kwargs, strict=strict)
         return value
 
-    def __init__(self, tag_type=None, class_=None, tag=None, optional=None, default=None, contents=None):
+    def __init__(self, explicit=None, implicit=None, no_explicit=False, tag_type=None, class_=None, tag=None,
+                 optional=None, default=None, contents=None):
         """
         The optional parameter is not used, but rather included so we don't
         have to delete it from the parameter dictionary when passing as keyword
         args
 
+        :param explicit:
+            An int tag number for explicit tagging, or a 2-element tuple of
+            class and tag.
+
+        :param implicit:
+            An int tag number for implicit tagging, or a 2-element tuple of
+            class and tag.
+
+        :param no_explicit:
+            If explicit tagging info should be removed from this instance.
+            Used internally to allow contructing the underlying value that
+            has been wrapped in an explicit tag.
+
         :param tag_type:
             None for normal values, or one of "implicit", "explicit" for tagged
-            values
+            values. Deprecated in favor of explicit and implicit params.
 
         :param class_:
             The class for the value - defaults to "universal" if tag_type is
@@ -247,10 +260,11 @@ class Asn1Value(object):
              - "application"
              - "context"
              - "private"
+            Deprecated in favor of explicit and implicit params.
 
         :param tag:
             The integer tag to override - usually this is used with tag_type or
-            class_
+            class_. Deprecated in favor of explicit and implicit params.
 
         :param optional:
             Dummy parameter that allows "optional" key in spec param dicts
@@ -262,53 +276,112 @@ class Asn1Value(object):
             A byte string of the encoded contents of the value
 
         :raises:
-            ValueError - when tag_type, class_ or tag are invalid values
+            ValueError - when implicit, explicit, tag_type, class_ or tag are invalid values
         """
 
         try:
             if self.__class__ not in _SETUP_CLASSES:
                 cls = self.__class__
+                # Allow explicit to be specified as a simple 2-element tuple
+                # instead of requiring the user make a nested tuple
+                if cls.explicit is not None and isinstance(cls.explicit[0], int_types):
+                    cls.explicit = (cls.explicit, )
                 if hasattr(cls, '_setup'):
                     self._setup()
                 _SETUP_CLASSES[cls] = True
 
+            # Normalize tagging values
+            if explicit is not None:
+                if isinstance(explicit, int_types):
+                    if class_ is None:
+                        class_ = 'context'
+                    explicit = (class_, explicit)
+                # Prevent both explicit and tag_type == 'explicit'
+                if tag_type == 'explicit':
+                    tag_type = None
+                    tag = None
+
+            if implicit is not None:
+                if isinstance(implicit, int_types):
+                    if class_ is None:
+                        class_ = 'context'
+                    implicit = (class_, implicit)
+                # Prevent both implicit and tag_type == 'implicit'
+                if tag_type == 'implicit':
+                    tag_type = None
+                    tag = None
+
+            # Convert old tag_type API to explicit/implicit params
             if tag_type is not None:
-                if tag_type not in ('implicit', 'explicit'):
+                if class_ is None:
+                    class_ = 'context'
+                if tag_type == 'explicit':
+                    explicit = (class_, tag)
+                elif tag_type == 'implicit':
+                    implicit = (class_, tag)
+                else:
                     raise ValueError(unwrap(
                         '''
                         tag_type must be one of "implicit", "explicit", not %s
                         ''',
                         repr(tag_type)
                     ))
-                self.tag_type = tag_type
 
-                if class_ is None:
-                    class_ = 'context'
+            if explicit is not None:
+                # Ensure we have a tuple of 2-element tuples
+                if len(explicit) == 2 and isinstance(explicit[1], int_types):
+                    explicit = (explicit, )
+                for class_, tag in explicit:
+                    invalid_class = None
+                    if isinstance(class_, int_types):
+                        if class_ not in CLASS_NUM_TO_NAME_MAP:
+                            invalid_class = class_
+                    else:
+                        if class_ not in CLASS_NAME_TO_NUM_MAP:
+                            invalid_class = class_
+                        class_ = CLASS_NAME_TO_NUM_MAP[class_]
+                    if invalid_class is not None:
+                        raise ValueError(unwrap(
+                            '''
+                            explicit class must be one of "universal", "application",
+                            "context", "private", not %s
+                            ''',
+                            repr(invalid_class)
+                        ))
+                    if tag is not None:
+                        if not isinstance(tag, int_types):
+                            raise TypeError(unwrap(
+                                '''
+                                explicit tag must be an integer, not %s
+                                ''',
+                                type_name(tag)
+                            ))
+                    if self.explicit is None:
+                        self.explicit = ((class_, tag), )
+                    else:
+                        self.explicit = self.explicit + ((class_, tag), )
+
+            elif implicit is not None:
+                class_, tag = implicit
                 if class_ not in CLASS_NAME_TO_NUM_MAP:
                     raise ValueError(unwrap(
                         '''
-                        class_ must be one of "universal", "application",
+                        implicit class must be one of "universal", "application",
                         "context", "private", not %s
                         ''',
                         repr(class_)
                     ))
-                class_ = CLASS_NAME_TO_NUM_MAP[class_]
-
                 if tag is not None:
                     if not isinstance(tag, int_types):
                         raise TypeError(unwrap(
                             '''
-                            tag must be an integer, not %s
+                            implicit tag must be an integer, not %s
                             ''',
                             type_name(tag)
                         ))
-
-                if tag_type == 'implicit':
-                    self.class_ = class_
-                    self.tag = tag
-                else:
-                    self.explicit_class = class_
-                    self.explicit_tag = tag
+                self.class_ = CLASS_NAME_TO_NUM_MAP[class_]
+                self.tag = tag
+                self.implicit = True
             else:
                 if class_ is not None:
                     if class_ not in CLASS_NUM_TO_NAME_MAP:
@@ -324,6 +397,9 @@ class Asn1Value(object):
                 if tag is not None:
                     self.tag = tag
 
+            if no_explicit:
+                self.explicit = None
+
             if contents is not None:
                 self.contents = contents
 
@@ -337,7 +413,7 @@ class Asn1Value(object):
 
     def __str__(self):
         """
-        Since str is differnt in Python 2 and 3, this calls the appropriate
+        Since str is different in Python 2 and 3, this calls the appropriate
         method, __unicode__() or __bytes__()
 
         :return:
@@ -389,11 +465,10 @@ class Asn1Value(object):
         """
 
         new_obj = self.__class__()
-        new_obj.tag_type = self.tag_type
         new_obj.class_ = self.class_
         new_obj.tag = self.tag
-        new_obj.explicit_class = self.explicit_class
-        new_obj.explicit_tag = self.explicit_tag
+        new_obj.implicit = self.implicit
+        new_obj.explicit = self.explicit
         return new_obj
 
     def __copy__(self):
@@ -434,21 +509,25 @@ class Asn1Value(object):
 
         return copy.deepcopy(self)
 
-    def retag(self, tag_type, tag):
+    def retag(self, tagging, tag=None):
         """
         Copies the object, applying a new tagging to it
 
-        :param tag_type:
-            A unicode string of "implicit" or "explicit"
+        :param tagging:
+            A dict containing the keys "explicit" and "implicit". Legacy
+            API allows a unicode string of "implicit" or "explicit".
 
         :param tag:
-            A integer tag number
+            A integer tag number. Only used when tagging is a unicode string.
 
         :return:
             An Asn1Value object
         """
 
-        new_obj = self.__class__(tag_type=tag_type, tag=tag)
+        # This is required to preserve the old API
+        if not isinstance(tagging, dict):
+            tagging = {tagging: tag}
+        new_obj = self.__class__(explicit=tagging.get('explicit'), implicit=tagging.get('implicit'))
         new_obj._copy(self, copy.deepcopy)
         return new_obj
 
@@ -495,8 +574,8 @@ class Asn1Value(object):
 
         prefix = '  ' * nest_level
 
-        # This interacts with Any and moves the tag, tag_type, _header, contents, _footer
-        # to the parsed value so duplicate data isn't present
+        # This interacts with Any and moves the tag, implicit, explicit, _header,
+        # contents, _footer to the parsed value so duplicate data isn't present
         has_parsed = hasattr(self, 'parsed')
 
         _basic_debug(prefix, self)
@@ -529,23 +608,15 @@ class Asn1Value(object):
                 self.method = 0
 
             header = _dump_header(self.class_, self.method, self.tag, self.contents)
-            trailer = b''
-
-            if self.tag_type == 'explicit':
-                container = Asn1Value()
-                container.method = 1
-                container.class_ = self.explicit_class
-                container.tag = self.explicit_tag
-                container.contents = header + self.contents + trailer
-                # Force the container to generate the header and footer
-                container.dump()
-                header = container._header + header
-                trailer += container._trailer
+
+            if self.explicit is not None:
+                for class_, tag in self.explicit:
+                    header = _dump_header(class_, 1, tag, header + self.contents) + header
 
             self._header = header
-            self._trailer = trailer
+            self._trailer = b''
 
-        return self._header + contents + self._trailer
+        return self._header + contents
 
 
 class ValueMap():
@@ -607,10 +678,9 @@ class Castable(object):
             ))
 
         new_obj = other_class()
-        new_obj.tag_type = self.tag_type
         new_obj.class_ = self.class_
-        new_obj.explicit_class = self.explicit_class
-        new_obj.explicit_tag = self.explicit_tag
+        new_obj.implicit = self.implicit
+        new_obj.explicit = self.explicit
         new_obj._header = self._header
         new_obj.contents = self.contents
         new_obj._trailer = self._trailer
@@ -627,7 +697,7 @@ class Constructable(object):
     """
 
     # Instance attribute indicating if an object was indefinite
-    # length when parsed  affects parsing and dumping
+    # length when parsed - affects parsing and dumping
     _indefinite = False
 
     # Class attribute that indicates the offset into self.contents
@@ -774,7 +844,7 @@ class Any(Asn1Value):
                 if not isinstance(value, Asn1Value):
                     raise TypeError(unwrap(
                         '''
-                        value must be an instance of Ans1Value, not %s
+                        value must be an instance of Asn1Value, not %s
                         ''',
                         type_name(value)
                     ))
@@ -835,11 +905,13 @@ class Any(Asn1Value):
 
         if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
             try:
-                passed_params = spec_params
-                if self.tag_type == 'explicit':
-                    passed_params = {} if not spec_params else spec_params.copy()
-                    passed_params['tag_type'] = self.tag_type
-                    passed_params['tag'] = self.explicit_tag
+                passed_params = spec_params or {}
+                _tag_type_to_explicit_implicit(passed_params)
+                if self.explicit is not None:
+                    if 'explicit' in passed_params:
+                        passed_params['explicit'] = self.explicit + passed_params['explicit']
+                    else:
+                        passed_params['explicit'] = self.explicit
                 contents = self._header + self.contents + self._trailer
                 parsed_value, _ = _parse_build(
                     contents,
@@ -850,8 +922,9 @@ class Any(Asn1Value):
 
                 # Once we've parsed the Any value, clear any attributes from this object
                 # since they are now duplicate
-                self.tag_type = None
                 self.tag = None
+                self.explicit = None
+                self.implicit = False
                 self._header = b''
                 self.contents = contents
                 self._trailer = b''
@@ -917,7 +990,7 @@ class Choice(Asn1Value):
     #
     # Option 2, same as Option 1, but with a dict of class params
     #
-    # ("name", Asn1ValueClass, {'tag_type': 'explicit', 'tag': 5})
+    # ("name", Asn1ValueClass, {'explicit': 5})
     _alternatives = None
 
     # A dict that maps tuples of (class_, tag) to an index in _alternatives
@@ -964,7 +1037,7 @@ class Choice(Asn1Value):
             cls._id_map[id_] = index
             cls._name_map[info[0]] = index
 
-    def __init__(self, name=None, value=None, tag_type=None, **kwargs):
+    def __init__(self, name=None, value=None, **kwargs):
         """
         Checks to ensure implicit tagging is not being used since it is
         incompatible with Choice, then forwards on to Asn1Value.__init__()
@@ -978,18 +1051,16 @@ class Choice(Asn1Value):
         :param value:
             The alternative value to set - used with name
 
-        :param tag_type:
-            The tag_type of the value - None, "implicit" or "explicit"
-
         :raises:
-            ValueError - when tag_type is "implicit"
+            ValueError - when implicit param is passed (or legacy tag_type param is "implicit")
         """
 
-        kwargs['tag_type'] = tag_type
+        _tag_type_to_explicit_implicit(kwargs)
+
         Asn1Value.__init__(self, **kwargs)
 
         try:
-            if tag_type == 'implicit':
+            if kwargs.get('implicit') is not None:
                 raise ValueError(unwrap(
                     '''
                     The Choice type can not be implicitly tagged even if in an
@@ -1119,8 +1190,8 @@ class Choice(Asn1Value):
 
         id_ = (class_, tag)
 
-        if self.tag_type == 'explicit':
-            if (self.explicit_class, self.explicit_tag) != id_:
+        if self.explicit is not None:
+            if self.explicit[-1] != id_:
                 raise ValueError(unwrap(
                     '''
                     %s was explicitly tagged, but the value provided does not
@@ -1202,10 +1273,10 @@ class Choice(Asn1Value):
 
         self.contents = self.chosen.dump(force=force)
         if self._header is None or force:
-            if self.tag_type == 'explicit':
-                self._header = _dump_header(self.explicit_class, 1, self.explicit_tag, self.contents)
-            else:
-                self._header = b''
+            self._header = b''
+            if self.explicit is not None:
+                for class_, tag in self.explicit:
+                    self._header = _dump_header(class_, 1, tag, self._header + self.contents) + self._header
         return self._header + self.contents
 
 
@@ -1287,7 +1358,7 @@ class Concat(object):
 
     def __str__(self):
         """
-        Since str is differnt in Python 2 and 3, this calls the appropriate
+        Since str is different in Python 2 and 3, this calls the appropriate
         method, __unicode__() or __bytes__()
 
         :return:
@@ -1615,7 +1686,7 @@ class Primitive(Asn1Value):
 
         # When tagging is going on, do the extra work of constructing new
         # objects to see if the dumped representation are the same
-        if self.tag_type is not None or other.tag_type is not None:
+        if self.implicit or self.explicit or other.implicit or other.explicit:
             return self.untag().dump() == other.untag().dump()
 
         return self.dump() == other.dump()
@@ -2256,7 +2327,7 @@ class IntegerBitString(Constructable, Castable, Primitive):
         Allows reconstructing indefinite length values
 
         :return:
-            A unicode string of bits  1s and 0s
+            A unicode string of bits - 1s and 0s
         """
 
         extra_bits = int_from_bytes(self.contents[0:1])
@@ -3005,7 +3076,7 @@ class Sequence(Asn1Value):
     #
     # Option 2, same as Option 1, but with a dict of class params
     #
-    # ("name", Asn1ValueClass, {'tag_type': 'explicit', 'tag': 5})
+    # ("name", Asn1ValueClass, {'explicit': 5})
     _fields = []
 
     # A dict with keys being the name of a field and the value being a unicode
@@ -3389,10 +3460,10 @@ class Sequence(Asn1Value):
         :return:
             A tuple containing the following elements:
              - unicode string of the field name
-             - Ans1Value class of the field spec
+             - Asn1Value class of the field spec
              - Asn1Value class of the value spec
              - None or dict of params to pass to the field spec
-             - None or Asn1Value class indicating the value spec was derived fomr an OID or a spec callback
+             - None or Asn1Value class indicating the value spec was derived from an OID or a spec callback
         """
 
         name, field_spec, field_params = self._fields[index]
@@ -3460,7 +3531,7 @@ class Sequence(Asn1Value):
                 raise ValueError(unwrap(
                     '''
                     Can not set a native python value to %s, which has the
-                    choice type of %s  value must be an instance of Asn1Value
+                    choice type of %s - value must be an instance of Asn1Value
                     ''',
                     field_name,
                     type_name(value_spec)
@@ -3638,8 +3709,8 @@ class Sequence(Asn1Value):
         """
         Determines the spec to use for the field specified. Depending on how
         the spec is determined (_oid_pair or _spec_callbacks), it may be
-        necessary to set preceeding field values before calling this. Usually
-        specs, if dynamic, are controlled by a preceeding ObjectIdentifier
+        necessary to set preceding field values before calling this. Usually
+        specs, if dynamic, are controlled by a preceding ObjectIdentifier
         field.
 
         :param field_name:
@@ -3758,6 +3829,19 @@ class Sequence(Asn1Value):
         if force:
             self._set_contents(force=force)
 
+        if self._fields and self.children is not None:
+            for index, (field_name, _, params) in enumerate(self._fields):
+                if self.children[index] is not VOID:
+                    continue
+                if 'default' in params or 'optional' in params:
+                    continue
+                raise ValueError(unwrap(
+                    '''
+                    Field "%s" is missing from structure
+                    ''',
+                    field_name
+                ))
+
         return Asn1Value.dump(self)
 
 
@@ -3900,7 +3984,7 @@ class SequenceOf(Asn1Value):
                 raise ValueError(unwrap(
                     '''
                     Can not set a native python value to %s where the
-                    _child_spec is Any  value must be an instance of Asn1Value
+                    _child_spec is Any - value must be an instance of Asn1Value
                     ''',
                     type_name(self)
                 ))
@@ -3910,7 +3994,7 @@ class SequenceOf(Asn1Value):
                 raise ValueError(unwrap(
                     '''
                     Can not set a native python value to %s where the
-                    _child_spec is the choice type %s  value must be an
+                    _child_spec is the choice type %s - value must be an
                     instance of Asn1Value
                     ''',
                     type_name(self),
@@ -3927,13 +4011,10 @@ class SequenceOf(Asn1Value):
             return self._child_spec(value=value)
 
         params = {}
-        if self._child_spec.tag_type is not None:
-            params['tag_type'] = self._child_spec.tag_type
-            if params['tag_type'] == 'explicit':
-                params['tag'] = self._child_spec.explicit_tag
-            else:
-                params['tag'] = self._child_spec.tag
-
+        if self._child_spec.explicit:
+            params['explicit'] = self._child_spec.explicit
+        if self._child_spec.implicit:
+            params['implicit'] = (self._child_spec.class_, self._child_spec.tag)
         return _fix_tagging(new_value, params)
 
     def __len__(self):
@@ -4738,19 +4819,20 @@ def _basic_debug(prefix, self):
         method_name = METHOD_NUM_TO_NAME_MAP.get(self.method)
         class_name = CLASS_NUM_TO_NAME_MAP.get(self.class_)
 
-    if self.tag_type == 'explicit':
-        print(
-            '%s    %s tag %s (explicitly tagged)' %
-            (
-                prefix,
-                CLASS_NUM_TO_NAME_MAP.get(self.explicit_class),
-                self.explicit_tag
+    if self.explicit is not None:
+        for class_, tag in self.explicit:
+            print(
+                '%s    %s tag %s (explicitly tagged)' %
+                (
+                    prefix,
+                    CLASS_NUM_TO_NAME_MAP.get(class_),
+                    tag
+                )
             )
-        )
         if has_header:
             print('%s      %s %s %s' % (prefix, method_name, class_name, self.tag))
 
-    elif self.tag_type == 'implicit':
+    elif self.implicit:
         if has_header:
             print('%s    %s %s tag %s (implicitly tagged)' % (prefix, method_name, class_name, self.tag))
 
@@ -4760,6 +4842,25 @@ def _basic_debug(prefix, self):
     print('%s  Data: 0x%s' % (prefix, binascii.hexlify(self.contents or b'').decode('utf-8')))
 
 
+def _tag_type_to_explicit_implicit(params):
+    """
+    Converts old-style "tag_type" and "tag" params to "explicit" and "implicit"
+
+    :param params:
+        A dict of parameters to convert from tag_type/tag to explicit/implicit
+    """
+
+    if 'tag_type' in params:
+        if params['tag_type'] == 'explicit':
+            params['explicit'] = (params.get('class', 2), params['tag'])
+        elif params['tag_type'] == 'implicit':
+            params['implicit'] = (params.get('class', 2), params['tag'])
+        del params['tag_type']
+        del params['tag']
+        if 'class' in params:
+            del params['class']
+
+
 def _fix_tagging(value, params):
     """
     Checks if a value is properly tagged based on the spec, and re/untags as
@@ -4775,26 +4876,28 @@ def _fix_tagging(value, params):
         An Asn1Value that is properly tagged
     """
 
-    if 'tag_type' in params:
-        required_tag_type = params['tag_type']
-        retag = False
+    _tag_type_to_explicit_implicit(params)
 
-        if required_tag_type != value.tag_type:
+    retag = False
+    if 'implicit' not in params:
+        if value.implicit is not False:
             retag = True
-
-        elif required_tag_type == 'explicit' and value.explicit_tag != params['tag']:
+    else:
+        if isinstance(params['implicit'], tuple):
+            class_, tag = params['implicit']
+        else:
+            tag = params['implicit']
+            class_ = 'context'
+        if value.implicit is False:
             retag = True
-
-        elif required_tag_type == 'implicit' and value.tag != params['tag']:
+        elif value.class_ != CLASS_NAME_TO_NUM_MAP[class_] or value.tag != tag:
             retag = True
 
-        if retag:
-            return value.retag(params['tag_type'], params['tag'])
-        return value
-
-    if value.tag_type:
-        return value.untag()
+    if params.get('explicit') != value.explicit:
+        retag = True
 
+    if retag:
+        return value.retag(params)
     return value
 
 
@@ -4820,9 +4923,22 @@ def _build_id_tuple(params, spec):
     required_class = spec.class_
     required_tag = spec.tag
 
-    tag_type = params.get('tag_type', spec.tag_type)
-    if tag_type is not None:
-        required_class = 2
+    _tag_type_to_explicit_implicit(params)
+
+    if 'explicit' in params:
+        if isinstance(params['explicit'], tuple):
+            required_class, required_tag = params['explicit']
+        else:
+            required_class = 2
+            required_tag = params['explicit']
+    elif 'implicit' in params:
+        if isinstance(params['implicit'], tuple):
+            required_class, required_tag = params['implicit']
+        else:
+            required_class = 2
+            required_tag = params['implicit']
+    if required_class is not None and not isinstance(required_class, int_types):
+        required_class = CLASS_NAME_TO_NUM_MAP[required_class]
 
     required_class = params.get('class_', required_class)
     required_tag = params.get('tag', required_tag)
@@ -4903,6 +5019,9 @@ def _build(class_, method, tag, header, contents, trailer, spec=None, spec_param
         An object of the type spec, or if not specified, a child of Asn1Value
     """
 
+    if spec_params is not None:
+        _tag_type_to_explicit_implicit(spec_params)
+
     if header is None:
         return VOID
 
@@ -4910,116 +5029,140 @@ def _build(class_, method, tag, header, contents, trailer, spec=None, spec_param
 
     # If an explicit specification was passed in, make sure it matches
     if spec is not None:
-        if spec_params:
-            value = spec(contents=contents, **spec_params)
-        else:
-            value = spec(contents=contents)
-
-        if spec is Any:
-            pass
-
-        elif value.tag_type == 'explicit':
-            if class_ != value.explicit_class:
-                raise ValueError(unwrap(
-                    '''
-                    Error parsing %s - explicitly-tagged class should have been
-                    %s, but %s was found
-                    ''',
-                    type_name(value),
-                    CLASS_NUM_TO_NAME_MAP.get(value.explicit_class),
-                    CLASS_NUM_TO_NAME_MAP.get(class_, class_)
-                ))
-            if method != 1:
-                raise ValueError(unwrap(
-                    '''
-                    Error parsing %s - explicitly-tagged method should have
-                    been %s, but %s was found
-                    ''',
-                    type_name(value),
-                    METHOD_NUM_TO_NAME_MAP.get(1),
-                    METHOD_NUM_TO_NAME_MAP.get(method, method)
-                ))
-            if tag != value.explicit_tag:
-                raise ValueError(unwrap(
-                    '''
-                    Error parsing %s - explicitly-tagged tag should have been
-                    %s, but %s was found
-                    ''',
-                    type_name(value),
-                    value.explicit_tag,
-                    tag
-                ))
-            original_value = value
-            info, _ = _parse(contents, len(contents))
-            value = _build(*info, spec=spec)
-            value._header = header + value._header
-            value._trailer += trailer or b''
-            value.tag_type = 'explicit'
-            value.explicit_class = original_value.explicit_class
-            value.explicit_tag = original_value.explicit_tag
+        # If there is explicit tagging and contents, we have to split
+        # the header and trailer off before we do the parsing
+        no_explicit = spec_params and 'no_explicit' in spec_params
+        if not no_explicit and (spec.explicit or (spec_params and 'explicit' in spec_params)):
+            if spec_params:
+                value = spec(**spec_params)
+            else:
+                value = spec()
+            original_explicit = value.explicit
+            explicit_info = reversed(original_explicit)
+            parsed_class = class_
+            parsed_method = method
+            parsed_tag = tag
+            to_parse = contents
+            explicit_header = header
+            explicit_trailer = trailer or b''
+            for expected_class, expected_tag in explicit_info:
+                if parsed_class != expected_class:
+                    raise ValueError(unwrap(
+                        '''
+                        Error parsing %s - explicitly-tagged class should have been
+                        %s, but %s was found
+                        ''',
+                        type_name(value),
+                        CLASS_NUM_TO_NAME_MAP.get(expected_class),
+                        CLASS_NUM_TO_NAME_MAP.get(parsed_class, parsed_class)
+                    ))
+                if parsed_method != 1:
+                    raise ValueError(unwrap(
+                        '''
+                        Error parsing %s - explicitly-tagged method should have
+                        been %s, but %s was found
+                        ''',
+                        type_name(value),
+                        METHOD_NUM_TO_NAME_MAP.get(1),
+                        METHOD_NUM_TO_NAME_MAP.get(parsed_method, parsed_method)
+                    ))
+                if parsed_tag != expected_tag:
+                    raise ValueError(unwrap(
+                        '''
+                        Error parsing %s - explicitly-tagged tag should have been
+                        %s, but %s was found
+                        ''',
+                        type_name(value),
+                        expected_tag,
+                        parsed_tag
+                    ))
+                info, _ = _parse(to_parse, len(to_parse))
+                parsed_class, parsed_method, parsed_tag, parsed_header, to_parse, parsed_trailer = info
+                explicit_header += parsed_header
+                explicit_trailer = parsed_trailer + explicit_trailer
+
+            value = _build(*info, spec=spec, spec_params={'no_explicit': True})
+            value._header = explicit_header
+            value._trailer = explicit_trailer
+            value.explicit = original_explicit
             header_set = True
-
-        elif isinstance(value, Choice):
-            value.validate(class_, tag, contents)
-            try:
-                # Force parsing the Choice now
-                value.contents = header + value.contents
-                header = b''
-                value.parse()
-            except (ValueError, TypeError) as e:
-                args = e.args[1:]
-                e.args = (e.args[0] + '\n    while parsing %s' % type_name(value),) + args
-                raise e
-
         else:
-            if class_ != value.class_:
-                raise ValueError(unwrap(
-                    '''
-                    Error parsing %s - class should have been %s, but %s was
-                    found
-                    ''',
-                    type_name(value),
-                    CLASS_NUM_TO_NAME_MAP.get(value.class_),
-                    CLASS_NUM_TO_NAME_MAP.get(class_, class_)
-                ))
-            if method != value.method:
-                # Allow parsing a primitive method as constructed if the value
-                # is indefinite length. This is to allow parsing BER.
-                ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
-                if not ber_indef or not isinstance(value, Constructable):
+            if spec_params:
+                value = spec(contents=contents, **spec_params)
+            else:
+                value = spec(contents=contents)
+
+            if spec is Any:
+                pass
+
+            elif isinstance(value, Choice):
+                value.validate(class_, tag, contents)
+                try:
+                    # Force parsing the Choice now
+                    value.contents = header + value.contents
+                    header = b''
+                    value.parse()
+                except (ValueError, TypeError) as e:
+                    args = e.args[1:]
+                    e.args = (e.args[0] + '\n    while parsing %s' % type_name(value),) + args
+                    raise e
+
+            else:
+                if class_ != value.class_:
                     raise ValueError(unwrap(
                         '''
-                        Error parsing %s - method should have been %s, but %s was found
+                        Error parsing %s - class should have been %s, but %s was
+                        found
                         ''',
                         type_name(value),
-                        METHOD_NUM_TO_NAME_MAP.get(value.method),
-                        METHOD_NUM_TO_NAME_MAP.get(method, method)
+                        CLASS_NUM_TO_NAME_MAP.get(value.class_),
+                        CLASS_NUM_TO_NAME_MAP.get(class_, class_)
+                    ))
+                if method != value.method:
+                    # Allow parsing a primitive method as constructed if the value
+                    # is indefinite length. This is to allow parsing BER.
+                    ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00'
+                    if not ber_indef or not isinstance(value, Constructable):
+                        raise ValueError(unwrap(
+                            '''
+                            Error parsing %s - method should have been %s, but %s was found
+                            ''',
+                            type_name(value),
+                            METHOD_NUM_TO_NAME_MAP.get(value.method),
+                            METHOD_NUM_TO_NAME_MAP.get(method, method)
+                        ))
+                    else:
+                        value.method = method
+                        value._indefinite = True
+                if tag != value.tag and tag != value._bad_tag:
+                    raise ValueError(unwrap(
+                        '''
+                        Error parsing %s - tag should have been %s, but %s was found
+                        ''',
+                        type_name(value),
+                        value.tag,
+                        tag
                     ))
-                else:
-                    value.method = method
-                    value._indefinite = True
-            if tag != value.tag and tag != value._bad_tag:
-                raise ValueError(unwrap(
-                    '''
-                    Error parsing %s - tag should have been %s, but %s was found
-                    ''',
-                    type_name(value),
-                    value.tag,
-                    tag
-                ))
 
     # For explicitly tagged, un-speced parsings, we use a generic container
     # since we will be parsing the contents and discarding the outer object
     # anyway a little further on
-    elif spec_params and 'tag_type' in spec_params and spec_params['tag_type'] == 'explicit':
+    elif spec_params and 'explicit' in spec_params:
         original_value = Asn1Value(contents=contents, **spec_params)
-        info, _ = _parse(contents, len(contents))
-        value = _build(*info, spec=spec)
+        original_explicit = original_value.explicit
+
+        to_parse = contents
+        explicit_header = header
+        explicit_trailer = trailer or b''
+        for expected_class, expected_tag in reversed(original_explicit):
+            info, _ = _parse(to_parse, len(to_parse))
+            _, _, _, parsed_header, to_parse, parsed_trailer = info
+            explicit_header += parsed_header
+            explicit_trailer = parsed_trailer + explicit_trailer
+        value = _build(*info, spec=spec, spec_params={'no_explicit': True})
         value._header = header + value._header
         value._trailer += trailer or b''
-        value.tag_type = 'explicit'
-        value.explicit_class = original_value.explicit_class
-        value.explicit_tag = original_value.explicit_tag
+        value.explicit = original_explicit
         header_set = True
 
     # If no spec was specified, allow anything and just process what

+ 8 - 8
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/crl.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/crl.py

@@ -50,12 +50,12 @@ class Version(Integer):
 
 class IssuingDistributionPoint(Sequence):
     _fields = [
-        ('distribution_point', DistributionPointName, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('only_contains_user_certs', Boolean, {'tag_type': 'implicit', 'tag': 1, 'default': False}),
-        ('only_contains_ca_certs', Boolean, {'tag_type': 'implicit', 'tag': 2, 'default': False}),
-        ('only_some_reasons', ReasonFlags, {'tag_type': 'implicit', 'tag': 3, 'optional': True}),
-        ('indirect_crl', Boolean, {'tag_type': 'implicit', 'tag': 4, 'default': False}),
-        ('only_contains_attribute_certs', Boolean, {'tag_type': 'implicit', 'tag': 5, 'default': False}),
+        ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
+        ('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}),
+        ('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}),
+        ('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}),
+        ('indirect_crl', Boolean, {'implicit': 4, 'default': False}),
+        ('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}),
     ]
 
 
@@ -278,9 +278,9 @@ class TbsCertList(Sequence):
         ('signature', SignedDigestAlgorithm),
         ('issuer', Name),
         ('this_update', Time),
-        ('next_update', Time),
+        ('next_update', Time, {'optional': True}),
         ('revoked_certificates', RevokedCertificates, {'optional': True}),
-        ('crl_extensions', TBSCertListExtensions, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}),
     ]
 
 

+ 1 - 1
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/csr.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/csr.py

@@ -84,7 +84,7 @@ class CertificationRequestInfo(Sequence):
         ('version', Version),
         ('subject', Name),
         ('subject_pk_info', PublicKeyInfo),
-        ('attributes', CRIAttributes, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
     ]
 
 

+ 10 - 6
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/keys.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/keys.py

@@ -30,7 +30,7 @@ from ._elliptic_curve import (
 )
 from ._errors import unwrap
 from ._types import type_name, str_cls, byte_cls
-from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm
+from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm, RSAESOAEPParams
 from .core import (
     Any,
     Asn1Value,
@@ -412,8 +412,8 @@ class ECPrivateKey(Sequence):
     _fields = [
         ('version', ECPrivateKeyVersion),
         ('private_key', IntegerOctetString),
-        ('parameters', ECDomainParameters, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('public_key', ECPointBitString, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('parameters', ECDomainParameters, {'explicit': 0, 'optional': True}),
+        ('public_key', ECPointBitString, {'explicit': 1, 'optional': True}),
     ]
 
 
@@ -497,7 +497,7 @@ class PrivateKeyInfo(Sequence):
         ('version', Integer),
         ('private_key_algorithm', PrivateKeyAlgorithm),
         ('private_key', ParsableOctetString),
-        ('attributes', Attributes, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('attributes', Attributes, {'implicit': 0, 'optional': True}),
     ]
 
     def _private_key_spec(self):
@@ -828,7 +828,7 @@ class PrivateKeyInfo(Sequence):
         Creates a fingerprint that can be compared with a public key to see if
         the two form a pair.
 
-        This fingerprint is not compatiable with fingerprints generated by any
+        This fingerprint is not compatible with fingerprints generated by any
         other software.
 
         :return:
@@ -930,6 +930,8 @@ class PublicKeyAlgorithmId(ObjectIdentifier):
     _map = {
         # https://tools.ietf.org/html/rfc3279#page-19
         '1.2.840.113549.1.1.1': 'rsa',
+        # https://tools.ietf.org/html/rfc3447#page-47
+        '1.2.840.113549.1.1.7': 'rsaes_oaep',
         # https://tools.ietf.org/html/rfc3279#page-18
         '1.2.840.10040.4.1': 'dsa',
         # https://tools.ietf.org/html/rfc3279#page-13
@@ -955,6 +957,7 @@ class PublicKeyAlgorithm(_ForceNullParameters, Sequence):
         'dsa': DSAParams,
         'ec': ECDomainParameters,
         'dh': DomainParameters,
+        'rsaes_oaep': RSAESOAEPParams,
     }
 
 
@@ -973,6 +976,7 @@ class PublicKeyInfo(Sequence):
         algorithm = self['algorithm']['algorithm'].native
         return {
             'rsa': RSAPublicKey,
+            'rsaes_oaep': RSAPublicKey,
             'dsa': Integer,
             # We override the field spec with ECPoint so that users can easily
             # decompose the byte string into the constituent X and Y coords
@@ -1191,7 +1195,7 @@ class PublicKeyInfo(Sequence):
         Creates a fingerprint that can be compared with a private key to see if
         the two form a pair.
 
-        This fingerprint is not compatiable with fingerprints generated by any
+        This fingerprint is not compatible with fingerprints generated by any
         other software.
 
         :return:

+ 24 - 21
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/ocsp.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/ocsp.py

@@ -84,7 +84,7 @@ class RequestExtensions(SequenceOf):
 class Request(Sequence):
     _fields = [
         ('req_cert', CertId),
-        ('single_request_extensions', RequestExtensions, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}),
     ]
 
     _processed_extensions = False
@@ -192,10 +192,10 @@ class TBSRequestExtensions(SequenceOf):
 
 class TBSRequest(Sequence):
     _fields = [
-        ('version', Version, {'tag_type': 'explicit', 'tag': 0, 'default': 'v1'}),
-        ('requestor_name', GeneralName, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('version', Version, {'explicit': 0, 'default': 'v1'}),
+        ('requestor_name', GeneralName, {'explicit': 1, 'optional': True}),
         ('request_list', Requests),
-        ('request_extensions', TBSRequestExtensions, {'tag_type': 'explicit', 'tag': 2, 'optional': True}),
+        ('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}),
     ]
 
 
@@ -207,14 +207,14 @@ class Signature(Sequence):
     _fields = [
         ('signature_algorithm', SignedDigestAlgorithm),
         ('signature', OctetBitString),
-        ('certs', Certificates, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('certs', Certificates, {'explicit': 0, 'optional': True}),
     ]
 
 
 class OCSPRequest(Sequence):
     _fields = [
         ('tbs_request', TBSRequest),
-        ('optional_signature', Signature, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('optional_signature', Signature, {'explicit': 0, 'optional': True}),
     ]
 
     _processed_extensions = False
@@ -314,31 +314,31 @@ class OCSPResponseStatus(Enumerated):
 
 class ResponderId(Choice):
     _alternatives = [
-        ('by_name', Name, {'tag_type': 'explicit', 'tag': 1}),
-        ('by_key', OctetString, {'tag_type': 'explicit', 'tag': 2}),
+        ('by_name', Name, {'explicit': 1}),
+        ('by_key', OctetString, {'explicit': 2}),
     ]
 
 
 class RevokedInfo(Sequence):
     _fields = [
         ('revocation_time', GeneralizedTime),
-        ('revocation_reason', CRLReason, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}),
     ]
 
 
 class CertStatus(Choice):
     _alternatives = [
-        ('good', Null, {'tag_type': 'implicit', 'tag': 0}),
-        ('revoked', RevokedInfo, {'tag_type': 'implicit', 'tag': 1}),
-        ('unknown', Null, {'tag_type': 'implicit', 'tag': 2}),
+        ('good', Null, {'implicit': 0}),
+        ('revoked', RevokedInfo, {'implicit': 1}),
+        ('unknown', Null, {'implicit': 2}),
     ]
 
 
 class CrlId(Sequence):
     _fields = [
-        ('crl_url', IA5String, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('crl_num', Integer, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
-        ('crl_time', GeneralizedTime, {'tag_type': 'explicit', 'tag': 2, 'optional': True}),
+        ('crl_url', IA5String, {'explicit': 0, 'optional': True}),
+        ('crl_num', Integer, {'explicit': 1, 'optional': True}),
+        ('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}),
     ]
 
 
@@ -351,6 +351,8 @@ class SingleResponseExtensionId(ObjectIdentifier):
         '2.5.29.21': 'crl_reason',
         '2.5.29.24': 'invalidity_date',
         '2.5.29.29': 'certificate_issuer',
+        # https://tools.ietf.org/html/rfc6962.html#page-13
+        '1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list',
     }
 
 
@@ -368,6 +370,7 @@ class SingleResponseExtension(Sequence):
         'crl_reason': CRLReason,
         'invalidity_date': GeneralizedTime,
         'certificate_issuer': GeneralNames,
+        'signed_certificate_timestamp_list': OctetString,
     }
 
 
@@ -380,8 +383,8 @@ class SingleResponse(Sequence):
         ('cert_id', CertId),
         ('cert_status', CertStatus),
         ('this_update', GeneralizedTime),
-        ('next_update', GeneralizedTime, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('single_extensions', SingleResponseExtensions, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}),
+        ('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}),
     ]
 
     _processed_extensions = False
@@ -526,11 +529,11 @@ class ResponseDataExtensions(SequenceOf):
 
 class ResponseData(Sequence):
     _fields = [
-        ('version', Version, {'tag_type': 'explicit', 'tag': 0, 'default': 'v1'}),
+        ('version', Version, {'explicit': 0, 'default': 'v1'}),
         ('responder_id', ResponderId),
         ('produced_at', GeneralizedTime),
         ('responses', Responses),
-        ('response_extensions', ResponseDataExtensions, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
+        ('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}),
     ]
 
 
@@ -539,7 +542,7 @@ class BasicOCSPResponse(Sequence):
         ('tbs_response_data', ResponseData),
         ('signature_algorithm', SignedDigestAlgorithm),
         ('signature', OctetBitString),
-        ('certs', Certificates, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('certs', Certificates, {'explicit': 0, 'optional': True}),
     ]
 
 
@@ -558,7 +561,7 @@ class ResponseBytes(Sequence):
 class OCSPResponse(Sequence):
     _fields = [
         ('response_status', OCSPResponseStatus),
-        ('response_bytes', ResponseBytes, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
+        ('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}),
     ]
 
     _processed_extensions = False

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/parser.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/parser.py


+ 3 - 3
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/pdf.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/pdf.py

@@ -63,9 +63,9 @@ class SequenceOfOtherRevInfo(SequenceOf):
 
 class RevocationInfoArchival(Sequence):
     _fields = [
-        ('crl', SequenceOfCertificateList, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('ocsp', SequenceOfOCSPResponse, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
-        ('other_rev_info', SequenceOfOtherRevInfo, {'tag_type': 'explicit', 'tag': 2, 'optional': True}),
+        ('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}),
+        ('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}),
+        ('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}),
     ]
 
 

+ 8 - 8
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/pem.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/pem.py

@@ -16,7 +16,7 @@ import re
 import sys
 
 from ._errors import unwrap
-from ._types import type_name, str_cls, byte_cls
+from ._types import type_name as _type_name, str_cls, byte_cls
 
 if sys.version_info < (3,):
     from cStringIO import StringIO as BytesIO
@@ -41,7 +41,7 @@ def detect(byte_string):
             '''
             byte_string must be a byte string, not %s
             ''',
-            type_name(byte_string)
+            _type_name(byte_string)
         ))
 
     return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1
@@ -51,15 +51,15 @@ def armor(type_name, der_bytes, headers=None):
     """
     Armors a DER-encoded byte string in PEM
 
-    :param der_bytes:
-        A byte string to be armored
-
     :param type_name:
         A unicode string that will be capitalized and placed in the header
         and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This
         will appear as "-----BEGIN CERTIFICATE-----" and
         "-----END CERTIFICATE-----".
 
+    :param der_bytes:
+        A byte string to be armored
+
     :param headers:
         An OrderedDict of the header lines to write after the BEGIN line
 
@@ -71,7 +71,7 @@ def armor(type_name, der_bytes, headers=None):
         raise TypeError(unwrap(
             '''
             der_bytes must be a byte string, not %s
-            ''' % type_name(der_bytes)
+            ''' % _type_name(der_bytes)
         ))
 
     if not isinstance(type_name, str_cls):
@@ -79,7 +79,7 @@ def armor(type_name, der_bytes, headers=None):
             '''
             type_name must be a unicode string, not %s
             ''',
-            type_name(type_name)
+            _type_name(type_name)
         ))
 
     type_name = type_name.upper().encode('ascii')
@@ -132,7 +132,7 @@ def _unarmor(pem_bytes):
             '''
             pem_bytes must be a byte string, not %s
             ''',
-            type_name(pem_bytes)
+            _type_name(pem_bytes)
         ))
 
     # Valid states include: "trash", "headers", "body"

+ 4 - 4
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/pkcs12.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/pkcs12.py

@@ -145,7 +145,7 @@ class CertId(ObjectIdentifier):
 class CertBag(Sequence):
     _fields = [
         ('cert_id', CertId),
-        ('cert_value', ParsableOctetString, {'tag_type': 'explicit', 'tag': 0}),
+        ('cert_value', ParsableOctetString, {'explicit': 0}),
     ]
 
     _oid_pair = ('cert_id', 'cert_value')
@@ -157,14 +157,14 @@ class CertBag(Sequence):
 class CrlBag(Sequence):
     _fields = [
         ('crl_id', ObjectIdentifier),
-        ('crl_value', OctetString, {'tag_type': 'explicit', 'tag': 0}),
+        ('crl_value', OctetString, {'explicit': 0}),
     ]
 
 
 class SecretBag(Sequence):
     _fields = [
         ('secret_type_id', ObjectIdentifier),
-        ('secret_value', OctetString, {'tag_type': 'explicit', 'tag': 0}),
+        ('secret_value', OctetString, {'explicit': 0}),
     ]
 
 
@@ -175,7 +175,7 @@ class SafeContents(SequenceOf):
 class SafeBag(Sequence):
     _fields = [
         ('bag_id', BagId),
-        ('bag_value', Any, {'tag_type': 'explicit', 'tag': 0}),
+        ('bag_value', Any, {'explicit': 0}),
         ('bag_attributes', Attributes, {'optional': True}),
     ]
 

+ 13 - 13
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/tsp.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/tsp.py

@@ -74,8 +74,8 @@ class MessageImprint(Sequence):
 class Accuracy(Sequence):
     _fields = [
         ('seconds', Integer, {'optional': True}),
-        ('millis', Integer, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('micros', Integer, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('millis', Integer, {'implicit': 0, 'optional': True}),
+        ('micros', Integer, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -101,8 +101,8 @@ class TSTInfo(Sequence):
         ('accuracy', Accuracy, {'optional': True}),
         ('ordering', Boolean, {'default': False}),
         ('nonce', Integer, {'optional': True}),
-        ('tsa', GeneralName, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('extensions', Extensions, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('tsa', GeneralName, {'explicit': 0, 'optional': True}),
+        ('extensions', Extensions, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -113,7 +113,7 @@ class TimeStampReq(Sequence):
         ('req_policy', ObjectIdentifier, {'optional': True}),
         ('nonce', Integer, {'optional': True}),
         ('cert_req', Boolean, {'default': False}),
-        ('extensions', Extensions, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('extensions', Extensions, {'implicit': 0, 'optional': True}),
     ]
 
 
@@ -201,9 +201,9 @@ class PartialHashtrees(SequenceOf):
 
 class ArchiveTimeStamp(Sequence):
     _fields = [
-        ('digest_algorithm', DigestAlgorithm, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('attributes', Attributes, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('reduced_hashtree', PartialHashtrees, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}),
+        ('attributes', Attributes, {'implicit': 1, 'optional': True}),
+        ('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}),
         ('time_stamp', ContentInfo),
     ]
 
@@ -216,8 +216,8 @@ class EvidenceRecord(Sequence):
     _fields = [
         ('version', Version),
         ('digest_algorithms', DigestAlgorithms),
-        ('crypto_infos', Attributes, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('encryption_info', EncryptionInfo, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('crypto_infos', Attributes, {'implicit': 0, 'optional': True}),
+        ('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}),
         ('archive_time_stamp_sequence', ArchiveTimeStampSequence),
     ]
 
@@ -231,9 +231,9 @@ class OtherEvidence(Sequence):
 
 class Evidence(Choice):
     _alternatives = [
-        ('tst_evidence', TimeStampTokenEvidence, {'tag_type': 'implicit', 'tag': 0}),
-        ('ers_evidence', EvidenceRecord, {'tag_type': 'implicit', 'tag': 1}),
-        ('other_evidence', OtherEvidence, {'tag_type': 'implicit', 'tag': 2}),
+        ('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}),
+        ('ers_evidence', EvidenceRecord, {'implicit': 1}),
+        ('other_evidence', OtherEvidence, {'implicit': 2}),
     ]
 
 

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/util.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/util.py


+ 2 - 2
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/version.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/version.py

@@ -2,5 +2,5 @@
 from __future__ import unicode_literals, division, absolute_import, print_function
 
 
-__version__ = '0.22.0'
-__version_info__ = (0, 22, 0)
+__version__ = '0.24.0'
+__version_info__ = (0, 24, 0)

+ 402 - 81
desktop/core/ext-py/asn1crypto-0.22.0/asn1crypto/x509.py → desktop/core/ext-py/asn1crypto-0.24.0/asn1crypto/x509.py

@@ -15,6 +15,7 @@ Other type classes are defined that help compose the types listed above.
 
 from __future__ import unicode_literals, division, absolute_import, print_function
 
+from contextlib import contextmanager
 from encodings import idna  # noqa
 import hashlib
 import re
@@ -27,7 +28,7 @@ from ._errors import unwrap
 from ._iri import iri_to_uri, uri_to_iri
 from ._ordereddict import OrderedDict
 from ._types import type_name, str_cls, bytes_to_list
-from .algos import AlgorithmIdentifier, SignedDigestAlgorithm
+from .algos import AlgorithmIdentifier, AnyAlgorithmIdentifier, DigestAlgorithm, SignedDigestAlgorithm
 from .core import (
     Any,
     BitString,
@@ -35,6 +36,7 @@ from .core import (
     Boolean,
     Choice,
     Concat,
+    Enumerated,
     GeneralizedTime,
     GeneralString,
     IA5String,
@@ -443,14 +445,47 @@ class KeyUsage(BitString):
 
 class PrivateKeyUsagePeriod(Sequence):
     _fields = [
-        ('not_before', GeneralizedTime, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('not_after', GeneralizedTime, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('not_before', GeneralizedTime, {'implicit': 0, 'optional': True}),
+        ('not_after', GeneralizedTime, {'implicit': 1, 'optional': True}),
     ]
 
 
+class NotReallyTeletexString(TeletexString):
+    """
+    OpenSSL (and probably some other libraries) puts ISO-8859-1
+    into TeletexString instead of ITU T.61. We use Windows-1252 when
+    decoding since it is a superset of ISO-8859-1, and less likely to
+    cause encoding issues, but we stay strict with encoding to prevent
+    us from creating bad data.
+    """
+
+    _decoding_encoding = 'cp1252'
+
+    def __unicode__(self):
+        """
+        :return:
+            A unicode string
+        """
+
+        if self.contents is None:
+            return ''
+        if self._unicode is None:
+            self._unicode = self._merge_chunks().decode(self._decoding_encoding)
+        return self._unicode
+
+
+@contextmanager
+def strict_teletex():
+    try:
+        NotReallyTeletexString._decoding_encoding = 'teletex'
+        yield
+    finally:
+        NotReallyTeletexString._decoding_encoding = 'cp1252'
+
+
 class DirectoryString(Choice):
     _alternatives = [
-        ('teletex_string', TeletexString),
+        ('teletex_string', NotReallyTeletexString),
         ('printable_string', PrintableString),
         ('universal_string', UniversalString),
         ('utf8_string', UTF8String),
@@ -483,6 +518,13 @@ class NameType(ObjectIdentifier):
         '2.5.4.46': 'dn_qualifier',
         '2.5.4.65': 'pseudonym',
         '2.5.4.97': 'organization_identifier',
+        # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf
+        '2.23.133.2.1': 'tpm_manufacturer',
+        '2.23.133.2.2': 'tpm_model',
+        '2.23.133.2.3': 'tpm_version',
+        '2.23.133.2.4': 'platform_manufacturer',
+        '2.23.133.2.5': 'platform_model',
+        '2.23.133.2.6': 'platform_version',
         # https://tools.ietf.org/html/rfc2985#page-26
         '1.2.840.113549.1.9.1': 'email_address',
         # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf
@@ -525,6 +567,12 @@ class NameType(ObjectIdentifier):
         'domain_component',
         'name_distinguisher',
         'organization_identifier',
+        'tpm_manufacturer',
+        'tpm_model',
+        'tpm_version',
+        'platform_manufacturer',
+        'platform_model',
+        'platform_version',
     ]
 
     @classmethod
@@ -582,6 +630,12 @@ class NameType(ObjectIdentifier):
             'domain_component': 'Domain Component',
             'name_distinguisher': 'Name Distinguisher',
             'organization_identifier': 'Organization Identifier',
+            'tpm_manufacturer': 'TPM Manufacturer',
+            'tpm_model': 'TPM Model',
+            'tpm_version': 'TPM Version',
+            'platform_manufacturer': 'Platform Manufacturer',
+            'platform_model': 'Platform Model',
+            'platform_version': 'Platform Version',
         }.get(self.native, self.native)
 
 
@@ -622,6 +676,12 @@ class NameTypeAndValue(Sequence):
         'domain_component': DNSName,
         'name_distinguisher': DirectoryString,
         'organization_identifier': DirectoryString,
+        'tpm_manufacturer': UTF8String,
+        'tpm_model': UTF8String,
+        'tpm_version': UTF8String,
+        'platform_manufacturer': UTF8String,
+        'platform_model': UTF8String,
+        'platform_version': UTF8String,
     }
 
     _prepped = None
@@ -920,7 +980,7 @@ class Name(Choice):
 
         :param use_printable:
             A bool - if PrintableString should be used for encoding instead of
-            UTF8String. This is for backwards compatiblity with old software.
+            UTF8String. This is for backwards compatibility with old software.
 
         :return:
             An x509.Name object
@@ -1096,7 +1156,7 @@ class Name(Choice):
 class AnotherName(Sequence):
     _fields = [
         ('type_id', ObjectIdentifier),
-        ('value', Any, {'tag_type': 'explicit', 'tag': 0}),
+        ('value', Any, {'explicit': 0}),
     ]
 
 
@@ -1129,19 +1189,19 @@ class PrivateDomainName(Choice):
 
 class PersonalName(Set):
     _fields = [
-        ('surname', PrintableString, {'tag_type': 'implicit', 'tag': 0}),
-        ('given_name', PrintableString, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('initials', PrintableString, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
-        ('generation_qualifier', PrintableString, {'tag_type': 'implicit', 'tag': 3, 'optional': True}),
+        ('surname', PrintableString, {'implicit': 0}),
+        ('given_name', PrintableString, {'implicit': 1, 'optional': True}),
+        ('initials', PrintableString, {'implicit': 2, 'optional': True}),
+        ('generation_qualifier', PrintableString, {'implicit': 3, 'optional': True}),
     ]
 
 
 class TeletexPersonalName(Set):
     _fields = [
-        ('surname', TeletexString, {'tag_type': 'implicit', 'tag': 0}),
-        ('given_name', TeletexString, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('initials', TeletexString, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
-        ('generation_qualifier', TeletexString, {'tag_type': 'implicit', 'tag': 3, 'optional': True}),
+        ('surname', TeletexString, {'implicit': 0}),
+        ('given_name', TeletexString, {'implicit': 1, 'optional': True}),
+        ('initials', TeletexString, {'implicit': 2, 'optional': True}),
+        ('generation_qualifier', TeletexString, {'implicit': 3, 'optional': True}),
     ]
 
 
@@ -1157,13 +1217,13 @@ class BuiltInStandardAttributes(Sequence):
     _fields = [
         ('country_name', CountryName, {'optional': True}),
         ('administration_domain_name', AdministrationDomainName, {'optional': True}),
-        ('network_address', NumericString, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('terminal_identifier', PrintableString, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('private_domain_name', PrivateDomainName, {'tag_type': 'explicit', 'tag': 2, 'optional': True}),
-        ('organization_name', PrintableString, {'tag_type': 'implicit', 'tag': 3, 'optional': True}),
-        ('numeric_user_identifier', NumericString, {'tag_type': 'implicit', 'tag': 4, 'optional': True}),
-        ('personal_name', PersonalName, {'tag_type': 'implicit', 'tag': 5, 'optional': True}),
-        ('organizational_unit_names', OrganizationalUnitNames, {'tag_type': 'implicit', 'tag': 6, 'optional': True}),
+        ('network_address', NumericString, {'implicit': 0, 'optional': True}),
+        ('terminal_identifier', PrintableString, {'implicit': 1, 'optional': True}),
+        ('private_domain_name', PrivateDomainName, {'explicit': 2, 'optional': True}),
+        ('organization_name', PrintableString, {'implicit': 3, 'optional': True}),
+        ('numeric_user_identifier', NumericString, {'implicit': 4, 'optional': True}),
+        ('personal_name', PersonalName, {'implicit': 5, 'optional': True}),
+        ('organizational_unit_names', OrganizationalUnitNames, {'implicit': 6, 'optional': True}),
     ]
 
 
@@ -1223,8 +1283,8 @@ class UnformattedPostalAddress(Set):
 
 class E1634Address(Sequence):
     _fields = [
-        ('number', NumericString, {'tag_type': 'implicit', 'tag': 0}),
-        ('sub_address', NumericString, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('number', NumericString, {'implicit': 0}),
+        ('sub_address', NumericString, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -1234,17 +1294,17 @@ class NAddresses(SetOf):
 
 class PresentationAddress(Sequence):
     _fields = [
-        ('p_selector', OctetString, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('s_selector', OctetString, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
-        ('t_selector', OctetString, {'tag_type': 'explicit', 'tag': 2, 'optional': True}),
-        ('n_addresses', NAddresses, {'tag_type': 'explicit', 'tag': 3}),
+        ('p_selector', OctetString, {'explicit': 0, 'optional': True}),
+        ('s_selector', OctetString, {'explicit': 1, 'optional': True}),
+        ('t_selector', OctetString, {'explicit': 2, 'optional': True}),
+        ('n_addresses', NAddresses, {'explicit': 3}),
     ]
 
 
 class ExtendedNetworkAddress(Choice):
     _alternatives = [
         ('e163_4_address', E1634Address),
-        ('psap_address', PresentationAddress, {'tag_type': 'implicit', 'tag': 0})
+        ('psap_address', PresentationAddress, {'implicit': 0})
     ]
 
 
@@ -1289,8 +1349,8 @@ class ExtensionAttributeType(Integer):
 
 class ExtensionAttribute(Sequence):
     _fields = [
-        ('extension_attribute_type', ExtensionAttributeType, {'tag_type': 'implicit', 'tag': 0}),
-        ('extension_attribute_value', Any, {'tag_type': 'explicit', 'tag': 1}),
+        ('extension_attribute_type', ExtensionAttributeType, {'implicit': 0}),
+        ('extension_attribute_value', Any, {'explicit': 1}),
     ]
 
     _oid_pair = ('extension_attribute_type', 'extension_attribute_value')
@@ -1335,22 +1395,22 @@ class ORAddress(Sequence):
 
 class EDIPartyName(Sequence):
     _fields = [
-        ('name_assigner', DirectoryString, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('party_name', DirectoryString, {'tag_type': 'implicit', 'tag': 1}),
+        ('name_assigner', DirectoryString, {'implicit': 0, 'optional': True}),
+        ('party_name', DirectoryString, {'implicit': 1}),
     ]
 
 
 class GeneralName(Choice):
     _alternatives = [
-        ('other_name', AnotherName, {'tag_type': 'implicit', 'tag': 0}),
-        ('rfc822_name', EmailAddress, {'tag_type': 'implicit', 'tag': 1}),
-        ('dns_name', DNSName, {'tag_type': 'implicit', 'tag': 2}),
-        ('x400_address', ORAddress, {'tag_type': 'implicit', 'tag': 3}),
-        ('directory_name', Name, {'tag_type': 'explicit', 'tag': 4}),
-        ('edi_party_name', EDIPartyName, {'tag_type': 'implicit', 'tag': 5}),
-        ('uniform_resource_identifier', URI, {'tag_type': 'implicit', 'tag': 6}),
-        ('ip_address', IPAddress, {'tag_type': 'implicit', 'tag': 7}),
-        ('registered_id', ObjectIdentifier, {'tag_type': 'implicit', 'tag': 8}),
+        ('other_name', AnotherName, {'implicit': 0}),
+        ('rfc822_name', EmailAddress, {'implicit': 1}),
+        ('dns_name', DNSName, {'implicit': 2}),
+        ('x400_address', ORAddress, {'implicit': 3}),
+        ('directory_name', Name, {'explicit': 4}),
+        ('edi_party_name', EDIPartyName, {'implicit': 5}),
+        ('uniform_resource_identifier', URI, {'implicit': 6}),
+        ('ip_address', IPAddress, {'implicit': 7}),
+        ('registered_id', ObjectIdentifier, {'implicit': 8}),
     ]
 
     def __ne__(self, other):
@@ -1417,16 +1477,16 @@ class BasicConstraints(Sequence):
 
 class AuthorityKeyIdentifier(Sequence):
     _fields = [
-        ('key_identifier', OctetString, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('authority_cert_issuer', GeneralNames, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('authority_cert_serial_number', Integer, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('key_identifier', OctetString, {'implicit': 0, 'optional': True}),
+        ('authority_cert_issuer', GeneralNames, {'implicit': 1, 'optional': True}),
+        ('authority_cert_serial_number', Integer, {'implicit': 2, 'optional': True}),
     ]
 
 
 class DistributionPointName(Choice):
     _alternatives = [
-        ('full_name', GeneralNames, {'tag_type': 'implicit', 'tag': 0}),
-        ('name_relative_to_crl_issuer', RelativeDistinguishedName, {'tag_type': 'implicit', 'tag': 1}),
+        ('full_name', GeneralNames, {'implicit': 0}),
+        ('name_relative_to_crl_issuer', RelativeDistinguishedName, {'implicit': 1}),
     ]
 
 
@@ -1447,8 +1507,8 @@ class ReasonFlags(BitString):
 class GeneralSubtree(Sequence):
     _fields = [
         ('base', GeneralName),
-        ('minimum', Integer, {'tag_type': 'implicit', 'tag': 0, 'default': 0}),
-        ('maximum', Integer, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('minimum', Integer, {'implicit': 0, 'default': 0}),
+        ('maximum', Integer, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -1458,16 +1518,16 @@ class GeneralSubtrees(SequenceOf):
 
 class NameConstraints(Sequence):
     _fields = [
-        ('permitted_subtrees', GeneralSubtrees, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('excluded_subtrees', GeneralSubtrees, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('permitted_subtrees', GeneralSubtrees, {'implicit': 0, 'optional': True}),
+        ('excluded_subtrees', GeneralSubtrees, {'implicit': 1, 'optional': True}),
     ]
 
 
 class DistributionPoint(Sequence):
     _fields = [
-        ('distribution_point', DistributionPointName, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
-        ('reasons', ReasonFlags, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('crl_issuer', GeneralNames, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
+        ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}),
+        ('reasons', ReasonFlags, {'implicit': 1, 'optional': True}),
+        ('crl_issuer', GeneralNames, {'implicit': 2, 'optional': True}),
     ]
 
     _url = False
@@ -1493,7 +1553,7 @@ class DistributionPoint(Sequence):
             for general_name in name.chosen:
                 if general_name.name == 'uniform_resource_identifier':
                     url = general_name.native
-                    if url[0:7] == 'http://':
+                    if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
                         self._url = url
                         break
 
@@ -1585,8 +1645,8 @@ class PolicyMappings(SequenceOf):
 
 class PolicyConstraints(Sequence):
     _fields = [
-        ('require_explicit_policy', Integer, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
-        ('inhibit_policy_mapping', Integer, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('require_explicit_policy', Integer, {'implicit': 0, 'optional': True}),
+        ('inhibit_policy_mapping', Integer, {'implicit': 1, 'optional': True}),
     ]
 
 
@@ -1650,6 +1710,8 @@ class KeyPurposeId(ObjectIdentifier):
         '1.3.6.1.4.1.311.10.3.12': 'microsoft_document_signing',
         '1.3.6.1.4.1.311.10.3.13': 'microsoft_lifetime_signing',
         '1.3.6.1.4.1.311.10.3.14': 'microsoft_mobile_device_software',
+        # https://support.microsoft.com/en-us/help/287547/object-ids-associated-with-microsoft-cryptography
+        '1.3.6.1.4.1.311.20.2.2': 'microsoft_smart_card_logon',
         # https://opensource.apple.com/source
         #  - /Security/Security-57031.40.6/Security/libsecurity_keychain/lib/SecPolicy.cpp
         #  - /libsecurity_cssm/libsecurity_cssm-36064/lib/oidsalg.c
@@ -1685,6 +1747,16 @@ class KeyPurposeId(ObjectIdentifier):
         '1.2.840.113625.100.1.32': 'apple_test_smp_encryption',
         '1.2.840.113635.100.1.33': 'apple_server_authentication',
         '1.2.840.113635.100.1.34': 'apple_pcs_escrow_service',
+        # http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.201-2.pdf
+        '2.16.840.1.101.3.6.8': 'piv_card_authentication',
+        '2.16.840.1.101.3.6.7': 'piv_content_signing',
+        # https://tools.ietf.org/html/rfc4556.html
+        '1.3.6.1.5.2.3.4': 'pkinit_kpclientauth',
+        '1.3.6.1.5.2.3.5': 'pkinit_kpkdc',
+        # https://www.adobe.com/devnet-docs/acrobatetk/tools/DigSig/changes.html
+        '1.2.840.113583.1.1.5': 'adobe_authentic_documents_trust',
+        # https://www.idmanagement.gov/wp-content/uploads/sites/1171/uploads/fpki-pivi-cert-profiles.pdf
+        '2.16.840.1.101.3.8.7': 'fpki_pivi_content_signing'
     }
 
 
@@ -1741,6 +1813,232 @@ class NetscapeCertificateType(BitString):
     }
 
 
+class Version(Integer):
+    _map = {
+        0: 'v1',
+        1: 'v2',
+        2: 'v3',
+    }
+
+
+class TPMSpecification(Sequence):
+    _fields = [
+        ('family', UTF8String),
+        ('level', Integer),
+        ('revision', Integer),
+    ]
+
+
+class SetOfTPMSpecification(SetOf):
+    _child_spec = TPMSpecification
+
+
+class TCGSpecificationVersion(Sequence):
+    _fields = [
+        ('major_version', Integer),
+        ('minor_version', Integer),
+        ('revision', Integer),
+    ]
+
+
+class TCGPlatformSpecification(Sequence):
+    _fields = [
+        ('version', TCGSpecificationVersion),
+        ('platform_class', OctetString),
+    ]
+
+
+class SetOfTCGPlatformSpecification(SetOf):
+    _child_spec = TCGPlatformSpecification
+
+
+class EKGenerationType(Enumerated):
+    _map = {
+        0: 'internal',
+        1: 'injected',
+        2: 'internal_revocable',
+        3: 'injected_revocable',
+    }
+
+
+class EKGenerationLocation(Enumerated):
+    _map = {
+        0: 'tpm_manufacturer',
+        1: 'platform_manufacturer',
+        2: 'ek_cert_signer',
+    }
+
+
+class EKCertificateGenerationLocation(Enumerated):
+    _map = {
+        0: 'tpm_manufacturer',
+        1: 'platform_manufacturer',
+        2: 'ek_cert_signer',
+    }
+
+
+class EvaluationAssuranceLevel(Enumerated):
+    _map = {
+        1: 'level1',
+        2: 'level2',
+        3: 'level3',
+        4: 'level4',
+        5: 'level5',
+        6: 'level6',
+        7: 'level7',
+    }
+
+
+class EvaluationStatus(Enumerated):
+    _map = {
+        0: 'designed_to_meet',
+        1: 'evaluation_in_progress',
+        2: 'evaluation_completed',
+    }
+
+
+class StrengthOfFunction(Enumerated):
+    _map = {
+        0: 'basic',
+        1: 'medium',
+        2: 'high',
+    }
+
+
+class URIReference(Sequence):
+    _fields = [
+        ('uniform_resource_identifier', IA5String),
+        ('hash_algorithm', DigestAlgorithm, {'optional': True}),
+        ('hash_value', BitString, {'optional': True}),
+    ]
+
+
+class CommonCriteriaMeasures(Sequence):
+    _fields = [
+        ('version', IA5String),
+        ('assurance_level', EvaluationAssuranceLevel),
+        ('evaluation_status', EvaluationStatus),
+        ('plus', Boolean, {'default': False}),
+        ('strengh_of_function', StrengthOfFunction, {'implicit': 0, 'optional': True}),
+        ('profile_oid', ObjectIdentifier, {'implicit': 1, 'optional': True}),
+        ('profile_url', URIReference, {'implicit': 2, 'optional': True}),
+        ('target_oid', ObjectIdentifier, {'implicit': 3, 'optional': True}),
+        ('target_uri', URIReference, {'implicit': 4, 'optional': True}),
+    ]
+
+
+class SecurityLevel(Enumerated):
+    _map = {
+        1: 'level1',
+        2: 'level2',
+        3: 'level3',
+        4: 'level4',
+    }
+
+
+class FIPSLevel(Sequence):
+    _fields = [
+        ('version', IA5String),
+        ('level', SecurityLevel),
+        ('plus', Boolean, {'default': False}),
+    ]
+
+
+class TPMSecurityAssertions(Sequence):
+    _fields = [
+        ('version', Version, {'default': 'v1'}),
+        ('field_upgradable', Boolean, {'default': False}),
+        ('ek_generation_type', EKGenerationType, {'implicit': 0, 'optional': True}),
+        ('ek_generation_location', EKGenerationLocation, {'implicit': 1, 'optional': True}),
+        ('ek_certificate_generation_location', EKCertificateGenerationLocation, {'implicit': 2, 'optional': True}),
+        ('cc_info', CommonCriteriaMeasures, {'implicit': 3, 'optional': True}),
+        ('fips_level', FIPSLevel, {'implicit': 4, 'optional': True}),
+        ('iso_9000_certified', Boolean, {'implicit': 5, 'default': False}),
+        ('iso_9000_uri', IA5String, {'optional': True}),
+    ]
+
+
+class SetOfTPMSecurityAssertions(SetOf):
+    _child_spec = TPMSecurityAssertions
+
+
+class SubjectDirectoryAttributeId(ObjectIdentifier):
+    _map = {
+        # https://tools.ietf.org/html/rfc2256#page-11
+        '2.5.4.52': 'supported_algorithms',
+        # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf
+        '2.23.133.2.16': 'tpm_specification',
+        '2.23.133.2.17': 'tcg_platform_specification',
+        '2.23.133.2.18': 'tpm_security_assertions',
+        # https://tools.ietf.org/html/rfc3739#page-18
+        '1.3.6.1.5.5.7.9.1': 'pda_date_of_birth',
+        '1.3.6.1.5.5.7.9.2': 'pda_place_of_birth',
+        '1.3.6.1.5.5.7.9.3': 'pda_gender',
+        '1.3.6.1.5.5.7.9.4': 'pda_country_of_citizenship',
+        '1.3.6.1.5.5.7.9.5': 'pda_country_of_residence',
+        # https://holtstrom.com/michael/tools/asn1decoder.php
+        '1.2.840.113533.7.68.29': 'entrust_user_role',
+    }
+
+
+class SetOfGeneralizedTime(SetOf):
+    _child_spec = GeneralizedTime
+
+
+class SetOfDirectoryString(SetOf):
+    _child_spec = DirectoryString
+
+
+class SetOfPrintableString(SetOf):
+    _child_spec = PrintableString
+
+
+class SupportedAlgorithm(Sequence):
+    _fields = [
+        ('algorithm_identifier', AnyAlgorithmIdentifier),
+        ('intended_usage', KeyUsage, {'explicit': 0, 'optional': True}),
+        ('intended_certificate_policies', CertificatePolicies, {'explicit': 1, 'optional': True}),
+    ]
+
+
+class SetOfSupportedAlgorithm(SetOf):
+    _child_spec = SupportedAlgorithm
+
+
+class SubjectDirectoryAttribute(Sequence):
+    _fields = [
+        ('type', SubjectDirectoryAttributeId),
+        ('values', Any),
+    ]
+
+    _oid_pair = ('type', 'values')
+    _oid_specs = {
+        'supported_algorithms': SetOfSupportedAlgorithm,
+        'tpm_specification': SetOfTPMSpecification,
+        'tcg_platform_specification': SetOfTCGPlatformSpecification,
+        'tpm_security_assertions': SetOfTPMSecurityAssertions,
+        'pda_date_of_birth': SetOfGeneralizedTime,
+        'pda_place_of_birth': SetOfDirectoryString,
+        'pda_gender': SetOfPrintableString,
+        'pda_country_of_citizenship': SetOfPrintableString,
+        'pda_country_of_residence': SetOfPrintableString,
+    }
+
+    def _values_spec(self):
+        type_ = self['type'].native
+        if type_ in self._oid_specs:
+            return self._oid_specs[type_]
+        return SetOf
+
+    _spec_callbacks = {
+        'values': _values_spec
+    }
+
+
+class SubjectDirectoryAttributes(SequenceOf):
+    _child_spec = SubjectDirectoryAttribute
+
+
 class ExtensionId(ObjectIdentifier):
     _map = {
         '2.5.29.9': 'subject_directory_attributes',
@@ -1766,6 +2064,8 @@ class ExtensionId(ObjectIdentifier):
         '1.3.6.1.5.5.7.48.1.5': 'ocsp_no_check',
         '1.2.840.113533.7.65.0': 'entrust_version_extension',
         '2.16.840.1.113730.1.1': 'netscape_certificate_type',
+        # https://tools.ietf.org/html/rfc6962.html#page-14
+        '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list',
     }
 
 
@@ -1778,7 +2078,7 @@ class Extension(Sequence):
 
     _oid_pair = ('extn_id', 'extn_value')
     _oid_specs = {
-        'subject_directory_attributes': Attributes,
+        'subject_directory_attributes': SubjectDirectoryAttributes,
         'key_identifier': OctetString,
         'key_usage': KeyUsage,
         'private_key_usage_period': PrivateKeyUsagePeriod,
@@ -1800,6 +2100,7 @@ class Extension(Sequence):
         'ocsp_no_check': Null,
         'entrust_version_extension': EntrustVersionInfo,
         'netscape_certificate_type': NetscapeCertificateType,
+        'signed_certificate_timestamp_list': OctetString,
     }
 
 
@@ -1807,26 +2108,18 @@ class Extensions(SequenceOf):
     _child_spec = Extension
 
 
-class Version(Integer):
-    _map = {
-        0: 'v1',
-        1: 'v2',
-        2: 'v3',
-    }
-
-
 class TbsCertificate(Sequence):
     _fields = [
-        ('version', Version, {'tag_type': 'explicit', 'tag': 0, 'default': 'v1'}),
+        ('version', Version, {'explicit': 0, 'default': 'v1'}),
         ('serial_number', Integer),
         ('signature', SignedDigestAlgorithm),
         ('issuer', Name),
         ('validity', Validity),
         ('subject', Name),
         ('subject_public_key_info', PublicKeyInfo),
-        ('issuer_unique_id', OctetBitString, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
-        ('subject_unique_id', OctetBitString, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
-        ('extensions', Extensions, {'tag_type': 'explicit', 'tag': 3, 'optional': True}),
+        ('issuer_unique_id', OctetBitString, {'implicit': 1, 'optional': True}),
+        ('subject_unique_id', OctetBitString, {'implicit': 2, 'optional': True}),
+        ('extensions', Extensions, {'explicit': 3, 'optional': True}),
     ]
 
 
@@ -1856,6 +2149,7 @@ class Certificate(Sequence):
     _extended_key_usage_value = None
     _authority_information_access_value = None
     _subject_information_access_value = None
+    _private_key_usage_period_value = None
     _tls_feature_value = None
     _ocsp_no_check_value = None
     _issuer_serial = None
@@ -1901,6 +2195,20 @@ class Certificate(Sequence):
             self._set_extensions()
         return self._critical_extensions
 
+    @property
+    def private_key_usage_period_value(self):
+        """
+        This extension is used to constrain the period over which the subject
+        private key may be used
+
+        :return:
+            None or a PrivateKeyUsagePeriod object
+        """
+
+        if not self._processed_extensions:
+            self._set_extensions()
+        return self._private_key_usage_period_value
+
     @property
     def subject_directory_attributes_value(self):
         """
@@ -1908,12 +2216,12 @@ class Certificate(Sequence):
         about the subject.
 
         :return:
-            None or an Attributes object
+            None or a SubjectDirectoryAttributes object
         """
 
         if not self._processed_extensions:
             self._set_extensions()
-        return self._key_identifier_value
+        return self._subject_directory_attributes
 
     @property
     def key_identifier_value(self):
@@ -2374,7 +2682,7 @@ class Certificate(Sequence):
                 if location.name != 'uniform_resource_identifier':
                     continue
                 url = location.native
-                if url.lower()[0:7] == 'http://':
+                if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')):
                     output.append(url)
         return output
 
@@ -2466,11 +2774,14 @@ class Certificate(Sequence):
     def self_signed(self):
         """
         :return:
-            A unicode string of "yes", "no" or "maybe". The "maybe" result will
-            be returned if the certificate does not contain a key identifier
-            extension, but is issued by the subject. In this case the
-            certificate signature will need to be verified using the subject
-            public key to determine a "yes" or "no" answer.
+            A unicode string of "no" or "maybe". The "maybe" result will
+            be returned if the certificate issuer and subject are the same.
+            If a key identifier and authority key identifier are present,
+            they will need to match otherwise "no" will be returned.
+
+            To verify is a certificate is truly self-signed, the signature
+            will need to be verified. See the certvalidator package for
+            one possible solution.
         """
 
         if self._self_signed is None:
@@ -2478,9 +2789,9 @@ class Certificate(Sequence):
             if self.self_issued:
                 if self.key_identifier:
                     if not self.authority_key_identifier:
-                        self._self_signed = 'yes'
+                        self._self_signed = 'maybe'
                     elif self.authority_key_identifier == self.key_identifier:
-                        self._self_signed = 'yes'
+                        self._self_signed = 'maybe'
                 else:
                     self._self_signed = 'maybe'
         return self._self_signed
@@ -2518,6 +2829,16 @@ class Certificate(Sequence):
             self._sha256 = hashlib.sha256(self.dump()).digest()
         return self._sha256
 
+    @property
+    def sha256_fingerprint(self):
+        """
+        :return:
+            A unicode string of the SHA-256 hash, formatted using hex encoding
+            with a space between each pair of characters, all uppercase
+        """
+
+        return ' '.join('%02X' % c for c in bytes_to_list(self.sha256))
+
     def is_valid_domain_ip(self, domain_ip):
         """
         Check if a domain name or IP address is valid according to the
@@ -2670,10 +2991,10 @@ class SequenceOfAlgorithmIdentifiers(SequenceOf):
 class CertificateAux(Sequence):
     _fields = [
         ('trust', KeyPurposeIdentifiers, {'optional': True}),
-        ('reject', KeyPurposeIdentifiers, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
+        ('reject', KeyPurposeIdentifiers, {'implicit': 0, 'optional': True}),
         ('alias', UTF8String, {'optional': True}),
         ('keyid', OctetString, {'optional': True}),
-        ('other', SequenceOfAlgorithmIdentifiers, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
+        ('other', SequenceOfAlgorithmIdentifiers, {'implicit': 1, 'optional': True}),
     ]
 
 

+ 339 - 0
desktop/core/ext-py/asn1crypto-0.24.0/changelog.md

@@ -0,0 +1,339 @@
+# changelog
+
+## 0.24.0
+
+ - `x509.Certificate().self_signed` will no longer return `"yes"` under any
+   circumstances. This helps prevent confusion since the library does not
+   verify the signature. Instead a library like oscrypto should be used
+   to confirm if a certificate is self-signed.
+ - Added various OIDs to `x509.KeyPurposeId()`
+ - Added `x509.Certificate().private_key_usage_period_value`
+ - Added structures for parsing common subject directory attributes for
+   X.509 certificates, including `x509.SubjectDirectoryAttribute()`
+ - Added `algos.AnyAlgorithmIdentifier()` for situations where an
+   algorithm identifier may contain a digest, signed digest or encryption
+   algorithm OID
+ - Fixed a bug with `x509.Certificate().subject_directory_attributes_value`
+   not returning the correct value
+ - Fixed a bug where explicitly-tagged fields in a `core.Sequence()` would
+   not function properly when the field had a default value
+ - Fixed a bug with type checking in `pem.armor()`
+
+## 0.23.0
+
+ - Backwards compatibility break: the `tag_type`, `explicit_tag` and
+   `explicit_class` attributes on `core.Asn1Value` no longer exist and were
+   replaced by the `implicit` and `explicit` attributes. Field param dicts
+   may use the new `explicit` and `implicit` keys, or the old `tag_type` and
+   `tag` keys. The attribute changes will likely to have little to no impact
+   since they were primarily an implementation detail.
+ - Teletex strings used inside of X.509 certificates are now interpreted
+   using Windows-1252 (a superset of ISO-8859-1). This enables compatibility
+   with certificates generated by OpenSSL. Strict parsing of Teletex strings
+   can be retained by using the `x509.strict_teletex()` context manager.
+ - Added support for nested explicit tagging, supporting values that are
+   defined with explicit tagging and then added as a field of another
+   structure using explicit tagging.
+ - Fixed a `UnicodeDecodeError` when trying to find the (optional) dependency
+   OpenSSL on Python 2
+ - Fixed `next_update` field of `crl.TbsCertList` to be optional
+ - Added the `x509.Certificate.sha256_fingerprint` property
+ - `x509.Certificate.ocsp_urls` and `x509.DistributionPoint.url` will now
+   return `https://`, `ldap://` and `ldaps://` URLs in addition to `http://`.
+ - Added CMS Attribute Protection definitions from RFC 6211
+ - Added OIDs from RFC 6962
+
+## 0.22.0
+
+ - Added `parser.peek()`
+ - Implemented proper support for BER-encoded indefinite length strings of
+   all kinds - `core.BitString`, `core.OctetString` and all of the `core`
+   classes that are natively represented as Python unicode strings
+ - Fixed a bug with encoding LDAP URLs in `x509.URI`
+ - Correct `x509.DNSName` to allow a leading `.`, such as when used with
+   `x509.NameConstraints`
+ - Fixed an issue with dumping the parsed contents of `core.Any` when
+   explicitly tagged
+ - Custom `setup.py clean` now accepts the short `-a` flag for compatibility
+
+## 0.21.1
+
+ - Fixed a regression where explicit tagging of a field containing a
+   `core.Choice` would result in an incorrect header
+ - Fixed a bug where an `IndexError` was being raised instead of a `ValueError`
+   when a value was truncated to not include enough bytes for the header
+ - Corrected the spec for the `value` field of `pkcs12.Attribute`
+ - Added support for `2.16.840.1.113894.746875.1.1` OID to
+   `pkcs12.AttributeType`
+
+## 0.21.0
+
+ - Added `core.load()` for loading standard, universal types without knowing
+   the spec beforehand
+ - Added a `strict` keyword arg to the various `load()` methods and functions in
+   `core` that checks for trailing data and raises a `ValueError` when found
+ - Added `asn1crypto.parser` submodule with `emit()` and `parse()` functions for
+   low-level integration
+ - Added `asn1crypto.version` for version introspection without side-effects
+ - Added `algos.DSASignature`
+ - Fixed a bug with the `_header` attribute of explicitly-tagged values only
+   containing the explicit tag header instead of both the explicit tag header
+   and the encapsulated value header
+
+## 0.20.0
+
+ - Added support for year 0
+ - Added the OID for unique identifier to `x509.NameType`
+ - Fixed a bug creating the native representation of a `core.BitString` with
+   leading null bytes
+ - Added a `.cast()` method to allow converting between different
+   representations of the same data, e.g. `core.BitString` and
+   `core.OctetBitString`
+
+## 0.19.0
+
+ - Force `algos.DigestAlgorithm` to encoding `parameters` as `Null` when the
+   `algorithm` is `sha1`, `sha224`, `sha256`, `sha384` or `sha512` per RFC 4055
+ - Resolved an issue where a BER-encoded indefinite-length value could not be
+   properly parsed when embedded inside of a `core.Sequence` or `core.Set`
+ - Fix `x509.Name.build()` to properly handle dotted OID type values
+ - `core.Choice` can now be constructed from a single-element `dict` or a
+   two-element `tuple` to allow for better usability when constructing values
+   from native Python values
+ - All `core` objects can now be passed to `print()` with an exception being
+   raised
+
+## 0.18.5
+
+ - Don't fail importing if `ctypes` or `_ctypes` is not available
+
+## 0.18.4
+
+ - `core.Sequence` will now raise an exception when an unknown field is provided
+ - Prevent `UnicodeDecodeError` on Python 2 when calling
+   `core.OctetString.debug()`
+ - Corrected the default value for the `hash_algorithm` field of
+   `tsp.ESSCertIDv2`
+ - Fixed a bug constructing a `cms.SignedData` object
+ - Ensure that specific RSA OIDs are always paired with `parameters` set to
+   `core.Null`
+
+## 0.18.3
+
+ - Fixed DER encoding of `core.BitString` when a `_map` is specified (i.e. a
+   "named bit list") to omit trailing zero bits. This fixes compliance of
+   various `x509` structures with RFC 5280.
+ - Corrected a side effect in `keys.PrivateKeyInfo.wrap()` that would cause the
+   original `keys.ECPrivateKey` structure to become corrupt
+ - `core.IntegerOctetString` now correctly encodes the integer as an unsigned
+   value when converting to bytes. Previously decoding was unsigned, but
+   encoding was signed.
+ - Fix `util.int_from_bytes()` on Python 2 to return `0` from an empty byte
+   string
+
+## 0.18.2
+
+ - Allow `_perf` submodule to be removed from source tree when embedding
+
+## 0.18.1
+
+ - Fixed DER encoding of `core.Set` and `core.SetOf`
+ - Fixed a bug in `x509.Name.build()` that could generate invalid DER encoding
+ - Improved exception messages when parsing nested structures via the `.native`
+   attribute
+ - `algos.SignedDigestAlgorithm` now ensures the `parameters` are set to
+   `Null` when `algorithm` is `sha224_rsa`, `sha256_rsa`, `sha384_rsa` or
+   `sha512_rsa`, per RFC 4055
+ - Corrected the definition of `pdf.AdobeTimestamp` to mark the
+   `requires_auth` field as optional
+ - Add support for the OID `1.2.840.113549.1.9.16.2.14` to
+   `cms.CMSAttributeType`
+ - Improve attribute support for `cms.AttributeCertificateV2`
+ - Handle `cms.AttributeCertificateV2` when incorrectly tagged as
+   `cms.AttributeCertificateV1` in `cms.CertificateChoices`
+
+## 0.18.0
+
+ - Improved general parsing performance by 10-15%
+ - Add support for Windows XP
+ - Added `core.ObjectIdentifier.dotted` attribute to always return dotted
+   integer unicode string
+ - Added `core.ObjectIdentifier.map()` and `core.ObjectIdentifier.unmap()`
+   class methods to map dotted integer unicode strings to user-friendly unicode
+   strings and back
+ - Added various Apple OIDs to `x509.KeyPurposeId`
+ - Fixed a bug parsing nested indefinite-length-encoded values
+ - Fixed a bug with `x509.Certificate.issuer_alt_name_value` if it is the first
+   extension queried
+ - `keys.PublicKeyInfo.bit_size` and `keys.PrivateKeyInfo.bit_size` values are
+   now rounded up to the next closest multiple of 8
+
+## 0.17.1
+
+ - Fix a bug in `x509.URI` parsing IRIs containing explicit port numbers on
+   Python 3.x
+
+## 0.17.0
+
+ - Added `x509.TrustedCertificate` for handling OpenSSL auxiliary certificate
+   information appended after a certificate
+ - Added `core.Concat` class for situations such as `x509.TrustedCertificate`
+ - Allow "broken" X.509 certificates to use `core.IA5String` where an
+   `x509.DirectoryString` should be used instead
+ - Added `keys.PrivateKeyInfo.public_key_info` attribute
+ - Added a bunch of OIDs to `x509.KeyPurposeId`
+
+## 0.16.0
+
+ - Added DH key exchange structures: `algos.KeyExchangeAlgorithm`,
+   `algos.KeyExchangeAlgorithmId` and `algos.DHParameters`.
+ - Added DH public key support to `keys.PublicKeyInfo`,
+   `keys.PublicKeyAlgorithm` and `keys.PublicKeyAlgorithmId`. New structures
+   include `keys.DomainParameters` and `keys.ValidationParms`.
+
+## 0.15.1
+
+ - Fixed `cms.CMSAttributes` to be a `core.SetOf` instead of `core.SequenceOf`
+ - `cms.CMSAttribute` can now parse unknown attribute contrustruct without an
+   exception being raised
+ - `x509.PolicyMapping` now uses `x509.PolicyIdentifier` for field types
+ - Fixed `pdf.RevocationInfoArchival` so that all fields are now of the type
+   `core.SequenceOf` instead of a single value
+ - Added support for the `name_distinguisher`, `telephone_number` and
+   `organization_identifier` OIDs to `x509.Name`
+ - Fixed `x509.Name.native` to not accidentally create nested lists when three
+   of more values for a single type are part of the name
+ - `x509.Name.human_friendly` now reverses the order of fields when the data
+   in an `x509.Name` was encoded in most-specific to least-specific order, which
+   is the opposite of the standard way of least-specific to most-specific.
+ - `x509.NameType.human_friendly` no longer raises an exception when an
+   unknown OID is encountered
+ - Raise a `ValueError` when parsing a `core.Set` and an unknown field is
+   encountered
+
+## 0.15.0
+
+ - Added support for the TLS feature extension from RFC 7633
+ - `x509.Name.build()` now accepts a keyword parameter `use_printable` to force
+   string encoding to be `core.PrintableString` instead of `core.UTF8String`
+ - Added the functions `util.uri_to_iri()` and `util.iri_to_uri()`
+ - Changed `algos.SignedDigestAlgorithmId` to use the preferred OIDs when
+   mapping a unicode string name to an OID. Previously there were multiple OIDs
+   for some algorithms, and different OIDs would sometimes be selected due to
+   the fact that the `_map` `dict` is not ordered.
+
+## 0.14.1
+
+ - Fixed a bug generating `x509.Certificate.sha1_fingerprint` on Python 2
+
+## 0.14.0
+
+ - Added the `x509.Certificate.sha1_fingerprint` attribute
+
+## 0.13.0
+
+ - Backwards compatibility break: the native representation of some
+   `algos.EncryptionAlgorithmId` values changed. `aes128` became `aes128_cbc`,
+   `aes192` became `aes192_cbc` and `aes256` became `aes256_cbc`.
+ - Added more OIDs to `algos.EncryptionAlgorithmId`
+ - Added more OIDs to `cms.KeyEncryptionAlgorithmId`
+ - `x509.Name.human_friendly` now properly supports multiple values per
+   `x509.NameTypeAndValue` object
+ - Added `ocsp.OCSPResponse.basic_ocsp_response` and
+   `ocsp.OCSPResponse.response_data` properties
+ - Added `algos.EncryptionAlgorithm.encryption_mode` property
+ - Fixed a bug with parsing times containing timezone offsets in Python 3
+ - The `attributes` field of `csr.CertificationRequestInfo` is now optional,
+   for compatibility with other ASN.1 parsers
+
+## 0.12.2
+
+ - Correct `core.Sequence.__setitem__()` so set `core.VOID` to an optional
+   field when `None` is set
+
+## 0.12.1
+
+ - Fixed a `unicode`/`bytes` bug with `x509.URI.dump()` on Python 2
+
+## 0.12.0
+
+ - Backwards Compatibility Break: `core.NoValue` was renamed to `core.Void` and
+   a singleton was added as `core.VOID`
+ - 20-30% improvement in parsing performance
+ - `core.Void` now implements `__nonzero__`
+ - `core.Asn1Value.copy()` now performs a deep copy
+ - All `core` value classes are now compatible with the `copy` module
+ - `core.SequenceOf` and `core.SetOf` now implement `__contains__`
+ - Added `x509.Name.__len__()`
+ - Fixed a bug where `core.Choice.validate()` would not properly account for
+   explicit tagging
+ - `core.Choice.load()` now properly passes itself as the spec when parsing
+ - `x509.Certificate.crl_distribution_points` no longer throws an exception if
+   the `DistributionPoint` does not have a value for the `distribution_point`
+   field
+
+## 0.11.1
+
+ - Corrected `core.UTCTime` to interpret year <= 49 as 20xx and >= 50 as 19xx
+ - `keys.PublicKeyInfo.hash_algo` can now handle DSA keys without parameters
+ - Added `crl.CertificateList.sha256` and `crl.CertificateList.sha1`
+ - Fixed `x509.Name.build()` to properly encode `country_name`, `serial_number`
+   and `dn_qualifier` as `core.PrintableString` as specified in RFC 5280,
+   instead of `core.UTF8String`
+
+## 0.11.0
+
+ - Added Python 2.6 support
+ - Added ability to compare primitive type objects
+ - Implemented proper support for internationalized domains, URLs and email
+   addresses in `x509.Certificate`
+ - Comparing `x509.Name` and `x509.GeneralName` objects adheres to RFC 5280
+ - `x509.Certificate.self_signed` and `x509.Certificate.self_issued` no longer
+   require that certificate is for a CA
+ - Fixed `x509.Certificate.valid_domains` to adhere to RFC 6125
+ - Added `x509.Certificate.is_valid_domain_ip()`
+ - Added `x509.Certificate.sha1` and `x509.Certificate.sha256`
+ - Exposed `util.inet_ntop()` and `util.inet_pton()` for IP address encoding
+ - Improved exception messages for improper types to include type's module name
+
+## 0.10.1
+
+ - Fixed bug in `core.Sequence` affecting Python 2.7 and pypy
+
+## 0.10.0
+
+ - Added PEM encoding/decoding functionality
+ - `core.BitString` now uses item access instead of attributes for named bit
+   access
+ - `core.BitString.native` now uses a `set` of unicode strings when `_map` is
+   present
+ - Removed `core.Asn1Value.pprint()` method
+ - Added `core.ParsableOctetString` class
+ - Added `core.ParsableOctetBitString` class
+ - Added `core.Asn1Value.copy()` method
+ - Added `core.Asn1Value.debug()` method
+ - Added `core.SequenceOf.append()` method
+ - Added `core.Sequence.spec()` and `core.SequenceOf.spec()` methods
+ - Added correct IP address parsing to `x509.GeneralName`
+ - `x509.Name` and `x509.GeneralName` are now compared according to rules in
+   RFC 5280
+ - Added convenience attributes to:
+   - `algos.SignedDigestAlgorithm`
+   - `crl.CertificateList`
+   - `crl.RevokedCertificate`
+   - `keys.PublicKeyInfo`
+   - `ocsp.OCSPRequest`
+   - `ocsp.Request`
+   - `ocsp.OCSPResponse`
+   - `ocsp.SingleResponse`
+   - `x509.Certificate`
+   - `x509.Name`
+ - Added `asn1crypto.util` module with the following items:
+   - `int_to_bytes()`
+   - `int_from_bytes()`
+   - `timezone.utc`
+ - Added `setup.py clean` command
+
+## 0.9.0
+
+ - Initial release

+ 79 - 0
desktop/core/ext-py/asn1crypto-0.24.0/docs/pem.md

@@ -0,0 +1,79 @@
+# PEM Decoder and Encoder
+
+Often times DER-encoded data is wrapped in PEM encoding. This allows the binary
+DER data to be identified and reliably sent over various communication channels.
+
+The `asn1crypto.pem` module includes three functions:
+
+ - `detect(byte_string)`
+ - `unarmor(pem_bytes, multiple=False)`
+ - `armor(type_name, der_bytes, headers=None)`
+
+## detect()
+
+The `detect()` function accepts a byte string and looks for a `BEGIN` block
+line. This is useful to determine in a byte string needs to be PEM-decoded
+before parsing.
+
+```python
+from asn1crypto import pem, x509
+
+with open('/path/to/cert', 'rb') as f:
+    der_bytes = f.read()
+    if pem.detect(der_bytes):
+        _, _, der_bytes = pem.unarmor(der_bytes)
+```
+
+## unarmor()
+
+The `unarmor()` function accepts a byte string and the flag to indicates if
+more than one PEM block may be contained in the byte string. The result is
+a three-element tuple.
+
+ - The first element is a unicode string of the type of PEM block. Examples
+   include: `CERTIFICATE`, `PRIVATE KEY`, `PUBLIC KEY`.
+ - The second element is a `dict` of PEM block headers. Headers are typically
+   only used by encrypted OpenSSL private keys, and are in the format
+   `Name: Value`.
+ - The third element is a byte string of the decoded block contents.
+
+```python
+from asn1crypto import pem, x509
+
+with open('/path/to/cert', 'rb') as f:
+    der_bytes = f.read()
+    if pem.detect(der_bytes):
+        type_name, headers, der_bytes = pem.unarmor(der_bytes)
+
+cert = x509.Certificate.load(der_bytes)
+```
+
+If the `multiple` keyword argument is set to `True`, a generator will be
+returned.
+
+```python
+from asn1crypto import pem, x509
+
+certs = []
+with open('/path/to/ca_certs', 'rb') as f:
+    for type_name, headers, der_bytes in pem.unarmor(f.read(), multiple=True):
+        certs.append(x509.Certificate.load(der_bytes))
+```
+
+## armor()
+
+The `armor()` function accepts three parameters: a unicode string of the block
+type name, a byte string to encode and an optional keyword argument `headers`,
+that should be a `dict` of headers to add after the `BEGIN` line. Headers are
+typically only used by encrypted OpenSSL private keys.
+
+```python
+from asn1crypto import pem, x509
+
+# cert is an instance of x509.Certificate
+
+with open('/path/to/cert', 'wb') as f:
+    der_bytes = cert.dump()
+    pem_bytes = pem.armor('CERTIFICATE', der_bytes)
+    f.write(pem_bytes)
+```

+ 23 - 0
desktop/core/ext-py/asn1crypto-0.24.0/docs/readme.md

@@ -0,0 +1,23 @@
+# asn1crypto Documentation
+
+The documentation for *asn1crypto* is composed of tutorials on basic usage and
+links to the source for the various pre-defined type classes.
+
+## Tutorials
+
+ - [Universal Types with BER/DER Decoder and DER Encoder](universal_types.md)
+ - [PEM Decoder and Encoder](pem.md)
+
+## Reference
+
+ - [Universal types](../asn1crypto/core.py), `asn1crypto.core`
+ - [Digest, HMAC, signed digest and encryption algorithms](../asn1crypto/algos.py), `asn1crypto.algos`
+ - [Private and public keys](../asn1crypto/keys.py), `asn1crypto.keys`
+ - [X.509 certificates](../asn1crypto/x509.py), `asn1crypto.x509`
+ - [Certificate revocation lists (CRLs)](../asn1crypto/crl.py), `asn1crypto.crl`
+ - [Online certificate status protocol (OCSP)](../asn1crypto/ocsp.py), `asn1crypto.ocsp`
+ - [Certificate signing requests (CSRs)](../asn1crypto/csr.py), `asn1crypto.csr`
+ - [Private key/certificate containers (PKCS#12)](../asn1crypto/pkcs12.py), `asn1crypto.pkcs12`
+ - [Cryptographic message syntax (CMS, PKCS#7)](../asn1crypto/cms.py), `asn1crypto.cms`
+ - [Time stamp protocol (TSP)](../asn1crypto/tsp.py), `asn1crypto.tsp`
+ - [PDF signatures](../asn1crypto/pdf.py), `asn1crypto.pdf`

+ 675 - 0
desktop/core/ext-py/asn1crypto-0.24.0/docs/universal_types.md

@@ -0,0 +1,675 @@
+# Universal Types with BER/DER Decoder and DER Encoder
+
+The *asn1crypto* library is a combination of universal type classes that
+implement BER/DER decoding and DER encoding, a PEM encoder and decoder, and a
+number of pre-built cryptographic type classes. This document covers the
+universal type classes.
+
+For a general overview of ASN.1 as used in cryptography, please see
+[A Layman's Guide to a Subset of ASN.1, BER, and DER](http://luca.ntop.org/Teaching/Appunti/asn1.html).
+
+This page contains the following sections:
+
+ - [Universal Types](#universal-types)
+ - [Basic Usage](#basic-usage)
+ - [Sequence](#sequence)
+ - [Set](#set)
+ - [SequenceOf](#sequenceof)
+ - [SetOf](#setof)
+ - [Integer](#integer)
+ - [Enumerated](#enumerated)
+ - [ObjectIdentifier](#objectidentifier)
+ - [BitString](#bitstring)
+ - [Strings](#strings)
+ - [UTCTime](#utctime)
+ - [GeneralizedTime](#generalizedtime)
+ - [Choice](#choice)
+ - [Any](#any)
+ - [Specification via OID](#specification-via-oid)
+ - [Explicit and Implicit Tagging](#explicit-and-implicit-tagging)
+
+## Universal Types
+
+For general purpose ASN.1 parsing, the `asn1crypto.core` module is used. It
+contains the following classes, that parse, represent and serialize all of the
+ASN.1 universal types:
+
+| Class              | Native Type                            | Implementation Notes                 |
+| ------------------ | -------------------------------------- | ------------------------------------ |
+| `Boolean`          | `bool`                                 |                                      |
+| `Integer`          | `int`                                  | may be `long` on Python 2            |
+| `BitString`        | `tuple` of `int` or `set` of `unicode` | `set` used if `_map` present         |
+| `OctetString`      | `bytes` (`str`)                        |                                      |
+| `Null`             | `None`                                 |                                      |
+| `ObjectIdentifier` | `str` (`unicode`)                      | string is dotted integer format      |
+| `ObjectDescriptor` |                                        | no native conversion                 |
+| `InstanceOf`       |                                        | no native conversion                 |
+| `Real`             |                                        | no native conversion                 |
+| `Enumerated`       | `str` (`unicode`)                      | `_map` must be set                   |
+| `UTF8String`       | `str` (`unicode`)                      |                                      |
+| `RelativeOid`      | `str` (`unicode`)                      | string is dotted integer format      |
+| `Sequence`         | `OrderedDict`                          |                                      |
+| `SequenceOf`       | `list`                                 |                                      |
+| `Set`              | `OrderedDict`                          |                                      |
+| `SetOf`            | `list`                                 |                                      |
+| `EmbeddedPdv`      | `OrderedDict`                          | no named field parsing               |
+| `NumericString`    | `str` (`unicode`)                      | no charset limitations               |
+| `PrintableString`  | `str` (`unicode`)                      | no charset limitations               |
+| `TeletexString`    | `str` (`unicode`)                      |                                      |
+| `VideotexString`   | `bytes` (`str`)                        | no unicode conversion                |
+| `IA5String`        | `str` (`unicode`)                      |                                      |
+| `UTCTime`          | `datetime.datetime`                    |                                      |
+| `GeneralizedTime`  | `datetime.datetime`                    | treated as UTC when no timezone      |
+| `GraphicString`    | `str` (`unicode`)                      | unicode conversion as latin1         |
+| `VisibleString`    | `str` (`unicode`)                      | no charset limitations               |
+| `GeneralString`    | `str` (`unicode`)                      | unicode conversion as latin1         |
+| `UniversalString`  | `str` (`unicode`)                      |                                      |
+| `CharacterString`  | `str` (`unicode`)                      | unicode conversion as latin1         |
+| `BMPString`        | `str` (`unicode`)                      |                                      |
+
+For *Native Type*, the Python 3 type is listed first, with the Python 2 type
+in parentheses.
+
+As mentioned next to some of the types, value parsing may not be implemented
+for types not currently used in cryptography (such as `ObjectDescriptor`,
+`InstanceOf` and `Real`). Additionally some of the string classes don't
+enforce character set limitations, and for some string types that accept all
+different encodings, the default encoding is set to latin1.
+
+In addition, there are a few overridden types where various specifications use
+a `BitString` or `OctetString` type to represent a different type. These
+include:
+
+| Class                | Native Type         | Implementation Notes            |
+| -------------------- | ------------------- | ------------------------------- |
+| `OctetBitString`     | `bytes` (`str`)     |                                 |
+| `IntegerBitString`   | `int`               | may be `long` on Python 2       |
+| `IntegerOctetString` | `int`               | may be `long` on Python 2       |
+
+For situations where the DER encoded bytes from one type is embedded in another,
+the `ParsableOctetString` and `ParsableOctetBitString` classes exist. These
+function the same as `OctetString` and `OctetBitString`, however they also
+have an attribute `.parsed` and a method `.parse()` that allows for
+parsing the content as ASN.1 structures.
+
+All of these overrides can be used with the `cast()` method to convert between
+them. The only requirement is that the class being casted to has the same tag
+as the original class. No re-encoding is done, rather the contents are simply
+re-interpreted.
+
+```python
+from asn1crypto.core import BitString, OctetBitString, IntegerBitString
+
+bit = BitString({
+    0, 0, 0, 0, 0, 0, 0, 1,
+    0, 0, 0, 0, 0, 0, 1, 0,
+})
+
+# Will print (0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0)
+print(bit.native)
+
+octet = bit.cast(OctetBitString)
+
+# Will print b'\x01\x02'
+print(octet.native)
+
+i = bit.cast(IntegerBitString)
+
+# Will print 258
+print(i.native)
+```
+
+## Basic Usage
+
+All of the universal types implement four methods, a class method `.load()` and
+the instance methods `.dump()`, `.copy()` and `.debug()`.
+
+`.load()` accepts a byte string of DER or BER encoded data and returns an
+object of the class it was called on. `.dump()` returns the serialization of
+an object into DER encoding.
+
+```python
+from asn1crypto.core import Sequence
+
+parsed = Sequence.load(der_byte_string)
+serialized = parsed.dump()
+```
+
+By default, *asn1crypto* tries to be efficient and caches serialized data for
+better performance. If the input data is possibly BER encoded, but the output
+must be DER encoded, the `force` parameter may be used with `.dump()`.
+
+```python
+from asn1crypto.core import Sequence
+
+parsed = Sequence.load(der_byte_string)
+der_serialized = parsed.dump(force=True)
+```
+
+The `.copy()` method creates a deep copy of an object, allowing child fields to
+be modified without affecting the original.
+
+```python
+from asn1crypto.core import Sequence
+
+seq1 = Sequence.load(der_byte_string)
+seq2 = seq1.copy()
+seq2[0] = seq1[0] + 1
+if seq1[0] != seq2[0]:
+    print('Copies have distinct contents')
+```
+
+The `.debug()` method is available to help in situations where interaction with
+another ASN.1 serializer or parsing is not functioning as expected. Calling
+this method will print a tree structure with information about the header bytes,
+class, method, tag, special tagging, content bytes, native Python value, child
+fields and any sub-parsed values.
+
+```python
+from asn1crypto.core import Sequence
+
+parsed = Sequence.load(der_byte_string)
+parsed.debug()
+```
+
+In addition to the available methods, every instance has a `.native` property
+that converts the data into a native Python data type.
+
+```python
+import pprint
+from asn1crypto.core import Sequence
+
+parsed = Sequence.load(der_byte_string)
+pprint(parsed.native)
+```
+
+## Sequence
+
+One of the core structures when dealing with ASN.1 is the Sequence type. The
+`Sequence` class can handle field with universal data types, however in most
+situations the `_fields` property will need to be set with the expected
+definition of each field in the Sequence.
+
+### Configuration
+
+The `_fields` property must be set to a `list` of 2-3 element `tuple`s. The
+first element in the tuple must be a unicode string of the field name. The
+second must be a type class - either a universal type, or a custom type. The
+third, and optional, element is a `dict` with parameters to pass to the type
+class for things like default values, marking the field as optional, or
+implicit/explicit tagging.
+
+```python
+from asn1crypto.core import Sequence, Integer, OctetString, IA5String
+
+class MySequence(Sequence):
+    _fields = [
+        ('field_one', Integer),
+        ('field_two', OctetString),
+        ('field_three', IA5String, {'optional': True}),
+    ]
+```
+
+Implicit and explicit tagging will be covered in more detail later, however
+the following are options that can be set for each field type class:
+
+ - `{'default: 1}` sets the field's default value to `1`, allowing it to be
+   omitted from the serialized form
+ - `{'optional': True}` set the field to be optional, allowing it to be
+   omitted
+
+### Usage
+
+To access values of the sequence, use dict-like access via `[]` and use the
+name of the field:
+
+```python
+seq = MySequence.load(der_byte_string)
+print(seq['field_two'].native)
+```
+
+The values of fields can be set by assigning via `[]`. If the value assigned is
+of the correct type class, it will be used as-is. If the value is not of the
+correct type class, a new instance of that type class will be created and the
+value will be passed to the constructor.
+
+```python
+seq = MySequence.load(der_byte_string)
+# These statements will result in the same state
+seq['field_one'] = Integer(5)
+seq['field_one'] = 5
+```
+
+When fields are complex types such as `Sequence` or `SequenceOf`, there is no
+way to construct the value out of a native Python data type.
+
+### Optional Fields
+
+When a field is configured via the `optional` parameter, not present in the
+`Sequence`, but accessed, the `VOID` object will be returned. This is an object
+that is serialized to an empty byte string and returns `None` when `.native` is
+accessed.
+
+## Set
+
+The `Set` class is configured in the same was as `Sequence`, however it allows
+serialized fields to be in any order, per the ASN.1 standard.
+
+```python
+from asn1crypto.core import Set, Integer, OctetString, IA5String
+
+class MySet(Set):
+    _fields = [
+        ('field_one', Integer),
+        ('field_two', OctetString),
+        ('field_three', IA5String, {'optional': True}),
+    ]
+```
+
+## SequenceOf
+
+The `SequenceOf` class is used to allow for zero or more instances of a type.
+The class uses the `_child_spec` property to define the instance class type.
+
+```python
+from asn1crypto.core import SequenceOf, Integer
+
+class Integers(SequenceOf):
+    _child_spec = Integer
+```
+
+Values in the `SequenceOf` can be accessed via `[]` with an integer key. The
+length of the `SequenceOf` is determined via `len()`.
+
+```python
+values = Integers.load(der_byte_string)
+for i in range(0, len(values)):
+    print(values[i].native)
+```
+
+## SetOf
+
+The `SetOf` class is an exact duplicate of `SequenceOf`. According to the ASN.1
+standard, the difference is that a `SequenceOf` is explicitly ordered, however
+`SetOf` may be in any order. This is an equivalent comparison of a Python `list`
+and `set`.
+
+```python
+from asn1crypto.core import SetOf, Integer
+
+class Integers(SetOf):
+    _child_spec = Integer
+```
+
+## Integer
+
+The `Integer` class allows values to be *named*. An `Integer` with named values
+may contain any integer, however special values with named will be represented
+as those names when `.native` is called.
+
+Named values are configured via the `_map` property, which must be a `dict`
+with the keys being integers and the values being unicode strings.
+
+```python
+from asn1crypto.core import Integer
+
+class Version(Integer):
+    _map = {
+        1: 'v1',
+        2: 'v2',
+    }
+
+# Will print: "v1"
+print(Version(1).native)
+
+# Will print: 4
+print(Version(4).native)
+```
+
+## Enumerated
+
+The `Enumerated` class is almost identical to `Integer`, however only values in
+the `_map` property are valid.
+
+```python
+from asn1crypto.core import Enumerated
+
+class Version(Enumerated):
+    _map = {
+        1: 'v1',
+        2: 'v2',
+    }
+
+# Will print: "v1"
+print(Version(1).native)
+
+# Will raise a ValueError exception
+print(Version(4).native)
+```
+
+## ObjectIdentifier
+
+The `ObjectIdentifier` class represents values of the ASN.1 type of the same
+name. `ObjectIdentifier` instances are converted to a unicode string in a
+dotted-integer format when `.native` is accessed.
+
+While this standard conversion is a reasonable baseline, in most situations
+it will be more maintainable to map the OID strings to a unicode string
+containing a description of what the OID repesents.
+
+The mapping of OID strings to name strings is configured via the `_map`
+property, which is a `dict` object with keys being unicode OID string and the
+values being a unicode string.
+
+The `.dotted` attribute will always return a unicode string of the dotted
+integer form of the OID.
+
+The class methods `.map()` and `.unmap()` will convert a dotted integer unicode
+string to the user-friendly name, and vice-versa.
+
+```python
+from asn1crypto.core import ObjectIdentifier
+
+class MyType(ObjectIdentifier):
+    _map = {
+        '1.8.2.1.23': 'value_name',
+        '1.8.2.1.24': 'other_value',
+    }
+
+# Will print: "value_name"
+print(MyType('1.8.2.1.23').native)
+
+# Will print: "1.8.2.1.23"
+print(MyType('1.8.2.1.23').dotted)
+
+# Will print: "1.8.2.1.25"
+print(MyType('1.8.2.1.25').native)
+
+# Will print "value_name"
+print(MyType.map('1.8.2.1.23'))
+
+# Will print "1.8.2.1.23"
+print(MyType.unmap('value_name'))
+```
+
+## BitString
+
+When no `_map` is set for a `BitString` class, the native representation is a
+`tuple` of `int`s (being either `1` or `0`).
+
+```python
+from asn1crypto.core import BitString
+
+b1 = BitString((1, 0, 1))
+```
+
+Additionally, it is possible to set the `_map` property to a dict where the
+keys are bit indexes and the values are unicode string names. This allows
+checking the value of a given bit by item access, and the native representation
+becomes a `set` of unicode strings.
+
+```python
+from asn1crypto.core import BitString
+
+class MyFlags(BitString):
+    _map = {
+        0: 'edit',
+        1: 'delete',
+        2: 'manage_users',
+    }
+
+permissions = MyFlags({'edit', 'delete'})
+
+# This will be printed
+if permissions['edit'] and permissions['delete']:
+    print('Can edit and delete')
+
+# This will not
+if 'manage_users' in permissions.native:
+    print('Is admin')
+```
+
+## Strings
+
+ASN.1 contains quite a number of string types:
+
+| Type              | Standard Encoding                 | Implementation Encoding | Notes                                                                     |
+| ----------------- | --------------------------------- | ----------------------- | ------------------------------------------------------------------------- |
+| `UTF8String`      | UTF-8                             | UTF-8                   |                                                                           |
+| `NumericString`   | ASCII `[0-9 ]`                    | ISO 8859-1              | The implementation is a superset of supported characters                  |
+| `PrintableString` | ASCII `[a-zA-Z0-9 '()+,\\-./:=?]` | ISO 8859-1              | The implementation is a superset of supported characters                  |
+| `TeletexString`   | ITU T.61                          | Custom                  | The implementation is based off of https://en.wikipedia.org/wiki/ITU_T.61 |
+| `VideotexString`  | *?*                               | *None*                  | This has no set encoding, and it not used in cryptography                 |
+| `IA5String`       | ITU T.50 (very similar to ASCII)  | ISO 8859-1              | The implementation is a superset of supported characters                  |
+| `GraphicString`   | *                                 | ISO 8859-1              | This has not set encoding, but seems to often contain ISO 8859-1          |
+| `VisibleString`   | ASCII (printable)                 | ISO 8859-1              | The implementation is a superset of supported characters                  |
+| `GeneralString`   | *                                 | ISO 8859-1              | This has not set encoding, but seems to often contain ISO 8859-1          |
+| `UniversalString` | UTF-32                            | UTF-32                  |                                                                           |
+| `CharacterString` | *                                 | ISO 8859-1              | This has not set encoding, but seems to often contain ISO 8859-1          |
+| `BMPString`       | UTF-16                            | UTF-16                  |                                                                           |
+
+As noted in the table above, many of the implementations are supersets of the
+supported characters. This simplifies parsing, but puts the onus of using valid
+characters on the developer. However, in general `UTF8String`, `BMPString` or
+`UniversalString` should be preferred when a choice is given.
+
+All string types other than `VideotexString` are created from unicode strings.
+
+```python
+from asn1crypto.core import IA5String
+
+print(IA5String('Testing!').native)
+```
+
+## UTCTime
+
+The class `UTCTime` accepts a unicode string in one of the formats:
+
+ - `%y%m%d%H%MZ`
+ - `%y%m%d%H%M%SZ`
+ - `%y%m%d%H%M%z`
+ - `%y%m%d%H%M%S%z`
+
+or a `datetime.datetime` instance. See the
+[Python datetime strptime() reference](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior)
+for details of the formats.
+
+When `.native` is accessed, it returns a `datetime.datetime` object with a
+`tzinfo` of `asn1crypto.util.timezone.utc`.
+
+## GeneralizedTime
+
+The class `GeneralizedTime` accepts a unicode string in one of the formats:
+
+ - `%Y%m%d%H`
+ - `%Y%m%d%H%M`
+ - `%Y%m%d%H%M%S`
+ - `%Y%m%d%H%M%S.%f`
+ - `%Y%m%d%HZ`
+ - `%Y%m%d%H%MZ`
+ - `%Y%m%d%H%M%SZ`
+ - `%Y%m%d%H%M%S.%fZ`
+ - `%Y%m%d%H%z`
+ - `%Y%m%d%H%M%z`
+ - `%Y%m%d%H%M%S%z`
+ - `%Y%m%d%H%M%S.%f%z`
+
+or a `datetime.datetime` instance. See the
+[Python datetime strptime() reference](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior)
+for details of the formats.
+
+When `.native` is accessed, it returns a `datetime.datetime` object with a
+`tzinfo` of `asn1crypto.util.timezone.utc`. For formats where the time has a
+timezone offset is specified (`[+-]\d{4}`), the time is converted to UTC. For
+times without a timezone, the time is assumed to be in UTC.
+
+## Choice
+
+The `Choice` class allows handling ASN.1 Choice structures. The `_alternatives`
+property must be set to a `list` containing 2-3 element `tuple`s. The first
+element in the tuple is the alternative name. The second element is the type
+class for the alternative. The, optional, third element is a `dict` of
+parameters to pass to the type class constructor. This is used primarily for
+implicit and explicit tagging.
+
+```python
+from asn1crypto.core import Choice, Integer, OctetString, IA5String
+
+class MyChoice(Choice):
+    _alternatives = [
+        ('option_one', Integer),
+        ('option_two', OctetString),
+        ('option_three', IA5String),
+    ]
+```
+
+`Choice` objects has two extra properties, `.name` and `.chosen`. The `.name`
+property contains the name of the chosen alternative. The `.chosen` property
+contains the instance of the chosen type class.
+
+```python
+parsed = MyChoice.load(der_bytes)
+print(parsed.name)
+print(type(parsed.chosen))
+```
+
+The `.native` property and `.dump()` method work as with the universal type
+classes. Under the hood they just proxy the calls to the `.chosen` object.
+
+## Any
+
+The `Any` class implements the ASN.1 Any type, which allows any data type. By
+default objects of this class do not perform any parsing. However, the
+`.parse()` instance method allows parsing the contents of the `Any` object,
+either into a universal type, or to a specification pass in via the `spec`
+parameter.
+
+This type is not used as a top-level structure, but instead allows `Sequence`
+and `Set` objects to accept varying contents, usually based on some sort of
+`ObjectIdentifier`.
+
+```python
+from asn1crypto.core import Sequence, ObjectIdentifier, Any, Integer, OctetString
+
+class MySequence(Sequence):
+    _fields = [
+        ('type', ObjectIdentifier),
+        ('value', Any),
+    ]
+```
+
+## Specification via OID
+
+Throughout the usage of ASN.1 in cryptography, a pattern is present where an
+`ObjectIdenfitier` is used to determine what specification should be used to
+interpret another field in a `Sequence`. Usually the other field is an instance
+of `Any`, however occasionally it is an `OctetString` or `OctetBitString`.
+
+*asn1crypto* provides the `_oid_pair` and `_oid_specs` properties of the
+`Sequence` class to allow handling these situations.
+
+The `_oid_pair` is a tuple with two unicode string elements. The first is the
+name of the field that is an `ObjectIdentifier` and the second if the name of
+the field that has a variable specification based on the first field. *In
+situations where the value field should be an `OctetString` or `OctetBitString`,
+`ParsableOctetString` and `ParsableOctetBitString` will need to be used instead
+to allow for the sub-parsing of the contents.*
+
+The `_oid_specs` property is a `dict` object with `ObjectIdentifier` values as
+the keys (either dotted or mapped notation) and a type class as the value. When
+the first field in `_oid_pair` has a value equal to one of the keys in
+`_oid_specs`, then the corresponding type class will be used as the
+specification for the second field of `_oid_pair`.
+
+```python
+from asn1crypto.core import Sequence, ObjectIdentifier, Any, OctetString, Integer
+
+class MyId(ObjectIdentifier):
+    _map = {
+        '1.2.3.4': 'initialization_vector',
+        '1.2.3.5': 'iterations',
+    }
+
+class MySequence(Sequence):
+    _fields = [
+        ('type', MyId),
+        ('value', Any),
+    ]
+
+    _oid_pair = ('type', 'value')
+    _oid_specs = {
+        'initialization_vector': OctetString,
+        'iterations': Integer,
+    }
+```
+
+## Explicit and Implicit Tagging
+
+When working with `Sequence`, `Set` and `Choice` it is often necessary to
+disambiguate between fields because of a number of factors:
+
+ - In `Sequence` the presence of an optional field must be determined by tag number
+ - In `Set`, each field must have a different tag number since they can be in any order
+ - In `Choice`, each alternative must have a different tag number to determine which is present
+
+The universal types all have unique tag numbers. However, if a `Sequence`, `Set`
+or `Choice` has more than one field with the same universal type, tagging allows
+a way to keep the semantics of the original type, but with a different tag
+number.
+
+Implicit tagging simply changes the tag number of a type to a different value.
+However, Explicit tagging wraps the existing type in another tag with the
+specified tag number.
+
+In general, most situations allow for implicit tagging, with the notable
+exception than a field that is a `Choice` type must always be explicitly tagged.
+Otherwise, using implicit tagging would modify the tag of the chosen
+alternative, breaking the mechanism by which `Choice` works.
+
+Here is an example of implicit and explicit tagging where explicit tagging on
+the `Sequence` allows a `Choice` type field to be optional, and where implicit
+tagging in the `Choice` structure allows disambiguating between two string of
+the same type.
+
+```python
+from asn1crypto.core import Sequence, Choice, IA5String, UTCTime, ObjectIdentifier
+
+class Person(Choice):
+    _alternatives = [
+        ('name', IA5String),
+        ('email', IA5String, {'implicit': 0}),
+    ]
+
+class Record(Sequence):
+    _fields = [
+        ('id', ObjectIdentifier),
+        ('created', UTCTime),
+        ('creator', Person, {'explicit': 0, 'optional': True}),
+    ]
+```
+
+As is shown above, the keys `implicit` and `explicit` are used for tagging,
+and are passed to a type class constructor via the optional third element of
+a field or alternative tuple. Both parameters may be an integer tag number, or
+a 2-element tuple of string class name and integer tag.
+
+If a tagging value needs its tagging changed, the `.untag()` method can be used
+to create a copy of the object without explicit/implicit tagging. The `.retag()`
+method can be used to change the tagging. This method accepts one parameter, a
+dict with either or both of the keys `implicit` and `explicit`.
+
+```python
+person = Person(name='email', value='will@wbond.net')
+
+# Will display True
+print(person.implicit)
+
+# Will display False
+print(person.untag().implicit)
+
+# Will display 0
+print(person.tag)
+
+# Will display 1
+print(person.retag({'implicit': 1}).tag)
+```

+ 232 - 0
desktop/core/ext-py/asn1crypto-0.24.0/readme.md

@@ -0,0 +1,232 @@
+# asn1crypto
+
+A fast, pure Python library for parsing and serializing ASN.1 structures.
+
+ - [Features](#features)
+ - [Why Another Python ASN.1 Library?](#why-another-python-asn1-library)
+ - [Related Crypto Libraries](#related-crypto-libraries)
+ - [Current Release](#current-release)
+ - [Dependencies](#dependencies)
+ - [Installation](#installation)
+ - [License](#license)
+ - [Documentation](#documentation)
+ - [Continuous Integration](#continuous-integration)
+ - [Testing](#testing)
+ - [Development](#development)
+
+[![Travis CI](https://api.travis-ci.org/wbond/asn1crypto.svg?branch=master)](https://travis-ci.org/wbond/asn1crypto)
+[![AppVeyor](https://ci.appveyor.com/api/projects/status/github/wbond/asn1crypto?branch=master&svg=true)](https://ci.appveyor.com/project/wbond/asn1crypto)
+[![CircleCI](https://circleci.com/gh/wbond/asn1crypto.svg?style=shield)](https://circleci.com/gh/wbond/asn1crypto)
+[![Codecov](https://codecov.io/gh/wbond/asn1crypto/branch/master/graph/badge.svg)](https://codecov.io/gh/wbond/asn1crypto)
+[![PyPI](https://img.shields.io/pypi/v/asn1crypto.svg)](https://pypi.python.org/pypi/asn1crypto)
+
+## Features
+
+In addition to an ASN.1 BER/DER decoder and DER serializer, the project includes
+a bunch of ASN.1 structures for use with various common cryptography standards:
+
+| Standard               | Module                                      | Source                                                                                                                 |
+| ---------------------- | ------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- |
+| X.509                  | [`asn1crypto.x509`](asn1crypto/x509.py)     | [RFC 5280](https://tools.ietf.org/html/rfc5280)                                                                        |
+| CRL                    | [`asn1crypto.crl`](asn1crypto/crl.py)       | [RFC 5280](https://tools.ietf.org/html/rfc5280)                                                                        |
+| CSR                    | [`asn1crypto.csr`](asn1crypto/csr.py)       | [RFC 2986](https://tools.ietf.org/html/rfc2986), [RFC 2985](https://tools.ietf.org/html/rfc2985)                       |
+| OCSP                   | [`asn1crypto.ocsp`](asn1crypto/ocsp.py)     | [RFC 6960](https://tools.ietf.org/html/rfc6960)                                                                        |
+| PKCS#12                | [`asn1crypto.pkcs12`](asn1crypto/pkcs12.py) | [RFC 7292](https://tools.ietf.org/html/rfc7292)                                                                        |
+| PKCS#8                 | [`asn1crypto.keys`](asn1crypto/keys.py)     | [RFC 5208](https://tools.ietf.org/html/rfc5208)                                                                        |
+| PKCS#1 v2.1 (RSA keys) | [`asn1crypto.keys`](asn1crypto/keys.py)     | [RFC 3447](https://tools.ietf.org/html/rfc3447)                                                                        |
+| DSA keys               | [`asn1crypto.keys`](asn1crypto/keys.py)     | [RFC 3279](https://tools.ietf.org/html/rfc3279)                                                                        |
+| Elliptic curve keys    | [`asn1crypto.keys`](asn1crypto/keys.py)     | [SECG SEC1 V2](http://www.secg.org/sec1-v2.pdf)                                                                        |
+| PKCS#3 v1.4            | [`asn1crypto.algos`](asn1crypto/algos.py)   | [PKCS#3 v1.4](ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc)                                                        |
+| PKCS#5 v2.1            | [`asn1crypto.algos`](asn1crypto/algos.py)   | [PKCS#5 v2.1](http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf) |
+| CMS (and PKCS#7)       | [`asn1crypto.cms`](asn1crypto/cms.py)       | [RFC 5652](https://tools.ietf.org/html/rfc5652), [RFC 2315](https://tools.ietf.org/html/rfc2315)                       |
+| TSP                    | [`asn1crypto.tsp`](asn1crypto/tsp.py)       | [RFC 3161](https://tools.ietf.org/html/rfc3161)                                                                        |
+| PDF signatures         | [`asn1crypto.pdf`](asn1crypto/pdf.py)       | [PDF 1.7](http://wwwimages.adobe.com/content/dam/Adobe/en/devnet/pdf/pdfs/PDF32000_2008.pdf)                           |
+
+## Why Another Python ASN.1 Library?
+
+Python has long had the [pyasn1](https://pypi.python.org/pypi/pyasn1) and
+[pyasn1_modules](https://pypi.python.org/pypi/pyasn1-modules) available for
+parsing and serializing ASN.1 structures. While the project does include a
+comprehensive set of tools for parsing and serializing, the performance of the
+library can be very poor, especially when dealing with bit fields and parsing
+large structures such as CRLs.
+
+After spending extensive time using *pyasn1*, the following issues were
+identified:
+
+ 1. Poor performance
+ 2. Verbose, non-pythonic API
+ 3. Out-dated and incomplete definitions in *pyasn1-modules*
+ 4. No simple way to map data to native Python data structures
+ 5. No mechanism for overridden universal ASN.1 types
+
+The *pyasn1* API is largely method driven, and uses extensive configuration
+objects and lowerCamelCase names. There were no consistent options for
+converting types of native Python data structures. Since the project supports
+out-dated versions of Python, many newer language features are unavailable
+for use.
+
+Time was spent trying to profile issues with the performance, however the
+architecture made it hard to pin down the primary source of the poor
+performance. Attempts were made to improve performance by utilizing unreleased
+patches and delaying parsing using the `Any` type. Even with such changes, the
+performance was still unacceptably slow.
+
+Finally, a number of structures in the cryptographic space use universal data
+types such as `BitString` and `OctetString`, but interpret the data as other
+types. For instance, signatures are really byte strings, but are encoded as
+`BitString`. Elliptic curve keys use both `BitString` and `OctetString` to
+represent integers. Parsing these structures as the base universal types and
+then re-interpreting them wastes computation.
+
+*asn1crypto* uses the following techniques to improve performance, especially
+when extracting one or two fields from large, complex structures:
+
+ - Delayed parsing of byte string values
+ - Persistence of original ASN.1 encoded data until a value is changed
+ - Lazy loading of child fields
+ - Utilization of high-level Python stdlib modules
+
+While there is no extensive performance test suite, the
+`CRLTests.test_parse_crl` test case was used to parse a 21MB CRL file on a
+late 2013 rMBP. *asn1crypto* parsed the certificate serial numbers in just
+under 8 seconds. With *pyasn1*, using definitions from *pyasn1-modules*, the
+same parsing took over 4,100 seconds.
+
+For smaller structures the performance difference can range from a few times
+faster to an order of magnitude of more.
+
+## Related Crypto Libraries
+
+*asn1crypto* is part of the modularcrypto family of Python packages:
+
+ - [asn1crypto](https://github.com/wbond/asn1crypto)
+ - [oscrypto](https://github.com/wbond/oscrypto)
+ - [csrbuilder](https://github.com/wbond/csrbuilder)
+ - [certbuilder](https://github.com/wbond/certbuilder)
+ - [crlbuilder](https://github.com/wbond/crlbuilder)
+ - [ocspbuilder](https://github.com/wbond/ocspbuilder)
+ - [certvalidator](https://github.com/wbond/certvalidator)
+
+## Current Release
+
+0.24.0 - [changelog](changelog.md)
+
+## Dependencies
+
+Python 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6 or pypy. *No third-party packages
+required.*
+
+## Installation
+
+```bash
+pip install asn1crypto
+```
+
+## License
+
+*asn1crypto* is licensed under the terms of the MIT license. See the
+[LICENSE](LICENSE) file for the exact license text.
+
+## Documentation
+
+The documentation for *asn1crypto* is composed of tutorials on basic usage and
+links to the source for the various pre-defined type classes.
+
+### Tutorials
+
+ - [Universal Types with BER/DER Decoder and DER Encoder](docs/universal_types.md)
+ - [PEM Encoder and Decoder](docs/pem.md)
+
+### Reference
+
+ - [Universal types](asn1crypto/core.py), `asn1crypto.core`
+ - [Digest, HMAC, signed digest and encryption algorithms](asn1crypto/algos.py), `asn1crypto.algos`
+ - [Private and public keys](asn1crypto/keys.py), `asn1crypto.keys`
+ - [X509 certificates](asn1crypto/x509.py), `asn1crypto.x509`
+ - [Certificate revocation lists (CRLs)](asn1crypto/crl.py), `asn1crypto.crl`
+ - [Online certificate status protocol (OCSP)](asn1crypto/ocsp.py), `asn1crypto.ocsp`
+ - [Certificate signing requests (CSRs)](asn1crypto/csr.py), `asn1crypto.csr`
+ - [Private key/certificate containers (PKCS#12)](asn1crypto/pkcs12.py), `asn1crypto.pkcs12`
+ - [Cryptographic message syntax (CMS, PKCS#7)](asn1crypto/cms.py), `asn1crypto.cms`
+ - [Time stamp protocol (TSP)](asn1crypto/tsp.py), `asn1crypto.tsp`
+ - [PDF signatures](asn1crypto/pdf.py), `asn1crypto.pdf`
+
+## Continuous Integration
+
+ - [Windows](https://ci.appveyor.com/project/wbond/asn1crypto/history) via AppVeyor
+ - [OS X](https://circleci.com/gh/wbond/asn1crypto) via CircleCI
+ - [Linux](https://travis-ci.org/wbond/asn1crypto/builds) via Travis CI
+ - [Test Coverage](https://codecov.io/gh/wbond/asn1crypto/commits) via Codecov
+
+## Testing
+
+Tests are written using `unittest` and require no third-party packages:
+
+```bash
+python run.py tests
+```
+
+To run only some tests, pass a regular expression as a parameter to `tests`.
+
+```bash
+python run.py tests ocsp
+```
+
+## Development
+
+To install the package used for linting, execute:
+
+```bash
+pip install --user -r requires/lint
+```
+
+The following command will run the linter:
+
+```bash
+python run.py lint
+```
+
+Support for code coverage can be installed via:
+
+```bash
+pip install --user -r requires/coverage
+```
+
+Coverage is measured by running:
+
+```bash
+python run.py coverage
+```
+
+To install the necessary packages for releasing a new version on PyPI, run:
+
+```bash
+pip install --user -r requires/release
+```
+
+Releases are created by:
+
+ - Making a git tag in [semver](http://semver.org/) format
+ - Running the command:
+
+   ```bash
+   python run.py release
+   ```
+
+Existing releases can be found at https://pypi.python.org/pypi/asn1crypto.
+
+## CI Tasks
+
+A task named `deps` exists to ensure a modern version of `pip` is installed,
+along with all necessary testing dependencies.
+
+The `ci` task runs `lint` (if flake8 is available for the version of Python) and
+`coverage` (or `tests` if coverage is not available for the version of Python).
+If the current directory is a clean git working copy, the coverage data is
+submitted to codecov.io.
+
+```bash
+python run.py deps
+python run.py ci
+```

+ 0 - 1
desktop/core/ext-py/asn1crypto-0.22.0/setup.cfg → desktop/core/ext-py/asn1crypto-0.24.0/setup.cfg

@@ -1,5 +1,4 @@
 [egg_info]
 tag_build = 
 tag_date = 0
-tag_svn_revision = 0
 

+ 0 - 0
desktop/core/ext-py/asn1crypto-0.22.0/setup.py → desktop/core/ext-py/asn1crypto-0.24.0/setup.py


+ 21 - 0
desktop/core/ext-py/certifi-2018.1.18/LICENSE

@@ -0,0 +1,21 @@
+This packge contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+Certificate data from Mozilla as of: Thu Nov  3 19:04:19 2011#
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt).  This file can be found in the mozilla source tree:
+http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $

+ 1 - 0
desktop/core/ext-py/certifi-2018.1.18/MANIFEST.in

@@ -0,0 +1 @@
+include MANIFEST.in README.rst LICENSE certifi/cacert.pem

+ 69 - 0
desktop/core/ext-py/certifi-2018.1.18/PKG-INFO

@@ -0,0 +1,69 @@
+Metadata-Version: 1.1
+Name: certifi
+Version: 2018.1.18
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: http://certifi.io/
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Description: Certifi: Python SSL Certificates
+        ================================
+        
+        `Certifi`_ is a carefully curated collection of Root Certificates for
+        validating the trustworthiness of SSL certificates while verifying the identity
+        of TLS hosts. It has been extracted from the `Requests`_ project.
+        
+        Installation
+        ------------
+        
+        ``certifi`` is available on PyPI. Simply install it with ``pip``::
+        
+            $ pip install certifi
+        
+        Usage
+        -----
+        
+        To reference the installed certificate authority (CA) bundle, you can use the
+        built-in function::
+        
+            >>> import certifi
+        
+            >>> certifi.where()
+            '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem'
+        
+        Enjoy!
+        
+        1024-bit Root Certificates
+        ~~~~~~~~~~~~~~~~~~~~~~~~~~
+        
+        Browsers and certificate authorities have concluded that 1024-bit keys are
+        unacceptably weak for certificates, particularly root certificates. For this
+        reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+        bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+        certificate from the same CA. Because Mozilla removed these certificates from
+        its bundle, ``certifi`` removed them as well.
+        
+        In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+        to intentionally re-add the 1024-bit roots back into your bundle. This was not
+        recommended in production and therefore was removed. To assist in migrating old
+        code, the function ``certifi.old_where()`` continues to exist as an alias of
+        ``certifi.where()``. Please update your code to use ``certifi.where()``
+        instead. ``certifi.old_where()`` will be removed in 2018.
+        
+        .. _`Certifi`: http://certifi.io/en/latest/
+        .. _`Requests`: http://docs.python-requests.org/en/latest/
+        
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6

+ 46 - 0
desktop/core/ext-py/certifi-2018.1.18/README.rst

@@ -0,0 +1,46 @@
+Certifi: Python SSL Certificates
+================================
+
+`Certifi`_ is a carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+    $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+    >>> import certifi
+
+    >>> certifi.where()
+    '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem'
+
+Enjoy!
+
+1024-bit Root Certificates
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Browsers and certificate authorities have concluded that 1024-bit keys are
+unacceptably weak for certificates, particularly root certificates. For this
+reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
+bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
+certificate from the same CA. Because Mozilla removed these certificates from
+its bundle, ``certifi`` removed them as well.
+
+In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
+to intentionally re-add the 1024-bit roots back into your bundle. This was not
+recommended in production and therefore was removed. To assist in migrating old
+code, the function ``certifi.old_where()`` continues to exist as an alias of
+``certifi.where()``. Please update your code to use ``certifi.where()``
+instead. ``certifi.old_where()`` will be removed in 2018.
+
+.. _`Certifi`: http://certifi.io/en/latest/
+.. _`Requests`: http://docs.python-requests.org/en/latest/

+ 3 - 0
desktop/core/ext-py/certifi-2018.1.18/certifi/__init__.py

@@ -0,0 +1,3 @@
+from .core import where, old_where
+
+__version__ = "2018.01.18"

+ 2 - 0
desktop/core/ext-py/certifi-2018.1.18/certifi/__main__.py

@@ -0,0 +1,2 @@
+from certifi import where
+print(where())

Datei-Diff unterdrückt, da er zu groß ist
+ 0 - 937
desktop/core/ext-py/certifi-2018.1.18/certifi/cacert.pem


+ 37 - 0
desktop/core/ext-py/certifi-2018.1.18/certifi/core.py

@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem.
+"""
+import os
+import warnings
+
+
+class DeprecatedBundleWarning(DeprecationWarning):
+    """
+    The weak security bundle is being deprecated. Please bother your service
+    provider to get them to stop using cross-signed roots.
+    """
+
+
+def where():
+    f = os.path.dirname(__file__)
+
+    return os.path.join(f, 'cacert.pem')
+
+
+def old_where():
+    warnings.warn(
+        "The weak security bundle has been removed. certifi.old_where() is now an alias "
+        "of certifi.where(). Please update your code to use certifi.where() instead. "
+        "certifi.old_where() will be removed in 2018.",
+        DeprecatedBundleWarning
+    )
+    return where()
+
+if __name__ == '__main__':
+    print(where())

+ 11 - 0
desktop/core/ext-py/certifi-2018.1.18/setup.cfg

@@ -0,0 +1,11 @@
+[bdist_wheel]
+universal = 1
+
+[metadata]
+license_file = LICENSE
+
+[egg_info]
+tag_build = 
+tag_date = 0
+tag_svn_revision = 0
+

+ 67 - 0
desktop/core/ext-py/certifi-2018.1.18/setup.py

@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import with_statement
+import re
+import os
+import sys
+
+# While I generally consider it an antipattern to try and support both
+# setuptools and distutils with a single setup.py, in this specific instance
+# where certifi is a dependency of setuptools, it can create a circular
+# dependency when projects attempt to unbundle stuff from setuptools and pip.
+# Though we don't really support that, it makes things easier if we do this and
+# should hopefully cause less issues for end users.
+try:
+    from setuptools import setup
+except ImportError:
+    from distutils.core import setup
+
+
+version_regex = r'__version__ = ["\']([^"\']*)["\']'
+with open('certifi/__init__.py', 'r') as f:
+    text = f.read()
+    match = re.search(version_regex, text)
+
+    if match:
+        VERSION = match.group(1)
+    else:
+        raise RuntimeError("No version number found!")
+
+if sys.argv[-1] == 'publish':
+    os.system('python setup.py sdist bdist_wheel upload')
+    sys.exit()
+
+required = []
+setup(
+    name='certifi',
+    version=VERSION,
+    description='Python package for providing Mozilla\'s CA Bundle.',
+    long_description=open('README.rst').read(),
+    author='Kenneth Reitz',
+    author_email='me@kennethreitz.com',
+    url='http://certifi.io/',
+    packages=[
+        'certifi',
+    ],
+    package_dir={'certifi': 'certifi'},
+    package_data={'certifi': ['*.pem']},
+    # data_files=[('certifi', ['certifi/cacert.pem'])],
+    include_package_data=True,
+    zip_safe=False,
+    license='MPL-2.0',
+    classifiers=(
+        'Development Status :: 5 - Production/Stable',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
+        'Natural Language :: English',
+        'Programming Language :: Python',
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.6',
+        'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3',
+        'Programming Language :: Python :: 3.3',
+        'Programming Language :: Python :: 3.4',
+        'Programming Language :: Python :: 3.5',
+        'Programming Language :: Python :: 3.6',
+    ),
+)

+ 5 - 0
desktop/core/ext-py/cffi-1.5.2/AUTHORS → desktop/core/ext-py/cffi-1.11.5/AUTHORS

@@ -1,3 +1,8 @@
 This package has been mostly done by Armin Rigo with help from
 Maciej Fijałkowski. The idea is heavily based (although not directly
 copied) from LuaJIT ffi by Mike Pall.
+
+
+Other contributors:
+
+  Google Inc.

+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/LICENSE → desktop/core/ext-py/cffi-1.11.5/LICENSE


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/MANIFEST.in → desktop/core/ext-py/cffi-1.11.5/MANIFEST.in


+ 4 - 1
desktop/core/ext-py/cffi-1.5.2/PKG-INFO → desktop/core/ext-py/cffi-1.11.5/PKG-INFO

@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.5.2
+Version: 1.11.5
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
 Author-email: python-cffi@googlegroups.com
 License: MIT
+Description-Content-Type: UNKNOWN
 Description: 
         CFFI
         ====
@@ -27,5 +28,7 @@ Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3.2
 Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy

+ 30 - 0
desktop/core/ext-py/cffi-1.11.5/README.md

@@ -0,0 +1,30 @@
+CFFI
+====
+
+Foreign Function Interface for Python calling C code.
+Please see the [Documentation](http://cffi.readthedocs.org/) or uncompiled
+in the doc/ subdirectory.
+
+Download
+--------
+
+[Download page](https://bitbucket.org/cffi/cffi/downloads)
+
+Contact
+-------
+
+[Mailing list](https://groups.google.com/forum/#!forum/python-cffi)
+
+Testing/development tips
+------------------------
+
+To run tests under CPython, run::
+
+    pip install pytest     # if you don't have py.test already
+    pip install pycparser
+    python setup.py build_ext -f -i
+    py.test c/ testing/
+
+If you run in another directory (either the tests or another program),
+you should use the environment variable ``PYTHONPATH=/path`` to point
+to the location that contains the ``_cffi_backend.so`` just compiled.

Datei-Diff unterdrückt, da er zu groß ist
+ 489 - 153
desktop/core/ext-py/cffi-1.11.5/c/_cffi_backend.c


+ 62 - 20
desktop/core/ext-py/cffi-1.5.2/c/call_python.c → desktop/core/ext-py/cffi-1.11.5/c/call_python.c

@@ -1,30 +1,57 @@
 
 static PyObject *_get_interpstate_dict(void)
 {
-    /* hack around to return a dict that is subinterpreter-local */
+    /* Hack around to return a dict that is subinterpreter-local.
+       Does not return a new reference.  Returns NULL in case of
+       error, but without setting any exception.  (If called late
+       during shutdown, we *can't* set an exception!)
+    */
+    static PyObject *attr_name = NULL;
+    PyThreadState *tstate;
+    PyObject *d, *builtins;
     int err;
-    PyObject *m, *modules = PyThreadState_GET()->interp->modules;
 
-    if (modules == NULL) {
-        PyErr_SetString(FFIError, "subinterpreter already gone?");
+    tstate = PyThreadState_GET();
+    if (tstate == NULL) {
+        /* no thread state! */
         return NULL;
     }
-    m = PyDict_GetItemString(modules, "_cffi_backend._extern_py");
-    if (m == NULL) {
-        m = PyModule_New("_cffi_backend._extern_py");
-        if (m == NULL)
-            return NULL;
-        err = PyDict_SetItemString(modules, "_cffi_backend._extern_py", m);
-        Py_DECREF(m);    /* sys.modules keeps one reference to m */
+
+    builtins = tstate->interp->builtins;
+    if (builtins == NULL) {
+        /* subinterpreter was cleared already, or is being cleared right now,
+           to a point that is too much for us to continue */
+        return NULL;
+    }
+
+    /* from there on, we know the (sub-)interpreter is still valid */
+
+    if (attr_name == NULL) {
+        attr_name = PyText_InternFromString("__cffi_backend_extern_py");
+        if (attr_name == NULL)
+            goto error;
+    }
+
+    d = PyDict_GetItem(builtins, attr_name);
+    if (d == NULL) {
+        d = PyDict_New();
+        if (d == NULL)
+            goto error;
+        err = PyDict_SetItem(builtins, attr_name, d);
+        Py_DECREF(d);    /* if successful, there is one ref left in builtins */
         if (err < 0)
-            return NULL;
+            goto error;
     }
-    return PyModule_GetDict(m);
+    return d;
+
+ error:
+    PyErr_Clear();    /* typically a MemoryError */
+    return NULL;
 }
 
 static PyObject *_ffi_def_extern_decorator(PyObject *outer_args, PyObject *fn)
 {
-    char *s;
+    const char *s;
     PyObject *error, *onerror, *infotuple, *old1;
     int index, err;
     const struct _cffi_global_s *g;
@@ -77,7 +104,7 @@ static PyObject *_ffi_def_extern_decorator(PyObject *outer_args, PyObject *fn)
     interpstate_dict = _get_interpstate_dict();
     if (interpstate_dict == NULL) {
         Py_DECREF(infotuple);
-        return NULL;
+        return PyErr_NoMemory();
     }
 
     externpy = (struct _cffi_externpy_s *)g->address;
@@ -119,7 +146,7 @@ static int _update_cache_to_call_python(struct _cffi_externpy_s *externpy)
 
     interpstate_dict = _get_interpstate_dict();
     if (interpstate_dict == NULL)
-        goto error;
+        return 4;    /* oops, shutdown issue? */
 
     interpstate_key = PyLong_FromVoidPtr((void *)externpy);
     if (interpstate_key == NULL)
@@ -150,7 +177,20 @@ static int _update_cache_to_call_python(struct _cffi_externpy_s *externpy)
 #if (defined(WITH_THREAD) && !defined(_MSC_VER) &&   \
      !defined(__amd64__) && !defined(__x86_64__) &&   \
      !defined(__i386__) && !defined(__i386))
-# define read_barrier()  __sync_synchronize()
+# if defined(HAVE_SYNC_SYNCHRONIZE)
+#   define read_barrier()  __sync_synchronize()
+# elif defined(_AIX)
+#   define read_barrier()  __lwsync()
+# elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
+#   include <mbarrier.h>
+#   define read_barrier()  __compiler_barrier()
+# elif defined(__hpux)
+#   define read_barrier()  _Asm_mf()
+# else
+#   define read_barrier()  /* missing */
+#   warning "no definition for read_barrier(), missing synchronization for\
+ multi-thread initialization in embedded mode"
+# endif
 #else
 # define read_barrier()  (void)0
 #endif
@@ -194,9 +234,10 @@ static void cffi_call_python(struct _cffi_externpy_s *externpy, char *args)
     save_errno();
 
     /* We need the infotuple here.  We could always go through
-       interp->modules['..'][externpy], but to avoid the extra dict
+       _update_cache_to_call_python(), but to avoid the extra dict
        lookups, we cache in (reserved1, reserved2) the last seen pair
-       (interp->modules, infotuple).
+       (interp->modules, infotuple).  The first item in this tuple is
+       a random PyObject that identifies the subinterpreter.
     */
     if (externpy->reserved1 == NULL) {
         /* Not initialized!  We didn't call @ffi.def_extern() on this
@@ -219,8 +260,9 @@ static void cffi_call_python(struct _cffi_externpy_s *externpy, char *args)
     if (err) {
         static const char *msg[] = {
             "no code was attached to it yet with @ffi.def_extern()",
-            "got internal exception (out of memory / shutdown issue)",
+            "got internal exception (out of memory?)",
             "@ffi.def_extern() was not called in the current subinterpreter",
+            "got internal exception (shutdown issue?)",
         };
         fprintf(stderr, "extern \"Python\": function %s() called, "
                         "but %s.  Returning 0.\n", externpy->name, msg[err-1]);

+ 11 - 27
desktop/core/ext-py/cffi-1.5.2/c/cdlopen.c → desktop/core/ext-py/cffi-1.11.5/c/cdlopen.c

@@ -1,6 +1,7 @@
 /* ffi.dlopen() interface with dlopen()/dlsym()/dlclose() */
 
-static void *cdlopen_fetch(PyObject *libname, void *libhandle, char *symbol)
+static void *cdlopen_fetch(PyObject *libname, void *libhandle,
+                           const char *symbol)
 {
     void *address;
 
@@ -39,35 +40,18 @@ static int cdlopen_close(PyObject *libname, void *libhandle)
 
 static PyObject *ffi_dlopen(PyObject *self, PyObject *args)
 {
-    char *filename_or_null, *printable_filename;
+    char *modname;
+    PyObject *temp, *result = NULL;
     void *handle;
-    int flags = 0;
-
-    if (PyTuple_GET_SIZE(args) == 0 || PyTuple_GET_ITEM(args, 0) == Py_None) {
-        PyObject *dummy;
-        if (!PyArg_ParseTuple(args, "|Oi:load_library",
-                              &dummy, &flags))
-            return NULL;
-        filename_or_null = NULL;
-    }
-    else if (!PyArg_ParseTuple(args, "et|i:load_library",
-                          Py_FileSystemDefaultEncoding, &filename_or_null,
-                          &flags))
-        return NULL;
-
-    if ((flags & (RTLD_NOW | RTLD_LAZY)) == 0)
-        flags |= RTLD_NOW;
-    printable_filename = filename_or_null ? filename_or_null : "<None>";
 
-    handle = dlopen(filename_or_null, flags);
-    if (handle == NULL) {
-        const char *error = dlerror();
-        PyErr_Format(PyExc_OSError, "cannot load library '%s': %s",
-                     printable_filename, error);
-        return NULL;
+    handle = b_do_dlopen(args, &modname, &temp);
+    if (handle != NULL)
+    {
+        result = (PyObject *)lib_internal_new((FFIObject *)self,
+                                              modname, handle);
     }
-    return (PyObject *)lib_internal_new((FFIObject *)self,
-                                        printable_filename, handle);
+    Py_XDECREF(temp);
+    return result;
 }
 
 static PyObject *ffi_dlclose(PyObject *self, PyObject *args)

+ 8 - 2
desktop/core/ext-py/cffi-1.5.2/c/cffi1_module.c → desktop/core/ext-py/cffi-1.11.5/c/cffi1_module.c

@@ -2,8 +2,9 @@
 #include "parse_c_type.c"
 #include "realize_c_type.c"
 
-#define CFFI_VERSION_MIN    0x2601
-#define CFFI_VERSION_MAX    0x27FF
+#define CFFI_VERSION_MIN            0x2601
+#define CFFI_VERSION_CHAR16CHAR32   0x2801
+#define CFFI_VERSION_MAX            0x28FF
 
 typedef struct FFIObject_s FFIObject;
 typedef struct LibObject_s LibObject;
@@ -45,6 +46,9 @@ static int init_ffi_lib(PyObject *m)
         if (PyDict_SetItemString(FFI_Type.tp_dict, "CData",
                                  (PyObject *)&CData_Type) < 0)
             return -1;
+        if (PyDict_SetItemString(FFI_Type.tp_dict, "buffer",
+                                 (PyObject *)&MiniBuffer_Type) < 0)
+            return -1;
 
         for (i = 0; all_dlopen_flags[i].name != NULL; i++) {
             x = PyInt_FromLong(all_dlopen_flags[i].value);
@@ -180,6 +184,8 @@ static PyObject *b_init_cffi_1_0_external_module(PyObject *self, PyObject *arg)
     num_exports = 25;
     if (ctx->flags & 1)    /* set to mean that 'extern "Python"' is used */
         num_exports = 26;
+    if (version >= CFFI_VERSION_CHAR16CHAR32)
+        num_exports = 28;
     memcpy(exports, (char *)cffi_exports, num_exports * sizeof(void *));
 
     /* make the module object */

+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/cglob.c → desktop/core/ext-py/cffi-1.11.5/c/cglob.c


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/commontypes.c → desktop/core/ext-py/cffi-1.11.5/c/commontypes.c


+ 92 - 29
desktop/core/ext-py/cffi-1.5.2/c/ffi_obj.c → desktop/core/ext-py/cffi-1.11.5/c/ffi_obj.c

@@ -92,7 +92,7 @@ static PyObject *ffiobj_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 /* forward, declared in cdlopen.c because it's mostly useful for this case */
 static int ffiobj_init(PyObject *self, PyObject *args, PyObject *kwds);
 
-static PyObject *ffi_fetch_int_constant(FFIObject *ffi, char *name,
+static PyObject *ffi_fetch_int_constant(FFIObject *ffi, const char *name,
                                         int recursion)
 {
     int index;
@@ -145,7 +145,7 @@ static PyObject *ffi_fetch_int_constant(FFIObject *ffi, char *name,
 #define ACCEPT_ALL      (ACCEPT_STRING | ACCEPT_CTYPE | ACCEPT_CDATA)
 #define CONSIDER_FN_AS_FNPTR  8
 
-static CTypeDescrObject *_ffi_bad_type(FFIObject *ffi, char *input_text)
+static CTypeDescrObject *_ffi_bad_type(FFIObject *ffi, const char *input_text)
 {
     size_t length = strlen(input_text);
     char *extra;
@@ -188,7 +188,7 @@ static CTypeDescrObject *_ffi_type(FFIObject *ffi, PyObject *arg,
         PyObject *x = PyDict_GetItem(types_dict, arg);
 
         if (x == NULL) {
-            char *input_text = PyText_AS_UTF8(arg);
+            const char *input_text = PyText_AS_UTF8(arg);
             int err, index = parse_c_type(&ffi->info, input_text);
             if (index < 0)
                 return _ffi_bad_type(ffi, input_text);
@@ -257,22 +257,20 @@ PyDoc_STRVAR(ffi_sizeof_doc,
 static PyObject *ffi_sizeof(FFIObject *self, PyObject *arg)
 {
     Py_ssize_t size;
-    CTypeDescrObject *ct = _ffi_type(self, arg, ACCEPT_ALL);
-    if (ct == NULL)
-        return NULL;
-
-    size = ct->ct_size;
 
     if (CData_Check(arg)) {
-        CDataObject *cd = (CDataObject *)arg;
-        if (cd->c_type->ct_flags & CT_ARRAY)
-            size = get_array_length(cd) * cd->c_type->ct_itemdescr->ct_size;
+        size = direct_sizeof_cdata((CDataObject *)arg);
     }
-
-    if (size < 0) {
-        PyErr_Format(FFIError, "don't know the size of ctype '%s'",
-                     ct->ct_name);
-        return NULL;
+    else {
+        CTypeDescrObject *ct = _ffi_type(self, arg, ACCEPT_ALL);
+        if (ct == NULL)
+            return NULL;
+        size = ct->ct_size;
+        if (size < 0) {
+            PyErr_Format(FFIError, "don't know the size of ctype '%s'",
+                         ct->ct_name);
+            return NULL;
+        }
     }
     return PyInt_FromSsize_t(size);
 }
@@ -459,20 +457,25 @@ PyDoc_STRVAR(ffi_string_doc,
 #define ffi_string  b_string     /* ffi_string() => b_string()
                                     from _cffi_backend.c */
 
-PyDoc_STRVAR(ffi_buffer_doc,
-"Return a read-write buffer object that references the raw C data\n"
-"pointed to by the given 'cdata'.  The 'cdata' must be a pointer or an\n"
-"array.  Can be passed to functions expecting a buffer, or directly\n"
-"manipulated with:\n"
+PyDoc_STRVAR(ffi_unpack_doc,
+"Unpack an array of C data of the given length,\n"
+"returning a Python string/unicode/list.\n"
+"\n"
+"If 'cdata' is a pointer to 'char', returns a byte string.\n"
+"It does not stop at the first null.  This is equivalent to:\n"
+"ffi.buffer(cdata, length)[:]\n"
+"\n"
+"If 'cdata' is a pointer to 'wchar_t', returns a unicode string.\n"
+"'length' is measured in wchar_t's; it is not the size in bytes.\n"
 "\n"
-"    buf[:]          get a copy of it in a regular string, or\n"
-"    buf[idx]        as a single character\n"
-"    buf[:] = ...\n"
-"    buf[idx] = ...  change the content");
+"If 'cdata' is a pointer to anything else, returns a list of\n"
+"'length' items.  This is a faster equivalent to:\n"
+"[cdata[i] for i in range(length)]");
 
-#define ffi_buffer  b_buffer     /* ffi_buffer() => b_buffer()
+#define ffi_unpack  b_unpack     /* ffi_unpack() => b_unpack()
                                     from _cffi_backend.c */
 
+
 PyDoc_STRVAR(ffi_offsetof_doc,
 "Return the offset of the named field inside the given structure or\n"
 "array, which must be given as a C type name.  You can give several\n"
@@ -689,7 +692,7 @@ PyDoc_STRVAR(ffi_from_handle_doc,
 PyDoc_STRVAR(ffi_from_buffer_doc,
 "Return a <cdata 'char[]'> that points to the data of the given Python\n"
 "object, which must support the buffer interface.  Note that this is\n"
-"not meant to be used on the built-in types str, unicode, or bytearray\n"
+"not meant to be used on the built-in types str or unicode\n"
 "(you can build 'char[]' arrays explicitly) but only on objects\n"
 "containing large quantities of raw data in some other format, like\n"
 "'array.array' or numpy arrays.");
@@ -702,7 +705,12 @@ static PyObject *ffi_from_buffer(PyObject *self, PyObject *arg)
 PyDoc_STRVAR(ffi_gc_doc,
 "Return a new cdata object that points to the same data.\n"
 "Later, when this new cdata object is garbage-collected,\n"
-"'destructor(old_cdata_object)' will be called.");
+"'destructor(old_cdata_object)' will be called.\n"
+"\n"
+"The optional 'size' gives an estimate of the size, used to\n"
+"trigger the garbage collection more eagerly.  So far only used\n"
+"on PyPy.  It tells the GC that the returned object keeps alive\n"
+"roughly 'size' bytes of external memory.");
 
 #define ffi_gc  b_gcp     /* ffi_gc() => b_gcp()
                              from _cffi_backend.c */
@@ -862,6 +870,60 @@ static PyObject *ffi_int_const(FFIObject *self, PyObject *args, PyObject *kwds)
     return x;
 }
 
+PyDoc_STRVAR(ffi_list_types_doc,
+"Returns the user type names known to this FFI instance.\n"
+"This returns a tuple containing three lists of names:\n"
+"(typedef_names, names_of_structs, names_of_unions)");
+
+static PyObject *ffi_list_types(FFIObject *self, PyObject *noargs)
+{
+    Py_ssize_t i, n1 = self->types_builder.ctx.num_typenames;
+    Py_ssize_t n23 = self->types_builder.ctx.num_struct_unions;
+    PyObject *o, *lst[3] = {NULL, NULL, NULL}, *result = NULL;
+
+    lst[0] = PyList_New(n1);
+    if (lst[0] == NULL)
+        goto error;
+    lst[1] = PyList_New(0);
+    if (lst[1] == NULL)
+        goto error;
+    lst[2] = PyList_New(0);
+    if (lst[2] == NULL)
+        goto error;
+
+    for (i = 0; i < n1; i++) {
+        o = PyText_FromString(self->types_builder.ctx.typenames[i].name);
+        if (o == NULL)
+            goto error;
+        PyList_SET_ITEM(lst[0], i, o);
+    }
+
+    for (i = 0; i < n23; i++) {
+        const struct _cffi_struct_union_s *s;
+        int err, index;
+
+        s = &self->types_builder.ctx.struct_unions[i];
+        if (s->name[0] == '$')
+            continue;
+
+        o = PyText_FromString(s->name);
+        if (o == NULL)
+            goto error;
+        index = (s->flags & _CFFI_F_UNION) ? 2 : 1;
+        err = PyList_Append(lst[index], o);
+        Py_DECREF(o);
+        if (err < 0)
+            goto error;
+    }
+    result = PyTuple_Pack(3, lst[0], lst[1], lst[2]);
+    /* fall-through */
+ error:
+    Py_XDECREF(lst[2]);
+    Py_XDECREF(lst[1]);
+    Py_XDECREF(lst[0]);
+    return result;
+}
+
 PyDoc_STRVAR(ffi_memmove_doc,
 "ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.\n"
 "\n"
@@ -1015,7 +1077,6 @@ static PyObject *ffi_init_once(FFIObject *self, PyObject *args, PyObject *kwds)
 static PyMethodDef ffi_methods[] = {
  {"addressof",  (PyCFunction)ffi_addressof,  METH_VARARGS, ffi_addressof_doc},
  {"alignof",    (PyCFunction)ffi_alignof,    METH_O,       ffi_alignof_doc},
- {"buffer",     (PyCFunction)ffi_buffer,     METH_VKW,     ffi_buffer_doc},
  {"def_extern", (PyCFunction)ffi_def_extern, METH_VKW,     ffi_def_extern_doc},
  {"callback",   (PyCFunction)ffi_callback,   METH_VKW,     ffi_callback_doc},
  {"cast",       (PyCFunction)ffi_cast,       METH_VARARGS, ffi_cast_doc},
@@ -1030,6 +1091,7 @@ static PyMethodDef ffi_methods[] = {
 #endif
  {"init_once",  (PyCFunction)ffi_init_once,  METH_VKW,     ffi_init_once_doc},
  {"integer_const",(PyCFunction)ffi_int_const,METH_VKW,     ffi_int_const_doc},
+ {"list_types", (PyCFunction)ffi_list_types, METH_NOARGS,  ffi_list_types_doc},
  {"memmove",    (PyCFunction)ffi_memmove,    METH_VKW,     ffi_memmove_doc},
  {"new",        (PyCFunction)ffi_new,        METH_VKW,     ffi_new_doc},
 {"new_allocator",(PyCFunction)ffi_new_allocator,METH_VKW,ffi_new_allocator_doc},
@@ -1038,6 +1100,7 @@ static PyMethodDef ffi_methods[] = {
  {"sizeof",     (PyCFunction)ffi_sizeof,     METH_O,       ffi_sizeof_doc},
  {"string",     (PyCFunction)ffi_string,     METH_VKW,     ffi_string_doc},
  {"typeof",     (PyCFunction)ffi_typeof,     METH_O,       ffi_typeof_doc},
+ {"unpack",     (PyCFunction)ffi_unpack,     METH_VKW,     ffi_unpack_doc},
  {NULL}
 };
 

+ 7 - 3
desktop/core/ext-py/cffi-1.5.2/c/file_emulator.h → desktop/core/ext-py/cffi-1.11.5/c/file_emulator.h

@@ -31,9 +31,9 @@ static void _close_file_capsule(PyObject *ob_capsule)
 static FILE *PyFile_AsFile(PyObject *ob_file)
 {
     PyObject *ob, *ob_capsule = NULL, *ob_mode = NULL;
-    FILE *f = NULL;
+    FILE *f;
     int fd;
-    char *mode;
+    const char *mode;
 
     ob = PyObject_CallMethod(ob_file, "flush", NULL);
     if (ob == NULL)
@@ -80,7 +80,11 @@ static FILE *PyFile_AsFile(PyObject *ob_file)
         if (PyObject_SetAttrString(ob_file, "__cffi_FILE", ob_capsule) < 0)
             goto fail;
     }
-    return PyCapsule_GetPointer(ob_capsule, "FILE");
+    else {
+        f = PyCapsule_GetPointer(ob_capsule, "FILE");
+    }
+    Py_DECREF(ob_capsule);   /* assumes still at least one reference */
+    return f;
 
  fail:
     Py_XDECREF(ob_mode);

+ 93 - 45
desktop/core/ext-py/cffi-1.5.2/c/lib_obj.c → desktop/core/ext-py/cffi-1.11.5/c/lib_obj.c

@@ -3,8 +3,12 @@
    module originally created by recompile().
 
    A Lib object is special in the sense that it has a custom
-   __getattr__ which returns C globals, functions and constants.  It
-   raises AttributeError for anything else, even attrs like '__class__'.
+   __getattr__ which returns C globals, functions and constants.  The
+   original idea was to raise AttributeError for anything else, even
+   attrs like '__class__', but it breaks various things; now, standard
+   attrs are returned, but in the unlikely case where a user cdef()s
+   the same name, then the standard attr is hidden (and the various
+   things like introspection might break).
 
    A Lib object has got a reference to the _cffi_type_context_s
    structure, which is used to create lazily the objects returned by
@@ -15,9 +19,8 @@ struct CPyExtFunc_s {
     PyMethodDef md;
     void *direct_fn;
     int type_index;
+    char doc[1];
 };
-static const char cpyextfunc_doc[] =
-    "direct call to the C function of the same name";
 
 struct LibObject_s {
     PyObject_HEAD
@@ -30,18 +33,22 @@ struct LibObject_s {
 
 static struct CPyExtFunc_s *_cpyextfunc_get(PyObject *x)
 {
-    struct CPyExtFunc_s *exf;
+    PyObject *y;
+    LibObject *lo;
+    PyCFunctionObject *fo;
 
     if (!PyCFunction_Check(x))
         return NULL;
-    if (!LibObject_Check(PyCFunction_GET_SELF(x)))
+    y = PyCFunction_GET_SELF(x);
+    if (!LibObject_Check(y))
         return NULL;
 
-    exf = (struct CPyExtFunc_s *)(((PyCFunctionObject *)x) -> m_ml);
-    if (exf->md.ml_doc != cpyextfunc_doc)
+    fo = (PyCFunctionObject *)x;
+    lo = (LibObject *)y;
+    if (lo->l_libname != fo->m_module)
         return NULL;
 
-    return exf;
+    return (struct CPyExtFunc_s *)(fo->m_ml);
 }
 
 static PyObject *_cpyextfunc_type(LibObject *lib, struct CPyExtFunc_s *exf)
@@ -78,15 +85,17 @@ static PyObject *_cpyextfunc_type_index(PyObject *x)
 }
 
 static void cdlopen_close_ignore_errors(void *libhandle);  /* forward */
-static void *cdlopen_fetch(PyObject *libname, void *libhandle, char *symbol);
+static void *cdlopen_fetch(PyObject *libname, void *libhandle,
+                           const char *symbol);
 
 static void lib_dealloc(LibObject *lib)
 {
+    PyObject_GC_UnTrack(lib);
     cdlopen_close_ignore_errors(lib->l_libhandle);
     Py_DECREF(lib->l_dict);
     Py_DECREF(lib->l_libname);
     Py_DECREF(lib->l_ffi);
-    PyObject_Del(lib);
+    PyObject_GC_Del(lib);
 }
 
 static int lib_traverse(LibObject *lib, visitproc visit, void *arg)
@@ -111,56 +120,82 @@ static PyObject *lib_build_cpython_func(LibObject *lib,
        built.  The C extension code can then assume that they are,
        by calling _cffi_type().
     */
-    CTypeDescrObject *ct;
+    PyObject *result = NULL;
+    CTypeDescrObject **pfargs = NULL;
+    CTypeDescrObject *fresult;
+    Py_ssize_t nargs = 0;
     struct CPyExtFunc_s *xfunc;
     int i, type_index = _CFFI_GETARG(g->type_op);
     _cffi_opcode_t *opcodes = lib->l_types_builder->ctx.types;
+    static const char *const format = ";\n\nCFFI C function from %s.lib";
+    const char *libname = PyText_AS_UTF8(lib->l_libname);
+    struct funcbuilder_s funcbuilder;
+
+    /* return type: */
+    fresult = realize_c_func_return_type(lib->l_types_builder, opcodes,
+                                       type_index);
+    if (fresult == NULL)
+        goto error;
 
-    if ((((uintptr_t)opcodes[type_index]) & 1) == 0) {
-        /* the function type was already built.  No need to force
-           the arg and return value to be built again. */
-    }
-    else {
-        assert(_CFFI_GETOP(opcodes[type_index]) == _CFFI_OP_FUNCTION);
-
-        /* return type: */
-        ct = realize_c_type(lib->l_types_builder, opcodes,
-                            _CFFI_GETARG(opcodes[type_index]));
+    /* argument types: */
+    /* note that if the arguments are already built, they have a
+       pointer in the 'opcodes' array, and GETOP() returns a
+       random even value.  But OP_FUNCTION_END is odd, so the
+       condition below still works correctly. */
+    i = type_index + 1;
+    while (_CFFI_GETOP(opcodes[i]) != _CFFI_OP_FUNCTION_END)
+        i++;
+    pfargs = alloca(sizeof(CTypeDescrObject *) * (i - type_index - 1));
+    i = type_index + 1;
+    while (_CFFI_GETOP(opcodes[i]) != _CFFI_OP_FUNCTION_END) {
+        CTypeDescrObject *ct = realize_c_type(lib->l_types_builder, opcodes, i);
         if (ct == NULL)
-            return NULL;
-        Py_DECREF(ct);
-
-        /* argument types: */
-        i = type_index + 1;
-        while (_CFFI_GETOP(opcodes[i]) != _CFFI_OP_FUNCTION_END) {
-            ct = realize_c_type(lib->l_types_builder, opcodes, i);
-            if (ct == NULL)
-                return NULL;
-            Py_DECREF(ct);
-            i++;
-        }
+            goto error;
+        pfargs[nargs++] = ct;
+        i++;
     }
 
+    memset(&funcbuilder, 0, sizeof(funcbuilder));
+    if (fb_build_name(&funcbuilder, g->name, pfargs, nargs, fresult, 0) < 0)
+        goto error;
+
     /* xxx the few bytes of memory we allocate here leak, but it's a
        minor concern because it should only occur for CPYTHON_BLTN.
        There is one per real C function in a CFFI C extension module.
        CPython never unloads its C extension modules anyway.
     */
-    xfunc = PyMem_Malloc(sizeof(struct CPyExtFunc_s));
+    xfunc = PyMem_Malloc(sizeof(struct CPyExtFunc_s) +
+                         funcbuilder.nb_bytes +
+                         strlen(format) + strlen(libname));
     if (xfunc == NULL) {
         PyErr_NoMemory();
-        return NULL;
+        goto error;
     }
     memset((char *)xfunc, 0, sizeof(struct CPyExtFunc_s));
     assert(g->address);
     xfunc->md.ml_meth = (PyCFunction)g->address;
     xfunc->md.ml_flags = flags;
     xfunc->md.ml_name = g->name;
-    xfunc->md.ml_doc = cpyextfunc_doc;
+    xfunc->md.ml_doc = xfunc->doc;
     xfunc->direct_fn = g->size_or_direct_fn;
     xfunc->type_index = type_index;
 
-    return PyCFunction_NewEx(&xfunc->md, (PyObject *)lib, lib->l_libname);
+    /* build the docstring */
+    funcbuilder.bufferp = xfunc->doc;
+    if (fb_build_name(&funcbuilder, g->name, pfargs, nargs, fresult, 0) < 0)
+        goto error;
+    sprintf(funcbuilder.bufferp - 1, format, libname);
+    /* done building the docstring */
+
+    result = PyCFunction_NewEx(&xfunc->md, (PyObject *)lib, lib->l_libname);
+    /* fall-through */
+ error:
+    Py_XDECREF(fresult);
+    while (nargs > 0) {
+        --nargs;
+        Py_DECREF(pfargs[nargs]);
+    }
+    return result;
 }
 
 static PyObject *lib_build_and_cache_attr(LibObject *lib, PyObject *name,
@@ -172,7 +207,7 @@ static PyObject *lib_build_and_cache_attr(LibObject *lib, PyObject *name,
     const struct _cffi_global_s *g;
     CTypeDescrObject *ct;
     builder_c_t *types_builder = lib->l_types_builder;
-    char *s = PyText_AsUTF8(name);
+    const char *s = PyText_AsUTF8(name);
     if (s == NULL)
         return NULL;
 
@@ -459,7 +494,7 @@ static PyObject *_lib_dict(LibObject *lib)
 
 static PyObject *lib_getattr(LibObject *lib, PyObject *name)
 {
-    char *p;
+    const char *p;
     PyObject *x;
     LIB_GET_OR_CACHE_ADDR(x, lib, name, goto missing);
 
@@ -470,6 +505,7 @@ static PyObject *lib_getattr(LibObject *lib, PyObject *name)
     return x;
 
  missing:
+    /*** ATTRIBUTEERROR IS SET HERE ***/
     p = PyText_AsUTF8(name);
     if (p == NULL)
         return NULL;
@@ -483,15 +519,27 @@ static PyObject *lib_getattr(LibObject *lib, PyObject *name)
     }
     if (strcmp(p, "__class__") == 0) {
         PyErr_Clear();
-        x = (PyObject *)Py_TYPE(lib);
+        x = (PyObject *)&PyModule_Type;
+        /* ^^^ used to be Py_TYPE(lib).  But HAAAAAACK!  That makes
+           help() behave correctly.  I couldn't find a more reasonable
+           way.  Urgh. */
         Py_INCREF(x);
         return x;
     }
-    /* this hack is for Python 3.5 */
+    /* this hack is for Python 3.5, and also to give a more 
+       module-like behavior */
     if (strcmp(p, "__name__") == 0) {
         PyErr_Clear();
-        return lib_repr(lib);
+        return PyText_FromFormat("%s.lib", PyText_AS_UTF8(lib->l_libname));
+    }
+#if PY_MAJOR_VERSION >= 3
+    if (strcmp(p, "__loader__") == 0 || strcmp(p, "__spec__") == 0) {
+        /* some more module-like behavior hacks */
+        PyErr_Clear();
+        Py_INCREF(Py_None);
+        return Py_None;
     }
+#endif
     return NULL;
 }
 
@@ -545,7 +593,7 @@ static PyTypeObject Lib_Type = {
     (getattrofunc)lib_getattr,                  /* tp_getattro */
     (setattrofunc)lib_setattr,                  /* tp_setattro */
     0,                                          /* tp_as_buffer */
-    Py_TPFLAGS_DEFAULT,                         /* tp_flags */
+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,    /* tp_flags */
     0,                                          /* tp_doc */
     (traverseproc)lib_traverse,                 /* tp_traverse */
     0,                                          /* tp_clear */
@@ -577,7 +625,7 @@ static LibObject *lib_internal_new(FFIObject *ffi, char *module_name,
     if (dict == NULL)
         goto err2;
 
-    lib = PyObject_New(LibObject, &Lib_Type);
+    lib = (LibObject *)PyType_GenericAlloc(&Lib_Type, 0);
     if (lib == NULL)
         goto err3;
 

+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/ffi.c → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffi.c


+ 3 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/ffi.h → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffi.h

@@ -231,6 +231,9 @@ ffi_prep_closure_loc (ffi_closure*,
 		  void *user_data,
 		  void *codeloc);
 
+/* AR: for cffi we need the following API, and not the _loc version */
+#define ffi_prep_closure(a,b,c,d)  ffi_prep_closure_loc(a,b,c,d,a)
+
 typedef struct {
   char tramp[FFI_TRAMPOLINE_SIZE];
 

+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/ffi_common.h → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffi_common.h


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/fficonfig.h → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/fficonfig.h


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/ffitarget.h → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/ffitarget.h


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/prep_cif.c → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/prep_cif.c


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/types.c → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/types.c


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/win32.c → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/win32.c


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/win64.asm → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/win64.asm


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/libffi_msvc/win64.obj → desktop/core/ext-py/cffi-1.11.5/c/libffi_msvc/win64.obj


+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/malloc_closure.h → desktop/core/ext-py/cffi-1.11.5/c/malloc_closure.h


+ 125 - 7
desktop/core/ext-py/cffi-1.5.2/c/minibuffer.h → desktop/core/ext-py/cffi-1.11.5/c/minibuffer.h

@@ -56,14 +56,17 @@ static int mb_ass_item(MiniBufferObj *self, Py_ssize_t idx, PyObject *other)
     }
 }
 
+/* forward: from _cffi_backend.c */
+static int _fetch_as_buffer(PyObject *x, Py_buffer *view, int writable_only);
+
 static int mb_ass_slice(MiniBufferObj *self,
                         Py_ssize_t left, Py_ssize_t right, PyObject *other)
 {
-    const void *buffer;
-    Py_ssize_t buffer_len, count;
+    Py_ssize_t count;
     Py_ssize_t size = self->mb_size;
+    Py_buffer src_view;
 
-    if (PyObject_AsReadBuffer(other, &buffer, &buffer_len) < 0)
+    if (_fetch_as_buffer(other, &src_view, 0) < 0)
         return -1;
 
     if (left < 0)     left = 0;
@@ -71,12 +74,14 @@ static int mb_ass_slice(MiniBufferObj *self,
     if (left > right) left = right;
 
     count = right - left;
-    if (count != buffer_len) {
+    if (count != src_view.len) {
+        PyBuffer_Release(&src_view);
         PyErr_SetString(PyExc_ValueError,
                         "right operand length must match slice length");
         return -1;
     }
-    memcpy(self->mb_data + left, buffer, count);
+    memcpy(self->mb_data + left, src_view.buf, count);
+    PyBuffer_Release(&src_view);
     return 0;
 }
 
@@ -155,8 +160,91 @@ mb_clear(MiniBufferObj *ob)
     return 0;
 }
 
+static PyObject *
+mb_richcompare(PyObject *self, PyObject *other, int op)
+{
+    Py_ssize_t self_size, other_size;
+    Py_buffer self_bytes, other_bytes;
+    PyObject *res;
+    Py_ssize_t minsize;
+    int cmp, rc;
+
+    /* Bytes can be compared to anything that supports the (binary)
+       buffer API.  Except that a comparison with Unicode is always an
+       error, even if the comparison is for equality. */
+    rc = PyObject_IsInstance(self, (PyObject*)&PyUnicode_Type);
+    if (!rc)
+        rc = PyObject_IsInstance(other, (PyObject*)&PyUnicode_Type);
+    if (rc < 0)
+        return NULL;
+    if (rc) {
+        Py_INCREF(Py_NotImplemented);
+        return Py_NotImplemented;
+    }
+
+    if (PyObject_GetBuffer(self, &self_bytes, PyBUF_SIMPLE) != 0) {
+        PyErr_Clear();
+        Py_INCREF(Py_NotImplemented);
+        return Py_NotImplemented;
+
+    }
+    self_size = self_bytes.len;
+
+    if (PyObject_GetBuffer(other, &other_bytes, PyBUF_SIMPLE) != 0) {
+        PyErr_Clear();
+        PyBuffer_Release(&self_bytes);
+        Py_INCREF(Py_NotImplemented);
+        return Py_NotImplemented;
+
+    }
+    other_size = other_bytes.len;
+
+    if (self_size != other_size && (op == Py_EQ || op == Py_NE)) {
+        /* Shortcut: if the lengths differ, the objects differ */
+        cmp = (op == Py_NE);
+    }
+    else {
+        minsize = self_size;
+        if (other_size < minsize)
+            minsize = other_size;
+
+        cmp = memcmp(self_bytes.buf, other_bytes.buf, minsize);
+        /* In ISO C, memcmp() guarantees to use unsigned bytes! */
+
+        if (cmp == 0) {
+            if (self_size < other_size)
+                cmp = -1;
+            else if (self_size > other_size)
+                cmp = 1;
+        }
+
+        switch (op) {
+        case Py_LT: cmp = cmp <  0; break;
+        case Py_LE: cmp = cmp <= 0; break;
+        case Py_EQ: cmp = cmp == 0; break;
+        case Py_NE: cmp = cmp != 0; break;
+        case Py_GT: cmp = cmp >  0; break;
+        case Py_GE: cmp = cmp >= 0; break;
+        }
+    }
+
+    res = cmp ? Py_True : Py_False;
+    PyBuffer_Release(&self_bytes);
+    PyBuffer_Release(&other_bytes);
+    Py_INCREF(res);
+    return res;
+}
+
 #if PY_MAJOR_VERSION >= 3
 /* pfffffffffffff pages of copy-paste from listobject.c */
+
+/* pfffffffffffff#2: the PySlice_GetIndicesEx() *macro* should not
+   be called, because C extension modules compiled with it differ
+   on ABI between 3.6.0, 3.6.1 and 3.6.2. */
+#if PY_VERSION_HEX < 0x03070000 && defined(PySlice_GetIndicesEx) && !defined(PYPY_VERSION)
+#undef PySlice_GetIndicesEx
+#endif
+
 static PyObject *mb_subscript(MiniBufferObj *self, PyObject *item)
 {
     if (PyIndex_Check(item)) {
@@ -238,6 +326,22 @@ static PyMappingMethods mb_as_mapping = {
 # define MINIBUF_TPFLAGS (Py_TPFLAGS_HAVE_GETCHARBUFFER | Py_TPFLAGS_HAVE_NEWBUFFER)
 #endif
 
+PyDoc_STRVAR(ffi_buffer_doc,
+"ffi.buffer(cdata[, byte_size]):\n"
+"Return a read-write buffer object that references the raw C data\n"
+"pointed to by the given 'cdata'.  The 'cdata' must be a pointer or an\n"
+"array.  Can be passed to functions expecting a buffer, or directly\n"
+"manipulated with:\n"
+"\n"
+"    buf[:]          get a copy of it in a regular string, or\n"
+"    buf[idx]        as a single character\n"
+"    buf[:] = ...\n"
+"    buf[idx] = ...  change the content");
+
+static PyObject *            /* forward, implemented in _cffi_backend.c */
+b_buffer_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+
+
 static PyTypeObject MiniBuffer_Type = {
     PyVarObject_HEAD_INIT(NULL, 0)
     "_cffi_backend.buffer",
@@ -268,11 +372,25 @@ static PyTypeObject MiniBuffer_Type = {
     &mb_as_buffer,                              /* tp_as_buffer */
     (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
         MINIBUF_TPFLAGS),                       /* tp_flags */
-    0,                                          /* tp_doc */
+    ffi_buffer_doc,                             /* tp_doc */
     (traverseproc)mb_traverse,                  /* tp_traverse */
     (inquiry)mb_clear,                          /* tp_clear */
-    0,                                          /* tp_richcompare */
+    (richcmpfunc)mb_richcompare,                /* tp_richcompare */
     offsetof(MiniBufferObj, mb_weakreflist),    /* tp_weaklistoffset */
+    0,                                          /* tp_iter */
+    0,                                          /* tp_iternext */
+    0,                                          /* tp_methods */
+    0,                                          /* tp_members */
+    0,                                          /* tp_getset */
+    0,                                          /* tp_base */
+    0,                                          /* tp_dict */
+    0,                                          /* tp_descr_get */
+    0,                                          /* tp_descr_set */
+    0,                                          /* tp_dictoffset */
+    0,                                          /* tp_init */
+    0,                                          /* tp_alloc */
+    b_buffer_new,                               /* tp_new */
+    0,                                          /* tp_free */
 };
 
 static PyObject *minibuffer_new(char *data, Py_ssize_t size,

+ 22 - 17
desktop/core/ext-py/cffi-1.5.2/c/misc_thread_common.h → desktop/core/ext-py/cffi-1.11.5/c/misc_thread_common.h

@@ -29,12 +29,7 @@ static void cffi_thread_shutdown(void *p)
     struct cffi_tls_s *tls = (struct cffi_tls_s *)p;
 
     if (tls->local_thread_state != NULL) {
-        /* We need to re-acquire the GIL temporarily to free the
-           thread state.  I hope it is not a problem to do it in
-           a thread-local destructor.
-        */
-        PyEval_RestoreThread(tls->local_thread_state);
-        PyThreadState_DeleteCurrent();
+        PyThreadState_Delete(tls->local_thread_state);
     }
     free(tls);
 }
@@ -67,23 +62,33 @@ static void restore_errno_only(void)
 #endif
 
 
-/* Seems that CPython 3.5.1 made our job harder.  Did not find out how
-   to do that without these hacks.  We can't use PyThreadState_GET(),
-   because that calls PyThreadState_Get() which fails an assert if the
-   result is NULL. */
-#if PY_MAJOR_VERSION >= 3 && !defined(_Py_atomic_load_relaxed)
-                             /* this was abruptly un-defined in 3.5.1 */
-void *volatile _PyThreadState_Current;
-   /* XXX simple volatile access is assumed atomic */
-#  define _Py_atomic_load_relaxed(pp)  (*(pp))
+/* MESS.  We can't use PyThreadState_GET(), because that calls
+   PyThreadState_Get() which fails an assert if the result is NULL.
+   
+   * in Python 2.7 and <= 3.4, the variable _PyThreadState_Current
+     is directly available, so use that.
+
+   * in Python 3.5, the variable is available too, but it might be
+     the case that the headers don't define it (this changed in 3.5.1).
+     In case we're compiling with 3.5.x with x >= 1, we need to
+     manually define this variable.
+
+   * in Python >= 3.6 there is _PyThreadState_UncheckedGet().
+     It was added in 3.5.2 but should never be used in 3.5.x
+     because it is not available in 3.5.0 or 3.5.1.
+*/
+#if PY_VERSION_HEX >= 0x03050100 && PY_VERSION_HEX < 0x03060000
+PyAPI_DATA(void *volatile) _PyThreadState_Current;
 #endif
 
 static PyThreadState *get_current_ts(void)
 {
-#if PY_MAJOR_VERSION >= 3
+#if PY_VERSION_HEX >= 0x03060000
+    return _PyThreadState_UncheckedGet();
+#elif defined(_Py_atomic_load_relaxed)
     return (PyThreadState*)_Py_atomic_load_relaxed(&_PyThreadState_Current);
 #else
-    return _PyThreadState_Current;
+    return (PyThreadState*)_PyThreadState_Current;  /* assume atomic read */
 #endif
 }
 

+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/c/misc_thread_posix.h → desktop/core/ext-py/cffi-1.11.5/c/misc_thread_posix.h


+ 6 - 6
desktop/core/ext-py/cffi-1.5.2/c/misc_win32.h → desktop/core/ext-py/cffi-1.11.5/c/misc_win32.h

@@ -192,7 +192,12 @@ static PyObject *b_getwinerror(PyObject *self, PyObject *args, PyObject *kwds)
 
 static void *dlopen(const char *filename, int flag)
 {
-    return (void *)LoadLibrary(filename);
+    return (void *)LoadLibraryA(filename);
+}
+
+static void *dlopenW(const wchar_t *filename)
+{
+    return (void *)LoadLibraryW(filename);
 }
 
 static void *dlsym(void *handle, const char *symbol)
@@ -234,8 +239,3 @@ static const char *dlerror(void)
     sprintf(buf, "error 0x%x", (unsigned int)dw);
     return buf;
 }
-
-/************************************************************/
-/* obscure */
-
-#define ffi_prep_closure(a,b,c,d)  ffi_prep_closure_loc(a,b,c,d,a)

+ 17 - 1
desktop/core/ext-py/cffi-1.5.2/c/parse_c_type.c → desktop/core/ext-py/cffi-1.11.5/c/parse_c_type.c

@@ -25,7 +25,7 @@ enum token_e {
     /* keywords */
     TOK__BOOL,
     TOK_CHAR,
-    //TOK__COMPLEX,
+    TOK__COMPLEX,
     TOK_CONST,
     TOK_DOUBLE,
     TOK_ENUM,
@@ -159,6 +159,7 @@ static void next_token(token_t *tok)
         if (tok->size == 5 && !memcmp(p, "_Bool", 5))  tok->kind = TOK__BOOL;
         if (tok->size == 7 && !memcmp(p,"__cdecl",7))  tok->kind = TOK_CDECL;
         if (tok->size == 9 && !memcmp(p,"__stdcall",9))tok->kind = TOK_STDCALL;
+        if (tok->size == 8 && !memcmp(p,"_Complex",8)) tok->kind = TOK__COMPLEX;
         break;
     case 'c':
         if (tok->size == 4 && !memcmp(p, "char", 4))   tok->kind = TOK_CHAR;
@@ -492,6 +493,7 @@ int search_standard_typename(const char *p, size_t size)
 
     case '1':
         if (size == 8 && !memcmp(p, "uint16", 6)) return _CFFI_PRIM_UINT16;
+        if (size == 8 && !memcmp(p, "char16", 6)) return _CFFI_PRIM_CHAR16;
         break;
 
     case '2':
@@ -500,6 +502,7 @@ int search_standard_typename(const char *p, size_t size)
 
     case '3':
         if (size == 8 && !memcmp(p, "uint32", 6)) return _CFFI_PRIM_UINT32;
+        if (size == 8 && !memcmp(p, "char32", 6)) return _CFFI_PRIM_CHAR32;
         break;
 
     case '4':
@@ -601,6 +604,7 @@ static int parse_complete(token_t *tok)
 {
     unsigned int t0;
     _cffi_opcode_t t1;
+    _cffi_opcode_t t1complex;
     int modifiers_length, modifiers_sign;
 
  qualifiers:
@@ -656,6 +660,8 @@ static int parse_complete(token_t *tok)
         break;
     }
 
+    t1complex = 0;
+
     if (modifiers_length || modifiers_sign) {
 
         switch (tok->kind) {
@@ -666,6 +672,7 @@ static int parse_complete(token_t *tok)
         case TOK_STRUCT:
         case TOK_UNION:
         case TOK_ENUM:
+        case TOK__COMPLEX:
             return parse_error(tok, "invalid combination of types");
 
         case TOK_DOUBLE:
@@ -719,9 +726,11 @@ static int parse_complete(token_t *tok)
             break;
         case TOK_FLOAT:
             t1 = _CFFI_OP(_CFFI_OP_PRIMITIVE, _CFFI_PRIM_FLOAT);
+            t1complex = _CFFI_OP(_CFFI_OP_PRIMITIVE, _CFFI_PRIM_FLOATCOMPLEX);
             break;
         case TOK_DOUBLE:
             t1 = _CFFI_OP(_CFFI_OP_PRIMITIVE, _CFFI_PRIM_DOUBLE);
+            t1complex = _CFFI_OP(_CFFI_OP_PRIMITIVE, _CFFI_PRIM_DOUBLECOMPLEX);
             break;
         case TOK_IDENTIFIER:
         {
@@ -788,6 +797,13 @@ static int parse_complete(token_t *tok)
         }
         next_token(tok);
     }
+    if (tok->kind == TOK__COMPLEX)
+    {
+        if (t1complex == 0)
+            return parse_error(tok, "_Complex type combination unsupported");
+        t1 = t1complex;
+        next_token(tok);
+    }
 
     return parse_sequel(tok, write_ds(tok, t1));
 }

+ 37 - 4
desktop/core/ext-py/cffi-1.5.2/c/realize_c_type.c → desktop/core/ext-py/cffi-1.11.5/c/realize_c_type.c

@@ -151,6 +151,10 @@ static PyObject *build_primitive_type(int num)
         "uint_fast64_t",
         "intmax_t",
         "uintmax_t",
+        "float _Complex",
+        "double _Complex",
+        "char16_t",
+        "char32_t",
     };
     PyObject *x;
 
@@ -265,8 +269,11 @@ realize_c_type(builder_c_t *builder, _cffi_opcode_t opcodes[], int index)
     PyObject *x = realize_c_type_or_func(builder, opcodes, index);
     if (x == NULL || CTypeDescr_Check(x))
         return (CTypeDescrObject *)x;
-    else
-        return unexpected_fn_type(x);
+    else {
+        unexpected_fn_type(x);
+        Py_DECREF(x);
+        return NULL;
+    }
 }
 
 static void _realize_name(char *target, const char *prefix, const char *srcname)
@@ -645,6 +652,32 @@ realize_c_type_or_func(builder_c_t *builder,
     return x;
 };
 
+static CTypeDescrObject *
+realize_c_func_return_type(builder_c_t *builder,
+                           _cffi_opcode_t opcodes[], int index)
+{
+    PyObject *x;
+    _cffi_opcode_t op = opcodes[index];
+
+    if ((((uintptr_t)op) & 1) == 0) {
+        /* already built: assert that it is a function and fish
+           for the return type */
+        x = (PyObject *)op;
+        assert(PyTuple_Check(x));   /* from _CFFI_OP_FUNCTION */
+        x = PyTuple_GET_ITEM(x, 0);
+        assert(CTypeDescr_Check(x));
+        assert(((CTypeDescrObject *)x)->ct_flags & CT_FUNCTIONPTR);
+        x = PyTuple_GET_ITEM(((CTypeDescrObject *)x)->ct_stuff, 1);
+        assert(CTypeDescr_Check(x));
+        Py_INCREF(x);
+        return (CTypeDescrObject *)x;
+    }
+    else {
+        assert(_CFFI_GETOP(op) == _CFFI_OP_FUNCTION);
+        return realize_c_type(builder, opcodes, _CFFI_GETARG(opcodes[index]));
+    }
+}
+
 static int do_realize_lazy_struct(CTypeDescrObject *ct)
 {
     /* This is called by force_lazy_struct() in _cffi_backend.c */
@@ -704,13 +737,13 @@ static int do_realize_lazy_struct(CTypeDescrObject *ct)
                 return -1;
             }
 
-            if (fld->field_offset == (size_t)-1) {
+            if (ctf != NULL && fld->field_offset == (size_t)-1) {
                 /* unnamed struct, with field positions and sizes entirely
                    determined by complete_struct_or_union() and not checked.
                    Or, bitfields (field_size >= 0), similarly not checked. */
                 assert(fld->field_size == (size_t)-1 || fbitsize >= 0);
             }
-            else if (detect_custom_layout(ct, SF_STD_FIELD_POS,
+            else if (ctf == NULL || detect_custom_layout(ct, SF_STD_FIELD_POS,
                                      ctf->ct_size, fld->field_size,
                                      "wrong size for field '",
                                      fld->name, "'") < 0) {

+ 606 - 106
desktop/core/ext-py/cffi-1.5.2/c/test_c.py → desktop/core/ext-py/cffi-1.11.5/c/test_c.py

@@ -12,9 +12,9 @@ from _cffi_backend import _testfunc, _get_types, _get_common_types, __version__
 # ____________________________________________________________
 
 import sys
-assert __version__ == "1.5.2", ("This test_c.py file is for testing a version"
-                                " of cffi that differs from the one that we"
-                                " get from 'import _cffi_backend'")
+assert __version__ == "1.11.5", ("This test_c.py file is for testing a version"
+                                 " of cffi that differs from the one that we"
+                                 " get from 'import _cffi_backend'")
 if sys.version_info < (3,):
     type_or_class = "type"
     mandatory_b_prefix = ''
@@ -27,6 +27,7 @@ if sys.version_info < (3,):
                                        .replace(r'\\U', r'\U'))
     u = U()
     str2bytes = str
+    strict_compare = False
 else:
     type_or_class = "class"
     long = int
@@ -38,6 +39,7 @@ else:
     bitem2bchr = bytechr
     u = ""
     str2bytes = lambda s: bytes(s, "ascii")
+    strict_compare = True
 
 def size_of_int():
     BInt = new_primitive_type("int")
@@ -59,6 +61,10 @@ def find_and_load_library(name, flags=RTLD_NOW):
         path = None
     else:
         path = ctypes.util.find_library(name)
+        if path is None and name == 'c':
+            assert sys.platform == 'win32'
+            assert sys.version_info >= (3,)
+            py.test.skip("dlopen(None) cannot work on Windows with Python 3")
     return load_library(path, flags)
 
 def test_load_library():
@@ -88,8 +94,8 @@ def test_new_primitive_type():
     assert repr(p) == "<ctype 'signed char'>"
 
 def check_dir(p, expected):
-    got = set(name for name in dir(p) if not name.startswith('_'))
-    assert got == set(expected)
+    got = [name for name in dir(p) if not name.startswith('_')]
+    assert got == sorted(expected)
 
 def test_inspect_primitive_type():
     p = new_primitive_type("signed char")
@@ -106,11 +112,11 @@ def test_cast_to_signed_char():
     x = cast(p, -66 + (1<<199)*256)
     assert repr(x) == "<cdata 'signed char' -66>"
     assert int(x) == -66
-    assert (x == cast(p, -66)) is False
-    assert (x != cast(p, -66)) is True
+    assert (x == cast(p, -66)) is True
+    assert (x != cast(p, -66)) is False
     q = new_primitive_type("short")
-    assert (x == cast(q, -66)) is False
-    assert (x != cast(q, -66)) is True
+    assert (x == cast(q, -66)) is True
+    assert (x != cast(q, -66)) is False
 
 def test_sizeof_type():
     py.test.raises(TypeError, sizeof, 42.5)
@@ -152,9 +158,13 @@ def test_float_types():
     INF = 1E200 * 1E200
     for name in ["float", "double"]:
         p = new_primitive_type(name)
-        assert bool(cast(p, 0))
+        assert bool(cast(p, 0)) is False      # since 1.7
+        assert bool(cast(p, -0.0)) is False   # since 1.7
+        assert bool(cast(p, 1e-42)) is True
+        assert bool(cast(p, -1e-42)) is True
         assert bool(cast(p, INF))
         assert bool(cast(p, -INF))
+        assert bool(cast(p, float("nan")))
         assert int(cast(p, -150)) == -150
         assert int(cast(p, 61.91)) == 61
         assert long(cast(p, 61.91)) == 61
@@ -171,7 +181,7 @@ def test_float_types():
             assert float(cast(p, 1.1)) != 1.1     # rounding error
             assert float(cast(p, 1E200)) == INF   # limited range
 
-        assert cast(p, -1.1) != cast(p, -1.1)
+        assert cast(p, -1.1) == cast(p, -1.1)
         assert repr(float(cast(p, -0.0))) == '-0.0'
         assert float(cast(p, b'\x09')) == 9.0
         assert float(cast(p, u+'\x09')) == 9.0
@@ -179,42 +189,62 @@ def test_float_types():
         py.test.raises(TypeError, cast, p, None)
 
 def test_complex_types():
-    py.test.skip("later")
     INF = 1E200 * 1E200
     for name in ["float", "double"]:
-        p = new_primitive_type("_Complex " + name)
-        assert bool(cast(p, 0))
+        p = new_primitive_type(name + " _Complex")
+        assert bool(cast(p, 0)) is False
         assert bool(cast(p, INF))
         assert bool(cast(p, -INF))
-        assert bool(cast(p, 0j))
+        assert bool(cast(p, 0j)) is False
         assert bool(cast(p, INF*1j))
         assert bool(cast(p, -INF*1j))
+        # "can't convert complex to float", like CPython's "float(0j)"
         py.test.raises(TypeError, int, cast(p, -150))
         py.test.raises(TypeError, long, cast(p, -150))
         py.test.raises(TypeError, float, cast(p, -150))
         assert complex(cast(p, 1.25)) == 1.25
         assert complex(cast(p, 1.25j)) == 1.25j
-        assert float(cast(p, INF*1j)) == INF*1j
-        assert float(cast(p, -INF)) == -INF
+        assert complex(cast(p, complex(0,INF))) == complex(0,INF)
+        assert complex(cast(p, -INF)) == -INF
         if name == "float":
             assert complex(cast(p, 1.1j)) != 1.1j         # rounding error
             assert complex(cast(p, 1E200+3j)) == INF+3j   # limited range
-            assert complex(cast(p, 3+1E200j)) == 3+INF*1j # limited range
+            assert complex(cast(p, complex(3,1E200))) == complex(3,INF) # limited range
 
-        assert cast(p, -1.1j) != cast(p, -1.1j)
+        assert cast(p, -1.1j) == cast(p, -1.1j)
         assert repr(complex(cast(p, -0.0)).real) == '-0.0'
-        assert repr(complex(cast(p, -0j))) == '-0j'
-        assert complex(cast(p, '\x09')) == 9.0
-        assert complex(cast(p, True)) == 1.0
+        #assert repr(complex(cast(p, -0j))) == '-0j'   # http://bugs.python.org/issue29602
+        assert complex(cast(p, b'\x09')) == 9.0 + 0j
+        assert complex(cast(p, u+'\x09')) == 9.0 + 0j
+        assert complex(cast(p, True)) == 1.0 + 0j
         py.test.raises(TypeError, cast, p, None)
         #
-        py.test.raises(cast, new_primitive_type(name), 1+2j)
-    py.test.raises(cast, new_primitive_type("int"), 1+2j)
+        py.test.raises(TypeError, cast, new_primitive_type(name), 1+0j)
+        #
+        for basetype in ["char", "int", "uint64_t", "float",
+                         "double", "long double"]:
+            baseobj = cast(new_primitive_type(basetype), 65)
+            py.test.raises(TypeError, complex, baseobj)
+        #
+        BArray = new_array_type(new_pointer_type(p), 10)
+        x = newp(BArray, None)
+        x[5] = 12.34 + 56.78j
+        assert type(x[5]) is complex
+        assert abs(x[5] - (12.34 + 56.78j)) < 1e-5
+        assert (x[5] == 12.34 + 56.78j) == (name == "double")  # rounding error
+        #
+        class Foo:
+            def __complex__(self):
+                return 2 + 3j
+        assert complex(Foo()) == 2 + 3j
+        assert complex(cast(p, Foo())) == 2 + 3j
+    py.test.raises(TypeError, cast, new_primitive_type("int"), 1+0j)
 
 def test_character_type():
     p = new_primitive_type("char")
-    assert bool(cast(p, '\x00'))
-    assert cast(p, '\x00') != cast(p, -17*256)
+    assert bool(cast(p, 'A')) is True
+    assert bool(cast(p, '\x00')) is False    # since 1.7
+    assert cast(p, '\x00') == cast(p, -17*256)
     assert int(cast(p, 'A')) == 65
     assert long(cast(p, 'A')) == 65
     assert type(int(cast(p, 'A'))) is int
@@ -371,29 +401,12 @@ def test_load_standard_library():
     x = find_and_load_library(None)
     BVoidP = new_pointer_type(new_void_type())
     assert x.load_function(BVoidP, 'strcpy')
-    py.test.raises(KeyError, x.load_function,
+    py.test.raises(AttributeError, x.load_function,
                    BVoidP, 'xxx_this_function_does_not_exist')
     # the next one is from 'libm', not 'libc', but we assume
     # that it is already loaded too, so it should work
     assert x.load_function(BVoidP, 'sqrt')
 
-def test_hash_differences():
-    BChar = new_primitive_type("char")
-    BInt = new_primitive_type("int")
-    BFloat = new_primitive_type("float")
-    for i in range(1, 20):
-        x1 = cast(BChar, chr(i))
-        x2 = cast(BInt, i)
-        if hash(x1) != hash(x2):
-            break
-    else:
-        raise AssertionError("hashes are equal")
-    for i in range(1, 20):
-        if hash(cast(BFloat, i)) != hash(float(i)):
-            break
-    else:
-        raise AssertionError("hashes are equal")
-
 def test_no_len_on_nonarray():
     p = new_primitive_type("int")
     py.test.raises(TypeError, len, cast(p, 42))
@@ -582,6 +595,19 @@ def test_array_sub():
     e = py.test.raises(TypeError, "q - a")
     assert str(e.value) == "cannot subtract cdata 'short *' and cdata 'int *'"
 
+def test_ptr_sub_unaligned():
+    BInt = new_primitive_type("int")
+    BIntPtr = new_pointer_type(BInt)
+    a = cast(BIntPtr, 1240)
+    for bi in range(1430, 1438):
+        b = cast(BIntPtr, bi)
+        if ((bi - 1240) % size_of_int()) == 0:
+            assert b - a == (bi - 1240) // size_of_int()
+            assert a - b == (1240 - bi) // size_of_int()
+        else:
+            py.test.raises(ValueError, "b - a")
+            py.test.raises(ValueError, "a - b")
+
 def test_cast_primitive_from_cdata():
     p = new_primitive_type("int")
     n = cast(p, cast(p, -42))
@@ -730,8 +756,14 @@ def test_struct_instance():
     BInt = new_primitive_type("int")
     BStruct = new_struct_type("struct foo")
     BStructPtr = new_pointer_type(BStruct)
-    p = cast(BStructPtr, 0)
-    py.test.raises(AttributeError, "p.a1")    # opaque
+    p = cast(BStructPtr, 42)
+    e = py.test.raises(AttributeError, "p.a1")    # opaque
+    assert str(e.value) == ("cdata 'struct foo *' points to an opaque type: "
+                            "cannot read fields")
+    e = py.test.raises(AttributeError, "p.a1 = 10")    # opaque
+    assert str(e.value) == ("cdata 'struct foo *' points to an opaque type: "
+                            "cannot write fields")
+
     complete_struct_or_union(BStruct, [('a1', BInt, -1),
                                        ('a2', BInt, -1)])
     p = newp(BStructPtr, None)
@@ -742,8 +774,29 @@ def test_struct_instance():
     assert s.a2 == 123
     py.test.raises(OverflowError, "s.a1 = sys.maxsize+1")
     assert s.a1 == 0
-    py.test.raises(AttributeError, "p.foobar")
-    py.test.raises(AttributeError, "s.foobar")
+    e = py.test.raises(AttributeError, "p.foobar")
+    assert str(e.value) == "cdata 'struct foo *' has no field 'foobar'"
+    e = py.test.raises(AttributeError, "p.foobar = 42")
+    assert str(e.value) == "cdata 'struct foo *' has no field 'foobar'"
+    e = py.test.raises(AttributeError, "s.foobar")
+    assert str(e.value) == "cdata 'struct foo' has no field 'foobar'"
+    e = py.test.raises(AttributeError, "s.foobar = 42")
+    assert str(e.value) == "cdata 'struct foo' has no field 'foobar'"
+    j = cast(BInt, 42)
+    e = py.test.raises(AttributeError, "j.foobar")
+    assert str(e.value) == "cdata 'int' has no attribute 'foobar'"
+    e = py.test.raises(AttributeError, "j.foobar = 42")
+    assert str(e.value) == "cdata 'int' has no attribute 'foobar'"
+    j = cast(new_pointer_type(BInt), 42)
+    e = py.test.raises(AttributeError, "j.foobar")
+    assert str(e.value) == "cdata 'int *' has no attribute 'foobar'"
+    e = py.test.raises(AttributeError, "j.foobar = 42")
+    assert str(e.value) == "cdata 'int *' has no attribute 'foobar'"
+    pp = newp(new_pointer_type(BStructPtr), p)
+    e = py.test.raises(AttributeError, "pp.a1")
+    assert str(e.value) == "cdata 'struct foo * *' has no attribute 'a1'"
+    e = py.test.raises(AttributeError, "pp.a1 = 42")
+    assert str(e.value) == "cdata 'struct foo * *' has no attribute 'a1'"
 
 def test_union_instance():
     BInt = new_primitive_type("int")
@@ -878,6 +931,15 @@ def test_call_function_0():
     py.test.raises(OverflowError, f, 128, 0)
     py.test.raises(OverflowError, f, 0, 128)
 
+def test_call_function_0_pretend_bool_result():
+    BSignedChar = new_primitive_type("signed char")
+    BBool = new_primitive_type("_Bool")
+    BFunc0 = new_function_type((BSignedChar, BSignedChar), BBool, False)
+    f = cast(BFunc0, _testfunc(0))
+    assert f(40, -39) is True
+    assert f(40, -40) is False
+    py.test.raises(ValueError, f, 40, 2)
+
 def test_call_function_1():
     BInt = new_primitive_type("int")
     BLong = new_primitive_type("long")
@@ -1040,6 +1102,17 @@ def test_call_function_23_bis():
     res = f(b"foo")
     assert res == 1000 * ord(b'f')
 
+def test_call_function_23_bool_array():
+    # declaring the function as int(_Bool*)
+    BBool = new_primitive_type("_Bool")
+    BBoolP = new_pointer_type(BBool)
+    BInt = new_primitive_type("int")
+    BFunc23 = new_function_type((BBoolP,), BInt, False)
+    f = cast(BFunc23, _testfunc(23))
+    res = f(b"\x01\x01")
+    assert res == 1000
+    py.test.raises(ValueError, f, b"\x02\x02")
+
 def test_cannot_pass_struct_with_array_of_length_0():
     BInt = new_primitive_type("int")
     BArray0 = new_array_type(new_pointer_type(BInt), 0)
@@ -1066,6 +1139,34 @@ def test_call_function_9():
     BSShort = new_primitive_type("short")
     assert f(3, cast(BSChar, -3), cast(BUChar, 200), cast(BSShort, -5)) == 192
 
+def test_call_function_24():
+    BFloat = new_primitive_type("float")
+    BFloatComplex = new_primitive_type("float _Complex")
+    BFunc3 = new_function_type((BFloat, BFloat), BFloatComplex, False)
+    if 0:   # libffi returning nonsense silently, so logic disabled for now
+        f = cast(BFunc3, _testfunc(24))
+        result = f(1.25, 5.1)
+        assert type(result) == complex
+        assert result.real == 1.25   # exact
+        assert (result.imag != 2*5.1) and (abs(result.imag - 2*5.1) < 1e-5) # inexact
+    else:
+        f = cast(BFunc3, _testfunc(9))
+        py.test.raises(NotImplementedError, f, 12.3, 34.5)
+
+def test_call_function_25():
+    BDouble = new_primitive_type("double")
+    BDoubleComplex = new_primitive_type("double _Complex")
+    BFunc3 = new_function_type((BDouble, BDouble), BDoubleComplex, False)
+    if 0:   # libffi returning nonsense silently, so logic disabled for now
+        f = cast(BFunc3, _testfunc(25))
+        result = f(1.25, 5.1)
+        assert type(result) == complex
+        assert result.real == 1.25   # exact
+        assert (result.imag != 2*5.1) and (abs(result.imag - 2*5.1) < 1e-10) # inexact
+    else:
+        f = cast(BFunc3, _testfunc(9))
+        py.test.raises(NotImplementedError, f, 12.3, 34.5)
+
 def test_cannot_call_with_a_autocompleted_struct():
     BSChar = new_primitive_type("signed char")
     BDouble = new_primitive_type("double")
@@ -1077,9 +1178,13 @@ def test_cannot_call_with_a_autocompleted_struct():
     BFunc = new_function_type((BStruct,), BDouble)   # internally not callable
     dummy_func = cast(BFunc, 42)
     e = py.test.raises(NotImplementedError, dummy_func, "?")
-    msg = ("ctype \'struct foo\' not supported as argument (it is a struct "
-           'declared with "...;", but the C calling convention may depend on '
-           'the missing fields)')
+    msg = ("ctype 'struct foo' not supported as argument.  It is a struct "
+           'declared with "...;", but the C calling convention may depend '
+           "on the missing fields; or, it contains anonymous struct/unions.  "
+           "Such structs are only supported as argument if the function is "
+           "'API mode' and non-variadic (i.e. declared inside ffibuilder."
+           "cdef()+ffibuilder.set_source() and not taking a final '...' "
+           "argument)")
     assert str(e.value) == msg
 
 def test_new_charp():
@@ -1835,7 +1940,11 @@ def test_string_byte():
         assert string(a, 8).startswith(b'ABC')  # may contain additional garbage
 
 def test_string_wchar():
-    BWChar = new_primitive_type("wchar_t")
+    for typename in ["wchar_t", "char16_t", "char32_t"]:
+        _test_string_wchar_variant(typename)
+
+def _test_string_wchar_variant(typename):
+    BWChar = new_primitive_type(typename)
     assert string(cast(BWChar, 42)) == u+'*'
     assert string(cast(BWChar, 0x4253)) == u+'\u4253'
     assert string(cast(BWChar, 0)) == u+'\x00'
@@ -1997,22 +2106,45 @@ def test_cast_with_functionptr():
     py.test.raises(TypeError, newp, BStructPtr, [cast(BFunc2, 0)])
 
 def test_wchar():
-    BWChar = new_primitive_type("wchar_t")
+    _test_wchar_variant("wchar_t")
+    if sys.platform.startswith("linux"):
+        BWChar = new_primitive_type("wchar_t")
+        assert sizeof(BWChar) == 4
+        # wchar_t is often signed on Linux, but not always (e.g. on ARM)
+        assert int(cast(BWChar, -1)) in (-1, 4294967295)
+
+def test_char16():
+    BChar16 = new_primitive_type("char16_t")
+    assert sizeof(BChar16) == 2
+    _test_wchar_variant("char16_t")
+    assert int(cast(BChar16, -1)) == 0xffff       # always unsigned
+
+def test_char32():
+    BChar32 = new_primitive_type("char32_t")
+    assert sizeof(BChar32) == 4
+    _test_wchar_variant("char32_t")
+    assert int(cast(BChar32, -1)) == 0xffffffff   # always unsigned
+
+def _test_wchar_variant(typename):
+    BWChar = new_primitive_type(typename)
     BInt = new_primitive_type("int")
     pyuni4 = {1: True, 2: False}[len(u+'\U00012345')]
     wchar4 = {2: False, 4: True}[sizeof(BWChar)]
-    assert str(cast(BWChar, 0x45)) == "<cdata 'wchar_t' %s'E'>" % (
-        mandatory_u_prefix,)
-    assert str(cast(BWChar, 0x1234)) == "<cdata 'wchar_t' %s'\u1234'>" % (
-        mandatory_u_prefix,)
-    if wchar4:
-        if not _hacked_pypy_uni4():
+    assert str(cast(BWChar, 0x45)) == "<cdata '%s' %s'E'>" % (
+        typename, mandatory_u_prefix)
+    assert str(cast(BWChar, 0x1234)) == "<cdata '%s' %s'\u1234'>" % (
+        typename, mandatory_u_prefix)
+    if not _hacked_pypy_uni4():
+        if wchar4:
             x = cast(BWChar, 0x12345)
-            assert str(x) == "<cdata 'wchar_t' %s'\U00012345'>" % (
-                mandatory_u_prefix,)
+            assert str(x) == "<cdata '%s' %s'\U00012345'>" % (
+                typename, mandatory_u_prefix)
             assert int(x) == 0x12345
-    else:
-        assert not pyuni4
+        else:
+            x = cast(BWChar, 0x18345)
+            assert str(x) == "<cdata '%s' %s'\u8345'>" % (
+                typename, mandatory_u_prefix)
+            assert int(x) == 0x8345
     #
     BWCharP = new_pointer_type(BWChar)
     BStruct = new_struct_type("struct foo_s")
@@ -2027,9 +2159,9 @@ def test_wchar():
     s.a1 = u+'\u1234'
     assert s.a1 == u+'\u1234'
     if pyuni4:
-        assert wchar4
-        s.a1 = u+'\U00012345'
-        assert s.a1 == u+'\U00012345'
+        if wchar4:
+            s.a1 = u+'\U00012345'
+            assert s.a1 == u+'\U00012345'
     elif wchar4:
         if not _hacked_pypy_uni4():
             s.a1 = cast(BWChar, 0x12345)
@@ -2064,17 +2196,17 @@ def test_wchar():
         py.test.raises(IndexError, 'a[4]')
     #
     w = cast(BWChar, 'a')
-    assert repr(w) == "<cdata 'wchar_t' %s'a'>" % mandatory_u_prefix
+    assert repr(w) == "<cdata '%s' %s'a'>" % (typename, mandatory_u_prefix)
     assert str(w) == repr(w)
     assert string(w) == u+'a'
     assert int(w) == ord('a')
     w = cast(BWChar, 0x1234)
-    assert repr(w) == "<cdata 'wchar_t' %s'\u1234'>" % mandatory_u_prefix
+    assert repr(w) == "<cdata '%s' %s'\u1234'>" % (typename, mandatory_u_prefix)
     assert str(w) == repr(w)
     assert string(w) == u+'\u1234'
     assert int(w) == 0x1234
     w = cast(BWChar, u+'\u8234')
-    assert repr(w) == "<cdata 'wchar_t' %s'\u8234'>" % mandatory_u_prefix
+    assert repr(w) == "<cdata '%s' %s'\u8234'>" % (typename, mandatory_u_prefix)
     assert str(w) == repr(w)
     assert string(w) == u+'\u8234'
     assert int(w) == 0x8234
@@ -2082,8 +2214,8 @@ def test_wchar():
     assert repr(w) == "<cdata 'int' 4660>"
     if wchar4 and not _hacked_pypy_uni4():
         w = cast(BWChar, u+'\U00012345')
-        assert repr(w) == "<cdata 'wchar_t' %s'\U00012345'>" % (
-            mandatory_u_prefix,)
+        assert repr(w) == "<cdata '%s' %s'\U00012345'>" % (
+            typename, mandatory_u_prefix)
         assert str(w) == repr(w)
         assert string(w) == u+'\U00012345'
         assert int(w) == 0x12345
@@ -2110,7 +2242,7 @@ def test_wchar():
     py.test.raises(RuntimeError, string, q)
     #
     def cb(p):
-        assert repr(p).startswith("<cdata 'wchar_t *' 0x")
+        assert repr(p).startswith("<cdata '%s *' 0x" % typename)
         return len(string(p))
     BFunc = new_function_type((BWCharP,), BInt, False)
     f = callback(BFunc, cb, -42)
@@ -2123,6 +2255,27 @@ def test_wchar():
         x = cast(BWChar, -1)
         py.test.raises(ValueError, string, x)
 
+def test_wchar_variants_mix():
+    BWChar  = new_primitive_type("wchar_t")
+    BChar16 = new_primitive_type("char16_t")
+    BChar32 = new_primitive_type("char32_t")
+    assert int(cast(BChar32, cast(BChar16, -2))) == 0xfffe
+    assert int(cast(BWChar, cast(BChar16, -2))) == 0xfffe
+    assert int(cast(BChar16, cast(BChar32, 0x0001f345))) == 0xf345
+    assert int(cast(BChar16, cast(BWChar, 0x0001f345))) == 0xf345
+    #
+    BChar16A = new_array_type(new_pointer_type(BChar16), None)
+    BChar32A = new_array_type(new_pointer_type(BChar32), None)
+    x = cast(BChar32, 'A')
+    py.test.raises(TypeError, newp, BChar16A, [x])
+    x = cast(BChar16, 'A')
+    py.test.raises(TypeError, newp, BChar32A, [x])
+    #
+    a = newp(BChar16A, u+'\U00012345')
+    assert len(a) == 3
+    a = newp(BChar32A, u+'\U00012345')
+    assert len(a) == 2   # even if the Python unicode string above is 2 chars
+
 def test_keepalive_struct():
     # exception to the no-keepalive rule: p=newp(BStructPtr) returns a
     # pointer owning the memory, and p[0] returns a pointer to the
@@ -2192,12 +2345,17 @@ def test_cmp():
     BVoidP = new_pointer_type(new_void_type())
     p = newp(BIntP, 123)
     q = cast(BInt, 124)
-    py.test.raises(TypeError, "p < q")
-    py.test.raises(TypeError, "p <= q")
     assert (p == q) is False
     assert (p != q) is True
-    py.test.raises(TypeError, "p > q")
-    py.test.raises(TypeError, "p >= q")
+    assert (q == p) is False
+    assert (q != p) is True
+    if strict_compare:
+        py.test.raises(TypeError, "p < q")
+        py.test.raises(TypeError, "p <= q")
+        py.test.raises(TypeError, "q < p")
+        py.test.raises(TypeError, "q <= p")
+        py.test.raises(TypeError, "p > q")
+        py.test.raises(TypeError, "p >= q")
     r = cast(BVoidP, p)
     assert (p <  r) is False
     assert (p <= r) is True
@@ -2230,6 +2388,7 @@ def test_buffer():
     buf = buffer(c)
     assert repr(buf).startswith('<_cffi_backend.buffer object at 0x')
     assert bytes(buf) == b"hi there\x00"
+    assert type(buf) is buffer
     if sys.version_info < (3,):
         assert str(buf) == "hi there\x00"
         assert unicode(buf) == u+"hi there\x00"
@@ -2518,6 +2677,25 @@ def test_nested_anonymous_struct():
     assert d[2][1].bitshift == -1
     assert d[2][1].bitsize == -1
 
+def test_nested_anonymous_struct_2():
+    BInt = new_primitive_type("int")
+    BStruct = new_struct_type("struct foo")
+    BInnerUnion = new_union_type("union bar")
+    complete_struct_or_union(BInnerUnion, [('a1', BInt, -1),
+                                           ('a2', BInt, -1)])
+    complete_struct_or_union(BStruct, [('b1', BInt, -1),
+                                       ('', BInnerUnion, -1),
+                                       ('b2', BInt, -1)])
+    assert sizeof(BInnerUnion) == sizeof(BInt)
+    assert sizeof(BStruct) == sizeof(BInt) * 3
+    fields = [(name, fld.offset, fld.flags) for (name, fld) in BStruct.fields]
+    assert fields == [
+        ('b1', 0 * sizeof(BInt), 0),
+        ('a1', 1 * sizeof(BInt), 0),
+        ('a2', 1 * sizeof(BInt), 1),
+        ('b2', 2 * sizeof(BInt), 0),
+    ]
+
 def test_sizeof_union():
     # a union has the largest alignment of its members, and a total size
     # that is the largest of its items *possibly further aligned* if
@@ -2569,7 +2747,8 @@ def test_bool():
     BBoolP = new_pointer_type(BBool)
     assert int(cast(BBool, False)) == 0
     assert int(cast(BBool, True)) == 1
-    assert bool(cast(BBool, False)) is True    # warning!
+    assert bool(cast(BBool, False)) is False    # since 1.7
+    assert bool(cast(BBool, True)) is True
     assert int(cast(BBool, 3)) == 1
     assert int(cast(BBool, long(3))) == 1
     assert int(cast(BBool, long(10)**4000)) == 1
@@ -2586,13 +2765,38 @@ def test_bool():
     py.test.raises(OverflowError, newp, BBoolP, 2)
     py.test.raises(OverflowError, newp, BBoolP, -1)
     BCharP = new_pointer_type(new_primitive_type("char"))
-    p = newp(BCharP, b'X')
+    p = newp(BCharP, b'\x01')
     q = cast(BBoolP, p)
-    assert q[0] == ord(b'X')
+    assert q[0] is True
+    p = newp(BCharP, b'\x00')
+    q = cast(BBoolP, p)
+    assert q[0] is False
     py.test.raises(TypeError, string, cast(BBool, False))
     BDouble = new_primitive_type("double")
     assert int(cast(BBool, cast(BDouble, 0.1))) == 1
     assert int(cast(BBool, cast(BDouble, 0.0))) == 0
+    BBoolA = new_array_type(BBoolP, None)
+    p = newp(BBoolA, b'\x01\x00')
+    assert p[0] is True
+    assert p[1] is False
+
+def test_bool_forbidden_cases():
+    BBool = new_primitive_type("_Bool")
+    BBoolP = new_pointer_type(BBool)
+    BBoolA = new_array_type(BBoolP, None)
+    BCharP = new_pointer_type(new_primitive_type("char"))
+    p = newp(BCharP, b'X')
+    q = cast(BBoolP, p)
+    py.test.raises(ValueError, "q[0]")
+    py.test.raises(TypeError, newp, BBoolP, b'\x00')
+    assert newp(BBoolP, 0)[0] is False
+    assert newp(BBoolP, 1)[0] is True
+    py.test.raises(OverflowError, newp, BBoolP, 2)
+    py.test.raises(OverflowError, newp, BBoolP, -1)
+    py.test.raises(ValueError, newp, BBoolA, b'\x00\x01\x02')
+    py.test.raises(OverflowError, newp, BBoolA, [0, 1, 2])
+    py.test.raises(TypeError, string, newp(BBoolP, 1))
+    py.test.raises(TypeError, string, newp(BBoolA, [1]))
 
 def test_typeoffsetof():
     BChar = new_primitive_type("char")
@@ -2691,10 +2895,19 @@ def test_newp_from_bytearray_doesnt_work():
     BCharArray = new_array_type(
         new_pointer_type(new_primitive_type("char")), None)
     py.test.raises(TypeError, newp, BCharArray, bytearray(b"foo"))
-    p = newp(BCharArray, 4)
-    buffer(p)[:] = bytearray(b"foo\x00")
-    assert len(p) == 4
-    assert list(p) == [b"f", b"o", b"o", b"\x00"]
+    p = newp(BCharArray, 5)
+    buffer(p)[:] = bytearray(b"foo.\x00")
+    assert len(p) == 5
+    assert list(p) == [b"f", b"o", b"o", b".", b"\x00"]
+    p[1:3] = bytearray(b"XY")
+    assert list(p) == [b"f", b"X", b"Y", b".", b"\x00"]
+
+def test_string_assignment_to_byte_array():
+    BByteArray = new_array_type(
+        new_pointer_type(new_primitive_type("unsigned char")), None)
+    p = newp(BByteArray, 5)
+    p[0:3] = bytearray(b"XYZ")
+    assert list(p) == [ord("X"), ord("Y"), ord("Z"), 0, 0]
 
 # XXX hack
 if sys.version_info >= (3,):
@@ -3125,17 +3338,19 @@ def test_struct_array_no_length():
     assert d[1][0] == 'y'
     assert d[1][1].type is BArray
     assert d[1][1].offset == size_of_int()
-    assert d[1][1].bitshift == -1
+    assert d[1][1].bitshift == -2
     assert d[1][1].bitsize == -1
     #
     p = newp(new_pointer_type(BStruct))
     p.x = 42
     assert p.x == 42
-    assert typeof(p.y) is BIntP
+    assert typeof(p.y) is BArray
+    assert len(p.y) == 0
     assert p.y == cast(BIntP, p) + 1
     #
     p = newp(new_pointer_type(BStruct), [100])
     assert p.x == 100
+    assert len(p.y) == 0
     #
     # Tests for
     #    ffi.new("struct_with_var_array *", [field.., [the_array_items..]])
@@ -3150,6 +3365,10 @@ def test_struct_array_no_length():
             p.y[0] = 200
             assert p.y[2] == 0
             p.y[2] = 400
+        assert len(p.y) == 3
+        assert len(p[0].y) == 3
+        assert len(buffer(p)) == sizeof(BInt) * 4
+        assert sizeof(p[0]) == sizeof(BInt) * 4
         plist.append(p)
     for i in range(20):
         p = plist[i]
@@ -3157,13 +3376,31 @@ def test_struct_array_no_length():
         assert p.y[0] == 200
         assert p.y[1] == i
         assert p.y[2] == 400
-        assert list(p.y[0:3]) == [200, i, 400]
+        assert list(p.y) == [200, i, 400]
     #
     # the following assignment works, as it normally would, for any array field
-    p.y = [500, 600]
-    assert list(p.y[0:3]) == [500, 600, 400]
+    p.y = [501, 601]
+    assert list(p.y) == [501, 601, 400]
+    p[0].y = [500, 600]
+    assert list(p[0].y) == [500, 600, 400]
+    assert repr(p) == "<cdata 'foo *' owning %d bytes>" % (
+        sizeof(BStruct) + 3 * sizeof(BInt),)
+    assert repr(p[0]) == "<cdata 'foo' owning %d bytes>" % (
+        sizeof(BStruct) + 3 * sizeof(BInt),)
+    assert sizeof(p[0]) == sizeof(BStruct) + 3 * sizeof(BInt)
+    #
+    # from a non-owning pointer, we can't get the length
+    q = cast(new_pointer_type(BStruct), p)
+    assert q.y[0] == 500
+    assert q[0].y[0] == 500
+    py.test.raises(TypeError, len, q.y)
+    py.test.raises(TypeError, len, q[0].y)
+    assert typeof(q.y) is BIntP
+    assert typeof(q[0].y) is BIntP
+    assert sizeof(q[0]) == sizeof(BStruct)
     #
     # error cases
+    py.test.raises(IndexError, "p.y[4]")
     py.test.raises(TypeError, "p.y = cast(BIntP, 0)")
     py.test.raises(TypeError, "p.y = 15")
     py.test.raises(TypeError, "p.y = None")
@@ -3228,6 +3465,33 @@ def test_struct_array_no_length_explicit_position():
         assert p.x[5] == 60
         assert p.x[6] == 70
 
+def test_struct_array_not_aligned():
+    # struct a { int x; char y; char z[]; };
+    # ends up of size 8, but 'z' is at offset 5
+    BChar = new_primitive_type("char")
+    BInt = new_primitive_type("int")
+    BCharP = new_pointer_type(BChar)
+    BArray = new_array_type(BCharP, None)
+    BStruct = new_struct_type("foo")
+    complete_struct_or_union(BStruct, [('x', BInt),
+                                       ('y', BChar),
+                                       ('z', BArray)])
+    assert sizeof(BStruct) == 2 * size_of_int()
+    def offsetof(BType, fieldname):
+        return typeoffsetof(BType, fieldname)[1]
+    base = offsetof(BStruct, 'z')
+    assert base == size_of_int() + 1
+    #
+    p = newp(new_pointer_type(BStruct), {'z': 3})
+    assert sizeof(p[0]) == base + 3
+    q = newp(new_pointer_type(BStruct), {'z': size_of_int()})
+    assert sizeof(q) == size_of_ptr()
+    assert sizeof(q[0]) == base + size_of_int()
+    assert len(p.z) == 3
+    assert len(p[0].z) == 3
+    assert len(q.z) == size_of_int()
+    assert len(q[0].z) == size_of_int()
+
 def test_ass_slice():
     BChar = new_primitive_type("char")
     BArray = new_array_type(new_pointer_type(BChar), None)
@@ -3238,14 +3502,15 @@ def test_ass_slice():
     py.test.raises(TypeError, "p[1:5] = u+'XYZT'")
     py.test.raises(TypeError, "p[1:5] = [1, 2, 3, 4]")
     #
-    BUniChar = new_primitive_type("wchar_t")
-    BArray = new_array_type(new_pointer_type(BUniChar), None)
-    p = newp(BArray, u+"foobar")
-    p[2:5] = [u+"*", u+"Z", u+"T"]
-    p[1:3] = u+"XY"
-    assert list(p) == [u+"f", u+"X", u+"Y", u+"Z", u+"T", u+"r", u+"\x00"]
-    py.test.raises(TypeError, "p[1:5] = b'XYZT'")
-    py.test.raises(TypeError, "p[1:5] = [1, 2, 3, 4]")
+    for typename in ["wchar_t", "char16_t", "char32_t"]:
+        BUniChar = new_primitive_type(typename)
+        BArray = new_array_type(new_pointer_type(BUniChar), None)
+        p = newp(BArray, u+"foobar")
+        p[2:5] = [u+"*", u+"Z", u+"T"]
+        p[1:3] = u+"XY"
+        assert list(p) == [u+"f", u+"X", u+"Y", u+"Z", u+"T", u+"r", u+"\x00"]
+        py.test.raises(TypeError, "p[1:5] = b'XYZT'")
+        py.test.raises(TypeError, "p[1:5] = [1, 2, 3, 4]")
 
 def test_void_p_arithmetic():
     BVoid = new_void_type()
@@ -3322,30 +3587,55 @@ def test_from_buffer():
     cast(p, c)[1] += 500
     assert list(a) == [10000, 20500, 30000]
 
-def test_from_buffer_not_str_unicode_bytearray():
+def test_from_buffer_not_str_unicode():
     BChar = new_primitive_type("char")
     BCharP = new_pointer_type(BChar)
     BCharA = new_array_type(BCharP, None)
-    py.test.raises(TypeError, from_buffer, BCharA, b"foo")
+    p1 = from_buffer(BCharA, b"foo")
+    assert p1 == from_buffer(BCharA, b"foo")
+    import gc; gc.collect()
+    assert p1 == from_buffer(BCharA, b"foo")
     py.test.raises(TypeError, from_buffer, BCharA, u+"foo")
-    py.test.raises(TypeError, from_buffer, BCharA, bytearray(b"foo"))
     try:
         from __builtin__ import buffer
     except ImportError:
         pass
     else:
-        py.test.raises(TypeError, from_buffer, BCharA, buffer(b"foo"))
-        py.test.raises(TypeError, from_buffer, BCharA, buffer(u+"foo"))
-        py.test.raises(TypeError, from_buffer, BCharA,
-                       buffer(bytearray(b"foo")))
+        # Python 2 only
+        contents = from_buffer(BCharA, buffer(b"foo"))
+        assert len(contents) == len(p1)
+        for i in range(len(contents)):
+            assert contents[i] == p1[i]
+        p4 = buffer(u+"foo")
+        contents = from_buffer(BCharA, buffer(u+"foo"))
+        assert len(contents) == len(p4)
+        for i in range(len(contents)):
+            assert contents[i] == p4[i]
     try:
         from __builtin__ import memoryview
     except ImportError:
         pass
     else:
-        py.test.raises(TypeError, from_buffer, BCharA, memoryview(b"foo"))
-        py.test.raises(TypeError, from_buffer, BCharA,
-                       memoryview(bytearray(b"foo")))
+        contents = from_buffer(BCharA, memoryview(b"foo"))
+        assert len(contents) == len(p1)
+        for i in range(len(contents)):
+            assert contents[i] == p1[i]
+
+
+def test_from_buffer_bytearray():
+    a = bytearray(b"xyz")
+    BChar = new_primitive_type("char")
+    BCharP = new_pointer_type(BChar)
+    BCharA = new_array_type(BCharP, None)
+    p = from_buffer(BCharA, a)
+    assert typeof(p) is BCharA
+    assert len(p) == 3
+    assert repr(p) == "<cdata 'char[]' buffer len 3 from 'bytearray' object>"
+    assert p[2] == b"z"
+    p[2] = b"."
+    assert a[2] == ord(".")
+    a[2] = ord("?")
+    assert p[2] == b"?"
 
 def test_from_buffer_more_cases():
     try:
@@ -3525,3 +3815,213 @@ def test_get_common_types():
     d = {}
     _get_common_types(d)
     assert d['bool'] == '_Bool'
+
+def test_unpack():
+    BChar = new_primitive_type("char")
+    BArray = new_array_type(new_pointer_type(BChar), 10)   # char[10]
+    p = newp(BArray, b"abc\x00def")
+    p0 = p
+    assert unpack(p, 10) == b"abc\x00def\x00\x00\x00"
+    assert unpack(p+1, 5) == b"bc\x00de"
+
+    for typename in ["wchar_t", "char16_t", "char32_t"]:
+        BWChar = new_primitive_type(typename)
+        BArray = new_array_type(new_pointer_type(BWChar), 10)   # wchar_t[10]
+        p = newp(BArray, u"abc\x00def")
+        assert unpack(p, 10) == u"abc\x00def\x00\x00\x00"
+
+    for typename, samples in [
+            ("uint8_t",  [0, 2**8-1]),
+            ("uint16_t", [0, 2**16-1]),
+            ("uint32_t", [0, 2**32-1]),
+            ("uint64_t", [0, 2**64-1]),
+            ("int8_t",  [-2**7, 2**7-1]),
+            ("int16_t", [-2**15, 2**15-1]),
+            ("int32_t", [-2**31, 2**31-1]),
+            ("int64_t", [-2**63, 2**63-1]),
+            ("_Bool", [False, True]),
+            ("float", [0.0, 10.5]),
+            ("double", [12.34, 56.78]),
+            ]:
+        BItem = new_primitive_type(typename)
+        BArray = new_array_type(new_pointer_type(BItem), 10)
+        p = newp(BArray, samples)
+        result = unpack(p, len(samples))
+        assert result == samples
+        for i in range(len(samples)):
+            assert result[i] == p[i] and type(result[i]) is type(p[i])
+            assert (type(result[i]) is bool) == (type(samples[i]) is bool)
+    #
+    BInt = new_primitive_type("int")
+    py.test.raises(TypeError, unpack, p)
+    py.test.raises(TypeError, unpack, b"foobar", 6)
+    py.test.raises(TypeError, unpack, cast(BInt, 42), 1)
+    #
+    BPtr = new_pointer_type(BInt)
+    random_ptr = cast(BPtr, -424344)
+    other_ptr = cast(BPtr, 54321)
+    BArray = new_array_type(new_pointer_type(BPtr), None)
+    lst = unpack(newp(BArray, [random_ptr, other_ptr]), 2)
+    assert lst == [random_ptr, other_ptr]
+    #
+    BFunc = new_function_type((BInt, BInt), BInt, False)
+    BFuncPtr = new_pointer_type(BFunc)
+    lst = unpack(newp(new_array_type(BFuncPtr, None), 2), 2)
+    assert len(lst) == 2
+    assert not lst[0] and not lst[1]
+    assert typeof(lst[0]) is BFunc
+    #
+    BStruct = new_struct_type("foo")
+    BStructPtr = new_pointer_type(BStruct)
+    e = py.test.raises(ValueError, unpack, cast(BStructPtr, 42), 5)
+    assert str(e.value) == "'foo *' points to items of unknown size"
+    complete_struct_or_union(BStruct, [('a1', BInt, -1),
+                                       ('a2', BInt, -1)])
+    array_of_structs = newp(new_array_type(BStructPtr, None), [[4,5], [6,7]])
+    lst = unpack(array_of_structs, 2)
+    assert typeof(lst[0]) is BStruct
+    assert lst[0].a1 == 4 and lst[1].a2 == 7
+    #
+    py.test.raises(RuntimeError, unpack, cast(new_pointer_type(BChar), 0), 0)
+    py.test.raises(RuntimeError, unpack, cast(new_pointer_type(BChar), 0), 10)
+    #
+    py.test.raises(ValueError, unpack, p0, -1)
+    py.test.raises(ValueError, unpack, p, -1)
+
+def test_cdata_dir():
+    BInt = new_primitive_type("int")
+    p = cast(BInt, 42)
+    check_dir(p, [])
+    p = newp(new_array_type(new_pointer_type(BInt), None), 5)
+    check_dir(p, [])
+    BStruct = new_struct_type("foo")
+    p = cast(new_pointer_type(BStruct), 0)
+    check_dir(p, [])    # opaque
+    complete_struct_or_union(BStruct, [('a2', BInt, -1),
+                                       ('a1', BInt, -1)])
+    check_dir(p, ['a1', 'a2'])   # always sorted
+    p = newp(new_pointer_type(BStruct), None)
+    check_dir(p, ['a1', 'a2'])
+    check_dir(p[0], ['a1', 'a2'])
+    pp = newp(new_pointer_type(new_pointer_type(BStruct)), p)
+    check_dir(pp, [])
+    check_dir(pp[0], ['a1', 'a2'])
+    check_dir(pp[0][0], ['a1', 'a2'])
+
+def test_char_pointer_conversion():
+    import warnings
+    assert __version__.startswith(("1.8", "1.9", "1.10", "1.11")), (
+        "consider turning the warning into an error")
+    BCharP = new_pointer_type(new_primitive_type("char"))
+    BIntP = new_pointer_type(new_primitive_type("int"))
+    BVoidP = new_pointer_type(new_void_type())
+    BUCharP = new_pointer_type(new_primitive_type("unsigned char"))
+    z1 = cast(BCharP, 0)
+    z2 = cast(BIntP, 0)
+    z3 = cast(BVoidP, 0)
+    z4 = cast(BUCharP, 0)
+    with warnings.catch_warnings(record=True) as w:
+        newp(new_pointer_type(BIntP), z1)    # warn
+        assert len(w) == 1
+        newp(new_pointer_type(BVoidP), z1)   # fine
+        assert len(w) == 1
+        newp(new_pointer_type(BCharP), z2)   # warn
+        assert len(w) == 2
+        newp(new_pointer_type(BVoidP), z2)   # fine
+        assert len(w) == 2
+        newp(new_pointer_type(BCharP), z3)   # fine
+        assert len(w) == 2
+        newp(new_pointer_type(BIntP), z3)    # fine
+        assert len(w) == 2
+        newp(new_pointer_type(BCharP), z4)   # fine (ignore signedness here)
+        assert len(w) == 2
+        newp(new_pointer_type(BUCharP), z1)  # fine (ignore signedness here)
+        assert len(w) == 2
+        newp(new_pointer_type(BUCharP), z3)  # fine
+        assert len(w) == 2
+    # check that the warnings are associated with lines in this file
+    assert w[1].lineno == w[0].lineno + 4
+
+def test_primitive_comparison():
+    def assert_eq(a, b):
+        assert (a == b) is True
+        assert (b == a) is True
+        assert (a != b) is False
+        assert (b != a) is False
+        assert (a < b) is False
+        assert (a <= b) is True
+        assert (a > b) is False
+        assert (a >= b) is True
+        assert (b < a) is False
+        assert (b <= a) is True
+        assert (b > a) is False
+        assert (b >= a) is True
+        assert hash(a) == hash(b)
+    def assert_lt(a, b, check_hash=True):
+        assert (a == b) is False
+        assert (b == a) is False
+        assert (a != b) is True
+        assert (b != a) is True
+        assert (a < b) is True
+        assert (a <= b) is True
+        assert (a > b) is False
+        assert (a >= b) is False
+        assert (b < a) is False
+        assert (b <= a) is False
+        assert (b > a) is True
+        assert (b >= a) is True
+        if check_hash:
+            assert hash(a) != hash(b)    # (or at least, it is unlikely)
+    def assert_gt(a, b, check_hash=True):
+        assert_lt(b, a, check_hash)
+    def assert_ne(a, b):
+        assert (a == b) is False
+        assert (b == a) is False
+        assert (a != b) is True
+        assert (b != a) is True
+        if strict_compare:
+            py.test.raises(TypeError, "a < b")
+            py.test.raises(TypeError, "a <= b")
+            py.test.raises(TypeError, "a > b")
+            py.test.raises(TypeError, "a >= b")
+            py.test.raises(TypeError, "b < a")
+            py.test.raises(TypeError, "b <= a")
+            py.test.raises(TypeError, "b > a")
+            py.test.raises(TypeError, "b >= a")
+        elif a < b:
+            assert_lt(a, b)
+        else:
+            assert_lt(b, a)
+    assert_eq(5, 5)
+    assert_lt(3, 5)
+    assert_ne('5', 5)
+    #
+    t1 = new_primitive_type("char")
+    t2 = new_primitive_type("int")
+    t3 = new_primitive_type("unsigned char")
+    t4 = new_primitive_type("unsigned int")
+    t5 = new_primitive_type("float")
+    t6 = new_primitive_type("double")
+    assert_eq(cast(t1, 65), b'A')
+    assert_lt(cast(t1, 64), b'\x99')
+    assert_gt(cast(t1, 200), b'A')
+    assert_ne(cast(t1, 65), 65)
+    assert_eq(cast(t2, -25), -25)
+    assert_lt(cast(t2, -25), -24)
+    assert_gt(cast(t2, -25), -26)
+    assert_eq(cast(t3, 65), 65)
+    assert_ne(cast(t3, 65), b'A')
+    assert_ne(cast(t3, 65), cast(t1, 65))
+    assert_gt(cast(t4, -1), -1, check_hash=False)
+    assert_gt(cast(t4, -1), cast(t2, -1), check_hash=False)
+    assert_gt(cast(t4, -1), 99999)
+    assert_eq(cast(t4, -1), 256 ** size_of_int() - 1)
+    assert_eq(cast(t5, 3.0), 3)
+    assert_eq(cast(t5, 3.5), 3.5)
+    assert_lt(cast(t5, 3.3), 3.3)   # imperfect rounding
+    assert_eq(cast(t6, 3.3), 3.3)
+    assert_eq(cast(t5, 3.5), cast(t6, 3.5))
+    assert_lt(cast(t5, 3.1), cast(t6, 3.1))   # imperfect rounding
+    assert_eq(cast(t5, 7.0), cast(t3, 7))
+    assert_lt(cast(t5, 3.1), 3.101)
+    assert_gt(cast(t5, 3.1), 3)

+ 246 - 0
desktop/core/ext-py/cffi-1.11.5/c/wchar_helper.h

@@ -0,0 +1,246 @@
+/*
+ * wchar_t helpers
+ */
+
+typedef uint16_t cffi_char16_t;
+typedef uint32_t cffi_char32_t;
+
+
+#if Py_UNICODE_SIZE == 2
+
+/* Before Python 2.7, PyUnicode_FromWideChar is not able to convert
+   wchar_t values greater than 65535 into two-unicode-characters surrogates.
+   But even the Python 2.7 version doesn't detect wchar_t values that are
+   out of range(1114112), and just returns nonsense.
+
+   From cffi 1.11 we can't use it anyway, because we need a version
+   with char32_t input types.
+*/
+static PyObject *
+_my_PyUnicode_FromChar32(const cffi_char32_t *w, Py_ssize_t size)
+{
+    PyObject *unicode;
+    register Py_ssize_t i;
+    Py_ssize_t alloc;
+    const cffi_char32_t *orig_w;
+
+    alloc = size;
+    orig_w = w;
+    for (i = size; i > 0; i--) {
+        if (*w > 0xFFFF)
+            alloc++;
+        w++;
+    }
+    w = orig_w;
+    unicode = PyUnicode_FromUnicode(NULL, alloc);
+    if (!unicode)
+        return NULL;
+
+    /* Copy the wchar_t data into the new object */
+    {
+        register Py_UNICODE *u;
+        u = PyUnicode_AS_UNICODE(unicode);
+        for (i = size; i > 0; i--) {
+            if (*w > 0xFFFF) {
+                cffi_char32_t ordinal;
+                if (*w > 0x10FFFF) {
+                    PyErr_Format(PyExc_ValueError,
+                                 "char32_t out of range for "
+                                 "conversion to unicode: 0x%x", (int)*w);
+                    Py_DECREF(unicode);
+                    return NULL;
+                }
+                ordinal = *w++;
+                ordinal -= 0x10000;
+                *u++ = 0xD800 | (ordinal >> 10);
+                *u++ = 0xDC00 | (ordinal & 0x3FF);
+            }
+            else
+                *u++ = *w++;
+        }
+    }
+    return unicode;
+}
+
+static PyObject *
+_my_PyUnicode_FromChar16(const cffi_char16_t *w, Py_ssize_t size)
+{
+    return PyUnicode_FromUnicode((const Py_UNICODE *)w, size);
+}
+
+#else   /* Py_UNICODE_SIZE == 4 */
+
+static PyObject *
+_my_PyUnicode_FromChar32(const cffi_char32_t *w, Py_ssize_t size)
+{
+    return PyUnicode_FromUnicode((const Py_UNICODE *)w, size);
+}
+
+static PyObject *
+_my_PyUnicode_FromChar16(const cffi_char16_t *w, Py_ssize_t size)
+{
+    /* 'size' is the length of the 'w' array */
+    PyObject *result = PyUnicode_FromUnicode(NULL, size);
+
+    if (result != NULL) {
+        Py_UNICODE *u_base = PyUnicode_AS_UNICODE(result);
+        Py_UNICODE *u = u_base;
+
+        if (size == 1) {      /* performance only */
+            *u = (cffi_char32_t)*w;
+        }
+        else {
+            while (size > 0) {
+                cffi_char32_t ch = *w++;
+                size--;
+                if (0xD800 <= ch && ch <= 0xDBFF && size > 0) {
+                    cffi_char32_t ch2 = *w;
+                    if (0xDC00 <= ch2 && ch2 <= 0xDFFF) {
+                        ch = (((ch & 0x3FF)<<10) | (ch2 & 0x3FF)) + 0x10000;
+                        w++;
+                        size--;
+                    }
+                }
+                *u++ = ch;
+            }
+            if (PyUnicode_Resize(&result, u - u_base) < 0) {
+                Py_DECREF(result);
+                return NULL;
+            }
+        }
+    }
+    return result;
+}
+
+#endif
+
+
+#define IS_SURROGATE(u)   (0xD800 <= (u)[0] && (u)[0] <= 0xDBFF &&   \
+                           0xDC00 <= (u)[1] && (u)[1] <= 0xDFFF)
+#define AS_SURROGATE(u)   (0x10000 + (((u)[0] - 0xD800) << 10) +     \
+                                     ((u)[1] - 0xDC00))
+
+static int
+_my_PyUnicode_AsSingleChar16(PyObject *unicode, cffi_char16_t *result,
+                             char *err_got)
+{
+    Py_UNICODE *u = PyUnicode_AS_UNICODE(unicode);
+    if (PyUnicode_GET_SIZE(unicode) != 1) {
+        sprintf(err_got, "unicode string of length %zd",
+                PyUnicode_GET_SIZE(unicode));
+        return -1;
+    }
+#if Py_UNICODE_SIZE == 4
+    if (((unsigned int)u[0]) > 0xFFFF)
+    {
+        sprintf(err_got, "larger-than-0xFFFF character");
+        return -1;
+    }
+#endif
+    *result = (cffi_char16_t)u[0];
+    return 0;
+}
+
+static int
+_my_PyUnicode_AsSingleChar32(PyObject *unicode, cffi_char32_t *result,
+                             char *err_got)
+{
+    Py_UNICODE *u = PyUnicode_AS_UNICODE(unicode);
+    if (PyUnicode_GET_SIZE(unicode) == 1) {
+        *result = (cffi_char32_t)u[0];
+        return 0;
+    }
+#if Py_UNICODE_SIZE == 2
+    if (PyUnicode_GET_SIZE(unicode) == 2 && IS_SURROGATE(u)) {
+        *result = AS_SURROGATE(u);
+        return 0;
+    }
+#endif
+    sprintf(err_got, "unicode string of length %zd",
+            PyUnicode_GET_SIZE(unicode));
+    return -1;
+}
+
+static Py_ssize_t _my_PyUnicode_SizeAsChar16(PyObject *unicode)
+{
+    Py_ssize_t length = PyUnicode_GET_SIZE(unicode);
+    Py_ssize_t result = length;
+
+#if Py_UNICODE_SIZE == 4
+    Py_UNICODE *u = PyUnicode_AS_UNICODE(unicode);
+    Py_ssize_t i;
+
+    for (i=0; i<length; i++) {
+        if (u[i] > 0xFFFF)
+            result++;
+    }
+#endif
+    return result;
+}
+
+static Py_ssize_t _my_PyUnicode_SizeAsChar32(PyObject *unicode)
+{
+    Py_ssize_t length = PyUnicode_GET_SIZE(unicode);
+    Py_ssize_t result = length;
+
+#if Py_UNICODE_SIZE == 2
+    Py_UNICODE *u = PyUnicode_AS_UNICODE(unicode);
+    Py_ssize_t i;
+
+    for (i=0; i<length-1; i++) {
+        if (IS_SURROGATE(u+i))
+            result--;
+    }
+#endif
+    return result;
+}
+
+static int _my_PyUnicode_AsChar16(PyObject *unicode,
+                                  cffi_char16_t *result,
+                                  Py_ssize_t resultlen)
+{
+    Py_ssize_t len = PyUnicode_GET_SIZE(unicode);
+    Py_UNICODE *u = PyUnicode_AS_UNICODE(unicode);
+    Py_ssize_t i;
+    for (i=0; i<len; i++) {
+#if Py_UNICODE_SIZE == 2
+        cffi_char16_t ordinal = u[i];
+#else
+        cffi_char32_t ordinal = u[i];
+        if (ordinal > 0xFFFF) {
+            if (ordinal > 0x10FFFF) {
+                PyErr_Format(PyExc_ValueError,
+                             "unicode character out of range for "
+                             "conversion to char16_t: 0x%x", (int)ordinal);
+                return -1;
+            }
+            ordinal -= 0x10000;
+            *result++ = 0xD800 | (ordinal >> 10);
+            *result++ = 0xDC00 | (ordinal & 0x3FF);
+            continue;
+        }
+#endif
+        *result++ = ordinal;
+    }
+    return 0;
+}
+
+static int _my_PyUnicode_AsChar32(PyObject *unicode,
+                                  cffi_char32_t *result,
+                                  Py_ssize_t resultlen)
+{
+    Py_UNICODE *u = PyUnicode_AS_UNICODE(unicode);
+    Py_ssize_t i;
+    for (i=0; i<resultlen; i++) {
+        cffi_char32_t ordinal = *u;
+#if Py_UNICODE_SIZE == 2
+        if (IS_SURROGATE(u)) {
+            ordinal = AS_SURROGATE(u);
+            u++;
+        }
+#endif
+        result[i] = ordinal;
+        u++;
+    }
+    return 0;
+}

+ 149 - 0
desktop/core/ext-py/cffi-1.11.5/c/wchar_helper_3.h

@@ -0,0 +1,149 @@
+/*
+ * wchar_t helpers, version CPython >= 3.3.
+ *
+ * CPython 3.3 added support for sys.maxunicode == 0x10FFFF on all
+ * platforms, even ones with wchar_t limited to 2 bytes.  As such,
+ * this code here works from the outside like wchar_helper.h in the
+ * case Py_UNICODE_SIZE == 4, but the implementation is very different.
+ */
+
+typedef uint16_t cffi_char16_t;
+typedef uint32_t cffi_char32_t;
+
+
+static PyObject *
+_my_PyUnicode_FromChar32(const cffi_char32_t *w, Py_ssize_t size)
+{
+    return PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, w, size);
+}
+
+static PyObject *
+_my_PyUnicode_FromChar16(const cffi_char16_t *w, Py_ssize_t size)
+{
+    /* are there any surrogate pairs, and if so, how many? */
+    Py_ssize_t i, count_surrogates = 0;
+    for (i = 0; i < size - 1; i++) {
+        if (0xD800 <= w[i] && w[i] <= 0xDBFF &&
+                0xDC00 <= w[i+1] && w[i+1] <= 0xDFFF)
+            count_surrogates++;
+    }
+    if (count_surrogates == 0) {
+        /* no, fast path */
+        return PyUnicode_FromKindAndData(PyUnicode_2BYTE_KIND, w, size);
+    }
+    else
+    {
+        PyObject *result = PyUnicode_New(size - count_surrogates, 0x10FFFF);
+        Py_UCS4 *data;
+        assert(PyUnicode_KIND(result) == PyUnicode_4BYTE_KIND);
+        data = PyUnicode_4BYTE_DATA(result);
+
+        for (i = 0; i < size; i++)
+        {
+            cffi_char32_t ch = w[i];
+            if (0xD800 <= ch && ch <= 0xDBFF && i < size - 1) {
+                cffi_char32_t ch2 = w[i + 1];
+                if (0xDC00 <= ch2 && ch2 <= 0xDFFF) {
+                    ch = (((ch & 0x3FF)<<10) | (ch2 & 0x3FF)) + 0x10000;
+                    i++;
+                }
+            }
+            *data++ = ch;
+        }
+        return result;
+    }
+}
+
+static int
+_my_PyUnicode_AsSingleChar16(PyObject *unicode, cffi_char16_t *result,
+                             char *err_got)
+{
+    cffi_char32_t ch;
+    if (PyUnicode_GET_LENGTH(unicode) != 1) {
+        sprintf(err_got, "unicode string of length %zd",
+                PyUnicode_GET_LENGTH(unicode));
+        return -1;
+    }
+    ch = PyUnicode_READ_CHAR(unicode, 0);
+
+    if (ch > 0xFFFF)
+    {
+        sprintf(err_got, "larger-than-0xFFFF character");
+        return -1;
+    }
+    *result = (cffi_char16_t)ch;
+    return 0;
+}
+
+static int
+_my_PyUnicode_AsSingleChar32(PyObject *unicode, cffi_char32_t *result,
+                             char *err_got)
+{
+    if (PyUnicode_GET_LENGTH(unicode) != 1) {
+        sprintf(err_got, "unicode string of length %zd",
+                PyUnicode_GET_LENGTH(unicode));
+        return -1;
+    }
+    *result = PyUnicode_READ_CHAR(unicode, 0);
+    return 0;
+}
+
+static Py_ssize_t _my_PyUnicode_SizeAsChar16(PyObject *unicode)
+{
+    Py_ssize_t length = PyUnicode_GET_LENGTH(unicode);
+    Py_ssize_t result = length;
+    unsigned int kind = PyUnicode_KIND(unicode);
+
+    if (kind == PyUnicode_4BYTE_KIND)
+    {
+        Py_UCS4 *data = PyUnicode_4BYTE_DATA(unicode);
+        Py_ssize_t i;
+        for (i = 0; i < length; i++) {
+            if (data[i] > 0xFFFF)
+                result++;
+        }
+    }
+    return result;
+}
+
+static Py_ssize_t _my_PyUnicode_SizeAsChar32(PyObject *unicode)
+{
+    return PyUnicode_GET_LENGTH(unicode);
+}
+
+static int _my_PyUnicode_AsChar16(PyObject *unicode,
+                                  cffi_char16_t *result,
+                                  Py_ssize_t resultlen)
+{
+    Py_ssize_t len = PyUnicode_GET_LENGTH(unicode);
+    unsigned int kind = PyUnicode_KIND(unicode);
+    void *data = PyUnicode_DATA(unicode);
+    Py_ssize_t i;
+
+    for (i = 0; i < len; i++) {
+        cffi_char32_t ordinal = PyUnicode_READ(kind, data, i);
+        if (ordinal > 0xFFFF) {
+            if (ordinal > 0x10FFFF) {
+                PyErr_Format(PyExc_ValueError,
+                             "unicode character out of range for "
+                             "conversion to char16_t: 0x%x", (int)ordinal);
+                return -1;
+            }
+            ordinal -= 0x10000;
+            *result++ = 0xD800 | (ordinal >> 10);
+            *result++ = 0xDC00 | (ordinal & 0x3FF);
+        }
+        else
+            *result++ = ordinal;
+    }
+    return 0;
+}
+
+static int _my_PyUnicode_AsChar32(PyObject *unicode,
+                                  cffi_char32_t *result,
+                                  Py_ssize_t resultlen)
+{
+    if (PyUnicode_AsUCS4(unicode, (Py_UCS4 *)result, resultlen, 0) == NULL)
+        return -1;
+    return 0;
+}

+ 4 - 4
desktop/core/ext-py/cffi-1.5.2/cffi/__init__.py → desktop/core/ext-py/cffi-1.11.5/cffi/__init__.py

@@ -1,11 +1,11 @@
 __all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
            'FFIError']
 
-from .api import FFI, CDefError, FFIError
-from .ffiplatform import VerificationError, VerificationMissing
+from .api import FFI
+from .error import CDefError, FFIError, VerificationError, VerificationMissing
 
-__version__ = "1.5.2"
-__version_info__ = (1, 5, 2)
+__version__ = "1.11.5"
+__version_info__ = (1, 11, 5)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__

+ 145 - 0
desktop/core/ext-py/cffi-1.11.5/cffi/_cffi_errors.h

@@ -0,0 +1,145 @@
+#ifndef CFFI_MESSAGEBOX
+# ifdef _MSC_VER
+#  define CFFI_MESSAGEBOX  1
+# else
+#  define CFFI_MESSAGEBOX  0
+# endif
+#endif
+
+
+#if CFFI_MESSAGEBOX
+/* Windows only: logic to take the Python-CFFI embedding logic
+   initialization errors and display them in a background thread
+   with MessageBox.  The idea is that if the whole program closes
+   as a result of this problem, then likely it is already a console
+   program and you can read the stderr output in the console too.
+   If it is not a console program, then it will likely show its own
+   dialog to complain, or generally not abruptly close, and for this
+   case the background thread should stay alive.
+*/
+static void *volatile _cffi_bootstrap_text;
+
+static PyObject *_cffi_start_error_capture(void)
+{
+    PyObject *result = NULL;
+    PyObject *x, *m, *bi;
+
+    if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
+            (void *)1, NULL) != NULL)
+        return (PyObject *)1;
+
+    m = PyImport_AddModule("_cffi_error_capture");
+    if (m == NULL)
+        goto error;
+
+    result = PyModule_GetDict(m);
+    if (result == NULL)
+        goto error;
+
+#if PY_MAJOR_VERSION >= 3
+    bi = PyImport_ImportModule("builtins");
+#else
+    bi = PyImport_ImportModule("__builtin__");
+#endif
+    if (bi == NULL)
+        goto error;
+    PyDict_SetItemString(result, "__builtins__", bi);
+    Py_DECREF(bi);
+
+    x = PyRun_String(
+        "import sys\n"
+        "class FileLike:\n"
+        "  def write(self, x):\n"
+        "    of.write(x)\n"
+        "    self.buf += x\n"
+        "fl = FileLike()\n"
+        "fl.buf = ''\n"
+        "of = sys.stderr\n"
+        "sys.stderr = fl\n"
+        "def done():\n"
+        "  sys.stderr = of\n"
+        "  return fl.buf\n",   /* make sure the returned value stays alive */
+        Py_file_input,
+        result, result);
+    Py_XDECREF(x);
+
+ error:
+    if (PyErr_Occurred())
+    {
+        PyErr_WriteUnraisable(Py_None);
+        PyErr_Clear();
+    }
+    return result;
+}
+
+#pragma comment(lib, "user32.lib")
+
+static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
+{
+    Sleep(666);    /* may be interrupted if the whole process is closing */
+#if PY_MAJOR_VERSION >= 3
+    MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
+                L"Python-CFFI error",
+                MB_OK | MB_ICONERROR);
+#else
+    MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
+                "Python-CFFI error",
+                MB_OK | MB_ICONERROR);
+#endif
+    _cffi_bootstrap_text = NULL;
+    return 0;
+}
+
+static void _cffi_stop_error_capture(PyObject *ecap)
+{
+    PyObject *s;
+    void *text;
+
+    if (ecap == (PyObject *)1)
+        return;
+
+    if (ecap == NULL)
+        goto error;
+
+    s = PyRun_String("done()", Py_eval_input, ecap, ecap);
+    if (s == NULL)
+        goto error;
+
+    /* Show a dialog box, but in a background thread, and
+       never show multiple dialog boxes at once. */
+#if PY_MAJOR_VERSION >= 3
+    text = PyUnicode_AsWideCharString(s, NULL);
+#else
+    text = PyString_AsString(s);
+#endif
+
+    _cffi_bootstrap_text = text;
+
+    if (text != NULL)
+    {
+        HANDLE h;
+        h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
+                         NULL, 0, NULL);
+        if (h != NULL)
+            CloseHandle(h);
+    }
+    /* decref the string, but it should stay alive as 'fl.buf'
+       in the small module above.  It will really be freed only if
+       we later get another similar error.  So it's a leak of at
+       most one copy of the small module.  That's fine for this
+       situation which is usually a "fatal error" anyway. */
+    Py_DECREF(s);
+    PyErr_Clear();
+    return;
+
+  error:
+    _cffi_bootstrap_text = NULL;
+    PyErr_Clear();
+}
+
+#else
+
+static PyObject *_cffi_start_error_capture(void) { return NULL; }
+static void _cffi_stop_error_capture(PyObject *ecap) { }
+
+#endif

+ 90 - 24
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/_cffi_include.h → desktop/core/ext-py/cffi-1.11.5/cffi/_cffi_include.h

@@ -1,4 +1,30 @@
 #define _CFFI_
+
+/* We try to define Py_LIMITED_API before including Python.h.
+
+   Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
+   Py_REF_DEBUG are not defined.  This is a best-effort approximation:
+   we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
+   the same works for the other two macros.  Py_DEBUG implies them,
+   but not the other way around.
+
+   Issue #350 is still open: on Windows, the code here causes it to link
+   with PYTHON36.DLL (for example) instead of PYTHON3.DLL.  A fix was
+   attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
+   does not make PYTHON3.DLL available, and so the "correctly" compiled
+   version would not run inside a virtualenv.  We will re-apply the fix
+   after virtualenv has been fixed for some time.  For explanation, see
+   issue #355.  For a workaround if you want PYTHON3.DLL and don't worry
+   about virtualenv, see issue #350.  See also 'py_limited_api' in
+   setuptools_ext.py.
+*/
+#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
+#  include <pyconfig.h>
+#  if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
+#    define Py_LIMITED_API
+#  endif
+#endif
+
 #include <Python.h>
 #ifdef __cplusplus
 extern "C" {
@@ -42,7 +68,9 @@ extern "C" {
 #  include <stdint.h>
 # endif
 # if _MSC_VER < 1800   /* MSVC < 2013 */
-   typedef unsigned char _Bool;
+#  ifndef __cplusplus
+    typedef unsigned char _Bool;
+#  endif
 # endif
 #else
 # include <stdint.h>
@@ -57,6 +85,12 @@ extern "C" {
 # define _CFFI_UNUSED_FN  /* nothing */
 #endif
 
+#ifdef __cplusplus
+# ifndef _Bool
+   typedef bool _Bool;   /* semi-hackish: C++ has no _Bool; bool is builtin */
+# endif
+#endif
+
 /**********  CPython-specific section  **********/
 #ifndef PYPY_VERSION
 
@@ -71,6 +105,7 @@ extern "C" {
 #define _cffi_from_c_ulong PyLong_FromUnsignedLong
 #define _cffi_from_c_longlong PyLong_FromLongLong
 #define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
 
 #define _cffi_to_c_double PyFloat_AsDouble
 #define _cffi_to_c_float PyFloat_AsDouble
@@ -117,9 +152,9 @@ extern "C" {
 #define _cffi_to_c_char                                                  \
                  ((int(*)(PyObject *))_cffi_exports[9])
 #define _cffi_from_c_pointer                                             \
-    ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
+    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
 #define _cffi_to_c_pointer                                               \
-    ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
+    ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
 #define _cffi_get_struct_layout                                          \
     not used any more
 #define _cffi_restore_errno                                              \
@@ -129,35 +164,40 @@ extern "C" {
 #define _cffi_from_c_char                                                \
     ((PyObject *(*)(char))_cffi_exports[15])
 #define _cffi_from_c_deref                                               \
-    ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
+    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
 #define _cffi_to_c                                                       \
-    ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
+    ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
 #define _cffi_from_c_struct                                              \
-    ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
+    ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
 #define _cffi_to_c_wchar_t                                               \
-    ((wchar_t(*)(PyObject *))_cffi_exports[19])
+    ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
 #define _cffi_from_c_wchar_t                                             \
-    ((PyObject *(*)(wchar_t))_cffi_exports[20])
+    ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
 #define _cffi_to_c_long_double                                           \
     ((long double(*)(PyObject *))_cffi_exports[21])
 #define _cffi_to_c__Bool                                                 \
     ((_Bool(*)(PyObject *))_cffi_exports[22])
 #define _cffi_prepare_pointer_call_argument                              \
-    ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
+    ((Py_ssize_t(*)(struct _cffi_ctypedescr *,                           \
+                    PyObject *, char **))_cffi_exports[23])
 #define _cffi_convert_array_from_object                                  \
-    ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
+    ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
 #define _CFFI_CPIDX  25
 #define _cffi_call_python                                                \
     ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
-#define _CFFI_NUM_EXPORTS 26
+#define _cffi_to_c_wchar3216_t                                           \
+    ((int(*)(PyObject *))_cffi_exports[26])
+#define _cffi_from_c_wchar3216_t                                         \
+    ((PyObject *(*)(int))_cffi_exports[27])
+#define _CFFI_NUM_EXPORTS 28
 
-typedef struct _ctypedescr CTypeDescrObject;
+struct _cffi_ctypedescr;
 
 static void *_cffi_exports[_CFFI_NUM_EXPORTS];
 
 #define _cffi_type(index)   (                           \
     assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
-    (CTypeDescrObject *)_cffi_types[index])
+    (struct _cffi_ctypedescr *)_cffi_types[index])
 
 static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
                             const struct _cffi_type_context_s *ctx)
@@ -190,20 +230,46 @@ static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
     return NULL;
 }
 
-_CFFI_UNUSED_FN
-static PyObject **_cffi_unpack_args(PyObject *args_tuple, Py_ssize_t expected,
-                                    const char *fnname)
+
+#ifdef HAVE_WCHAR_H
+typedef wchar_t _cffi_wchar_t;
+#else
+typedef uint16_t _cffi_wchar_t;   /* same random pick as _cffi_backend.c */
+#endif
+
+_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
+{
+    if (sizeof(_cffi_wchar_t) == 2)
+        return (uint16_t)_cffi_to_c_wchar_t(o);
+    else
+        return (uint16_t)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
+{
+    if (sizeof(_cffi_wchar_t) == 2)
+        return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+    else
+        return _cffi_from_c_wchar3216_t((int)x);
+}
+
+_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
+{
+    if (sizeof(_cffi_wchar_t) == 4)
+        return (int)_cffi_to_c_wchar_t(o);
+    else
+        return (int)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(int x)
 {
-    if (PyTuple_GET_SIZE(args_tuple) != expected) {
-        PyErr_Format(PyExc_TypeError,
-                     "%.150s() takes exactly %zd arguments (%zd given)",
-                     fnname, expected, PyTuple_GET_SIZE(args_tuple));
-        return NULL;
-    }
-    return &PyTuple_GET_ITEM(args_tuple, 0);   /* pointer to the first item,
-                                                  the others follow */
+    if (sizeof(_cffi_wchar_t) == 4)
+        return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+    else
+        return _cffi_from_c_wchar3216_t(x);
 }
 
+
 /**********  end CPython-specific section  **********/
 #else
 _CFFI_UNUSED_FN

+ 31 - 64
desktop/core/ext-py/cffi-1.5.2/cffi/_embedding.h → desktop/core/ext-py/cffi-1.11.5/cffi/_embedding.h

@@ -1,7 +1,12 @@
 
 /***** Support code for embedding *****/
 
-#if defined(_MSC_VER)
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#if defined(_WIN32)
 #  define CFFI_DLLEXPORT  __declspec(dllexport)
 #elif defined(__GNUC__)
 #  define CFFI_DLLEXPORT  __attribute__((visibility("default")))
@@ -109,6 +114,8 @@ static void _cffi_release_reentrant_mutex(void)
 /**********  CPython-specific section  **********/
 #ifndef PYPY_VERSION
 
+#include "_cffi_errors.h"
+
 
 #define _cffi_call_python_org  _cffi_exports[_CFFI_CPIDX]
 
@@ -139,32 +146,6 @@ static int _cffi_initialize_python(void)
     PyGILState_STATE state;
     PyObject *pycode=NULL, *global_dict=NULL, *x;
 
-#if PY_MAJOR_VERSION >= 3
-    /* see comments in _cffi_carefully_make_gil() about the
-       Python2/Python3 difference 
-    */
-#else
-    /* Acquire the GIL.  We have no threadstate here.  If Python is 
-       already initialized, it is possible that there is already one
-       existing for this thread, but it is not made current now.
-    */
-    PyEval_AcquireLock();
-
-    _cffi_py_initialize();
-
-    /* The Py_InitializeEx() sometimes made a threadstate for us, but
-       not always.  Indeed Py_InitializeEx() could be called and do
-       nothing.  So do we have a threadstate, or not?  We don't know,
-       but we can replace it with NULL in all cases.
-    */
-    (void)PyThreadState_Swap(NULL);
-
-    /* Now we can release the GIL and re-acquire immediately using the
-       logic of PyGILState(), which handles making or installing the
-       correct threadstate.
-    */
-    PyEval_ReleaseLock();
-#endif
     state = PyGILState_Ensure();
 
     /* Call the initxxx() function from the present module.  It will
@@ -220,8 +201,16 @@ static int _cffi_initialize_python(void)
         /* Print as much information as potentially useful.
            Debugging load-time failures with embedding is not fun
         */
+        PyObject *ecap;
         PyObject *exception, *v, *tb, *f, *modules, *mod;
         PyErr_Fetch(&exception, &v, &tb);
+        ecap = _cffi_start_error_capture();
+        f = PySys_GetObject((char *)"stderr");
+        if (f != NULL && f != Py_None) {
+            PyFile_WriteString(
+                "Failed to initialize the Python-CFFI embedding logic:\n\n", f);
+        }
+
         if (exception != NULL) {
             PyErr_NormalizeException(&exception, &v, &tb);
             PyErr_Display(exception, v, tb);
@@ -230,10 +219,9 @@ static int _cffi_initialize_python(void)
         Py_XDECREF(v);
         Py_XDECREF(tb);
 
-        f = PySys_GetObject((char *)"stderr");
         if (f != NULL && f != Py_None) {
             PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
-                               "\ncompiled with cffi version: 1.5.2"
+                               "\ncompiled with cffi version: 1.11.5"
                                "\n_cffi_backend module: ", f);
             modules = PyImport_GetModuleDict();
             mod = PyDict_GetItemString(modules, "_cffi_backend");
@@ -249,6 +237,7 @@ static int _cffi_initialize_python(void)
             PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
             PyFile_WriteString("\n\n", f);
         }
+        _cffi_stop_error_capture(ecap);
     }
     result = -1;
     goto done;
@@ -263,16 +252,14 @@ static int _cffi_carefully_make_gil(void)
        that we don't hold the GIL before (if it exists), and we don't
        hold it afterwards.
 
-       What it really does is completely different in Python 2 and 
-       Python 3.
-
-    Python 2
-    ========
+       (What it really does used to be completely different in Python 2
+       and Python 3, with the Python 2 solution avoiding the spin-lock
+       around the Py_InitializeEx() call.  However, after recent changes
+       to CPython 2.7 (issue #358) it no longer works.  So we use the
+       Python 3 solution everywhere.)
 
-       Initialize the GIL, without initializing the rest of Python,
-       by calling PyEval_InitThreads().
-
-       PyEval_InitThreads() must not be called concurrently at all.
+       This initializes Python by calling Py_InitializeEx().
+       Important: this must not be called concurrently at all.
        So we use a global variable as a simple spin lock.  This global
        variable must be from 'libpythonX.Y.so', not from this
        cffi-based extension module, because it must be shared from
@@ -282,18 +269,6 @@ static int _cffi_carefully_make_gil(void)
        string "ENDMARKER".  We change it temporarily to point to the
        next character in that string.  (Yes, I know it's REALLY
        obscure.)
-
-    Python 3
-    ========
-
-       In Python 3, PyEval_InitThreads() cannot be called before
-       Py_InitializeEx() any more.  So this function calls
-       Py_InitializeEx() first.  It uses the same obscure logic to
-       make sure we never call it concurrently.
-
-       Arguably, this is less good on the spinlock, because
-       Py_InitializeEx() takes much longer to run than
-       PyEval_InitThreads().  But I didn't find a way around it.
     */
 
 #ifdef WITH_THREAD
@@ -317,8 +292,7 @@ static int _cffi_carefully_make_gil(void)
     }
 #endif
 
-#if PY_MAJOR_VERSION >= 3
-    /* Python 3: call Py_InitializeEx() */
+    /* call Py_InitializeEx() */
     {
         PyGILState_STATE state = PyGILState_UNLOCKED;
         if (!Py_IsInitialized())
@@ -329,17 +303,6 @@ static int _cffi_carefully_make_gil(void)
         PyEval_InitThreads();
         PyGILState_Release(state);
     }
-#else
-    /* Python 2: call PyEval_InitThreads() */
-# ifdef WITH_THREAD
-    if (!PyEval_ThreadsInitialized()) {
-        PyEval_InitThreads();    /* makes the GIL */
-        PyEval_ReleaseLock();    /* then release it */
-    }
-    /* else: there is already a GIL, but we still needed to do the
-       spinlock dance to make sure that we see it as fully ready */
-# endif
-#endif
 
 #ifdef WITH_THREAD
     /* release the lock */
@@ -366,7 +329,7 @@ static struct _cffi_pypy_init_s {
     const char *code;
 } _cffi_pypy_init = {
     _CFFI_MODULE_NAME,
-    _CFFI_PYTHON_STARTUP_FUNC,
+    (void(*)(const void *[]))_CFFI_PYTHON_STARTUP_FUNC,
     _CFFI_PYTHON_STARTUP_CODE,
 };
 
@@ -515,3 +478,7 @@ static int cffi_start_python(void)
 #undef cffi_compare_and_swap
 #undef cffi_write_barrier
 #undef cffi_read_barrier
+
+#ifdef __cplusplus
+}
+#endif

+ 160 - 73
desktop/core/ext-py/cffi-1.5.2/cffi/api.py → desktop/core/ext-py/cffi-1.11.5/cffi/api.py

@@ -1,5 +1,7 @@
 import sys, types
 from .lock import allocate_lock
+from .error import CDefError
+from . import model
 
 try:
     callable
@@ -15,17 +17,6 @@ except NameError:
     basestring = str
 
 
-class FFIError(Exception):
-    pass
-
-class CDefError(Exception):
-    def __str__(self):
-        try:
-            line = 'line %d: ' % (self.args[1].coord.line,)
-        except (AttributeError, TypeError, IndexError):
-            line = ''
-        return '%s%s' % (line, self.args[0])
-
 
 class FFI(object):
     r'''
@@ -49,18 +40,27 @@ class FFI(object):
         """Create an FFI instance.  The 'backend' argument is used to
         select a non-default backend, mostly for tests.
         """
-        from . import cparser, model
         if backend is None:
             # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
             # _cffi_backend.so compiled.
             import _cffi_backend as backend
             from . import __version__
-            assert backend.__version__ == __version__, \
-               "version mismatch, %s != %s" % (backend.__version__, __version__)
+            if backend.__version__ != __version__:
+                # bad version!  Try to be as explicit as possible.
+                if hasattr(backend, '__file__'):
+                    # CPython
+                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r.  The two versions should be equal; check your installation." % (
+                        __version__, __file__,
+                        backend.__version__, backend.__file__))
+                else:
+                    # PyPy
+                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  This interpreter comes with a built-in '_cffi_backend' module, which is version %s.  The two versions should be equal; check your installation." % (
+                        __version__, __file__, backend.__version__))
             # (If you insist you can also try to pass the option
             # 'backend=backend_ctypes.CTypesBackend()', but don't
             # rely on it!  It's probably not going to work well.)
 
+        from . import cparser
         self._backend = backend
         self._lock = allocate_lock()
         self._parser = cparser.Parser()
@@ -75,9 +75,10 @@ class FFI(object):
         self._init_once_cache = {}
         self._cdef_version = None
         self._embedding = None
+        self._typecache = model.get_typecache(backend)
         if hasattr(backend, 'set_ffi'):
             backend.set_ffi(self)
-        for name in backend.__dict__:
+        for name in list(backend.__dict__):
             if name.startswith('RTLD_'):
                 setattr(self, name, getattr(backend, name))
         #
@@ -93,6 +94,7 @@ class FFI(object):
             # ctypes backend: attach these constants to the instance
             self.NULL = self.cast(self.BVoidP, 0)
             self.CData, self.CType = backend._get_types()
+        self.buffer = backend.buffer
 
     def cdef(self, csource, override=False, packed=False):
         """Parse the given C source.  This registers all declared functions,
@@ -141,6 +143,13 @@ class FFI(object):
             self._libraries.append(lib)
         return lib
 
+    def dlclose(self, lib):
+        """Close a library obtained with ffi.dlopen().  After this call,
+        access to functions or variables from the library will fail
+        (possibly with a segmentation fault).
+        """
+        type(lib).__cffi_close__(lib)
+
     def _typeof_locked(self, cdecl):
         # call me with the lock!
         key = cdecl
@@ -212,7 +221,7 @@ class FFI(object):
 
     def offsetof(self, cdecl, *fields_or_indexes):
         """Return the offset of the named field inside the given
-        structure or array, which must be given as a C type name.  
+        structure or array, which must be given as a C type name.
         You can give several field names in case of nested structures.
         You can also give numeric values which correspond to array
         items, in case of an array type.
@@ -299,24 +308,41 @@ class FFI(object):
         """
         return self._backend.string(cdata, maxlen)
 
-    def buffer(self, cdata, size=-1):
-        """Return a read-write buffer object that references the raw C data
-        pointed to by the given 'cdata'.  The 'cdata' must be a pointer or
-        an array.  Can be passed to functions expecting a buffer, or directly
-        manipulated with:
+    def unpack(self, cdata, length):
+        """Unpack an array of C data of the given length,
+        returning a Python string/unicode/list.
+
+        If 'cdata' is a pointer to 'char', returns a byte string.
+        It does not stop at the first null.  This is equivalent to:
+        ffi.buffer(cdata, length)[:]
+
+        If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
+        'length' is measured in wchar_t's; it is not the size in bytes.
 
-            buf[:]          get a copy of it in a regular string, or
-            buf[idx]        as a single character
-            buf[:] = ...
-            buf[idx] = ...  change the content
+        If 'cdata' is a pointer to anything else, returns a list of
+        'length' items.  This is a faster equivalent to:
+        [cdata[i] for i in range(length)]
         """
-        return self._backend.buffer(cdata, size)
+        return self._backend.unpack(cdata, length)
+
+   #def buffer(self, cdata, size=-1):
+   #    """Return a read-write buffer object that references the raw C data
+   #    pointed to by the given 'cdata'.  The 'cdata' must be a pointer or
+   #    an array.  Can be passed to functions expecting a buffer, or directly
+   #    manipulated with:
+   #
+   #        buf[:]          get a copy of it in a regular string, or
+   #        buf[idx]        as a single character
+   #        buf[:] = ...
+   #        buf[idx] = ...  change the content
+   #    """
+   #    note that 'buffer' is a type, set on this instance by __init__
 
     def from_buffer(self, python_buffer):
         """Return a <cdata 'char[]'> that points to the data of the
         given Python object, which must support the buffer interface.
-        Note that this is not meant to be used on the built-in types str,
-        unicode, or bytearray (you can build 'char[]' arrays explicitly)
+        Note that this is not meant to be used on the built-in types
+        str or unicode (you can build 'char[]' arrays explicitly)
         but only on objects containing large quantities of raw data
         in some other format, like 'array.array' or numpy arrays.
         """
@@ -375,25 +401,17 @@ class FFI(object):
             replace_with = ' ' + replace_with
         return self._backend.getcname(cdecl, replace_with)
 
-    def gc(self, cdata, destructor):
+    def gc(self, cdata, destructor, size=0):
         """Return a new cdata object that points to the same
         data.  Later, when this new cdata object is garbage-collected,
         'destructor(old_cdata_object)' will be called.
+
+        The optional 'size' gives an estimate of the size, used to
+        trigger the garbage collection more eagerly.  So far only used
+        on PyPy.  It tells the GC that the returned object keeps alive
+        roughly 'size' bytes of external memory.
         """
-        try:
-            gcp = self._backend.gcp
-        except AttributeError:
-            pass
-        else:
-            return gcp(cdata, destructor)
-        #
-        with self._lock:
-            try:
-                gc_weakrefs = self.gc_weakrefs
-            except AttributeError:
-                from .gc_weakref import GcWeakrefs
-                gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self)
-            return gc_weakrefs.build(cdata, destructor)
+        return self._backend.gcp(cdata, destructor, size)
 
     def _get_cached_btype(self, type):
         assert self._lock.acquire(False) is False
@@ -448,7 +466,6 @@ class FFI(object):
         return self._backend.getwinerror(code)
 
     def _pointer_to(self, ctype):
-        from . import model
         with self._lock:
             return model.pointer_cache(self, ctype)
 
@@ -458,7 +475,12 @@ class FFI(object):
         field or array item in the structure or array, recursively in
         case of nested structures.
         """
-        ctype = self._backend.typeof(cdata)
+        try:
+            ctype = self._backend.typeof(cdata)
+        except TypeError:
+            if '__addressof__' in type(cdata).__dict__:
+                return type(cdata).__addressof__(cdata, *fields_or_indexes)
+            raise
         if fields_or_indexes:
             ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
         else:
@@ -550,21 +572,27 @@ class FFI(object):
                 lst.append(value)
         #
         if '__pypy__' in sys.builtin_module_names:
+            import os
             if sys.platform == "win32":
-                # we need 'libpypy-c.lib'.  Right now, distributions of
-                # pypy contain it as 'include/python27.lib'.  You need
-                # to manually copy it back to 'libpypy-c.lib'.  XXX Will
-                # be fixed in the next pypy release.
-                pythonlib = "libpypy-c"
+                # we need 'libpypy-c.lib'.  Current distributions of
+                # pypy (>= 4.1) contain it as 'libs/python27.lib'.
+                pythonlib = "python27"
                 if hasattr(sys, 'prefix'):
-                    ensure('library_dirs', sys.prefix)
+                    ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
             else:
                 # we need 'libpypy-c.{so,dylib}', which should be by
-                # default located in 'sys.prefix/bin'
-                pythonlib = "pypy-c"
+                # default located in 'sys.prefix/bin' for installed
+                # systems.
+                if sys.version_info < (3,):
+                    pythonlib = "pypy-c"
+                else:
+                    pythonlib = "pypy3-c"
                 if hasattr(sys, 'prefix'):
-                    import os
                     ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
+            # On uninstalled pypy's, the libpypy-c is typically found in
+            # .../pypy/goal/.
+            if hasattr(sys, 'prefix'):
+                ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
         else:
             if sys.platform == "win32":
                 template = "python%d%d"
@@ -587,11 +615,15 @@ class FFI(object):
             ensure('extra_link_args', '/MANIFEST')
 
     def set_source(self, module_name, source, source_extension='.c', **kwds):
+        import os
         if hasattr(self, '_assigned_source'):
             raise ValueError("set_source() cannot be called several times "
                              "per ffi object")
         if not isinstance(module_name, basestring):
             raise TypeError("'module_name' must be a string")
+        if os.sep in module_name or (os.altsep and os.altsep in module_name):
+            raise ValueError("'module_name' must not contain '/': use a dotted "
+                             "name to make a 'package.module' location")
         self._assigned_source = (str(module_name), source,
                                  source_extension, kwds)
 
@@ -645,7 +677,7 @@ class FFI(object):
         recompile(self, module_name, source,
                   c_file=filename, call_c_compiler=False, **kwds)
 
-    def compile(self, tmpdir='.', verbose=0, target=None):
+    def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
         """The 'target' argument gives the final file name of the
         compiled DLL.  Use '*' to force distutils' choice, suitable for
         regular CPython C API modules.  Use a file name ending in '.*'
@@ -662,7 +694,7 @@ class FFI(object):
         module_name, source, source_extension, kwds = self._assigned_source
         return recompile(self, module_name, source, tmpdir=tmpdir,
                          target=target, source_extension=source_extension,
-                         compiler_verbose=verbose, **kwds)
+                         compiler_verbose=verbose, debug=debug, **kwds)
 
     def init_once(self, func, tag):
         # Read _init_once_cache[tag], which is either (False, lock) if
@@ -718,26 +750,54 @@ class FFI(object):
         raise ValueError("ffi.def_extern() is only available on API-mode FFI "
                          "objects")
 
+    def list_types(self):
+        """Returns the user type names known to this FFI instance.
+        This returns a tuple containing three lists of names:
+        (typedef_names, names_of_structs, names_of_unions)
+        """
+        typedefs = []
+        structs = []
+        unions = []
+        for key in self._parser._declarations:
+            if key.startswith('typedef '):
+                typedefs.append(key[8:])
+            elif key.startswith('struct '):
+                structs.append(key[7:])
+            elif key.startswith('union '):
+                unions.append(key[6:])
+        typedefs.sort()
+        structs.sort()
+        unions.sort()
+        return (typedefs, structs, unions)
+
 
 def _load_backend_lib(backend, name, flags):
+    import os
     if name is None:
         if sys.platform != "win32":
             return backend.load_library(None, flags)
         name = "c"    # Windows: load_library(None) fails, but this works
-                      # (backward compatibility hack only)
-    try:
-        if '.' not in name and '/' not in name:
-            raise OSError("library not found: %r" % (name,))
-        return backend.load_library(name, flags)
-    except OSError:
-        import ctypes.util
-        path = ctypes.util.find_library(name)
-        if path is None:
-            raise     # propagate the original OSError
-        return backend.load_library(path, flags)
+                      # on Python 2 (backward compatibility hack only)
+    first_error = None
+    if '.' in name or '/' in name or os.sep in name:
+        try:
+            return backend.load_library(name, flags)
+        except OSError as e:
+            first_error = e
+    import ctypes.util
+    path = ctypes.util.find_library(name)
+    if path is None:
+        if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
+            raise OSError("dlopen(None) cannot work on Windows for Python 3 "
+                          "(see http://bugs.python.org/issue23606)")
+        msg = ("ctypes.util.find_library() did not manage "
+               "to locate a library called %r" % (name,))
+        if first_error is not None:
+            msg = "%s.  Additionally, %s" % (first_error, msg)
+        raise OSError(msg)
+    return backend.load_library(path, flags)
 
 def _make_ffi_library(ffi, libname, flags):
-    import os
     backend = ffi._backend
     backendlib = _load_backend_lib(backend, libname, flags)
     #
@@ -745,10 +805,7 @@ def _make_ffi_library(ffi, libname, flags):
         key = 'function ' + name
         tp, _ = ffi._parser._declarations[key]
         BType = ffi._get_cached_btype(tp)
-        try:
-            value = backendlib.load_function(BType, name)
-        except KeyError as e:
-            raise AttributeError('%s: %s' % (name, e))
+        value = backendlib.load_function(BType, name)
         library.__dict__[name] = value
     #
     def accessor_variable(name):
@@ -761,6 +818,21 @@ def _make_ffi_library(ffi, libname, flags):
             lambda self: read_variable(BType, name),
             lambda self, value: write_variable(BType, name, value)))
     #
+    def addressof_var(name):
+        try:
+            return addr_variables[name]
+        except KeyError:
+            with ffi._lock:
+                if name not in addr_variables:
+                    key = 'variable ' + name
+                    tp, _ = ffi._parser._declarations[key]
+                    BType = ffi._get_cached_btype(tp)
+                    if BType.kind != 'array':
+                        BType = model.pointer_cache(ffi, BType)
+                    p = backendlib.load_function(BType, name)
+                    addr_variables[name] = p
+            return addr_variables[name]
+    #
     def accessor_constant(name):
         raise NotImplementedError("non-integer constant '%s' cannot be "
                                   "accessed from a dlopen() library" % (name,))
@@ -770,12 +842,12 @@ def _make_ffi_library(ffi, libname, flags):
     #
     accessors = {}
     accessors_version = [False]
+    addr_variables = {}
     #
     def update_accessors():
         if accessors_version[0] is ffi._cdef_version:
             return
         #
-        from . import model
         for key, (tp, _) in ffi._parser._declarations.items():
             if not isinstance(tp, model.EnumType):
                 tag, name = key.split(' ', 1)
@@ -821,6 +893,21 @@ def _make_ffi_library(ffi, libname, flags):
             with ffi._lock:
                 update_accessors()
                 return accessors.keys()
+        def __addressof__(self, name):
+            if name in library.__dict__:
+                return library.__dict__[name]
+            if name in FFILibrary.__dict__:
+                return addressof_var(name)
+            make_accessor(name)
+            if name in library.__dict__:
+                return library.__dict__[name]
+            if name in FFILibrary.__dict__:
+                return addressof_var(name)
+            raise AttributeError("cffi library has no function or "
+                                 "global variable named '%s'" % (name,))
+        def __cffi_close__(self):
+            backendlib.close_lib()
+            self.__dict__.clear()
     #
     if libname is not None:
         try:

+ 59 - 13
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/backend_ctypes.py → desktop/core/ext-py/cffi-1.11.5/cffi/backend_ctypes.py

@@ -112,11 +112,20 @@ class CTypesData(object):
     def _make_cmp(name):
         cmpfunc = getattr(operator, name)
         def cmp(self, other):
-            if isinstance(other, CTypesData):
+            v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
+            w_is_ptr = (isinstance(other, CTypesData) and
+                           not isinstance(other, CTypesGenericPrimitive))
+            if v_is_ptr and w_is_ptr:
                 return cmpfunc(self._convert_to_address(None),
                                other._convert_to_address(None))
-            else:
+            elif v_is_ptr or w_is_ptr:
                 return NotImplemented
+            else:
+                if isinstance(self, CTypesGenericPrimitive):
+                    self = self._value
+                if isinstance(other, CTypesGenericPrimitive):
+                    other = other._value
+                return cmpfunc(self, other)
         cmp.func_name = name
         return cmp
 
@@ -128,7 +137,7 @@ class CTypesData(object):
     __ge__ = _make_cmp('__ge__')
 
     def __hash__(self):
-        return hash(type(self)) ^ hash(self._convert_to_address(None))
+        return hash(self._convert_to_address(None))
 
     def _to_string(self, maxlen):
         raise TypeError("string(): %r" % (self,))
@@ -137,14 +146,8 @@ class CTypesData(object):
 class CTypesGenericPrimitive(CTypesData):
     __slots__ = []
 
-    def __eq__(self, other):
-        return self is other
-
-    def __ne__(self, other):
-        return self is not other
-
     def __hash__(self):
-        return object.__hash__(self)
+        return hash(self._value)
 
     def _get_own_repr(self):
         return repr(self._from_ctypes(self._value))
@@ -205,9 +208,7 @@ class CTypesGenericPtr(CTypesData):
 
     def __nonzero__(self):
         return bool(self._address)
-    
-    def __bool__(self):
-        return bool(self._address)
+    __bool__ = __nonzero__
 
     @classmethod
     def _to_ctypes(cls, value):
@@ -460,6 +461,12 @@ class CTypesBackend(object):
                         return x._value
                     raise TypeError("character expected, got %s" %
                                     type(x).__name__)
+                def __nonzero__(self):
+                    return ord(self._value) != 0
+            else:
+                def __nonzero__(self):
+                    return self._value != 0
+            __bool__ = __nonzero__
 
             if kind == 'float':
                 @staticmethod
@@ -993,6 +1000,45 @@ class CTypesBackend(object):
         assert onerror is None   # XXX not implemented
         return BType(source, error)
 
+    _weakref_cache_ref = None
+
+    def gcp(self, cdata, destructor, size=0):
+        if self._weakref_cache_ref is None:
+            import weakref
+            class MyRef(weakref.ref):
+                def __eq__(self, other):
+                    myref = self()
+                    return self is other or (
+                        myref is not None and myref is other())
+                def __ne__(self, other):
+                    return not (self == other)
+                def __hash__(self):
+                    try:
+                        return self._hash
+                    except AttributeError:
+                        self._hash = hash(self())
+                        return self._hash
+            self._weakref_cache_ref = {}, MyRef
+        weak_cache, MyRef = self._weakref_cache_ref
+
+        if destructor is None:
+            try:
+                del weak_cache[MyRef(cdata)]
+            except KeyError:
+                raise TypeError("Can remove destructor only on a object "
+                                "previously returned by ffi.gc()")
+            return None
+
+        def remove(k):
+            cdata, destructor = weak_cache.pop(k, (None, None))
+            if destructor is not None:
+                destructor(cdata)
+
+        new_cdata = self.cast(self.typeof(cdata), cdata)
+        assert new_cdata is not cdata
+        weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
+        return new_cdata
+
     typeof = type
 
     def getcname(self, BType, replace_with):

+ 10 - 2
desktop/core/ext-py/cffi-1.5.2/cffi/cffi_opcode.py → desktop/core/ext-py/cffi-1.11.5/cffi/cffi_opcode.py

@@ -1,3 +1,4 @@
+from .error import VerificationError
 
 class CffiOp(object):
     def __init__(self, op, arg):
@@ -19,7 +20,6 @@ class CffiOp(object):
                                     % (self.arg,))
             return format_four_bytes(value)
         if isinstance(self.arg, str):
-            from .ffiplatform import VerificationError
             raise VerificationError("cannot emit to Python: %r" % (self.arg,))
         return format_four_bytes((self.arg << 8) | self.op)
 
@@ -105,8 +105,12 @@ PRIM_INT_FAST64    = 44
 PRIM_UINT_FAST64   = 45
 PRIM_INTMAX        = 46
 PRIM_UINTMAX       = 47
+PRIM_FLOATCOMPLEX  = 48
+PRIM_DOUBLECOMPLEX = 49
+PRIM_CHAR16        = 50
+PRIM_CHAR32        = 51
 
-_NUM_PRIM          = 48
+_NUM_PRIM          = 52
 _UNKNOWN_PRIM          = -1
 _UNKNOWN_FLOAT_PRIM    = -2
 _UNKNOWN_LONG_DOUBLE   = -3
@@ -128,8 +132,12 @@ PRIMITIVE_TO_INDEX = {
     'float':              PRIM_FLOAT,
     'double':             PRIM_DOUBLE,
     'long double':        PRIM_LONGDOUBLE,
+    'float _Complex':     PRIM_FLOATCOMPLEX,
+    'double _Complex':    PRIM_DOUBLECOMPLEX,
     '_Bool':              PRIM_BOOL,
     'wchar_t':            PRIM_WCHAR,
+    'char16_t':           PRIM_CHAR16,
+    'char32_t':           PRIM_CHAR32,
     'int8_t':             PRIM_INT8,
     'uint8_t':            PRIM_UINT8,
     'int16_t':            PRIM_INT16,

+ 9 - 5
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/commontypes.py → desktop/core/ext-py/cffi-1.11.5/cffi/commontypes.py

@@ -1,5 +1,6 @@
 import sys
-from . import api, model
+from . import model
+from .error import FFIError
 
 
 COMMON_TYPES = {}
@@ -31,12 +32,15 @@ def resolve_common_type(parser, commontype):
         elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
             result, quals = model.PrimitiveType(cdecl), 0
         elif cdecl == 'set-unicode-needed':
-            raise api.FFIError("The Windows type %r is only available after "
-                               "you call ffi.set_unicode()" % (commontype,))
+            raise FFIError("The Windows type %r is only available after "
+                           "you call ffi.set_unicode()" % (commontype,))
         else:
             if commontype == cdecl:
-                raise api.FFIError("Unsupported type: %r.  Please file a bug "
-                                   "if you think it should be." % (commontype,))
+                raise FFIError(
+                    "Unsupported type: %r.  Please look at "
+        "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
+                    "and file an issue if you think this type should really "
+                    "be supported." % (commontype,))
             result, quals = parser.parse_type_and_quals(cdecl)   # recursive
 
         assert isinstance(result, model.BaseTypeByIdentity)

+ 123 - 71
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/cparser.py → desktop/core/ext-py/cffi-1.11.5/cffi/cparser.py

@@ -1,5 +1,6 @@
-from . import api, model
+from . import model
 from .commontypes import COMMON_TYPES, resolve_common_type
+from .error import FFIError, CDefError
 try:
     from . import _pycparser as pycparser
 except ImportError:
@@ -15,6 +16,7 @@ try:
 except ImportError:
     lock = None
 
+CDEF_SOURCE_STRING = "<cdef source string>"
 _r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
                         re.DOTALL | re.MULTILINE)
 _r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
@@ -29,9 +31,13 @@ _r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
 _r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
 _r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
 _r_cdecl = re.compile(r"\b__cdecl\b")
-_r_extern_python = re.compile(r'\bextern\s*"Python"\s*.')
+_r_extern_python = re.compile(r'\bextern\s*"'
+                              r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
 _r_star_const_space = re.compile(       # matches "* const "
     r"[*]\s*((const|volatile|restrict)\b\s*)+")
+_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
+                              r"\.\.\.")
+_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
 
 def _get_parser():
     global _parser_cache
@@ -88,6 +94,12 @@ def _preprocess_extern_python(csource):
     #     void __cffi_extern_python_start;
     #     int foo(int);
     #     void __cffi_extern_python_stop;
+    #
+    # input: `extern "Python+C" int foo(int);`
+    # output:
+    #     void __cffi_extern_python_plus_c_start;
+    #     int foo(int);
+    #     void __cffi_extern_python_stop;
     parts = []
     while True:
         match = _r_extern_python.search(csource)
@@ -98,12 +110,15 @@ def _preprocess_extern_python(csource):
         #print ''.join(parts)+csource
         #print '=>'
         parts.append(csource[:match.start()])
-        parts.append('void __cffi_extern_python_start; ')
+        if 'C' in match.group(1):
+            parts.append('void __cffi_extern_python_plus_c_start; ')
+        else:
+            parts.append('void __cffi_extern_python_start; ')
         if csource[endpos] == '{':
             # grouping variant
             closing = csource.find('}', endpos)
             if closing < 0:
-                raise api.CDefError("'extern \"Python\" {': no '}' found")
+                raise CDefError("'extern \"Python\" {': no '}' found")
             if csource.find('{', endpos + 1, closing) >= 0:
                 raise NotImplementedError("cannot use { } inside a block "
                                           "'extern \"Python\" { ... }'")
@@ -113,7 +128,7 @@ def _preprocess_extern_python(csource):
             # non-grouping variant
             semicolon = csource.find(';', endpos)
             if semicolon < 0:
-                raise api.CDefError("'extern \"Python\": no ';' found")
+                raise CDefError("'extern \"Python\": no ';' found")
             parts.append(csource[endpos:semicolon+1])
             csource = csource[semicolon+1:]
         parts.append(' void __cffi_extern_python_stop;')
@@ -169,6 +184,10 @@ def _preprocess(csource):
             assert csource[p:p+3] == '...'
             csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
                                                  csource[p+3:])
+    # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
+    csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
+    # Replace "float ..." or "double..." with "__dotdotdotfloat__"
+    csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
     # Replace all remaining "..." with the same name, "__dotdotdot__",
     # which is declared with a typedef for the purpose of C parsing.
     return csource.replace('...', ' __dotdotdot__ '), macros
@@ -240,14 +259,21 @@ class Parser(object):
                 ctn.discard(name)
         typenames += sorted(ctn)
         #
-        csourcelines = ['typedef int %s;' % typename for typename in typenames]
-        csourcelines.append('typedef int __dotdotdot__;')
+        csourcelines = []
+        csourcelines.append('# 1 "<cdef automatic initialization code>"')
+        for typename in typenames:
+            csourcelines.append('typedef int %s;' % typename)
+        csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
+                            ' __dotdotdot__;')
+        # this forces pycparser to consider the following in the file
+        # called <cdef source string> from line 1
+        csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
         csourcelines.append(csource)
-        csource = '\n'.join(csourcelines)
+        fullcsource = '\n'.join(csourcelines)
         if lock is not None:
             lock.acquire()     # pycparser is not thread-safe...
         try:
-            ast = _get_parser().parse(csource)
+            ast = _get_parser().parse(fullcsource)
         except pycparser.c_parser.ParseError as e:
             self.convert_pycparser_error(e, csource)
         finally:
@@ -257,17 +283,17 @@ class Parser(object):
         return ast, macros, csource
 
     def _convert_pycparser_error(self, e, csource):
-        # xxx look for ":NUM:" at the start of str(e) and try to interpret
-        # it as a line number
+        # xxx look for "<cdef source string>:NUM:" at the start of str(e)
+        # and interpret that as a line number.  This will not work if
+        # the user gives explicit ``# NUM "FILE"`` directives.
         line = None
         msg = str(e)
-        if msg.startswith(':') and ':' in msg[1:]:
-            linenum = msg[1:msg.find(':',1)]
-            if linenum.isdigit():
-                linenum = int(linenum, 10)
-                csourcelines = csource.splitlines()
-                if 1 <= linenum <= len(csourcelines):
-                    line = csourcelines[linenum-1]
+        match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
+        if match:
+            linenum = int(match.group(1), 10)
+            csourcelines = csource.splitlines()
+            if 1 <= linenum <= len(csourcelines):
+                line = csourcelines[linenum-1]
         return line
 
     def convert_pycparser_error(self, e, csource):
@@ -278,7 +304,7 @@ class Parser(object):
             msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
         else:
             msg = 'parse error\n%s' % (msg,)
-        raise api.CDefError(msg)
+        raise CDefError(msg)
 
     def parse(self, csource, override=False, packed=False, dllexport=False):
         prev_options = self._options
@@ -300,33 +326,45 @@ class Parser(object):
         for decl in iterator:
             if decl.name == '__dotdotdot__':
                 break
+        else:
+            assert 0
+        current_decl = None
         #
         try:
-            self._inside_extern_python = False
+            self._inside_extern_python = '__cffi_extern_python_stop'
             for decl in iterator:
+                current_decl = decl
                 if isinstance(decl, pycparser.c_ast.Decl):
                     self._parse_decl(decl)
                 elif isinstance(decl, pycparser.c_ast.Typedef):
                     if not decl.name:
-                        raise api.CDefError("typedef does not declare any name",
-                                            decl)
+                        raise CDefError("typedef does not declare any name",
+                                        decl)
                     quals = 0
-                    if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
-                            and decl.type.type.names[-1] == '__dotdotdot__'):
+                    if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
+                            decl.type.type.names[-1].startswith('__dotdotdot')):
                         realtype = self._get_unknown_type(decl)
                     elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
                           isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
                           isinstance(decl.type.type.type,
                                      pycparser.c_ast.IdentifierType) and
-                          decl.type.type.type.names == ['__dotdotdot__']):
-                        realtype = model.unknown_ptr_type(decl.name)
+                          decl.type.type.type.names[-1].startswith('__dotdotdot')):
+                        realtype = self._get_unknown_ptr_type(decl)
                     else:
                         realtype, quals = self._get_type_and_quals(
-                            decl.type, name=decl.name)
+                            decl.type, name=decl.name, partial_length_ok=True)
                     self._declare('typedef ' + decl.name, realtype, quals=quals)
+                elif decl.__class__.__name__ == 'Pragma':
+                    pass    # skip pragma, only in pycparser 2.15
                 else:
-                    raise api.CDefError("unrecognized construct", decl)
-        except api.FFIError as e:
+                    raise CDefError("unexpected <%s>: this construct is valid "
+                                    "C but not valid in cdef()" %
+                                    decl.__class__.__name__, decl)
+        except CDefError as e:
+            if len(e.args) == 1:
+                e.args = e.args + (current_decl,)
+            raise
+        except FFIError as e:
             msg = self._convert_pycparser_error(e, csource)
             if msg:
                 e.args = (e.args[0] + "\n    *** Err: %s" % msg,)
@@ -336,7 +374,7 @@ class Parser(object):
         if key in self._int_constants:
             if self._int_constants[key] == val:
                 return     # ignore identical double declarations
-            raise api.FFIError(
+            raise FFIError(
                 "multiple declarations of constant: %s" % (key,))
         self._int_constants[key] = val
 
@@ -363,7 +401,7 @@ class Parser(object):
             elif value == '...':
                 self._declare('macro ' + key, value)
             else:
-                raise api.CDefError(
+                raise CDefError(
                     'only supports one of the following syntax:\n'
                     '  #define %s ...     (literally dot-dot-dot)\n'
                     '  #define %s NUMBER  (with NUMBER an integer'
@@ -376,8 +414,10 @@ class Parser(object):
         tp = self._get_type_pointer(tp, quals)
         if self._options.get('dllexport'):
             tag = 'dllexport_python '
-        elif self._inside_extern_python:
+        elif self._inside_extern_python == '__cffi_extern_python_start':
             tag = 'extern_python '
+        elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
+            tag = 'extern_python_plus_c '
         else:
             tag = 'function '
         self._declare(tag + decl.name, tp)
@@ -396,8 +436,8 @@ class Parser(object):
             elif isinstance(node, pycparser.c_ast.Enum):
                 self._get_struct_union_enum_type('enum', node)
             elif not decl.name:
-                raise api.CDefError("construct does not declare any variable",
-                                    decl)
+                raise CDefError("construct does not declare any variable",
+                                decl)
             #
             if decl.name:
                 tp, quals = self._get_type_and_quals(node,
@@ -421,12 +461,10 @@ class Parser(object):
                     # hack: `extern "Python"` in the C source is replaced
                     # with "void __cffi_extern_python_start;" and
                     # "void __cffi_extern_python_stop;"
-                    self._inside_extern_python = not self._inside_extern_python
-                    assert self._inside_extern_python == (
-                        decl.name == '__cffi_extern_python_start')
+                    self._inside_extern_python = decl.name
                 else:
-                    if self._inside_extern_python:
-                        raise api.CDefError(
+                    if self._inside_extern_python !='__cffi_extern_python_stop':
+                        raise CDefError(
                             "cannot declare constants or "
                             "variables with 'extern \"Python\"'")
                     if (quals & model.Q_CONST) and not tp.is_array_type:
@@ -442,7 +480,7 @@ class Parser(object):
         assert not macros
         exprnode = ast.ext[-1].type.args.params[0]
         if isinstance(exprnode, pycparser.c_ast.ID):
-            raise api.CDefError("unknown identifier '%s'" % (exprnode.name,))
+            raise CDefError("unknown identifier '%s'" % (exprnode.name,))
         return self._get_type_and_quals(exprnode.type)
 
     def _declare(self, name, obj, included=False, quals=0):
@@ -451,7 +489,7 @@ class Parser(object):
             if prevobj is obj and prevquals == quals:
                 return
             if not self._options.get('override'):
-                raise api.FFIError(
+                raise FFIError(
                     "multiple declarations of %s (for interactive usage, "
                     "try cdef(xx, override=True))" % (name,))
         assert '__dotdotdot__' not in name.split()
@@ -539,7 +577,7 @@ class Parser(object):
                 if ident == 'void':
                     return model.void_type, quals
                 if ident == '__dotdotdot__':
-                    raise api.FFIError(':%d: bad usage of "..."' %
+                    raise FFIError(':%d: bad usage of "..."' %
                             typenode.coord.line)
                 tp0, quals0 = resolve_common_type(self, ident)
                 return tp0, (quals | quals0)
@@ -571,14 +609,14 @@ class Parser(object):
             return self._get_struct_union_enum_type('union', typenode, name,
                                                     nested=True), 0
         #
-        raise api.FFIError(":%d: bad or unsupported type declaration" %
+        raise FFIError(":%d: bad or unsupported type declaration" %
                 typenode.coord.line)
 
     def _parse_function_type(self, typenode, funcname=None):
         params = list(getattr(typenode.args, 'params', []))
         for i, arg in enumerate(params):
             if not hasattr(arg, 'type'):
-                raise api.CDefError("%s arg %d: unknown type '%s'"
+                raise CDefError("%s arg %d: unknown type '%s'"
                     " (if you meant to use the old C syntax of giving"
                     " untyped arguments, it is not supported)"
                     % (funcname or 'in expression', i + 1,
@@ -592,7 +630,7 @@ class Parser(object):
         if ellipsis:
             params.pop()
             if not params:
-                raise api.CDefError(
+                raise CDefError(
                     "%s: a function with only '(...)' as argument"
                     " is not correct C" % (funcname or 'in expression'))
         args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
@@ -693,7 +731,7 @@ class Parser(object):
             return tp
         #
         if tp.fldnames is not None:
-            raise api.CDefError("duplicate declaration of struct %s" % name)
+            raise CDefError("duplicate declaration of struct %s" % name)
         fldnames = []
         fldtypes = []
         fldbitsize = []
@@ -737,7 +775,7 @@ class Parser(object):
 
     def _make_partial(self, tp, nested):
         if not isinstance(tp, model.StructOrUnion):
-            raise api.CDefError("%s cannot be partial" % (tp,))
+            raise CDefError("%s cannot be partial" % (tp,))
         if not tp.has_c_name() and not nested:
             raise NotImplementedError("%s is partial but has no C name" %(tp,))
         tp.partial = True
@@ -757,7 +795,7 @@ class Parser(object):
                     len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
                 return ord(s[-2])
             else:
-                raise api.CDefError("invalid constant %r" % (s,))
+                raise CDefError("invalid constant %r" % (s,))
         #
         if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
                 exprnode.op == '+'):
@@ -771,14 +809,27 @@ class Parser(object):
                 exprnode.name in self._int_constants):
             return self._int_constants[exprnode.name]
         #
-        if partial_length_ok:
-            if (isinstance(exprnode, pycparser.c_ast.ID) and
+        if (isinstance(exprnode, pycparser.c_ast.ID) and
                     exprnode.name == '__dotdotdotarray__'):
+            if partial_length_ok:
                 self._partial_length = True
                 return '...'
+            raise FFIError(":%d: unsupported '[...]' here, cannot derive "
+                           "the actual array length in this context"
+                           % exprnode.coord.line)
+        #
+        if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
+                exprnode.op == '+'):
+            return (self._parse_constant(exprnode.left) +
+                    self._parse_constant(exprnode.right))
         #
-        raise api.FFIError(":%d: unsupported expression: expected a "
-                           "simple numeric constant" % exprnode.coord.line)
+        if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and
+                exprnode.op == '-'):
+            return (self._parse_constant(exprnode.left) -
+                    self._parse_constant(exprnode.right))
+        #
+        raise FFIError(":%d: unsupported expression: expected a "
+                       "simple numeric constant" % exprnode.coord.line)
 
     def _build_enum_type(self, explicit_name, decls):
         if decls is not None:
@@ -816,24 +867,25 @@ class Parser(object):
 
     def _get_unknown_type(self, decl):
         typenames = decl.type.type.names
-        assert typenames[-1] == '__dotdotdot__'
-        if len(typenames) == 1:
+        if typenames == ['__dotdotdot__']:
             return model.unknown_type(decl.name)
 
-        if (typenames[:-1] == ['float'] or
-            typenames[:-1] == ['double']):
-            # not for 'long double' so far
-            result = model.UnknownFloatType(decl.name)
-        else:
-            for t in typenames[:-1]:
-                if t not in ['int', 'short', 'long', 'signed',
-                             'unsigned', 'char']:
-                    raise api.FFIError(':%d: bad usage of "..."' %
-                                       decl.coord.line)
-            result = model.UnknownIntegerType(decl.name)
-
-        if self._uses_new_feature is None:
-            self._uses_new_feature = "'typedef %s... %s'" % (
-                ' '.join(typenames[:-1]), decl.name)
-
-        return result
+        if typenames == ['__dotdotdotint__']:
+            if self._uses_new_feature is None:
+                self._uses_new_feature = "'typedef int... %s'" % decl.name
+            return model.UnknownIntegerType(decl.name)
+
+        if typenames == ['__dotdotdotfloat__']:
+            # note: not for 'long double' so far
+            if self._uses_new_feature is None:
+                self._uses_new_feature = "'typedef float... %s'" % decl.name
+            return model.UnknownFloatType(decl.name)
+
+        raise FFIError(':%d: unsupported usage of "..." in typedef'
+                       % decl.coord.line)
+
+    def _get_unknown_ptr_type(self, decl):
+        if decl.type.type.type.names == ['__dotdotdot__']:
+            return model.unknown_ptr_type(decl.name)
+        raise FFIError(':%d: unsupported usage of "..." in typedef'
+                       % decl.coord.line)

+ 23 - 0
desktop/core/ext-py/cffi-1.11.5/cffi/error.py

@@ -0,0 +1,23 @@
+
+class FFIError(Exception):
+    pass
+
+class CDefError(Exception):
+    def __str__(self):
+        try:
+            current_decl = self.args[1]
+            filename = current_decl.coord.file
+            linenum = current_decl.coord.line
+            prefix = '%s:%d: ' % (filename, linenum)
+        except (AttributeError, TypeError, IndexError):
+            prefix = ''
+        return '%s%s' % (prefix, self.args[0])
+
+class VerificationError(Exception):
+    """ An error raised when verification fails
+    """
+
+class VerificationMissing(Exception):
+    """ An error raised when incomplete structures are passed into
+    cdef, but no verification has been done
+    """

+ 19 - 13
desktop/core/ext-py/cffi-1.5.2/cffi/ffiplatform.py → desktop/core/ext-py/cffi-1.11.5/cffi/ffiplatform.py

@@ -1,32 +1,25 @@
 import sys, os
-
-
-class VerificationError(Exception):
-    """ An error raised when verification fails
-    """
-
-class VerificationMissing(Exception):
-    """ An error raised when incomplete structures are passed into
-    cdef, but no verification has been done
-    """
+from .error import VerificationError
 
 
 LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
                       'extra_objects', 'depends']
 
 def get_extension(srcfilename, modname, sources=(), **kwds):
+    _hack_at_distutils()
     from distutils.core import Extension
     allsources = [srcfilename]
     for src in sources:
         allsources.append(os.path.normpath(src))
     return Extension(name=modname, sources=allsources, **kwds)
 
-def compile(tmpdir, ext, compiler_verbose=0):
+def compile(tmpdir, ext, compiler_verbose=0, debug=None):
     """Compile a C extension module using distutils."""
 
+    _hack_at_distutils()
     saved_environ = os.environ.copy()
     try:
-        outputfilename = _build(tmpdir, ext, compiler_verbose)
+        outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
         outputfilename = os.path.abspath(outputfilename)
     finally:
         # workaround for a distutils bugs where some env vars can
@@ -36,7 +29,7 @@ def compile(tmpdir, ext, compiler_verbose=0):
                 os.environ[key] = value
     return outputfilename
 
-def _build(tmpdir, ext, compiler_verbose=0):
+def _build(tmpdir, ext, compiler_verbose=0, debug=None):
     # XXX compact but horrible :-(
     from distutils.core import Distribution
     import distutils.errors, distutils.log
@@ -44,6 +37,9 @@ def _build(tmpdir, ext, compiler_verbose=0):
     dist = Distribution({'ext_modules': [ext]})
     dist.parse_config_files()
     options = dist.get_option_dict('build_ext')
+    if debug is None:
+        debug = sys.flags.debug
+    options['debug'] = ('ffiplatform', debug)
     options['force'] = ('ffiplatform', True)
     options['build_lib'] = ('ffiplatform', tmpdir)
     options['build_temp'] = ('ffiplatform', tmpdir)
@@ -119,3 +115,13 @@ def flatten(x):
     f = cStringIO.StringIO()
     _flatten(x, f)
     return f.getvalue()
+
+def _hack_at_distutils():
+    # Windows-only workaround for some configurations: see
+    # https://bugs.python.org/issue23246 (Python 2.7 with 
+    # a specific MS compiler suite download)
+    if sys.platform == "win32":
+        try:
+            import setuptools    # for side-effects, patches distutils
+        except ImportError:
+            pass

+ 0 - 0
desktop/core/ext-py/cffi-1.5.2/cffi/lock.py → desktop/core/ext-py/cffi-1.11.5/cffi/lock.py


+ 49 - 40
desktop/core/ext-py/cffi-1.5.2/cffi/model.py → desktop/core/ext-py/cffi-1.11.5/cffi/model.py

@@ -1,8 +1,8 @@
-import types, sys
+import types
 import weakref
 
 from .lock import allocate_lock
-
+from .error import CDefError, VerificationError, VerificationMissing
 
 # type qualifiers
 Q_CONST    = 0x01
@@ -39,7 +39,6 @@ class BaseTypeByIdentity(object):
         replace_with = qualify(quals, replace_with)
         result = result.replace('&', replace_with)
         if '$' in result:
-            from .ffiplatform import VerificationError
             raise VerificationError(
                 "cannot generate '%s' in %s: unknown type name"
                 % (self._get_c_name(), context))
@@ -96,7 +95,8 @@ void_type = VoidType()
 
 
 class BasePrimitiveType(BaseType):
-    pass
+    def is_complex_type(self):
+        return False
 
 
 class PrimitiveType(BasePrimitiveType):
@@ -117,9 +117,13 @@ class PrimitiveType(BasePrimitiveType):
         'float':              'f',
         'double':             'f',
         'long double':        'f',
+        'float _Complex':     'j',
+        'double _Complex':    'j',
         '_Bool':              'i',
         # the following types are not primitive in the C sense
         'wchar_t':            'c',
+        'char16_t':           'c',
+        'char32_t':           'c',
         'int8_t':             'i',
         'uint8_t':            'i',
         'int16_t':            'i',
@@ -164,6 +168,8 @@ class PrimitiveType(BasePrimitiveType):
         return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
     def is_float_type(self):
         return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
+    def is_complex_type(self):
+        return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
 
     def build_backend_type(self, ffi, finishlist):
         return global_cache(self, ffi, 'new_primitive_type', self.name)
@@ -223,9 +229,8 @@ class RawFunctionType(BaseFunctionType):
     is_raw_function = True
 
     def build_backend_type(self, ffi, finishlist):
-        from . import api
-        raise api.CDefError("cannot render the type %r: it is a function "
-                            "type, not a pointer-to-function type" % (self,))
+        raise CDefError("cannot render the type %r: it is a function "
+                        "type, not a pointer-to-function type" % (self,))
 
     def as_function_pointer(self):
         return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
@@ -307,9 +312,8 @@ class ArrayType(BaseType):
 
     def build_backend_type(self, ffi, finishlist):
         if self.length == '...':
-            from . import api
-            raise api.CDefError("cannot render the type %r: unknown length" %
-                                (self,))
+            raise CDefError("cannot render the type %r: unknown length" %
+                            (self,))
         self.item.get_cached_btype(ffi, finishlist)   # force the item BType
         BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
         return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
@@ -348,21 +352,20 @@ class StructOrUnion(StructOrUnionOrEnum):
         self.fldquals = fldquals
         self.build_c_name_with_marker()
 
-    def has_anonymous_struct_fields(self):
-        if self.fldtypes is None:
-            return False
-        for name, type in zip(self.fldnames, self.fldtypes):
-            if name == '' and isinstance(type, StructOrUnion):
-                return True
-        return False
+    def anonymous_struct_fields(self):
+        if self.fldtypes is not None:
+            for name, type in zip(self.fldnames, self.fldtypes):
+                if name == '' and isinstance(type, StructOrUnion):
+                    yield type
 
-    def enumfields(self):
+    def enumfields(self, expand_anonymous_struct_union=True):
         fldquals = self.fldquals
         if fldquals is None:
             fldquals = (0,) * len(self.fldnames)
         for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
                                               self.fldbitsize, fldquals):
-            if name == '' and isinstance(type, StructOrUnion):
+            if (name == '' and isinstance(type, StructOrUnion)
+                    and expand_anonymous_struct_union):
                 # nested anonymous struct/union
                 for result in type.enumfields():
                     yield result
@@ -455,13 +458,11 @@ class StructOrUnion(StructOrUnionOrEnum):
         self.completed = 2
 
     def _verification_error(self, msg):
-        from .ffiplatform import VerificationError
         raise VerificationError(msg)
 
     def check_not_partial(self):
         if self.partial and self.fixedlayout is None:
-            from . import ffiplatform
-            raise ffiplatform.VerificationMissing(self._get_c_name())
+            raise VerificationMissing(self._get_c_name())
 
     def build_backend_type(self, ffi, finishlist):
         self.check_not_partial()
@@ -499,8 +500,7 @@ class EnumType(StructOrUnionOrEnum):
 
     def check_not_partial(self):
         if self.partial and not self.partial_resolved:
-            from . import ffiplatform
-            raise ffiplatform.VerificationMissing(self._get_c_name())
+            raise VerificationMissing(self._get_c_name())
 
     def build_backend_type(self, ffi, finishlist):
         self.check_not_partial()
@@ -514,15 +514,20 @@ class EnumType(StructOrUnionOrEnum):
         if self.baseinttype is not None:
             return self.baseinttype.get_cached_btype(ffi, finishlist)
         #
-        from . import api
         if self.enumvalues:
             smallest_value = min(self.enumvalues)
             largest_value = max(self.enumvalues)
         else:
             import warnings
-            warnings.warn("%r has no values explicitly defined; next version "
-                          "will refuse to guess which integer type it is "
-                          "meant to be (unsigned/signed, int/long)"
+            try:
+                # XXX!  The goal is to ensure that the warnings.warn()
+                # will not suppress the warning.  We want to get it
+                # several times if we reach this point several times.
+                __warningregistry__.clear()
+            except NameError:
+                pass
+            warnings.warn("%r has no values explicitly defined; "
+                          "guessing that it is equivalent to 'unsigned int'"
                           % self._get_c_name())
             smallest_value = largest_value = 0
         if smallest_value < 0:   # needs a signed type
@@ -543,8 +548,8 @@ class EnumType(StructOrUnionOrEnum):
         if (smallest_value >= ((-1) << (8*size2-1)) and
             largest_value < (1 << (8*size2-sign))):
             return btype2
-        raise api.CDefError("%s values don't all fit into either 'long' "
-                            "or 'unsigned long'" % self._get_c_name())
+        raise CDefError("%s values don't all fit into either 'long' "
+                        "or 'unsigned long'" % self._get_c_name())
 
 def unknown_type(name, structname=None):
     if structname is None:
@@ -562,22 +567,26 @@ def unknown_ptr_type(name, structname=None):
 
 
 global_lock = allocate_lock()
+_typecache_cffi_backend = weakref.WeakValueDictionary()
+
+def get_typecache(backend):
+    # returns _typecache_cffi_backend if backend is the _cffi_backend
+    # module, or type(backend).__typecache if backend is an instance of
+    # CTypesBackend (or some FakeBackend class during tests)
+    if isinstance(backend, types.ModuleType):
+        return _typecache_cffi_backend
+    with global_lock:
+        if not hasattr(type(backend), '__typecache'):
+            type(backend).__typecache = weakref.WeakValueDictionary()
+        return type(backend).__typecache
 
 def global_cache(srctype, ffi, funcname, *args, **kwds):
     key = kwds.pop('key', (funcname, args))
     assert not kwds
     try:
-        return ffi._backend.__typecache[key]
+        return ffi._typecache[key]
     except KeyError:
         pass
-    except AttributeError:
-        # initialize the __typecache attribute, either at the module level
-        # if ffi._backend is a module, or at the class level if ffi._backend
-        # is some instance.
-        if isinstance(ffi._backend, types.ModuleType):
-            ffi._backend.__typecache = weakref.WeakValueDictionary()
-        else:
-            type(ffi._backend).__typecache = weakref.WeakValueDictionary()
     try:
         res = getattr(ffi._backend, funcname)(*args)
     except NotImplementedError as e:
@@ -585,7 +594,7 @@ def global_cache(srctype, ffi, funcname, *args, **kwds):
     # note that setdefault() on WeakValueDictionary is not atomic
     # and contains a rare bug (http://bugs.python.org/issue19542);
     # we have to use a lock and do it ourselves
-    cache = ffi._backend.__typecache
+    cache = ffi._typecache
     with global_lock:
         res1 = cache.get(key)
         if res1 is None:

+ 5 - 1
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/parse_c_type.h → desktop/core/ext-py/cffi-1.11.5/cffi/parse_c_type.h

@@ -79,8 +79,12 @@ typedef void *_cffi_opcode_t;
 #define _CFFI_PRIM_UINT_FAST64  45
 #define _CFFI_PRIM_INTMAX       46
 #define _CFFI_PRIM_UINTMAX      47
+#define _CFFI_PRIM_FLOATCOMPLEX 48
+#define _CFFI_PRIM_DOUBLECOMPLEX 49
+#define _CFFI_PRIM_CHAR16       50
+#define _CFFI_PRIM_CHAR32       51
 
-#define _CFFI__NUM_PRIM         48
+#define _CFFI__NUM_PRIM         52
 #define _CFFI__UNKNOWN_PRIM           (-1)
 #define _CFFI__UNKNOWN_FLOAT_PRIM     (-2)
 #define _CFFI__UNKNOWN_LONG_DOUBLE    (-3)

+ 151 - 78
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/recompiler.py → desktop/core/ext-py/cffi-1.11.5/cffi/recompiler.py

@@ -1,9 +1,11 @@
 import os, sys, io
 from . import ffiplatform, model
+from .error import VerificationError
 from .cffi_opcode import *
 
-VERSION = "0x2601"
-VERSION_EMBEDDED = "0x2701"
+VERSION_BASE = 0x2601
+VERSION_EMBEDDED = 0x2701
+VERSION_CHAR16CHAR32 = 0x2801
 
 
 class GlobalExpr:
@@ -125,6 +127,10 @@ class Recompiler:
         self.ffi = ffi
         self.module_name = module_name
         self.target_is_python = target_is_python
+        self._version = VERSION_BASE
+
+    def needs_version(self, ver):
+        self._version = max(self._version, ver)
 
     def collect_type_table(self):
         self._typesdict = {}
@@ -211,7 +217,7 @@ class Recompiler:
                 method = getattr(self, '_generate_cpy_%s_%s' % (kind,
                                                                 step_name))
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented in recompile(): %r" % name)
             try:
                 self._current_quals = quals
@@ -275,6 +281,8 @@ class Recompiler:
     def write_c_source_to_f(self, f, preamble):
         self._f = f
         prnt = self._prnt
+        if self.ffi._embedding is not None:
+            prnt('#define _CFFI_USE_EMBEDDING')
         #
         # first the '#include' (actually done by inlining the file's content)
         lines = self._rel_readlines('_cffi_include.h')
@@ -287,8 +295,9 @@ class Recompiler:
         base_module_name = self.module_name.split('.')[-1]
         if self.ffi._embedding is not None:
             prnt('#define _CFFI_MODULE_NAME  "%s"' % (self.module_name,))
-            prnt('#define _CFFI_PYTHON_STARTUP_CODE  %s' %
-                 (self._string_literal(self.ffi._embedding),))
+            prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
+            self._print_string_literal_in_array(self.ffi._embedding)
+            prnt('0 };')
             prnt('#ifdef PYPY_VERSION')
             prnt('# define _CFFI_PYTHON_STARTUP_FUNC  _cffi_pypyinit_%s' % (
                 base_module_name,))
@@ -300,10 +309,10 @@ class Recompiler:
                 base_module_name,))
             prnt('#endif')
             lines = self._rel_readlines('_embedding.h')
+            i = lines.index('#include "_cffi_errors.h"\n')
+            lines[i:i+1] = self._rel_readlines('_cffi_errors.h')
             prnt(''.join(lines))
-            version = VERSION_EMBEDDED
-        else:
-            version = VERSION
+            self.needs_version(VERSION_EMBEDDED)
         #
         # then paste the C source given by the user, verbatim.
         prnt('/************************************************************/')
@@ -352,12 +361,12 @@ class Recompiler:
                     included_module_name, included_source = (
                         ffi_to_include._assigned_source[:2])
                 except AttributeError:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "ffi object %r includes %r, but the latter has not "
                         "been prepared with set_source()" % (
                             self.ffi, ffi_to_include,))
                 if included_source is None:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "not implemented yet: ffi.include() of a Python-based "
                         "ffi inside a C-based ffi")
                 prnt('  "%s",' % (included_module_name,))
@@ -389,6 +398,10 @@ class Recompiler:
         prnt()
         #
         # the init function
+        prnt('#ifdef __GNUC__')
+        prnt('#  pragma GCC visibility push(default)  /* for -fvisibility= */')
+        prnt('#endif')
+        prnt()
         prnt('#ifdef PYPY_VERSION')
         prnt('PyMODINIT_FUNC')
         prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
@@ -398,8 +411,11 @@ class Recompiler:
             prnt('        _cffi_call_python_org = '
                  '(void(*)(struct _cffi_externpy_s *, char *))p[1];')
             prnt('    }')
-        prnt('    p[0] = (const void *)%s;' % version)
+        prnt('    p[0] = (const void *)0x%x;' % self._version)
         prnt('    p[1] = &_cffi_type_context;')
+        prnt('#if PY_MAJOR_VERSION >= 3')
+        prnt('    return NULL;')
+        prnt('#endif')
         prnt('}')
         # on Windows, distutils insists on putting init_cffi_xyz in
         # 'export_symbols', so instead of fighting it, just give up and
@@ -416,17 +432,22 @@ class Recompiler:
         prnt('PyMODINIT_FUNC')
         prnt('PyInit_%s(void)' % (base_module_name,))
         prnt('{')
-        prnt('  return _cffi_init("%s", %s, &_cffi_type_context);' % (
-            self.module_name, version))
+        prnt('  return _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
+            self.module_name, self._version))
         prnt('}')
         prnt('#else')
         prnt('PyMODINIT_FUNC')
         prnt('init%s(void)' % (base_module_name,))
         prnt('{')
-        prnt('  _cffi_init("%s", %s, &_cffi_type_context);' % (
-            self.module_name, version))
+        prnt('  _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
+            self.module_name, self._version))
         prnt('}')
         prnt('#endif')
+        prnt()
+        prnt('#ifdef __GNUC__')
+        prnt('#  pragma GCC visibility pop')
+        prnt('#endif')
+        self._version = None
 
     def _to_py(self, x):
         if isinstance(x, str):
@@ -454,18 +475,19 @@ class Recompiler:
                 included_module_name, included_source = (
                     ffi_to_include._assigned_source[:2])
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "ffi object %r includes %r, but the latter has not "
                     "been prepared with set_source()" % (
                         self.ffi, ffi_to_include,))
             if included_source is not None:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented yet: ffi.include() of a C-based "
                     "ffi inside a Python-based ffi")
             prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
         prnt()
         prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
-        prnt("    _version = %s," % (VERSION,))
+        prnt("    _version = 0x%x," % (self._version,))
+        self._version = None
         #
         # the '_types' keyword argument
         self.cffi_types = tuple(self.cffi_types)    # don't change any more
@@ -495,7 +517,7 @@ class Recompiler:
 
     def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
         extraarg = ''
-        if isinstance(tp, model.BasePrimitiveType):
+        if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
             if tp.is_integer_type() and tp.name != '_Bool':
                 converter = '_cffi_to_c_int'
                 extraarg = ', %s' % tp.name
@@ -504,8 +526,11 @@ class Recompiler:
                 # double' here, and _cffi_to_c_double would loose precision
                 converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),)
             else:
-                converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
+                cname = tp.get_c_name('')
+                converter = '(%s)_cffi_to_c_%s' % (cname,
                                                    tp.name.replace(' ', '_'))
+                if cname in ('char16_t', 'char32_t'):
+                    self.needs_version(VERSION_CHAR16CHAR32)
             errvalue = '-1'
         #
         elif isinstance(tp, model.PointerType):
@@ -513,8 +538,10 @@ class Recompiler:
                                                     tovar, errcode)
             return
         #
-        elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
-            # a struct (not a struct pointer) as a function argument
+        elif (isinstance(tp, model.StructOrUnionOrEnum) or
+              isinstance(tp, model.BasePrimitiveType)):
+            # a struct (not a struct pointer) as a function argument;
+            # or, a complex (the same code works)
             self._prnt('  if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
                       % (tovar, self._gettypenum(tp), fromvar))
             self._prnt('    %s;' % errcode)
@@ -555,12 +582,15 @@ class Recompiler:
 
     def _convert_expr_from_c(self, tp, var, context):
         if isinstance(tp, model.BasePrimitiveType):
-            if tp.is_integer_type():
+            if tp.is_integer_type() and tp.name != '_Bool':
                 return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
             elif isinstance(tp, model.UnknownFloatType):
                 return '_cffi_from_c_double(%s)' % (var,)
-            elif tp.name != 'long double':
-                return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
+            elif tp.name != 'long double' and not tp.is_complex_type():
+                cname = tp.name.replace(' ', '_')
+                if cname in ('char16_t', 'char32_t'):
+                    self.needs_version(VERSION_CHAR16CHAR32)
+                return '_cffi_from_c_%s(%s)' % (cname, var)
             else:
                 return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
                     var, self._gettypenum(tp))
@@ -570,7 +600,7 @@ class Recompiler:
         elif isinstance(tp, model.ArrayType):
             return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
                 var, self._gettypenum(model.PointerType(tp.item)))
-        elif isinstance(tp, model.StructType):
+        elif isinstance(tp, model.StructOrUnion):
             if tp.fldnames is None:
                 raise TypeError("'%s' is used as %s, but is opaque" % (
                     tp._get_c_name(), context))
@@ -585,8 +615,11 @@ class Recompiler:
     # ----------
     # typedefs
 
+    def _typedef_type(self, tp, name):
+        return self._global_type(tp, "(*(%s *)0)" % (name,))
+
     def _generate_cpy_typedef_collecttype(self, tp, name):
-        self._do_collect_type(tp)
+        self._do_collect_type(self._typedef_type(tp, name))
 
     def _generate_cpy_typedef_decl(self, tp, name):
         pass
@@ -596,6 +629,7 @@ class Recompiler:
         self._lsts["typename"].append(TypenameExpr(name, type_index))
 
     def _generate_cpy_typedef_ctx(self, tp, name):
+        tp = self._typedef_type(tp, name)
         self._typedef_ctx(tp, name)
         if getattr(tp, "origin", None) == "unknown_type":
             self._struct_ctx(tp, tp.name, approxname=None)
@@ -683,13 +717,11 @@ class Recompiler:
             rng = range(len(tp.args))
             for i in rng:
                 prnt('  PyObject *arg%d;' % i)
-            prnt('  PyObject **aa;')
             prnt()
-            prnt('  aa = _cffi_unpack_args(args, %d, "%s");' % (len(rng), name))
-            prnt('  if (aa == NULL)')
+            prnt('  if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % (
+                name, len(rng), len(rng),
+                ', '.join(['&arg%d' % i for i in rng])))
             prnt('    return NULL;')
-            for i in rng:
-                prnt('  arg%d = aa[%d];' % (i, i))
         prnt()
         #
         for i, type in enumerate(tp.args):
@@ -721,21 +753,26 @@ class Recompiler:
         #
         # the PyPy version: need to replace struct/union arguments with
         # pointers, and if the result is a struct/union, insert a first
-        # arg that is a pointer to the result.
+        # arg that is a pointer to the result.  We also do that for
+        # complex args and return type.
+        def need_indirection(type):
+            return (isinstance(type, model.StructOrUnion) or
+                    (isinstance(type, model.PrimitiveType) and
+                     type.is_complex_type()))
         difference = False
         arguments = []
         call_arguments = []
         context = 'argument of %s' % name
         for i, type in enumerate(tp.args):
             indirection = ''
-            if isinstance(type, model.StructOrUnion):
+            if need_indirection(type):
                 indirection = '*'
                 difference = True
             arg = type.get_c_name(' %sx%d' % (indirection, i), context)
             arguments.append(arg)
             call_arguments.append('%sx%d' % (indirection, i))
         tp_result = tp.result
-        if isinstance(tp_result, model.StructOrUnion):
+        if need_indirection(tp_result):
             context = 'result of %s' % name
             arg = tp_result.get_c_name(' *result', context)
             arguments.insert(0, arg)
@@ -799,6 +836,10 @@ class Recompiler:
 
     def _struct_collecttype(self, tp):
         self._do_collect_type(tp)
+        if self.target_is_python:
+            # also requires nested anon struct/unions in ABI mode, recursively
+            for fldtype in tp.anonymous_struct_fields():
+                self._struct_collecttype(fldtype)
 
     def _struct_decl(self, tp, cname, approxname):
         if tp.fldtypes is None:
@@ -814,7 +855,7 @@ class Recompiler:
             try:
                 if ftype.is_integer_type() or fbitsize >= 0:
                     # accept all integers, but complain on float or double
-                    prnt("  (void)((p->%s) << 1);  /* check that '%s.%s' is "
+                    prnt("  (void)((p->%s) | 0);  /* check that '%s.%s' is "
                          "an integer */" % (fname, cname, fname))
                     continue
                 # only accept exactly the type declared, except that '[]'
@@ -827,7 +868,7 @@ class Recompiler:
                 prnt('  { %s = &p->%s; (void)tmp; }' % (
                     ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
                     fname))
-            except ffiplatform.VerificationError as e:
+            except VerificationError as e:
                 prnt('  /* %s */' % str(e))   # cannot verify it, ignore
         prnt('}')
         prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
@@ -847,7 +888,7 @@ class Recompiler:
                  named_ptr not in self.ffi._parser._included_declarations)):
             if tp.fldtypes is None:
                 pass    # opaque
-            elif tp.partial or tp.has_anonymous_struct_fields():
+            elif tp.partial or any(tp.anonymous_struct_fields()):
                 pass    # field layout obtained silently from the C compiler
             else:
                 flags.append("_CFFI_F_CHECK_FIELDS")
@@ -859,9 +900,12 @@ class Recompiler:
         flags = '|'.join(flags) or '0'
         c_fields = []
         if reason_for_not_expanding is None:
-            enumfields = list(tp.enumfields())
+            expand_anonymous_struct_union = not self.target_is_python
+            enumfields = list(tp.enumfields(expand_anonymous_struct_union))
             for fldname, fldtype, fbitsize, fqual in enumfields:
                 fldtype = self._field_type(tp, fldname, fldtype)
+                self._check_not_opaque(fldtype,
+                                       "field '%s.%s'" % (tp.name, fldname))
                 # cname is None for _add_missing_struct_unions() only
                 op = OP_NOOP
                 if fbitsize >= 0:
@@ -911,6 +955,13 @@ class Recompiler:
                             first_field_index, c_fields))
         self._seen_struct_unions.add(tp)
 
+    def _check_not_opaque(self, tp, location):
+        while isinstance(tp, model.ArrayType):
+            tp = tp.item
+        if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
+            raise TypeError(
+                "%s is of an opaque type (not declared in cdef())" % location)
+
     def _add_missing_struct_unions(self):
         # not very nice, but some struct declarations might be missing
         # because they don't have any known C name.  Check that they are
@@ -981,7 +1032,7 @@ class Recompiler:
     def _generate_cpy_const(self, is_int, name, tp=None, category='const',
                             check_value=None):
         if (category, name) in self._seen_constants:
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "duplicate declaration of %s '%s'" % (category, name))
         self._seen_constants.add((category, name))
         #
@@ -991,7 +1042,7 @@ class Recompiler:
             prnt('static int %s(unsigned long long *o)' % funcname)
             prnt('{')
             prnt('  int n = (%s) <= 0;' % (name,))
-            prnt('  *o = (unsigned long long)((%s) << 0);'
+            prnt('  *o = (unsigned long long)((%s) | 0);'
                  '  /* check that %s is an integer */' % (name, name))
             if check_value is not None:
                 if check_value > 0:
@@ -1080,7 +1131,7 @@ class Recompiler:
     def _generate_cpy_macro_ctx(self, tp, name):
         if tp == '...':
             if self.target_is_python:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "cannot use the syntax '...' in '#define %s ...' when "
                     "using the ABI mode" % (name,))
             check_value = None
@@ -1145,11 +1196,11 @@ class Recompiler:
     def _generate_cpy_extern_python_collecttype(self, tp, name):
         assert isinstance(tp, model.FunctionPtrType)
         self._do_collect_type(tp)
+    _generate_cpy_dllexport_python_collecttype = \
+      _generate_cpy_extern_python_plus_c_collecttype = \
+      _generate_cpy_extern_python_collecttype
 
-    def _generate_cpy_dllexport_python_collecttype(self, tp, name):
-        self._generate_cpy_extern_python_collecttype(tp, name)
-
-    def _generate_cpy_extern_python_decl(self, tp, name, dllexport=False):
+    def _extern_python_decl(self, tp, name, tag_and_space):
         prnt = self._prnt
         if isinstance(tp.result, model.VoidType):
             size_of_result = '0'
@@ -1158,7 +1209,7 @@ class Recompiler:
             size_of_result = '(int)sizeof(%s)' % (
                 tp.result.get_c_name('', context),)
         prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
-        prnt('  { "%s", %s };' % (name, size_of_result))
+        prnt('  { "%s.%s", %s };' % (self.module_name, name, size_of_result))
         prnt()
         #
         arguments = []
@@ -1184,11 +1235,7 @@ class Recompiler:
             size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % (
                 tp.result.get_c_name(''), size_of_a,
                 tp.result.get_c_name(''), size_of_a)
-        if dllexport:
-            tag = 'CFFI_DLLEXPORT'
-        else:
-            tag = 'static'
-        prnt('%s %s' % (tag, tp.result.get_c_name(name_and_arguments)))
+        prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
         prnt('{')
         prnt('  char a[%s];' % size_of_a)
         prnt('  char *p = a;')
@@ -1206,12 +1253,18 @@ class Recompiler:
         prnt()
         self._num_externpy += 1
 
+    def _generate_cpy_extern_python_decl(self, tp, name):
+        self._extern_python_decl(tp, name, 'static ')
+
     def _generate_cpy_dllexport_python_decl(self, tp, name):
-        self._generate_cpy_extern_python_decl(tp, name, dllexport=True)
+        self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ')
+
+    def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
+        self._extern_python_decl(tp, name, '')
 
     def _generate_cpy_extern_python_ctx(self, tp, name):
         if self.target_is_python:
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "cannot use 'extern \"Python\"' in the ABI mode")
         if tp.ellipsis:
             raise NotImplementedError("a vararg function is extern \"Python\"")
@@ -1220,20 +1273,22 @@ class Recompiler:
         self._lsts["global"].append(
             GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name))
 
-    def _generate_cpy_dllexport_python_ctx(self, tp, name):
-        self._generate_cpy_extern_python_ctx(tp, name)
-
-    def _string_literal(self, s):
-        def _char_repr(c):
-            # escape with a '\' the characters '\', '"' or (for trigraphs) '?'
-            if c in '\\"?': return '\\' + c
-            if ' ' <= c < '\x7F': return c
-            if c == '\n': return '\\n'
-            return '\\%03o' % ord(c)
-        lines = []
+    _generate_cpy_dllexport_python_ctx = \
+      _generate_cpy_extern_python_plus_c_ctx = \
+      _generate_cpy_extern_python_ctx
+
+    def _print_string_literal_in_array(self, s):
+        prnt = self._prnt
+        prnt('// # NB. this is not a string because of a size limit in MSVC')
         for line in s.splitlines(True):
-            lines.append('"%s"' % ''.join([_char_repr(c) for c in line]))
-        return ' \\\n'.join(lines)
+            prnt(('// ' + line).rstrip())
+            printed_line = ''
+            for c in line:
+                if len(printed_line) >= 76:
+                    prnt(printed_line)
+                    printed_line = ''
+                printed_line += '%d,' % (ord(c),)
+            prnt(printed_line)
 
     # ----------
     # emitting the opcodes for individual types
@@ -1247,7 +1302,7 @@ class Recompiler:
 
     def _emit_bytecode_UnknownIntegerType(self, tp, index):
         s = ('_cffi_prim_int(sizeof(%s), (\n'
-             '           ((%s)-1) << 0 /* check that %s is an integer type */\n'
+             '           ((%s)-1) | 0 /* check that %s is an integer type */\n'
              '         ) <= 0)' % (tp.name, tp.name, tp.name))
         self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
 
@@ -1291,7 +1346,7 @@ class Recompiler:
         if tp.length is None:
             self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
         elif tp.length == '...':
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "type %s badly placed: the '...' array length can only be "
                 "used on global arrays or on fields of structures" % (
                     str(tp).replace('/*...*/', '...'),))
@@ -1319,7 +1374,9 @@ else:
                 s = s.encode('ascii')
             super(NativeIO, self).write(s)
 
-def _make_c_or_py_source(ffi, module_name, preamble, target_file):
+def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
+    if verbose:
+        print("generating %s" % (target_file,))
     recompiler = Recompiler(ffi, module_name,
                             target_is_python=(preamble is None))
     recompiler.collect_type_table()
@@ -1331,6 +1388,8 @@ def _make_c_or_py_source(ffi, module_name, preamble, target_file):
         with open(target_file, 'r') as f1:
             if f1.read(len(output) + 1) != output:
                 raise IOError
+        if verbose:
+            print("(already up-to-date)")
         return False     # already up-to-date
     except IOError:
         tmp_file = '%s.~%d' % (target_file, os.getpid())
@@ -1343,12 +1402,14 @@ def _make_c_or_py_source(ffi, module_name, preamble, target_file):
             os.rename(tmp_file, target_file)
         return True
 
-def make_c_source(ffi, module_name, preamble, target_c_file):
+def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
     assert preamble is not None
-    return _make_c_or_py_source(ffi, module_name, preamble, target_c_file)
+    return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
+                                verbose)
 
-def make_py_source(ffi, module_name, target_py_file):
-    return _make_c_or_py_source(ffi, module_name, None, target_py_file)
+def make_py_source(ffi, module_name, target_py_file, verbose=False):
+    return _make_c_or_py_source(ffi, module_name, None, target_py_file,
+                                verbose)
 
 def _modname_to_file(outputdir, modname, extension):
     parts = modname.split('.')
@@ -1413,7 +1474,7 @@ def _patch_for_target(patchlist, target):
 
 def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
               c_file=None, source_extension='.c', extradir=None,
-              compiler_verbose=1, target=None, **kwds):
+              compiler_verbose=1, target=None, debug=None, **kwds):
     if not isinstance(module_name, str):
         module_name = module_name.encode('ascii')
     if ffi._windows_unicode:
@@ -1438,7 +1499,8 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
                 target = '*'
         #
         ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
-        updated = make_c_source(ffi, module_name, preamble, c_file)
+        updated = make_c_source(ffi, module_name, preamble, c_file,
+                                verbose=compiler_verbose)
         if call_c_compiler:
             patchlist = []
             cwd = os.getcwd()
@@ -1447,8 +1509,15 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
                     _patch_for_embedding(patchlist)
                 if target != '*':
                     _patch_for_target(patchlist, target)
+                if compiler_verbose:
+                    if tmpdir == '.':
+                        msg = 'the current directory is'
+                    else:
+                        msg = 'setting the current directory to'
+                    print('%s %r' % (msg, os.path.abspath(tmpdir)))
                 os.chdir(tmpdir)
-                outputfilename = ffiplatform.compile('.', ext, compiler_verbose)
+                outputfilename = ffiplatform.compile('.', ext,
+                                                     compiler_verbose, debug)
             finally:
                 os.chdir(cwd)
                 _unpatch_meths(patchlist)
@@ -1458,7 +1527,8 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
     else:
         if c_file is None:
             c_file, _ = _modname_to_file(tmpdir, module_name, '.py')
-        updated = make_py_source(ffi, module_name, c_file)
+        updated = make_py_source(ffi, module_name, c_file,
+                                 verbose=compiler_verbose)
         if call_c_compiler:
             return c_file
         else:
@@ -1484,4 +1554,7 @@ def _verify(ffi, module_name, preamble, *args, **kwds):
     def typeof_disabled(*args, **kwds):
         raise NotImplementedError
     ffi._typeof = typeof_disabled
+    for name in dir(ffi):
+        if not name.startswith('_') and not hasattr(module.ffi, name):
+            setattr(ffi, name, NotImplemented)
     return module.lib

+ 46 - 3
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/setuptools_ext.py → desktop/core/ext-py/cffi-1.11.5/cffi/setuptools_ext.py

@@ -1,4 +1,5 @@
 import os
+import sys
 
 try:
     basestring
@@ -69,16 +70,47 @@ def add_cffi_module(dist, mod_spec):
     else:
         _add_c_module(dist, ffi, module_name, source, source_extension, kwds)
 
+def _set_py_limited_api(Extension, kwds):
+    """
+    Add py_limited_api to kwds if setuptools >= 26 is in use.
+    Do not alter the setting if it already exists.
+    Setuptools takes care of ignoring the flag on Python 2 and PyPy.
+
+    CPython itself should ignore the flag in a debugging version
+    (by not listing .abi3.so in the extensions it supports), but
+    it doesn't so far, creating troubles.  That's why we check
+    for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
+    of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
+
+    On Windows, it's better not to use py_limited_api until issue #355
+    can be resolved (by having virtualenv copy PYTHON3.DLL).  See also
+    the start of _cffi_include.h.
+    """
+    if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
+            and sys.platform != 'win32'):
+        import setuptools
+        try:
+            setuptools_major_version = int(setuptools.__version__.partition('.')[0])
+            if setuptools_major_version >= 26:
+                kwds['py_limited_api'] = True
+        except ValueError:  # certain development versions of setuptools
+            # If we don't know the version number of setuptools, we
+            # try to set 'py_limited_api' anyway.  At worst, we get a
+            # warning.
+            kwds['py_limited_api'] = True
+    return kwds
 
 def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
     from distutils.core import Extension
-    from distutils.command.build_ext import build_ext
+    # We are a setuptools extension. Need this build_ext for py_limited_api.
+    from setuptools.command.build_ext import build_ext
     from distutils.dir_util import mkpath
     from distutils import log
     from cffi import recompiler
 
     allsources = ['$PLACEHOLDER']
     allsources.extend(kwds.pop('sources', []))
+    kwds = _set_py_limited_api(Extension, kwds)
     ext = Extension(name=module_name, sources=allsources, **kwds)
 
     def make_mod(tmpdir, pre_run=None):
@@ -116,8 +148,8 @@ def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
 
 def _add_py_module(dist, ffi, module_name):
     from distutils.dir_util import mkpath
-    from distutils.command.build_py import build_py
-    from distutils.command.build_ext import build_ext
+    from setuptools.command.build_py import build_py
+    from setuptools.command.build_ext import build_ext
     from distutils import log
     from cffi import recompiler
 
@@ -137,6 +169,17 @@ def _add_py_module(dist, ffi, module_name):
             generate_mod(os.path.join(self.build_lib, *module_path))
     dist.cmdclass['build_py'] = build_py_make_mod
 
+    # distutils and setuptools have no notion I could find of a
+    # generated python module.  If we don't add module_name to
+    # dist.py_modules, then things mostly work but there are some
+    # combination of options (--root and --record) that will miss
+    # the module.  So we add it here, which gives a few apparently
+    # harmless warnings about not finding the file outside the
+    # build directory.
+    if dist.py_modules is None:
+        dist.py_modules = []
+    dist.py_modules.append(module_name)
+
     # the following is only for "build_ext -i"
     base_class_2 = dist.cmdclass.get('build_ext', build_ext)
     class build_ext_make_mod(base_class_2):

+ 17 - 12
desktop/core/ext-py/cffi-1.5.2/cffi/vengine_cpy.py → desktop/core/ext-py/cffi-1.11.5/cffi/vengine_cpy.py

@@ -2,7 +2,8 @@
 # DEPRECATED: implementation for ffi.verify()
 #
 import sys, imp
-from . import model, ffiplatform
+from . import model
+from .error import VerificationError
 
 
 class VCPythonEngine(object):
@@ -155,7 +156,7 @@ class VCPythonEngine(object):
                                           self.verifier.modulefilename)
             except ImportError as e:
                 error = "importing %r: %s" % (self.verifier.modulefilename, e)
-                raise ffiplatform.VerificationError(error)
+                raise VerificationError(error)
             finally:
                 if hasattr(sys, "setdlopenflags"):
                     sys.setdlopenflags(previous_flags)
@@ -185,7 +186,7 @@ class VCPythonEngine(object):
             def __dir__(self):
                 return FFILibrary._cffi_dir + list(self.__dict__)
         library = FFILibrary()
-        if module._cffi_setup(lst, ffiplatform.VerificationError, library):
+        if module._cffi_setup(lst, VerificationError, library):
             import warnings
             warnings.warn("reimporting %r might overwrite older definitions"
                           % (self.verifier.get_module_name()))
@@ -212,7 +213,7 @@ class VCPythonEngine(object):
                 method = getattr(self, '_generate_cpy_%s_%s' % (kind,
                                                                 step_name))
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented in verify(): %r" % name)
             try:
                 method(tp, realname)
@@ -295,7 +296,7 @@ class VCPythonEngine(object):
 
     def _convert_expr_from_c(self, tp, var, context):
         if isinstance(tp, model.PrimitiveType):
-            if tp.is_integer_type():
+            if tp.is_integer_type() and tp.name != '_Bool':
                 return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
             elif tp.name != 'long double':
                 return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
@@ -308,7 +309,7 @@ class VCPythonEngine(object):
         elif isinstance(tp, model.ArrayType):
             return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
                 var, self._gettypenum(model.PointerType(tp.item)))
-        elif isinstance(tp, model.StructType):
+        elif isinstance(tp, model.StructOrUnion):
             if tp.fldnames is None:
                 raise TypeError("'%s' is used as %s, but is opaque" % (
                     tp._get_c_name(), context))
@@ -485,7 +486,7 @@ class VCPythonEngine(object):
                     prnt('  { %s = &p->%s; (void)tmp; }' % (
                         ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
                         fname))
-                except ffiplatform.VerificationError as e:
+                except VerificationError as e:
                     prnt('  /* %s */' % str(e))   # cannot verify it, ignore
         prnt('}')
         prnt('static PyObject *')
@@ -550,7 +551,7 @@ class VCPythonEngine(object):
             # check that the layout sizes and offsets match the real ones
             def check(realvalue, expectedvalue, msg):
                 if realvalue != expectedvalue:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "%s (we have %d, but C compiler says %d)"
                         % (msg, expectedvalue, realvalue))
             ffi = self.ffi
@@ -771,7 +772,7 @@ class VCPythonEngine(object):
                 BItemType = self.ffi._get_cached_btype(tp.item)
                 length, rest = divmod(size, self.ffi.sizeof(BItemType))
                 if rest != 0:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "bad size: %r does not seem to be an array of %s" %
                         (name, tp.item))
                 tp = tp.resolve_length(length)
@@ -807,7 +808,8 @@ cffimod_header = r'''
 #include <stddef.h>
 
 /* this block of #ifs should be kept exactly identical between
-   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+   and cffi/_cffi_include.h */
 #if defined(_MSC_VER)
 # include <malloc.h>   /* for alloca() */
 # if _MSC_VER < 1600   /* MSVC < 2010 */
@@ -841,11 +843,13 @@ cffimod_header = r'''
 #  include <stdint.h>
 # endif
 # if _MSC_VER < 1800   /* MSVC < 2013 */
-   typedef unsigned char _Bool;
+#  ifndef __cplusplus
+    typedef unsigned char _Bool;
+#  endif
 # endif
 #else
 # include <stdint.h>
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
 #  include <alloca.h>
 # endif
 #endif
@@ -868,6 +872,7 @@ cffimod_header = r'''
 #define _cffi_from_c_ulong PyLong_FromUnsignedLong
 #define _cffi_from_c_longlong PyLong_FromLongLong
 #define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
 
 #define _cffi_to_c_double PyFloat_AsDouble
 #define _cffi_to_c_float PyFloat_AsDouble

+ 13 - 9
desktop/core/ext-py/cffi-1.5.2/cffi/vengine_gen.py → desktop/core/ext-py/cffi-1.11.5/cffi/vengine_gen.py

@@ -4,7 +4,8 @@
 import sys, os
 import types
 
-from . import model, ffiplatform
+from . import model
+from .error import VerificationError
 
 
 class VGenericEngine(object):
@@ -102,7 +103,7 @@ class VGenericEngine(object):
                 method = getattr(self, '_generate_gen_%s_%s' % (kind,
                                                                 step_name))
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented in verify(): %r" % name)
             try:
                 method(tp, realname)
@@ -281,7 +282,7 @@ class VGenericEngine(object):
                     prnt('  { %s = &p->%s; (void)tmp; }' % (
                         ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
                         fname))
-                except ffiplatform.VerificationError as e:
+                except VerificationError as e:
                     prnt('  /* %s */' % str(e))   # cannot verify it, ignore
         prnt('}')
         self.export_symbols.append(layoutfuncname)
@@ -344,7 +345,7 @@ class VGenericEngine(object):
             # check that the layout sizes and offsets match the real ones
             def check(realvalue, expectedvalue, msg):
                 if realvalue != expectedvalue:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "%s (we have %d, but C compiler says %d)"
                         % (msg, expectedvalue, realvalue))
             ffi = self.ffi
@@ -498,7 +499,7 @@ class VGenericEngine(object):
             error = self.ffi.string(p)
             if sys.version_info >= (3,):
                 error = str(error, 'utf-8')
-            raise ffiplatform.VerificationError(error)
+            raise VerificationError(error)
 
     def _enum_funcname(self, prefix, name):
         # "$enum_$1" => "___D_enum____D_1"
@@ -591,7 +592,7 @@ class VGenericEngine(object):
                 BItemType = self.ffi._get_cached_btype(tp.item)
                 length, rest = divmod(size, self.ffi.sizeof(BItemType))
                 if rest != 0:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "bad size: %r does not seem to be an array of %s" %
                         (name, tp.item))
                 tp = tp.resolve_length(length)
@@ -626,7 +627,8 @@ cffimod_header = r'''
 #include <sys/types.h>   /* XXX for ssize_t on some platforms */
 
 /* this block of #ifs should be kept exactly identical between
-   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
+   c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+   and cffi/_cffi_include.h */
 #if defined(_MSC_VER)
 # include <malloc.h>   /* for alloca() */
 # if _MSC_VER < 1600   /* MSVC < 2010 */
@@ -660,11 +662,13 @@ cffimod_header = r'''
 #  include <stdint.h>
 # endif
 # if _MSC_VER < 1800   /* MSVC < 2013 */
-   typedef unsigned char _Bool;
+#  ifndef __cplusplus
+    typedef unsigned char _Bool;
+#  endif
 # endif
 #else
 # include <stdint.h>
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
 #  include <alloca.h>
 # endif
 #endif

+ 8 - 18
desktop/core/ext-py/cryptography-1.3.1/.eggs/cffi-1.5.2-py2.7-macosx-10.11-x86_64.egg/cffi/verifier.py → desktop/core/ext-py/cffi-1.11.5/cffi/verifier.py

@@ -4,6 +4,7 @@
 import sys, os, binascii, shutil, io
 from . import __version_verifier_modules__
 from . import ffiplatform
+from .error import VerificationError
 
 if sys.version_info >= (3, 3):
     import importlib.machinery
@@ -25,16 +26,6 @@ else:
                 s = s.encode('ascii')
             super(NativeIO, self).write(s)
 
-def _hack_at_distutils():
-    # Windows-only workaround for some configurations: see
-    # https://bugs.python.org/issue23246 (Python 2.7 with 
-    # a specific MS compiler suite download)
-    if sys.platform == "win32":
-        try:
-            import setuptools    # for side-effects, patches distutils
-        except ImportError:
-            pass
-
 
 class Verifier(object):
 
@@ -42,7 +33,7 @@ class Verifier(object):
                  ext_package=None, tag='', force_generic_engine=False,
                  source_extension='.c', flags=None, relative_to=None, **kwds):
         if ffi._parser._uses_new_feature:
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "feature not supported with ffi.verify(), but only "
                 "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
         self.ffi = ffi
@@ -83,7 +74,7 @@ class Verifier(object):
         which can be tweaked beforehand."""
         with self.ffi._lock:
             if self._has_source and file is None:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "source code already written")
             self._write_source(file)
 
@@ -92,7 +83,7 @@ class Verifier(object):
         This produces a dynamic link library in 'self.modulefilename'."""
         with self.ffi._lock:
             if self._has_module:
-                raise ffiplatform.VerificationError("module already compiled")
+                raise VerificationError("module already compiled")
             if not self._has_source:
                 self._write_source()
             self._compile_module()
@@ -125,7 +116,7 @@ class Verifier(object):
         return basename
 
     def get_extension(self):
-        _hack_at_distutils() # backward compatibility hack
+        ffiplatform._hack_at_distutils() # backward compatibility hack
         if not self._has_source:
             with self.ffi._lock:
                 if not self._has_source:
@@ -310,7 +301,6 @@ def _get_so_suffixes():
     return suffixes
 
 def _ensure_dir(filename):
-    try:
-        os.makedirs(os.path.dirname(filename))
-    except OSError:
-        pass
+    dirname = os.path.dirname(filename)
+    if dirname and not os.path.isdir(dirname):
+        os.makedirs(dirname)

Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden.