diff -Nru protobuf-2.6.1/debian/changelog protobuf-2.6.1/debian/changelog --- protobuf-2.6.1/debian/changelog 2015-08-26 22:38:00.000000000 +0200 +++ protobuf-2.6.1/debian/changelog 2018-04-11 17:57:46.000000000 +0200 @@ -1,3 +1,9 @@ +protobuf (2.6.1-1.3ubuntu1) xenial; urgency=medium + + * Build python3 package. (LP: #1735160) + + -- Andrea Azzarone Thu, 11 Jan 2018 09:57:43 +0000 + protobuf (2.6.1-1.3) unstable; urgency=medium * Non-maintainer upload. diff -Nru protobuf-2.6.1/debian/control protobuf-2.6.1/debian/control --- protobuf-2.6.1/debian/control 2015-08-06 08:47:50.000000000 +0200 +++ protobuf-2.6.1/debian/control 2018-04-11 17:57:46.000000000 +0200 @@ -1,7 +1,8 @@ Source: protobuf Section: devel Priority: optional -Maintainer: Robert Edmonds +Maintainer: Ubuntu Developers +XSBC-Original-Maintainer: Robert Edmonds Uploaders: Iustin Pop Build-Depends: # Debian build system @@ -17,6 +18,11 @@ , libpython-all-dev (>= 2.7) , python-setuptools , python-google-apputils + , python3-all (>= 3.3) + , libpython3-all-dev (>= 3.3) + , python3-setuptools + , python3-google-apputils + , python3-six # Manpage generator , xmlto # Tests @@ -181,6 +187,27 @@ need the protoc tool (in the protobuf-compiler package) to compile your definition to Python classes, and then the modules in this package will allow you to use those classes in your programs. + +Package: python3-protobuf +Architecture: any +Section: python +Depends: ${shlibs:Depends}, ${python3:Depends}, ${misc:Depends} +Description: Python 3 bindings for protocol buffers + Protocol buffers are a flexible, efficient, automated mechanism for + serializing structured data - similar to XML, but smaller, faster, and + simpler. You define how you want your data to be structured once, then you can + use special generated source code to easily write and read your structured + data to and from a variety of data streams and using a variety of languages. + You can even update your data structure without breaking deployed programs + that are compiled against the "old" format. + . + Google uses Protocol Buffers for almost all of its internal RPC protocols and + file formats. + . + This package contains the Python 3 bindings for the protocol buffers. You will + need the protoc tool (in the protobuf-compiler package) to compile your + definition to Python classes, and then the modules in this package will allow + you to use those classes in your programs. Package: libprotobuf-java Architecture: all diff -Nru protobuf-2.6.1/debian/patches/fix-long-int-bugs.patch protobuf-2.6.1/debian/patches/fix-long-int-bugs.patch --- protobuf-2.6.1/debian/patches/fix-long-int-bugs.patch 1970-01-01 01:00:00.000000000 +0100 +++ protobuf-2.6.1/debian/patches/fix-long-int-bugs.patch 2018-04-11 19:21:10.000000000 +0200 @@ -0,0 +1,114 @@ +Description: Fixing some long/int bugs +Origin: backport, https://github.com/google/protobuf/commit/fe7d9379df3ce7c951bc0652a451413cff02382a +Bug-Ubuntu: https://bugs.launchpad.net/ubuntu/+source/protobuf/+bug/1735160 +Last-Update: 2018-01-10 + +--- protobuf-2.6.1.orig/python/google/protobuf/internal/decoder.py ++++ protobuf-2.6.1/python/google/protobuf/internal/decoder.py +@@ -86,6 +86,9 @@ + + import six + ++if six.PY3: ++ long = int ++ + from google.protobuf.internal import encoder + from google.protobuf.internal import wire_format + from google.protobuf import message +@@ -157,8 +160,8 @@ + # alternate implementations where the distinction is more significant + # (e.g. the C++ implementation) simpler. + +-_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +-_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, int) ++_DecodeVarint = _VarintDecoder((1 << 64) - 1, long) ++_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long) + + # Use these versions for values which must be limited to 32 bits. + _DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +--- protobuf-2.6.1.orig/python/google/protobuf/internal/reflection_test.py ++++ protobuf-2.6.1/python/google/protobuf/internal/reflection_test.py +@@ -621,17 +621,17 @@ + TestGetAndDeserialize('optional_int32', 1, int) + TestGetAndDeserialize('optional_int32', 1 << 30, int) + TestGetAndDeserialize('optional_uint32', 1 << 30, int) ++ try: ++ integer_64 = long ++ except NameError: # Python3 ++ integer_64 = int + if struct.calcsize('L') == 4: + # Python only has signed ints, so 32-bit python can't fit an uint32 + # in an int. +- TestGetAndDeserialize('optional_uint32', 1 << 31, int) ++ TestGetAndDeserialize('optional_uint32', 1 << 31, integer_64) + else: + # 64-bit python can fit uint32 inside an int + TestGetAndDeserialize('optional_uint32', 1 << 31, int) +- try: +- integer_64 = long +- except NameError: # Python3 +- integer_64 = int + TestGetAndDeserialize('optional_int64', 1 << 30, integer_64) + TestGetAndDeserialize('optional_int64', 1 << 60, integer_64) + TestGetAndDeserialize('optional_uint64', 1 << 30, integer_64) +--- protobuf-2.6.1.orig/python/google/protobuf/internal/type_checkers.py ++++ protobuf-2.6.1/python/google/protobuf/internal/type_checkers.py +@@ -49,6 +49,9 @@ + + import six + ++if six.PY3: ++ long = int ++ + from google.protobuf.internal import decoder + from google.protobuf.internal import encoder + from google.protobuf.internal import wire_format +@@ -181,13 +184,13 @@ + class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 +- _TYPE = int ++ _TYPE = long + + + class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 +- _TYPE = int ++ _TYPE = long + + + # Type-checkers for all scalar CPPTYPEs. +@@ -197,9 +200,9 @@ + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker( +- float, int, int), ++ float, int, long), + _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker( +- float, int, int), ++ float, int, long), + _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int), + _FieldDescriptor.CPPTYPE_STRING: TypeChecker(bytes), + } +--- protobuf-2.6.1.orig/python/google/protobuf/text_format.py ++++ protobuf-2.6.1/python/google/protobuf/text_format.py +@@ -39,6 +39,9 @@ + + import six + ++if six.PY3: ++ long = int ++ + from google.protobuf.internal import type_checkers + from google.protobuf import descriptor + from google.protobuf import text_encoding +@@ -772,7 +775,7 @@ + # alternate implementations where the distinction is more significant + # (e.g. the C++ implementation) simpler. + if is_long: +- result = int(text, 0) ++ result = long(text, 0) + else: + result = int(text, 0) + except ValueError: diff -Nru protobuf-2.6.1/debian/patches/python-modernize.patch protobuf-2.6.1/debian/patches/python-modernize.patch --- protobuf-2.6.1/debian/patches/python-modernize.patch 1970-01-01 01:00:00.000000000 +0100 +++ protobuf-2.6.1/debian/patches/python-modernize.patch 2018-04-11 17:57:46.000000000 +0200 @@ -0,0 +1,1088 @@ +Description: Prepare for Python2-Python3 straddle. +Origin: backport, https://github.com/google/protobuf/commit/f336d4b7a5c1d369ed508e513d482c885705e939 +Bug-Ubuntu: https://bugs.launchpad.net/ubuntu/+source/protobuf/+bug/1735160 +Last-Update: 2018-01-10 + +--- protobuf-2.6.1.orig/python/google/protobuf/descriptor.py ++++ protobuf-2.6.1/python/google/protobuf/descriptor.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-# Needs to stay compatible with Python 2.5 due to GAE. +-# + # Copyright 2007 Google Inc. All Rights Reserved. + + """Descriptors essentially contain exactly the information found in a .proto +@@ -846,4 +844,4 @@ + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, +- nested_types.values(), enum_types.values(), []) ++ list(nested_types.values()), list(enum_types.values()), []) +--- protobuf-2.6.1.orig/python/google/protobuf/descriptor_pool.py ++++ protobuf-2.6.1/python/google/protobuf/descriptor_pool.py +@@ -57,8 +57,6 @@ + + __author__ = 'matthewtoia@google.com (Matt Toia)' + +-import sys +- + from google.protobuf import descriptor + from google.protobuf import descriptor_database + from google.protobuf import text_encoding +@@ -175,8 +173,7 @@ + + try: + file_proto = self._internal_db.FindFileByName(file_name) +- except KeyError: +- _, error, _ = sys.exc_info() #PY25 compatible for GAE. ++ except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: +@@ -211,8 +208,7 @@ + + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) +- except KeyError: +- _, error, _ = sys.exc_info() #PY25 compatible for GAE. ++ except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: +@@ -282,9 +278,9 @@ + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( +- dependency.message_types_by_name.values())) ++ list(dependency.message_types_by_name.values()))) + scope.update((_PrefixWithDot(enum.full_name), enum) +- for enum in dependency.enum_types_by_name.values()) ++ for enum in list(dependency.enum_types_by_name.values())) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( +--- protobuf-2.6.1.orig/python/google/protobuf/internal/cpp_message.py ++++ protobuf-2.6.1/python/google/protobuf/internal/cpp_message.py +@@ -34,8 +34,12 @@ + + __author__ = 'petar@google.com (Petar Petrov)' + +-import copy_reg ++import collections + import operator ++ ++import six ++import six.moves.copyreg ++ + from google.protobuf.internal import _net_proto2___python + from google.protobuf.internal import enum_type_wrapper + from google.protobuf import message +@@ -146,7 +150,7 @@ + def __eq__(self, other): + if self is other: + return True +- if not operator.isSequenceType(other): ++ if not isinstance(other, collections.Sequence): + raise TypeError( + 'Can only compare repeated scalar fields against sequences.') + # We are presumably comparing against some other sequence type. +@@ -259,7 +263,7 @@ + index_key = lambda i: key(self[i]) + + # Sort the list of current indexes by the underlying object. +- indexes = range(len(self)) ++ indexes = list(range(len(self))) + indexes.sort(cmp=cmp, key=index_key, reverse=reverse) + + # Apply the transposition. +@@ -385,7 +389,7 @@ + _AddInitMethod(message_descriptor, cls) + _AddMessageMethods(message_descriptor, cls) + _AddPropertiesForExtensions(message_descriptor, cls) +- copy_reg.pickle(cls, lambda obj: (cls, (), obj.__getstate__())) ++ six.moves.copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__())) + + + def _AddDescriptors(message_descriptor, dictionary): +@@ -400,7 +404,7 @@ + dictionary['__descriptors'][field.name] = GetFieldDescriptor( + field.full_name) + +- dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [ ++ dictionary['__slots__'] = list(dictionary['__descriptors'].keys()) + [ + '_cmsg', '_owner', '_composite_fields', 'Extensions', '_HACK_REFCOUNTS'] + + +@@ -420,7 +424,7 @@ + def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary): + """Adds class attributes for the nested extensions.""" + extension_dict = message_descriptor.extensions_by_name +- for extension_name, extension_field in extension_dict.iteritems(): ++ for extension_name, extension_field in extension_dict.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + +@@ -474,7 +478,7 @@ + self._HACK_REFCOUNTS = self + self._composite_fields = {} + +- for field_name, field_value in kwargs.iteritems(): ++ for field_name, field_value in kwargs.items(): + field_cdescriptor = self.__descriptors.get(field_name, None) + if not field_cdescriptor: + raise ValueError('Protocol message has no "%s" field.' % field_name) +@@ -538,7 +542,7 @@ + + def Clear(self): + cmessages_to_release = [] +- for field_name, child_field in self._composite_fields.iteritems(): ++ for field_name, child_field in self._composite_fields.items(): + child_cdescriptor = self.__descriptors[field_name] + # TODO(anuraag): Support clearing repeated message fields as well. + if (child_cdescriptor.label != _LABEL_REPEATED and +@@ -631,7 +635,7 @@ + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + + # Attach the local methods to the message class. +- for key, value in locals().copy().iteritems(): ++ for key, value in locals().copy().items(): + if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'): + setattr(cls, key, value) + +@@ -658,6 +662,6 @@ + def _AddPropertiesForExtensions(message_descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extension_dict = message_descriptor.extensions_by_name +- for extension_name, extension_field in extension_dict.iteritems(): ++ for extension_name, extension_field in extension_dict.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) +--- protobuf-2.6.1.orig/python/google/protobuf/internal/decoder.py ++++ protobuf-2.6.1/python/google/protobuf/internal/decoder.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#PY25 compatible for GAE. +-# + # Copyright 2009 Google Inc. All Rights Reserved. + + """Code for decoding protocol buffer primitives. +@@ -85,8 +83,9 @@ + __author__ = 'kenton@google.com (Kenton Varda)' + + import struct +-import sys ##PY25 +-_PY2 = sys.version_info[0] < 3 ##PY25 ++ ++import six ++ + from google.protobuf.internal import encoder + from google.protobuf.internal import wire_format + from google.protobuf import message +@@ -114,14 +113,11 @@ + decoder returns a (value, new_pos) pair. + """ + +- local_ord = ord +- py2 = _PY2 ##PY25 +-##!PY25 py2 = str is bytes + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: +- b = local_ord(buffer[pos]) if py2 else buffer[pos] ++ b = six.indexbytes(buffer, pos) + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): +@@ -137,14 +133,11 @@ + def _SignedVarintDecoder(mask, result_type): + """Like _VarintDecoder() but decodes signed values.""" + +- local_ord = ord +- py2 = _PY2 ##PY25 +-##!PY25 py2 = str is bytes + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: +- b = local_ord(buffer[pos]) if py2 else buffer[pos] ++ b = six.indexbytes(buffer, pos) + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): +@@ -164,8 +157,8 @@ + # alternate implementations where the distinction is more significant + # (e.g. the C++ implementation) simpler. + +-_DecodeVarint = _VarintDecoder((1 << 64) - 1, long) +-_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long) ++_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) ++_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, int) + + # Use these versions for values which must be limited to 32 bits. + _DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +@@ -183,10 +176,8 @@ + use that, but not in Python. + """ + +- py2 = _PY2 ##PY25 +-##!PY25 py2 = str is bytes + start = pos +- while (ord(buffer[pos]) if py2 else buffer[pos]) & 0x80: ++ while six.indexbytes(buffer, pos) & 0x80: + pos += 1 + pos += 1 + return (buffer[start:pos], pos) +@@ -301,7 +292,6 @@ + """ + + local_unpack = struct.unpack +- b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1') ##PY25 + + def InnerDecode(buffer, pos): + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign +@@ -312,17 +302,12 @@ + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. +- if ((float_bytes[3:4] in b('\x7F\xFF')) ##PY25 +-##!PY25 if ((float_bytes[3:4] in b'\x7F\xFF') +- and (float_bytes[2:3] >= b('\x80'))): ##PY25 +-##!PY25 and (float_bytes[2:3] >= b'\x80')): ++ if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... +- if float_bytes[0:3] != b('\x00\x00\x80'): ##PY25 +-##!PY25 if float_bytes[0:3] != b'\x00\x00\x80': ++ if float_bytes[0:3] != b'\x00\x00\x80': + return (_NAN, new_pos) + # If sign bit is set... +- if float_bytes[3:4] == b('\xFF'): ##PY25 +-##!PY25 if float_bytes[3:4] == b'\xFF': ++ if float_bytes[3:4] == b'\xFF': + return (_NEG_INF, new_pos) + return (_POS_INF, new_pos) + +@@ -341,7 +326,6 @@ + """ + + local_unpack = struct.unpack +- b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1') ##PY25 + + def InnerDecode(buffer, pos): + # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign +@@ -352,12 +336,9 @@ + # If this value has all its exponent bits set and at least one significand + # bit set, it's not a number. In Python 2.4, struct.unpack will treat it + # as inf or -inf. To avoid that, we treat it specially. +-##!PY25 if ((double_bytes[7:8] in b'\x7F\xFF') +-##!PY25 and (double_bytes[6:7] >= b'\xF0') +-##!PY25 and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): +- if ((double_bytes[7:8] in b('\x7F\xFF')) ##PY25 +- and (double_bytes[6:7] >= b('\xF0')) ##PY25 +- and (double_bytes[0:7] != b('\x00\x00\x00\x00\x00\x00\xF0'))): ##PY25 ++ if ((double_bytes[7:8] in b'\x7F\xFF') ++ and (double_bytes[6:7] >= b'\xF0') ++ and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (_NAN, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert +@@ -480,12 +461,12 @@ + """Returns a decoder for a string field.""" + + local_DecodeVarint = _DecodeVarint +- local_unicode = unicode ++ local_unicode = six.text_type + + def _ConvertToUnicode(byte_str): + try: + return local_unicode(byte_str, 'utf-8') +- except UnicodeDecodeError, e: ++ except UnicodeDecodeError as e: + # add more information to the error message and re-raise it. + e.reason = '%s in field: %s' % (e, key.full_name) + raise +--- protobuf-2.6.1.orig/python/google/protobuf/internal/encoder.py ++++ protobuf-2.6.1/python/google/protobuf/internal/encoder.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#PY25 compatible for GAE. +-# + # Copyright 2009 Google Inc. All Rights Reserved. + + """Code for encoding protocol message primitives. +@@ -71,8 +69,9 @@ + __author__ = 'kenton@google.com (Kenton Varda)' + + import struct +-import sys ##PY25 +-_PY2 = sys.version_info[0] < 3 ##PY25 ++ ++import six ++ + from google.protobuf.internal import wire_format + + +@@ -346,16 +345,14 @@ + def _VarintEncoder(): + """Return an encoder for a basic varint value (does not include tag).""" + +- local_chr = _PY2 and chr or (lambda x: bytes((x,))) ##PY25 +-##!PY25 local_chr = chr if bytes is str else lambda x: bytes((x,)) + def EncodeVarint(write, value): + bits = value & 0x7f + value >>= 7 + while value: +- write(local_chr(0x80|bits)) ++ write(six.int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 +- return write(local_chr(bits)) ++ return write(six.int2byte(bits)) + + return EncodeVarint + +@@ -364,18 +361,16 @@ + """Return an encoder for a basic signed varint value (does not include + tag).""" + +- local_chr = _PY2 and chr or (lambda x: bytes((x,))) ##PY25 +-##!PY25 local_chr = chr if bytes is str else lambda x: bytes((x,)) + def EncodeSignedVarint(write, value): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: +- write(local_chr(0x80|bits)) ++ write(six.int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 +- return write(local_chr(bits)) ++ return write(six.int2byte(bits)) + + return EncodeSignedVarint + +@@ -390,8 +385,7 @@ + + pieces = [] + _EncodeVarint(pieces.append, value) +- return "".encode("latin1").join(pieces) ##PY25 +-##!PY25 return b"".join(pieces) ++ return b"".join(pieces) + + + def TagBytes(field_number, wire_type): +@@ -529,33 +523,26 @@ + format: The format string to pass to struct.pack(). + """ + +- b = _PY2 and (lambda x:x) or (lambda x:x.encode('latin1')) ##PY25 + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: +- write(b('\x00\x00\x80\x7F')) ##PY25 +-##!PY25 write(b'\x00\x00\x80\x7F') ++ write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: +- write(b('\x00\x00\x80\xFF')) ##PY25 +-##!PY25 write(b'\x00\x00\x80\xFF') ++ write(b'\x00\x00\x80\xFF') + elif value != value: # NaN +- write(b('\x00\x00\xC0\x7F')) ##PY25 +-##!PY25 write(b'\x00\x00\xC0\x7F') ++ write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: +- write(b('\x00\x00\x00\x00\x00\x00\xF0\x7F')) ##PY25 +-##!PY25 write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') ++ write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: +- write(b('\x00\x00\x00\x00\x00\x00\xF0\xFF')) ##PY25 +-##!PY25 write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') ++ write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN +- write(b('\x00\x00\x00\x00\x00\x00\xF8\x7F')) ##PY25 +-##!PY25 write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') ++ write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: +@@ -631,10 +618,8 @@ + def BoolEncoder(field_number, is_repeated, is_packed): + """Returns an encoder for a boolean field.""" + +-##!PY25 false_byte = b'\x00' +-##!PY25 true_byte = b'\x01' +- false_byte = '\x00'.encode('latin1') ##PY25 +- true_byte = '\x01'.encode('latin1') ##PY25 ++ false_byte = b'\x00' ++ true_byte = b'\x01' + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint +@@ -770,8 +755,7 @@ + } + } + """ +- start_bytes = "".encode("latin1").join([ ##PY25 +-##!PY25 start_bytes = b"".join([ ++ start_bytes = b"".join([ + TagBytes(1, wire_format.WIRETYPE_START_GROUP), + TagBytes(2, wire_format.WIRETYPE_VARINT), + _VarintBytes(field_number), +--- protobuf-2.6.1.orig/python/google/protobuf/internal/generator_test.py ++++ protobuf-2.6.1/python/google/protobuf/internal/generator_test.py +@@ -294,7 +294,7 @@ + self.assertSameElements( + nested_names, + [field.name for field in desc.oneofs[0].fields]) +- for field_name, field_desc in desc.fields_by_name.iteritems(): ++ for field_name, field_desc in desc.fields_by_name.items(): + if field_name in nested_names: + self.assertIs(desc.oneofs[0], field_desc.containing_oneof) + else: +--- protobuf-2.6.1.orig/python/google/protobuf/internal/message_factory_test.py ++++ protobuf-2.6.1/python/google/protobuf/internal/message_factory_test.py +@@ -107,14 +107,14 @@ + self.assertContainsSubset( + ['google.protobuf.python.internal.Factory2Message', + 'google.protobuf.python.internal.Factory1Message'], +- messages.keys()) ++ list(messages.keys())) + self._ExerciseDynamicClass( + messages['google.protobuf.python.internal.Factory2Message']) + self.assertContainsSubset( + ['google.protobuf.python.internal.Factory2Message.one_more_field', + 'google.protobuf.python.internal.another_field'], +- (messages['google.protobuf.python.internal.Factory1Message'] +- ._extensions_by_name.keys())) ++ (list(messages['google.protobuf.python.internal.Factory1Message'] ++ ._extensions_by_name.keys()))) + factory_msg1 = messages['google.protobuf.python.internal.Factory1Message'] + msg1 = messages['google.protobuf.python.internal.Factory1Message']() + ext1 = factory_msg1._extensions_by_name[ +--- protobuf-2.6.1.orig/python/google/protobuf/internal/python_message.py ++++ protobuf-2.6.1/python/google/protobuf/internal/python_message.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-# Keep it Python2.5 compatible for GAE. +-# + # Copyright 2007 Google Inc. All Rights Reserved. + # + # This code is meant to work on Python 2.4 and above only. +@@ -54,19 +52,14 @@ + + __author__ = 'robinson@google.com (Will Robinson)' + ++from io import BytesIO + import sys +-if sys.version_info[0] < 3: +- try: +- from cStringIO import StringIO as BytesIO +- except ImportError: +- from StringIO import StringIO as BytesIO +- import copy_reg as copyreg +-else: +- from io import BytesIO +- import copyreg + import struct + import weakref + ++import six ++import six.moves.copyreg as copyreg ++ + # We use "as" to avoid name collisions with variables. + from google.protobuf.internal import containers + from google.protobuf.internal import decoder +@@ -237,7 +230,7 @@ + + def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extension_dict = descriptor.extensions_by_name +- for extension_name, extension_field in extension_dict.iteritems(): ++ for extension_name, extension_field in extension_dict.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + +@@ -323,7 +316,7 @@ + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) +- for field_name, field_value in kwargs.iteritems(): ++ for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError("%s() got an unexpected keyword argument '%s'" % +@@ -546,7 +539,7 @@ + def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extension_dict = descriptor.extensions_by_name +- for extension_name, extension_field in extension_dict.iteritems(): ++ for extension_name, extension_field in extension_dict.items(): + constant_name = extension_name.upper() + "_FIELD_NUMBER" + setattr(cls, constant_name, extension_field.number) + +@@ -601,7 +594,7 @@ + """Helper for _AddMessageMethods().""" + + def ListFields(self): +- all_fields = [item for item in self._fields.iteritems() if _IsPresent(item)] ++ all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + +@@ -845,7 +838,7 @@ + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') +- except struct.error, e: ++ except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString +@@ -945,7 +938,7 @@ + name = field.name + + if field.label == _FieldDescriptor.LABEL_REPEATED: +- for i in xrange(len(value)): ++ for i in range(len(value)): + element = value[i] + prefix = "%s[%d]." % (name, i) + sub_errors = element.FindInitializationErrors() +@@ -975,7 +968,7 @@ + + fields = self._fields + +- for field, value in msg._fields.iteritems(): ++ for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: +--- protobuf-2.6.1.orig/python/google/protobuf/internal/reflection_test.py ++++ protobuf-2.6.1/python/google/protobuf/internal/reflection_test.py +@@ -42,6 +42,8 @@ + import operator + import struct + ++import six ++ + from google.apputils import basetest + from google.protobuf import unittest_import_pb2 + from google.protobuf import unittest_mset_pb2 +@@ -469,7 +471,7 @@ + proto.repeated_string.extend(['foo', 'bar']) + proto.repeated_string.extend([]) + proto.repeated_string.append('baz') +- proto.repeated_string.extend(str(x) for x in xrange(2)) ++ proto.repeated_string.extend(str(x) for x in range(2)) + proto.optional_int32 = 21 + proto.repeated_bool # Access but don't set anything; should not be listed. + self.assertEqual( +@@ -622,14 +624,18 @@ + if struct.calcsize('L') == 4: + # Python only has signed ints, so 32-bit python can't fit an uint32 + # in an int. +- TestGetAndDeserialize('optional_uint32', 1 << 31, long) ++ TestGetAndDeserialize('optional_uint32', 1 << 31, int) + else: + # 64-bit python can fit uint32 inside an int + TestGetAndDeserialize('optional_uint32', 1 << 31, int) +- TestGetAndDeserialize('optional_int64', 1 << 30, long) +- TestGetAndDeserialize('optional_int64', 1 << 60, long) +- TestGetAndDeserialize('optional_uint64', 1 << 30, long) +- TestGetAndDeserialize('optional_uint64', 1 << 60, long) ++ try: ++ integer_64 = long ++ except NameError: # Python3 ++ integer_64 = int ++ TestGetAndDeserialize('optional_int64', 1 << 30, integer_64) ++ TestGetAndDeserialize('optional_int64', 1 << 60, integer_64) ++ TestGetAndDeserialize('optional_uint64', 1 << 30, integer_64) ++ TestGetAndDeserialize('optional_uint64', 1 << 60, integer_64) + + def testSingleScalarBoundsChecking(self): + def TestMinAndMaxIntegers(field_name, expected_min, expected_max): +@@ -755,18 +761,18 @@ + + def testEnum_KeysAndValues(self): + self.assertEqual(['FOREIGN_FOO', 'FOREIGN_BAR', 'FOREIGN_BAZ'], +- unittest_pb2.ForeignEnum.keys()) ++ list(unittest_pb2.ForeignEnum.keys())) + self.assertEqual([4, 5, 6], +- unittest_pb2.ForeignEnum.values()) ++ list(unittest_pb2.ForeignEnum.values())) + self.assertEqual([('FOREIGN_FOO', 4), ('FOREIGN_BAR', 5), + ('FOREIGN_BAZ', 6)], +- unittest_pb2.ForeignEnum.items()) ++ list(unittest_pb2.ForeignEnum.items())) + + proto = unittest_pb2.TestAllTypes() +- self.assertEqual(['FOO', 'BAR', 'BAZ', 'NEG'], proto.NestedEnum.keys()) +- self.assertEqual([1, 2, 3, -1], proto.NestedEnum.values()) ++ self.assertEqual(['FOO', 'BAR', 'BAZ', 'NEG'], list(proto.NestedEnum.keys())) ++ self.assertEqual([1, 2, 3, -1], list(proto.NestedEnum.values())) + self.assertEqual([('FOO', 1), ('BAR', 2), ('BAZ', 3), ('NEG', -1)], +- proto.NestedEnum.items()) ++ list(proto.NestedEnum.items())) + + def testRepeatedScalars(self): + proto = unittest_pb2.TestAllTypes() +@@ -805,7 +811,7 @@ + self.assertEqual([5, 25, 20, 15, 30], proto.repeated_int32[:]) + + # Test slice assignment with an iterator +- proto.repeated_int32[1:4] = (i for i in xrange(3)) ++ proto.repeated_int32[1:4] = (i for i in range(3)) + self.assertEqual([5, 0, 1, 2, 30], proto.repeated_int32) + + # Test slice assignment. +@@ -1008,9 +1014,8 @@ + containing_type=None, nested_types=[], enum_types=[], + fields=[foo_field_descriptor], extensions=[], + options=descriptor_pb2.MessageOptions()) +- class MyProtoClass(message.Message): ++ class MyProtoClass(six.with_metaclass(reflection.GeneratedProtocolMessageType, message.Message)): + DESCRIPTOR = mydescriptor +- __metaclass__ = reflection.GeneratedProtocolMessageType + myproto_instance = MyProtoClass() + self.assertEqual(0, myproto_instance.foo_field) + self.assertTrue(not myproto_instance.HasField('foo_field')) +@@ -1050,14 +1055,13 @@ + new_field.label = descriptor_pb2.FieldDescriptorProto.LABEL_REPEATED + + desc = descriptor.MakeDescriptor(desc_proto) +- self.assertTrue(desc.fields_by_name.has_key('name')) +- self.assertTrue(desc.fields_by_name.has_key('year')) +- self.assertTrue(desc.fields_by_name.has_key('automatic')) +- self.assertTrue(desc.fields_by_name.has_key('price')) +- self.assertTrue(desc.fields_by_name.has_key('owners')) ++ self.assertTrue('name' in desc.fields_by_name) ++ self.assertTrue('year' in desc.fields_by_name) ++ self.assertTrue('automatic' in desc.fields_by_name) ++ self.assertTrue('price' in desc.fields_by_name) ++ self.assertTrue('owners' in desc.fields_by_name) + +- class CarMessage(message.Message): +- __metaclass__ = reflection.GeneratedProtocolMessageType ++ class CarMessage(six.with_metaclass(reflection.GeneratedProtocolMessageType, message.Message)): + DESCRIPTOR = desc + + prius = CarMessage() +@@ -1660,14 +1664,14 @@ + setattr, proto, 'optional_bytes', u'unicode object') + + # Check that the default value is of python's 'unicode' type. +- self.assertEqual(type(proto.optional_string), unicode) ++ self.assertEqual(type(proto.optional_string), six.text_type) + +- proto.optional_string = unicode('Testing') ++ proto.optional_string = six.text_type('Testing') + self.assertEqual(proto.optional_string, str('Testing')) + + # Assign a value of type 'str' which can be encoded in UTF-8. + proto.optional_string = str('Testing') +- self.assertEqual(proto.optional_string, unicode('Testing')) ++ self.assertEqual(proto.optional_string, six.text_type('Testing')) + + # Try to assign a 'str' value which contains bytes that aren't 7-bit ASCII. + self.assertRaises(ValueError, +@@ -1715,7 +1719,7 @@ + bytes_read = message2.MergeFromString(raw.item[0].message) + self.assertEqual(len(raw.item[0].message), bytes_read) + +- self.assertEqual(type(message2.str), unicode) ++ self.assertEqual(type(message2.str), six.text_type) + self.assertEqual(message2.str, test_utf8) + + # The pure Python API throws an exception on MergeFromString(), +@@ -1739,7 +1743,7 @@ + def testBytesInTextFormat(self): + proto = unittest_pb2.TestAllTypes(optional_bytes=b'\x00\x7f\x80\xff') + self.assertEqual(u'optional_bytes: "\\000\\177\\200\\377"\n', +- unicode(proto)) ++ six.text_type(proto)) + + def testEmptyNestedMessage(self): + proto = unittest_pb2.TestAllTypes() +@@ -2289,7 +2293,7 @@ + test_util.SetAllFields(first_proto) + serialized = first_proto.SerializeToString() + +- for truncation_point in xrange(len(serialized) + 1): ++ for truncation_point in range(len(serialized) + 1): + try: + second_proto = unittest_pb2.TestAllTypes() + unknown_fields = unittest_pb2.TestEmptyMessage() +@@ -2887,8 +2891,7 @@ + msg_descriptor = descriptor.MakeDescriptor( + file_descriptor.message_type[0]) + +- class MessageClass(message.Message): +- __metaclass__ = reflection.GeneratedProtocolMessageType ++ class MessageClass(six.with_metaclass(reflection.GeneratedProtocolMessageType, message.Message)): + DESCRIPTOR = msg_descriptor + msg = MessageClass() + msg_str = ( +--- protobuf-2.6.1.orig/python/google/protobuf/internal/text_format_test.py ++++ protobuf-2.6.1/python/google/protobuf/internal/text_format_test.py +@@ -36,9 +36,10 @@ + + import re + ++import six ++ + from google.apputils import basetest + from google.protobuf import text_format +-from google.protobuf.internal import api_implementation + from google.protobuf.internal import test_util + from google.protobuf import unittest_pb2 + from google.protobuf import unittest_mset_pb2 +@@ -138,7 +139,7 @@ + 'repeated_string: "\\303\\274\\352\\234\\237"\n') + + def testPrintExoticUnicodeSubclass(self): +- class UnicodeSub(unicode): ++ class UnicodeSub(six.text_type): + pass + message = unittest_pb2.TestAllTypes() + message.repeated_string.append(UnicodeSub(u'\u00fc\ua71f')) +--- protobuf-2.6.1.orig/python/google/protobuf/internal/type_checkers.py ++++ protobuf-2.6.1/python/google/protobuf/internal/type_checkers.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#PY25 compatible for GAE. +-# + # Copyright 2008 Google Inc. All Rights Reserved. + + """Provides type checking routines. +@@ -49,9 +47,8 @@ + + __author__ = 'robinson@google.com (Will Robinson)' + +-import sys ##PY25 +-if sys.version < '2.6': bytes = str ##PY25 +-from google.protobuf.internal import api_implementation ++import six ++ + from google.protobuf.internal import decoder + from google.protobuf.internal import encoder + from google.protobuf.internal import wire_format +@@ -111,9 +108,9 @@ + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): +- if not isinstance(proposed_value, (int, long)): ++ if not isinstance(proposed_value, six.integer_types): + message = ('%.1024r has type %s, but expected one of: %s' % +- (proposed_value, type(proposed_value), (int, long))) ++ (proposed_value, type(proposed_value), six.integer_types)) + raise TypeError(message) + if not self._MIN <= proposed_value <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) +@@ -132,9 +129,9 @@ + self._enum_type = enum_type + + def CheckValue(self, proposed_value): +- if not isinstance(proposed_value, (int, long)): ++ if not isinstance(proposed_value, six.integer_types): + message = ('%.1024r has type %s, but expected one of: %s' % +- (proposed_value, type(proposed_value), (int, long))) ++ (proposed_value, type(proposed_value), six.integer_types)) + raise TypeError(message) + if proposed_value not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) +@@ -149,9 +146,9 @@ + """ + + def CheckValue(self, proposed_value): +- if not isinstance(proposed_value, (bytes, unicode)): ++ if not isinstance(proposed_value, (bytes, six.text_type)): + message = ('%.1024r has type %s, but expected one of: %s' % +- (proposed_value, type(proposed_value), (bytes, unicode))) ++ (proposed_value, type(proposed_value), (bytes, six.text_type))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is in 7-bit ASCII +@@ -184,13 +181,13 @@ + class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 +- _TYPE = long ++ _TYPE = int + + + class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 +- _TYPE = long ++ _TYPE = int + + + # Type-checkers for all scalar CPPTYPEs. +@@ -200,9 +197,9 @@ + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker( +- float, int, long), ++ float, int, int), + _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker( +- float, int, long), ++ float, int, int), + _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int), + _FieldDescriptor.CPPTYPE_STRING: TypeChecker(bytes), + } +--- protobuf-2.6.1.orig/python/google/protobuf/message_factory.py ++++ protobuf-2.6.1/python/google/protobuf/message_factory.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#PY25 compatible for GAE. +-# + # Copyright 2012 Google Inc. All Rights Reserved. + + """Provides a factory class for generating dynamic messages. +@@ -43,7 +41,6 @@ + + __author__ = 'matthewtoia@google.com (Matt Toia)' + +-import sys ##PY25 + from google.protobuf import descriptor_database + from google.protobuf import descriptor_pool + from google.protobuf import message +@@ -75,8 +72,7 @@ + """ + if descriptor.full_name not in self._classes: + descriptor_name = descriptor.name +- if sys.version_info[0] < 3: ##PY25 +-##!PY25 if str is bytes: # PY2 ++ if str is bytes: # PY2 + descriptor_name = descriptor.name.encode('ascii', 'ignore') + result_class = reflection.GeneratedProtocolMessageType( + descriptor_name, +@@ -111,7 +107,7 @@ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) +- for name, msg in file_desc.message_types_by_name.iteritems(): ++ for name, msg in file_desc.message_types_by_name.items(): + if file_desc.package: + full_name = '.'.join([file_desc.package, name]) + else: +@@ -128,7 +124,7 @@ + # ignore the registration if the original was the same, or raise + # an error if they were different. + +- for name, extension in file_desc.extensions_by_name.iteritems(): ++ for name, extension in file_desc.extensions_by_name.items(): + if extension.containing_type.full_name not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type.full_name] +--- protobuf-2.6.1.orig/python/google/protobuf/text_encoding.py ++++ protobuf-2.6.1/python/google/protobuf/text_encoding.py +@@ -27,16 +27,13 @@ + # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- +-#PY25 compatible for GAE. +-# + """Encoding related utilities.""" +- + import re +-import sys ##PY25 ++ ++import six + + # Lookup table for utf8 +-_cescape_utf8_to_str = [chr(i) for i in xrange(0, 256)] ++_cescape_utf8_to_str = [chr(i) for i in range(0, 256)] + _cescape_utf8_to_str[9] = r'\t' # optional escape + _cescape_utf8_to_str[10] = r'\n' # optional escape + _cescape_utf8_to_str[13] = r'\r' # optional escape +@@ -46,9 +43,9 @@ + _cescape_utf8_to_str[92] = r'\\' # necessary escape + + # Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +-_cescape_byte_to_str = ([r'\%03o' % i for i in xrange(0, 32)] + +- [chr(i) for i in xrange(32, 127)] + +- [r'\%03o' % i for i in xrange(127, 256)]) ++_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + ++ [chr(i) for i in range(32, 127)] + ++ [r'\%03o' % i for i in range(127, 256)]) + _cescape_byte_to_str[9] = r'\t' # optional escape + _cescape_byte_to_str[10] = r'\n' # optional escape + _cescape_byte_to_str[13] = r'\r' # optional escape +@@ -75,7 +72,7 @@ + """ + # PY3 hack: make Ord work for str and bytes: + # //platforms/networking/data uses unicode here, hence basestring. +- Ord = ord if isinstance(text, basestring) else lambda x: x ++ Ord = ord if isinstance(text, six.string_types) else lambda x: x + if as_utf8: + return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text) + return ''.join(_cescape_byte_to_str[Ord(c)] for c in text) +@@ -100,8 +97,7 @@ + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + +- if sys.version_info[0] < 3: ##PY25 +-##!PY25 if str is bytes: # PY2 ++ if str is bytes: # PY2 + return result.decode('string_escape') + result = ''.join(_cescape_highbit_to_str[ord(c)] for c in result) + return (result.encode('ascii') # Make it bytes to allow decode. +--- protobuf-2.6.1.orig/python/google/protobuf/text_format.py ++++ protobuf-2.6.1/python/google/protobuf/text_format.py +@@ -28,8 +28,6 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-#PY25 compatible for GAE. +-# + # Copyright 2007 Google Inc. All Rights Reserved. + + """Contains routines for printing protocol messages in text format.""" +@@ -39,6 +37,8 @@ + import cStringIO + import re + ++import six ++ + from google.protobuf.internal import type_checkers + from google.protobuf import descriptor + from google.protobuf import text_encoding +@@ -189,7 +189,7 @@ + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') +- if isinstance(value, unicode): ++ if isinstance(value, six.text_type): + out_value = value.encode('utf-8') + else: + out_value = value +@@ -499,7 +499,7 @@ + def _PopLine(self): + while len(self._current_line) <= self._column: + try: +- self._current_line = self._lines.next() ++ self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False +@@ -569,7 +569,7 @@ + """ + try: + result = ParseInteger(self.token, is_signed=True, is_long=False) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -585,7 +585,7 @@ + """ + try: + result = ParseInteger(self.token, is_signed=False, is_long=False) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -601,7 +601,7 @@ + """ + try: + result = ParseInteger(self.token, is_signed=True, is_long=True) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -617,7 +617,7 @@ + """ + try: + result = ParseInteger(self.token, is_signed=False, is_long=True) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -633,7 +633,7 @@ + """ + try: + result = ParseFloat(self.token) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -649,7 +649,7 @@ + """ + try: + result = ParseBool(self.token) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -665,8 +665,8 @@ + """ + the_bytes = self.ConsumeByteString() + try: +- return unicode(the_bytes, 'utf-8') +- except UnicodeDecodeError, e: ++ return six.text_type(the_bytes, 'utf-8') ++ except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): +@@ -681,8 +681,7 @@ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in ('\'', '"'): + the_list.append(self._ConsumeSingleByteString()) +- return ''.encode('latin1').join(the_list) ##PY25 +-##!PY25 return b''.join(the_list) ++ return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. +@@ -700,7 +699,7 @@ + + try: + result = text_encoding.CUnescape(text[1:-1]) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -708,7 +707,7 @@ + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) +- except ValueError, e: ++ except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result +@@ -773,7 +772,7 @@ + # alternate implementations where the distinction is more significant + # (e.g. the C++ implementation) simpler. + if is_long: +- result = long(text, 0) ++ result = int(text, 0) + else: + result = int(text, 0) + except ValueError: diff -Nru protobuf-2.6.1/debian/patches/series protobuf-2.6.1/debian/patches/series --- protobuf-2.6.1/debian/patches/series 2014-10-24 19:56:18.000000000 +0200 +++ protobuf-2.6.1/debian/patches/series 2018-04-11 17:57:46.000000000 +0200 @@ -1 +1,4 @@ debian-changes +python-modernize.patch +use-io-bytesio.patch +fix-long-int-bugs.patch diff -Nru protobuf-2.6.1/debian/patches/use-io-bytesio.patch protobuf-2.6.1/debian/patches/use-io-bytesio.patch --- protobuf-2.6.1/debian/patches/use-io-bytesio.patch 1970-01-01 01:00:00.000000000 +0100 +++ protobuf-2.6.1/debian/patches/use-io-bytesio.patch 2018-04-11 17:57:46.000000000 +0200 @@ -0,0 +1,36 @@ +Description: Use 'io.BytesIO' rather than 'cStringIO.StringIO'. +Origin: backport, https://github.com/google/protobuf/commit/47ee4d37c17db8e97fe5b15cf918ab56ff93bb18 +Bug-Ubuntu: https://bugs.launchpad.net/ubuntu/+source/protobuf/+bug/1735160 +Last-Update: 2018-01-10 + +--- protobuf-2.6.1.orig/python/google/protobuf/internal/encoder.py ++++ protobuf-2.6.1/python/google/protobuf/internal/encoder.py +@@ -43,7 +43,7 @@ + sizer takes a value of this field's type and computes its byte size. The + encoder takes a writer function and a value. It encodes the value into byte + strings and invokes the writer function to write those strings. Typically the +-writer function is the write() method of a cStringIO. ++writer function is the write() method of a BytesIO. + + We try to do as much work as possible when constructing the writer and the + sizer rather than when calling them. In particular: +--- protobuf-2.6.1.orig/python/google/protobuf/text_format.py ++++ protobuf-2.6.1/python/google/protobuf/text_format.py +@@ -34,7 +34,7 @@ + + __author__ = 'kenton@google.com (Kenton Varda)' + +-import cStringIO ++import io + import re + + import six +@@ -89,7 +89,7 @@ + Returns: + A string of the text formatted protocol buffer message. + """ +- out = cStringIO.StringIO() ++ out = io.BytesIO() + PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, diff -Nru protobuf-2.6.1/debian/rules protobuf-2.6.1/debian/rules --- protobuf-2.6.1/debian/rules 2015-08-26 22:37:55.000000000 +0200 +++ protobuf-2.6.1/debian/rules 2018-04-11 17:57:46.000000000 +0200 @@ -7,7 +7,7 @@ %: - dh $@ --with autoreconf,python2 + dh $@ --with autoreconf,python2,python3 override_dh_auto_build-arch: dh_auto_build --arch @@ -16,7 +16,9 @@ xmlto man debian/protoc.xml # Python build. + cp -rf python python3 cd python && python setup.py build --cpp_implementation + cd python3 && python3 setup.py build --cpp_implementation override_dh_auto_build-indep: dh_auto_build --indep @@ -34,6 +36,13 @@ cd python && for python in $(shell pyversions -r); do \ $$python setup.py test --cpp_implementation; \ done + + # Python3 test. + set -e; \ + export LD_LIBRARY_PATH=$(CURDIR)/src/.libs; \ + cd python3 && for python in $(shell py3versions -r); do \ + $$python setup.py test --cpp_implementation; \ + done endif override_dh_auto_test-indep: @@ -49,6 +58,9 @@ done rm -rf python/protobuf.egg-info + # Python3 clean. + rm -rf python3 + override_dh_auto_clean-indep: dh_auto_clean --indep @@ -68,6 +80,14 @@ done find $(CURDIR)/debian/python-protobuf -name 'protobuf-*-nspkg.pth' -delete + # Python3 install. + cd python3 && for python in $(shell py3versions -r); do \ + $$python setup.py install --cpp_implementation \ + --install-layout=deb --no-compile \ + --root=$(CURDIR)/debian/python3-protobuf; \ + done + find $(CURDIR)/debian/python3-protobuf -name 'protobuf-*-nspkg.pth' -delete + override_dh_auto_install-indep: dh_auto_install --indep