[Python-checkins] r80929 - in python/branches/release31-maint: Lib/test/test_zlib.py Misc/NEWS Modules/zlibmodule.c

antoine.pitrou python-checkins at python.org
Fri May 7 19:08:54 CEST 2010


Author: antoine.pitrou
Date: Fri May 7 19:08:54 2010
New Revision: 80929
Log:
Merged revisions 80928 via svnmerge from 
svn+ssh://pythondev@svn.python.org/python/branches/py3k
................
 r80928 | antoine.pitrou | 2010年05月07日 19:04:02 +0200 (ven., 07 mai 2010) | 11 lines
 
 Merged revisions 80926 via svnmerge from 
 svn+ssh://pythondev@svn.python.org/python/trunk
 
 ........
 r80926 | antoine.pitrou | 2010年05月07日 18:50:34 +0200 (ven., 07 mai 2010) | 5 lines
 
 Issue #8571: Fix an internal error when compressing or decompressing a
 chunk larger than 1GB with the zlib module's compressor and decompressor
 objects.
 ........
................
Modified:
 python/branches/release31-maint/ (props changed)
 python/branches/release31-maint/Lib/test/test_zlib.py
 python/branches/release31-maint/Misc/NEWS
 python/branches/release31-maint/Modules/zlibmodule.c
Modified: python/branches/release31-maint/Lib/test/test_zlib.py
==============================================================================
--- python/branches/release31-maint/Lib/test/test_zlib.py	(original)
+++ python/branches/release31-maint/Lib/test/test_zlib.py	Fri May 7 19:08:54 2010
@@ -2,6 +2,7 @@
 from test import support
 import binascii
 import random
+from test.support import precisionbigmemtest, _1G
 
 zlib = support.import_module('zlib')
 
@@ -93,8 +94,39 @@
 self.assertRaises(ValueError, zlib.decompressobj().flush, -1)
 
 
+class BaseCompressTestCase(object):
+ def check_big_compress_buffer(self, size, compress_func):
+ _1M = 1024 * 1024
+ fmt = "%%0%dx" % (2 * _1M)
+ # Generate 10MB worth of random, and expand it by repeating it.
+ # The assumption is that zlib's memory is not big enough to exploit
+ # such spread out redundancy.
+ data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little')
+ for i in range(10)])
+ data = data * (size // len(data) + 1)
+ try:
+ compress_func(data)
+ finally:
+ # Release memory
+ data = None
+
+ def check_big_decompress_buffer(self, size, decompress_func):
+ data = b'x' * size
+ try:
+ compressed = zlib.compress(data, 1)
+ finally:
+ # Release memory
+ data = None
+ data = decompress_func(compressed)
+ # Sanity check
+ try:
+ self.assertEqual(len(data), size)
+ self.assertEqual(len(data.strip(b'x')), 0)
+ finally:
+ data = None
 
-class CompressTestCase(unittest.TestCase):
+
+class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
 # Test compression in one go (whole message compression)
 def test_speech(self):
 x = zlib.compress(HAMLET_SCENE)
@@ -108,9 +140,19 @@
 for ob in x, bytearray(x):
 self.assertEqual(zlib.decompress(ob), data)
 
+ # Memory use of the following functions takes into account overallocation
+
+ @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
+ def test_big_compress_buffer(self, size):
+ compress = lambda s: zlib.compress(s, 1)
+ self.check_big_compress_buffer(size, compress)
+
+ @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
+ def test_big_decompress_buffer(self, size):
+ self.check_big_decompress_buffer(size, zlib.decompress)
 
 
-class CompressObjectTestCase(unittest.TestCase):
+class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
 # Test compression object
 def test_pair(self):
 # straightforward compress/decompress objects
@@ -399,6 +441,21 @@
 d.flush()
 self.assertRaises(ValueError, d.copy)
 
+ # Memory use of the following functions takes into account overallocation
+
+ @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=3)
+ def test_big_compress_buffer(self, size):
+ c = zlib.compressobj(1)
+ compress = lambda s: c.compress(s) + c.flush()
+ self.check_big_compress_buffer(size, compress)
+
+ @precisionbigmemtest(size=_1G + 1024 * 1024, memuse=2)
+ def test_big_decompress_buffer(self, size):
+ d = zlib.decompressobj()
+ decompress = lambda s: d.decompress(s) + d.flush()
+ self.check_big_decompress_buffer(size, decompress)
+
+
 def genblock(seed, length, step=1024, generator=random):
 """length-byte stream of random data from a seed (in step-byte blocks)."""
 if seed is not None:
Modified: python/branches/release31-maint/Misc/NEWS
==============================================================================
--- python/branches/release31-maint/Misc/NEWS	(original)
+++ python/branches/release31-maint/Misc/NEWS	Fri May 7 19:08:54 2010
@@ -40,6 +40,10 @@
 Library
 -------
 
+- Issue #8571: Fix an internal error when compressing or decompressing a
+ chunk larger than 1GB with the zlib module's compressor and decompressor
+ objects.
+
 - Issue #8573: asyncore _strerror() function might throw ValueError.
 
 - Issue #8483: asyncore.dispatcher's __getattr__ method produced confusing 
Modified: python/branches/release31-maint/Modules/zlibmodule.c
==============================================================================
--- python/branches/release31-maint/Modules/zlibmodule.c	(original)
+++ python/branches/release31-maint/Modules/zlibmodule.c	Fri May 7 19:08:54 2010
@@ -396,7 +396,8 @@
 static PyObject *
 PyZlib_objcompress(compobject *self, PyObject *args)
 {
- int err, inplen, length = DEFAULTALLOC;
+ int err, inplen;
+ Py_ssize_t length = DEFAULTALLOC;
 PyObject *RetVal;
 Py_buffer pinput;
 Byte *input;
@@ -477,8 +478,8 @@
 static PyObject *
 PyZlib_objdecompress(compobject *self, PyObject *args)
 {
- int err, inplen, old_length, length = DEFAULTALLOC;
- int max_length = 0;
+ int err, inplen, max_length = 0;
+ Py_ssize_t old_length, length = DEFAULTALLOC;
 PyObject *RetVal;
 Py_buffer pinput;
 Byte *input;


More information about the Python-checkins mailing list

AltStyle によって変換されたページ (->オリジナル) /