[Python-checkins] python/dist/src/Lib/test test_zlib.py,1.25,1.26

nascheme at users.sourceforge.net nascheme at users.sourceforge.net
Sat Jun 5 15:34:30 EDT 2004


Update of /cvsroot/python/python/dist/src/Lib/test
In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv28247/Lib/test
Modified Files:
	test_zlib.py 
Log Message:
Remove a number of tests that differ only in input data size. It seems
no bug motivated their inclusion and the chance of them triggering a
problem seems unlikely. Refactor to reduce code duplication. Rename
'hamlet_scene' to 'HAMLET_SCENE'. Test is much faster now. Closes #960995.
Index: test_zlib.py
===================================================================
RCS file: /cvsroot/python/python/dist/src/Lib/test/test_zlib.py,v
retrieving revision 1.25
retrieving revision 1.26
diff -C2 -d -r1.25 -r1.26
*** test_zlib.py	5 Jun 2004 19:02:52 -0000	1.25
--- test_zlib.py	5 Jun 2004 19:34:28 -0000	1.26
***************
*** 75,97 ****
 # Test compression in one go (whole message compression)
 def test_speech(self):
! # decompress(compress(data)) better be data
! x = zlib.compress(hamlet_scene)
! self.assertEqual(zlib.decompress(x), hamlet_scene)
! 
! def test_speech8(self):
! # decompress(compress(data)) better be data -- more compression chances
! data = hamlet_scene * 8
! x = zlib.compress(data)
! self.assertEqual(zlib.decompress(x), data)
! 
! def test_speech16(self):
! # decompress(compress(data)) better be data -- more compression chances
! data = hamlet_scene * 16
! x = zlib.compress(data)
! self.assertEqual(zlib.decompress(x), data)
 
 def test_speech128(self):
! # decompress(compress(data)) better be data -- more compression chances
! data = hamlet_scene * 8 * 16
 x = zlib.compress(data)
 self.assertEqual(zlib.decompress(x), data)
--- 75,84 ----
 # Test compression in one go (whole message compression)
 def test_speech(self):
! x = zlib.compress(HAMLET_SCENE)
! self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
 
 def test_speech128(self):
! # compress more data
! data = HAMLET_SCENE * 128
 x = zlib.compress(data)
 self.assertEqual(zlib.decompress(x), data)
***************
*** 102,121 ****
 class CompressObjectTestCase(unittest.TestCase):
 # Test compression object
- def test_pairsmall(self):
- # use compress object in straightforward manner, decompress w/ object
- data = hamlet_scene
- co = zlib.compressobj()
- x1 = co.compress(data)
- x2 = co.flush()
- self.assertRaises(zlib.error, co.flush) # second flush should not work
- dco = zlib.decompressobj()
- y1 = dco.decompress(x1 + x2)
- y2 = dco.flush()
- self.assertEqual(data, y1 + y2)
- 
 def test_pair(self):
! # straightforward compress/decompress objects, more compression
! data = hamlet_scene * 8 * 16
! co = zlib.compressobj(zlib.Z_BEST_COMPRESSION, zlib.DEFLATED)
 x1 = co.compress(data)
 x2 = co.flush()
--- 89,96 ----
 class CompressObjectTestCase(unittest.TestCase):
 # Test compression object
 def test_pair(self):
! # straightforward compress/decompress objects
! data = HAMLET_SCENE * 128
! co = zlib.compressobj()
 x1 = co.compress(data)
 x2 = co.flush()
***************
*** 134,147 ****
 strategy = zlib.Z_FILTERED
 co = zlib.compressobj(level, method, wbits, memlevel, strategy)
! x1 = co.compress(hamlet_scene)
 x2 = co.flush()
 dco = zlib.decompressobj(wbits)
 y1 = dco.decompress(x1 + x2)
 y2 = dco.flush()
! self.assertEqual(hamlet_scene, y1 + y2)
 
 def test_compressincremental(self):
 # compress object in steps, decompress object as one-shot
! data = hamlet_scene * 8 * 16
 co = zlib.compressobj()
 bufs = []
--- 109,122 ----
 strategy = zlib.Z_FILTERED
 co = zlib.compressobj(level, method, wbits, memlevel, strategy)
! x1 = co.compress(HAMLET_SCENE)
 x2 = co.flush()
 dco = zlib.decompressobj(wbits)
 y1 = dco.decompress(x1 + x2)
 y2 = dco.flush()
! self.assertEqual(HAMLET_SCENE, y1 + y2)
 
 def test_compressincremental(self):
 # compress object in steps, decompress object as one-shot
! data = HAMLET_SCENE * 128
 co = zlib.compressobj()
 bufs = []
***************
*** 156,166 ****
 self.assertEqual(data, y1 + y2)
 
! def test_decompressincremental(self):
 # compress object in steps, decompress object in steps
! data = hamlet_scene * 8 * 16
 co = zlib.compressobj()
 bufs = []
! for i in range(0, len(data), 256):
! bufs.append(co.compress(data[i:i+256]))
 bufs.append(co.flush())
 combuf = ''.join(bufs)
--- 131,142 ----
 self.assertEqual(data, y1 + y2)
 
! def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
 # compress object in steps, decompress object in steps
! source = source or HAMLET_SCENE
! data = source * 128
 co = zlib.compressobj()
 bufs = []
! for i in range(0, len(data), cx):
! bufs.append(co.compress(data[i:i+cx]))
 bufs.append(co.flush())
 combuf = ''.join(bufs)
***************
*** 170,267 ****
 dco = zlib.decompressobj()
 bufs = []
! for i in range(0, len(combuf), 128):
! bufs.append(dco.decompress(combuf[i:i+128]))
 self.assertEqual('', dco.unconsumed_tail, ########
 "(A) uct should be '': not %d long" %
! len(dco.unconsumed_tail))
! bufs.append(dco.flush())
 self.assertEqual('', dco.unconsumed_tail, ########
! "(B) uct should be '': not %d long" %
! len(dco.unconsumed_tail))
 self.assertEqual(data, ''.join(bufs))
 # Failure means: "decompressobj with init options failed"
 
! def test_decompinc(self,sizes=[128],flush=True,source=None,cx=256,dcx=64):
! # compress object in steps, decompress object in steps, loop sizes
! source = source or hamlet_scene
! for reps in sizes:
! data = source * reps
! co = zlib.compressobj()
! bufs = []
! for i in range(0, len(data), cx):
! bufs.append(co.compress(data[i:i+cx]))
! bufs.append(co.flush())
! combuf = ''.join(bufs)
! 
! self.assertEqual(data, zlib.decompress(combuf))
! 
! dco = zlib.decompressobj()
! bufs = []
! for i in range(0, len(combuf), dcx):
! bufs.append(dco.decompress(combuf[i:i+dcx]))
! self.assertEqual('', dco.unconsumed_tail, ########
! "(A) uct should be '': not %d long" %
! len(dco.unconsumed_tail))
! if flush:
! bufs.append(dco.flush())
! else:
! while True:
! chunk = dco.decompress('')
! if chunk:
! bufs.append(chunk)
! else:
! break
! self.assertEqual('', dco.unconsumed_tail, ########
! "(B) uct should be '': not %d long" %
! len(dco.unconsumed_tail))
! self.assertEqual(data, ''.join(bufs))
! # Failure means: "decompressobj with init options failed"
! 
! def test_decompimax(self,sizes=[128],flush=True,source=None,cx=256,dcx=64):
! # compress in steps, decompress in length-restricted steps, loop sizes
! source = source or hamlet_scene
! for reps in sizes:
! # Check a decompression object with max_length specified
! data = source * reps
! co = zlib.compressobj()
! bufs = []
! for i in range(0, len(data), cx):
! bufs.append(co.compress(data[i:i+cx]))
! bufs.append(co.flush())
! combuf = ''.join(bufs)
! self.assertEqual(data, zlib.decompress(combuf),
! 'compressed data failure')
! 
! dco = zlib.decompressobj()
! bufs = []
! cb = combuf
! while cb:
! #max_length = 1 + len(cb)//10
! chunk = dco.decompress(cb, dcx)
! self.failIf(len(chunk) > dcx,
! 'chunk too big (%d>%d)' % (len(chunk), dcx))
! bufs.append(chunk)
! cb = dco.unconsumed_tail
! if flush:
! bufs.append(dco.flush())
! else:
! while True:
! chunk = dco.decompress('', dcx)
! self.failIf(len(chunk) > dcx,
! 'chunk too big in tail (%d>%d)' % (len(chunk), dcx))
! if chunk:
! bufs.append(chunk)
! else:
! break
! self.assertEqual(len(data), len(''.join(bufs)))
! self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
 
! def test_decompressmaxlen(self):
 # Check a decompression object with max_length specified
! data = hamlet_scene * 8 * 16
 co = zlib.compressobj()
 bufs = []
! for i in range(0, len(data), 256):
! bufs.append(co.compress(data[i:i+256]))
 bufs.append(co.flush())
 combuf = ''.join(bufs)
--- 146,181 ----
 dco = zlib.decompressobj()
 bufs = []
! for i in range(0, len(combuf), dcx):
! bufs.append(dco.decompress(combuf[i:i+dcx]))
 self.assertEqual('', dco.unconsumed_tail, ########
 "(A) uct should be '': not %d long" %
! len(dco.unconsumed_tail))
! if flush:
! bufs.append(dco.flush())
! else:
! while True:
! chunk = dco.decompress('')
! if chunk:
! bufs.append(chunk)
! else:
! break
 self.assertEqual('', dco.unconsumed_tail, ########
! "(B) uct should be '': not %d long" %
! len(dco.unconsumed_tail))
 self.assertEqual(data, ''.join(bufs))
 # Failure means: "decompressobj with init options failed"
 
! def test_decompincflush(self):
! self.test_decompinc(flush=True)
 
! def test_decompimax(self, source=None, cx=256, dcx=64):
! # compress in steps, decompress in length-restricted steps
! source = source or HAMLET_SCENE
 # Check a decompression object with max_length specified
! data = source * 128
 co = zlib.compressobj()
 bufs = []
! for i in range(0, len(data), cx):
! bufs.append(co.compress(data[i:i+cx]))
 bufs.append(co.flush())
 combuf = ''.join(bufs)
***************
*** 273,290 ****
 cb = combuf
 while cb:
! max_length = 1 + len(cb)//10
! chunk = dco.decompress(cb, max_length)
! self.failIf(len(chunk) > max_length,
! 'chunk too big (%d>%d)' % (len(chunk),max_length))
 bufs.append(chunk)
 cb = dco.unconsumed_tail
 bufs.append(dco.flush())
- self.assertEqual(len(data), len(''.join(bufs)))
 self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
 
! def test_decompressmaxlenflushless(self):
! # identical to test_decompressmaxlen except flush is replaced
! # with an equivalent. This works and other fails on (eg) 2.2.2
! data = hamlet_scene * 8 * 16
 co = zlib.compressobj()
 bufs = []
--- 187,202 ----
 cb = combuf
 while cb:
! #max_length = 1 + len(cb)//10
! chunk = dco.decompress(cb, dcx)
! self.failIf(len(chunk) > dcx,
! 'chunk too big (%d>%d)' % (len(chunk), dcx))
 bufs.append(chunk)
 cb = dco.unconsumed_tail
 bufs.append(dco.flush())
 self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
 
! def test_decompressmaxlen(self, flush=False):
! # Check a decompression object with max_length specified
! data = HAMLET_SCENE * 128
 co = zlib.compressobj()
 bufs = []
***************
*** 294,298 ****
 combuf = ''.join(bufs)
 self.assertEqual(data, zlib.decompress(combuf),
! 'compressed data mismatch')
 
 dco = zlib.decompressobj()
--- 206,210 ----
 combuf = ''.join(bufs)
 self.assertEqual(data, zlib.decompress(combuf),
! 'compressed data failure')
 
 dco = zlib.decompressobj()
***************
*** 306,319 ****
 bufs.append(chunk)
 cb = dco.unconsumed_tail
! 
! #bufs.append(dco.flush())
! while len(chunk):
! chunk = dco.decompress('', max_length)
! self.failIf(len(chunk) > max_length,
! 'chunk too big (%d>%d)' % (len(chunk),max_length))
! bufs.append(chunk)
! 
 self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
 
 def test_maxlenmisc(self):
 # Misc tests of max_length
--- 218,234 ----
 bufs.append(chunk)
 cb = dco.unconsumed_tail
! if flush:
! bufs.append(dco.flush())
! else:
! while chunk:
! chunk = dco.decompress('', max_length)
! self.failIf(len(chunk) > max_length,
! 'chunk too big (%d>%d)' % (len(chunk),max_length))
! bufs.append(chunk)
 self.assertEqual(data, ''.join(bufs), 'Wrong data retrieved')
 
+ def test_decompressmaxlenflush(self):
+ self.test_decompressmaxlen(flush=True)
+ 
 def test_maxlenmisc(self):
 # Misc tests of max_length
***************
*** 328,332 ****
 sync_opt = [getattr(zlib, opt) for opt in sync_opt
 if hasattr(zlib, opt)]
! data = hamlet_scene * 8
 
 for sync in sync_opt:
--- 243,247 ----
 sync_opt = [getattr(zlib, opt) for opt in sync_opt
 if hasattr(zlib, opt)]
! data = HAMLET_SCENE * 8
 
 for sync in sync_opt:
***************
*** 350,354 ****
 
 # Create compressor and decompressor objects
! co = zlib.compressobj(9)
 dco = zlib.decompressobj()
 
--- 265,269 ----
 
 # Create compressor and decompressor objects
! co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
 dco = zlib.decompressobj()
 
***************
*** 376,396 ****
 self.assertEqual(expanded, data, "17K random source doesn't match")
 
- def test_manydecompinc(self):
- # Run incremental decompress test for a large range of sizes
- self.test_decompinc(sizes=[1<<n for n in range(8)],
- flush=True, cx=32, dcx=4)
- 
- def test_manydecompimax(self):
- # Run incremental decompress maxlen test for a large range of sizes
- # avoid the flush bug
- self.test_decompimax(sizes=[1<<n for n in range(8)],
- flush=False, cx=32, dcx=4)
- 
- def test_manydecompimaxflush(self):
- # Run incremental decompress maxlen test for a large range of sizes
- # avoid the flush bug
- self.test_decompimax(sizes=[1<<n for n in range(8)],
- flush=True, cx=32, dcx=4)
- 
 
 def genblock(seed, length, step=1024, generator=random):
--- 291,294 ----
***************
*** 418,422 ****
 
 
! hamlet_scene = """
 LAERTES
 
--- 316,320 ----
 
 
! HAMLET_SCENE = """
 LAERTES
 


More information about the Python-checkins mailing list

AltStyle によって変換されたページ (->オリジナル) /