Mercurial > ~astiob > upreckon > hgweb
view zipfiles/zipfile321.diff @ 199:1de2ea435d93
The default match mode is now 'literal'
I must have mixed something up when I wrote '$%', because dollar
and percent signs are not replaced in the tests and dummies
configuration variables.
author | Oleg Oshmyan <chortos@inbox.lv> |
---|---|
date | Tue, 16 Aug 2011 23:05:03 +0300 |
parents | 8196d2c0d6f8 |
children |
line wrap: on
line source
--- /usr/local/lib/python3.2/zipfile.py 2011-07-14 16:08:50.000000000 +0300 +++ zipfile32.py 2011-07-14 18:31:32.000000000 +0300 @@ -22,8 +22,14 @@ zlib = None crc32 = binascii.crc32 +try: + import bz2 # We may need its compression method +except ImportError: + bz2 = None + __all__ = ["BadZipFile", "BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", - "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile"] + "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", + "ZIP_BZIP2"] class BadZipFile(Exception): pass @@ -45,6 +51,7 @@ class LargeZipFile(Exception): # constants for Zip file compression methods ZIP_STORED = 0 ZIP_DEFLATED = 8 +ZIP_BZIP2 = 12 # Other ZIP compression methods not supported # Below are some formats and associated data for reading/writing headers using @@ -483,6 +490,9 @@ def __init__(self, fileobj, mode, zi if self._compress_type == ZIP_DEFLATED: self._decompressor = zlib.decompressobj(-15) + elif self._compress_type == ZIP_BZIP2: + self._decompressor = bz2.BZ2Decompressor() + self.MIN_READ_SIZE = 900000 self._unconsumed = b'' self._readbuffer = b'' @@ -641,6 +651,20 @@ def read1(self, n): self._update_crc(data, eof=eof) self._readbuffer = self._readbuffer[self._offset:] + data self._offset = 0 + elif (len(self._unconsumed) > 0 and n > len_readbuffer and + self._compress_type == ZIP_BZIP2): + try: + data = self._decompressor.decompress(self._unconsumed) + except EOFError: + eof = self._compress_left + data = b'' + else: + eof = False + self._unconsumed = b'' + + self._update_crc(data, eof=eof) + self._readbuffer = self._readbuffer[self._offset:] + data + self._offset = 0 # Read from buffer. data = self._readbuffer[self._offset: self._offset + n] @@ -663,7 +687,8 @@ class ZipFile: file: Either the path to the file, or a file-like object. If it is a path, the file will be opened and closed by ZipFile. mode: The mode can be either read "r", write "w" or append "a". - compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). + compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib) + or ZIP_BZIP2 (requires bz2). allowZip64: if True ZipFile will create files with ZIP64 extensions when needed, otherwise it will raise an exception when this would be necessary. @@ -683,6 +708,10 @@ def __init__(self, file, mode="r", c if not zlib: raise RuntimeError( "Compression requires the (missing) zlib module") + elif compression == ZIP_BZIP2: + if not bz2: + raise RuntimeError( + "Compression requires the (missing) bz2 module") else: raise RuntimeError("That compression method is not supported") @@ -1051,7 +1080,10 @@ def _writecheck(self, zinfo): if zinfo.compress_type == ZIP_DEFLATED and not zlib: raise RuntimeError( "Compression requires the (missing) zlib module") - if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): + if zinfo.compress_type == ZIP_BZIP2 and not bz2: + raise RuntimeError( + "Compression requires the (missing) bz2 module") + if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): raise RuntimeError("That compression method is not supported") if zinfo.file_size > ZIP64_LIMIT: if not self._allowZip64: @@ -1112,6 +1144,8 @@ def write(self, filename, arcname=No if zinfo.compress_type == ZIP_DEFLATED: cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15) + elif zinfo.compress_type == ZIP_BZIP2: + cmpr = bz2.BZ2Compressor() else: cmpr = None while 1: @@ -1175,6 +1209,10 @@ def writestr(self, zinfo_or_arcname, zlib.DEFLATED, -15) data = co.compress(data) + co.flush() zinfo.compress_size = len(data) # Compressed size + elif zinfo.compress_type == ZIP_BZIP2: + co = bz2.BZ2Compressor() + data = co.compress(data) + co.flush() + zinfo.compress_size = len(data) # Compressed size else: zinfo.compress_size = zinfo.file_size zinfo.header_offset = self.fp.tell() # Start of header data