Mercurial > ~astiob > upreckon > hgweb
view zipfiles/zipfile272.diff @ 246:1bc89faac941 2.04
Fixed: match='re' could produce duplicate test identifiers
files.Files.regexp(pattern) now makes sure to return only one
metafile for each matching virtual path, namely, the one that would
be returned for that virtual path by files.Files.from_virtual_path.
author | Oleg Oshmyan <chortos@inbox.lv> |
---|---|
date | Thu, 03 Oct 2013 01:19:09 +0300 |
parents | b993d9257400 |
children |
line wrap: on
line source
--- /usr/local/lib/python2.7/zipfile.py 2011-06-15 13:20:07.000000000 +0100 +++ zipfile27.py 2011-06-03 20:20:40.000000000 +0100 @@ -1,6 +1,7 @@ """ Read and write ZIP files. """ +# Improved by Chortos-2 in 2010 (added bzip2 support) import struct, os, time, sys, shutil import binascii, cStringIO, stat import io @@ -13,8 +14,13 @@ zlib = None crc32 = binascii.crc32 +try: + import bz2 # We may need its compression method +except ImportError: + bz2 = None + __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", - "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] + "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] class BadZipfile(Exception): pass @@ -35,6 +41,7 @@ class LargeZipFile(Exception): # constants for Zip file compression methods ZIP_STORED = 0 ZIP_DEFLATED = 8 +ZIP_BZIP2 = 12 # Other ZIP compression methods not supported # Below are some formats and associated data for reading/writing headers using @@ -483,6 +490,9 @@ def __init__(self, fileobj, mode, zi if self._compress_type == ZIP_DEFLATED: self._decompressor = zlib.decompressobj(-15) + elif self._compress_type == ZIP_BZIP2: + self._decompressor = bz2.BZ2Decompressor() + self.MIN_READ_SIZE = 900000 self._unconsumed = '' self._readbuffer = '' @@ -641,6 +651,13 @@ def read1(self, n): self._update_crc(data, eof=eof) self._readbuffer = self._readbuffer[self._offset:] + data self._offset = 0 + elif (len(self._unconsumed) > 0 and n > len_readbuffer and + self._compress_type == ZIP_BZIP2): + data = self._decompressor.decompress(self._unconsumed) + + self._unconsumed = '' + self._readbuffer = self._readbuffer[self._offset:] + data + self._offset = 0 # Read from buffer. data = self._readbuffer[self._offset: self._offset + n] @@ -657,7 +674,8 @@ class ZipFile: file: Either the path to the file, or a file-like object. If it is a path, the file will be opened and closed by ZipFile. mode: The mode can be either read "r", write "w" or append "a". - compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). + compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), + or ZIP_BZIP2 (requires bz2). allowZip64: if True ZipFile will create files with ZIP64 extensions when needed, otherwise it will raise an exception when this would be necessary. @@ -677,6 +695,10 @@ def __init__(self, file, mode="r", c if not zlib: raise RuntimeError,\ "Compression requires the (missing) zlib module" + elif compression == ZIP_BZIP2: + if not bz2: + raise RuntimeError,\ + "Compression requires the (missing) bz2 module" else: raise RuntimeError, "That compression method is not supported" @@ -1011,7 +1033,10 @@ def _writecheck(self, zinfo): if zinfo.compress_type == ZIP_DEFLATED and not zlib: raise RuntimeError, \ "Compression requires the (missing) zlib module" - if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): + if zinfo.compress_type == ZIP_BZIP2 and not bz2: + raise RuntimeError, \ + "Compression requires the (missing) bz2 module" + if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): raise RuntimeError, \ "That compression method is not supported" if zinfo.file_size > ZIP64_LIMIT: @@ -1072,6 +1097,8 @@ def write(self, filename, arcname=No if zinfo.compress_type == ZIP_DEFLATED: cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15) + elif zinfo.compress_type == ZIP_BZIP2: + cmpr = bz2.BZ2Compressor() else: cmpr = None while 1: @@ -1132,6 +1159,10 @@ def writestr(self, zinfo_or_arcname, zlib.DEFLATED, -15) bytes = co.compress(bytes) + co.flush() zinfo.compress_size = len(bytes) # Compressed size + elif zinfo.compress_type == ZIP_BZIP2: + co = bz2.BZ2Compressor() + bytes = co.compress(bytes) + co.flush() + zinfo.compress_size = len(bytes) # Compressed size else: zinfo.compress_size = zinfo.file_size zinfo.header_offset = self.fp.tell() # Start of header bytes