# HG changeset patch # User Oleg Oshmyan # Date 1292793804 -7200 # Node ID 245150080c4821dbde22116b6b221287fb2a3cd9 # Parent 06f1683c8db9d466774476ec0333e2d96af23af7 Converted 1.20 into a branch diff -r 06f1683c8db9 -r 245150080c48 1.20/test.py --- a/1.20/test.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,899 +0,0 @@ -#! /usr/bin/python -# Copyright (c) 2009, 2010 Chortos-2 - -import os, sys, shutil, time, subprocess, filecmp, optparse, signal, tempfile, tarfile, zipfile - -parser = optparse.OptionParser(version='test.py 1.20.3', usage='usage: %prog [options] [problem names] [[path/to/]solution-app] [test case numbers]\n\nTest case numbers can be specified in plain text or as a Python expression\nif there is only one positional argument.\n\nOnly problem names listed in testconf.py are recognized.') -parser.add_option('-e', '--exclude', dest='exclude', action='append', help='test case number(s) to exclude, as a Python expression; multiple -e options can be supplied') -parser.add_option('-c', '--cleanup', dest='clean', action='store_true', default=False, help='delete the copies of input/output files and exit') -parser.add_option('-s', '--save-io', dest='erase', action='store_false', default=True, help='do not delete the copies of input/output files after the last test case; create copies of input files and store output in files even if the solution uses standard I/O; delete the stored input/output files if the solution uses standard I/O and the -c/--cleanup option is specified') -parser.add_option('-m', '--copy-io', dest='copyonly', action='store_true', default=False, help='only create a copy of the input/output files of the last test case for manual testing; to delete them, use options -cs') -parser.add_option('-x', '--auto-exit', dest='pause', action='store_false', default=True, help='do not wait for a key to be pressed when finished testing') -parser.add_option('-p', '--python', action='store_true', default=False, help='always parse all positional arguments as a single Python expression (including the first argument even if it names an executable file)') -parser.add_option('-t', '--detect-time', dest='autotime', action='store_true', default=False, help='spend a second detecting the most precise time measurement function') - -options, args = parser.parse_args() -parser.destroy() -del parser - -globals1 = set(globals()) - -# Initialize some configuration variables with default values -tasknames = ('.',) -maxtime = 0 -tests = () -dummies = () -testsexcluded = () -padwithzeroestolength = 0 -taskweight = 100 -pointmap = {} -stdio = False -dummyinname = '' -dummyoutname = '' -tester = '' - -def exectestconf_helper(name): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - f = zipfile.ZipFile('tests.zip') - try: - exec f.open(name, 'rU').read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - return False - -try: - execfile('testconf.py') -except IOError, error: - exc_info = sys.exc_info()[2] - try: - execfile('tests/testconf.py') - except IOError: - if not exectestconf_helper('testconf.py'): - raise IOError, (error.errno, 'The configuration file is missing', error.filename), exc_info - del exc_info - -globals2 = set(globals()) -globals2.remove('globals1') -globals2 -= globals1 -del globals1 - -shared = {} -g = globals() -for k in globals2: - shared[k] = g[k] - -newtasknames = [] -while len(args) and args[0] in tasknames: - newtasknames.append(args[0]) - del args[0] -if len(newtasknames): - tasknames = newtasknames - -scoresumoveralltasks = 0 -scoremaxoveralltasks = 0 -ntasks = 0 -nfulltasks = 0 -cwd = '' # At any time this is either '' or taskname + '/' - -if options.autotime: - c = time.clock() - time.sleep(1) - c = time.clock() - c - if int(c + .99999) == 1: - clock = time.clock - else: - clock = time.time -elif os.name == 'nt': - clock = time.clock -else: - clock = time.time - -if options.copyonly: - options.erase = False - -def existstestcase_helper(name): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - f = zipfile.ZipFile('tests.zip') - try: - f.getinfo(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - return False - -def existstestcase(name): - if os.path.isfile('tests/' + taskname + '/' + name) or os.path.isfile('tests/' + name): - return True - if cwd and (os.path.isfile(oldcwd + '/tests/' + cwd + name) or os.path.isfile(oldcwd + '/tests/' + name)): - return True - if existstestcase_helper(taskname + '/' + name) or existstestcase_helper(name): - return True - if cwd: - os.chdir(oldcwd) - if existstestcase_helper(cwd + name) or existstestcase_helper(name): - os.chdir(cwd) - return True - os.chdir(cwd) - return False - -def opentestcase_helper(name): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - f = zipfile.ZipFile('tests.zip') - try: - c = f.open(name, 'rU') - f.close() - return c - except KeyError: - f.close() - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - return None - -def opentestcase(name): - if os.path.isfile('tests/' + taskname + '/' + name): - return open('tests/' + taskname + '/' + name, 'rU') - elif os.path.isfile('tests/' + name): - return open('tests/' + name, 'rU') - f = opentestcase_helper(taskname + '/' + name) - if not f: - f = opentestcase_helper(name) - if f: - return f - if cwd: - if os.path.isfile(oldcwd + '/tests/' + cwd + name): - return open(oldcwd + '/tests/' + cwd + name, 'rU') - elif os.path.isfile(oldcwd + '/tests/' + name): - return open(oldcwd + '/tests/' + name, 'rU') - os.chdir(oldcwd) - f = opentestcase_helper(cwd + name) - if not f: - f = opentestcase_helper(name) - os.chdir(cwd) - if f: - return f - raise KeyError, 'The test-case-defining file \'' + name + '\' cannot be found' - -def copytestcase_helper(name, target): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - if not target.startswith('/'): - f = zipfile.ZipFile('tests.zip') - try: - m = f.getinfo(name) - m.filename = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - else: - oldcwd = os.getcwdu() - os.chdir('/') - f = zipfile.ZipFile(oldcwd + '/tests.zip') - try: - m = f.getinfo(name) - m.filename = os.path.relpath(target) - f.extract(m) - f.close() - os.chdir(oldcwd) - return True - except KeyError: - f.close() - os.chdir(oldcwd) - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - return False - -def copytestcase(name, target): - if os.path.isfile('tests/' + taskname + '/' + name): - shutil.copyfile('tests/' + taskname + '/' + name, target) - return - elif os.path.isfile('tests/' + name): - shutil.copyfile('tests/' + name, target) - return - if copytestcase_helper(taskname + '/' + name, target) or copytestcase_helper(name, target): - return - if cwd: - if os.path.isfile(oldcwd + '/tests/' + cwd + name): - shutil.copyfile(oldcwd + '/tests/' + cwd + name, target) - return - elif os.path.isfile(oldcwd + '/tests/' + name): - shutil.copyfile(oldcwd + '/tests/' + name, target) - return - os.chdir(oldcwd) - if copytestcase_helper(cwd + name, target) or copytestcase_helper(name, target): - os.chdir(cwd) - return - os.chdir(cwd) - raise KeyError, 'The test-case-defining file \'' + name + '\' cannot be found' - -# Always chdir if the directory exists but use any existing config -def chdir_and_exec_testconf(): - global cwd - cwd = '' - if os.path.isdir(taskname): - os.chdir(taskname) - if taskname != '.': - cwd = taskname + '/' - try: - execfile('testconf.py', globals()) - return - except IOError: - pass - if not cwd: - if os.path.isfile('tests/' + taskname + '/testconf.py'): - execfile('tests/' + taskname + '/testconf.py', globals()) - return - if os.path.isfile('tests/testconf.py'): - execfile('tests/testconf.py', globals()) - return - if exectestconf_helper(taskname + '/testconf.py') or exectestconf_helper('testconf.py'): - return - if cwd: - os.chdir(oldcwd) - if os.path.isfile('tests/' + cwd + 'testconf.py'): - execfile('tests/' + cwd + 'testconf.py', globals()) - os.chdir(cwd) - return - if os.path.isfile('tests/testconf.py'): - execfile('tests/testconf.py', globals()) - os.chdir(cwd) - return - if exectestconf_helper(cwd + 'testconf.py') or exectestconf_helper('testconf.py'): - os.chdir(cwd) - return - if os.path.isfile('testconf.py'): - execfile('testconf.py', globals()) - os.chdir(cwd) - return - os.chdir(cwd) - elif os.path.isfile('testconf.py'): - execfile('testconf.py', globals()) - return - raise KeyError, 'The configuration file for task ' + taskname + ' is missing' - -try: - name - namedefined = True -except Exception: - namedefined = False - -for taskname in tasknames: - if ntasks: - print - - try: - if len(tasknames) > 1: - print taskname - except Exception: - if taskname != '.' or ntasks: - print taskname - - try: del inname - except NameError: pass - try: del outname - except NameError: pass - try: del ansname - except NameError: pass - - if not namedefined and taskname != '.': - name = './' + taskname - for k in shared: - g[k] = shared[k] - - oldcwd = os.getcwdu() - chdir_and_exec_testconf() - - if options.clean: - try: - if not stdio or tester: - if not tester: - inname - outname - if tester: - ansname - except NameError, error: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] - if not options.erase: - try: - inname = inname.replace('%', taskname) - except NameError: - inname = taskname + '.in' - try: - outname = outname.replace('%', taskname) - except NameError: - outname = taskname + '.out' - try: - ansname = ansname.replace('%', taskname) - except NameError: - ansname = taskname + '.ans' - else: - inname = inname.replace('%', taskname) - outname = outname.replace('%', taskname) - if tester: - ansname = ansname.replace('%', taskname) - if not stdio or tester or not options.erase: - if os.path.exists(inname): os.remove(inname) - if os.path.exists(outname): os.remove(outname) - if (tester or not options.erase) and ansname: - if os.path.exists(ansname): os.remove(ansname) - continue - - try: - name - except NameError, error: - if str(error).count('name') == 1: - raise NameError, 'configuration ' + str(error), sys.exc_info()[2] - else: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] - - try: - if not stdio: - inname - outname - testcaseinname - if tester: - outname - if ansname: - testcaseoutname - else: - testcaseoutname - except NameError, error: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] - - if not options.erase: - try: - inname - except NameError: - inname = taskname + '.in' - try: - outname - except NameError: - outname = taskname + '.out' - try: - ansname - except NameError: - ansname = taskname + '.ans' - - if options.pause: - try: - pause - except NameError, error: - if os.name == 'posix': - pause = 'read -s -n 1' - print 'Configuration ' + str(error).replace('name ', 'variable ') + '; it was devised automatically but the choice might be incorrect, so test.py might exit immediately after the testing is complete.' - elif os.name == 'nt': - pause = 'pause' - else: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ') + ' and cannot be devised automatically', sys.exc_info()[2] - - if not dummyinname: - dummyinname = testcaseinname - if not dummyoutname and (not tester or ansname): - dummyoutname = testcaseoutname - - dummyinname = dummyinname.replace('%', taskname) - dummyoutname = dummyoutname.replace('%', taskname) - testcaseinname = testcaseinname.replace('%', taskname) - if not stdio or not options.erase: - inname = inname.replace('%', taskname) - outname = outname.replace('%', taskname) - try: - ansname = ansname.replace('%', taskname) - except NameError: - pass - if tester: - try: inname = inname.replace('%', taskname) - except NameError: pass - outname = outname.replace('%', taskname) - if ansname: - ansname = ansname.replace('%', taskname) - testcaseoutname = testcaseoutname.replace('%', taskname) - else: - testcaseoutname = testcaseoutname.replace('%', taskname) - - if isinstance(padwithzeroestolength, tuple): - padwithzeroestolength, paddummieswithzeroestolength = padwithzeroestolength - else: - paddummieswithzeroestolength = padwithzeroestolength - - if options.python: - dummies = () - s = ' '.join(args) - tests = eval(s) - try: - tests.__iter__ - except AttributeError: - tests = (tests,) - elif len(args): - if os.path.exists(args[0]): - name = args[0] - del args[0] - if len(args) > 1: - dummies = () - tests = args - elif len(args): - dummies = () - s = args[0] - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - if existstestcase(testcaseinname.replace('$', s)): - tests = (s,) - else: - try: - tests = eval(args[0]) - try: - tests.__iter__ - except AttributeError: - tests = (tests,) - except Exception: - tests = (s,) - - if options.exclude: - testsexcluded = [] - for i in options.exclude: - v = eval(i) - try: - testsexcluded.extend(v) - except TypeError: - testsexcluded.append(v) - - # Windows doesn't like paths beginning with .\ and not ending with an extension - name = os.path.normcase(name) - if name.startswith('.\\'): - name = name[2:] - - newpointmap = {} - - for i in pointmap: - try: - for j in i: - newpointmap[j] = pointmap[i] - except TypeError: - newpointmap[i] = pointmap[i] - - pointmap = newpointmap - - if maxtime > 0: - strmaxtime = '/%.3f' % maxtime - else: - strmaxtime = '' - - padoutputtolength = 0 - ntests = [] - - for j in dummies: - try: - j.__iter__ - except AttributeError: - j = (j,) - ntests.append((j, True)) - for i in j: - s = str(i) - if len(s) < paddummieswithzeroestolength: - s = s.zfill(paddummieswithzeroestolength) - s = 'sample ' + s - if padoutputtolength < len(s): - padoutputtolength = len(s) - - for j in tests: - try: - j.__iter__ - except AttributeError: - j = (j,) - ntests.append((j, False)) - for i in j: - s = str(i) - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - if padoutputtolength < len(s): - padoutputtolength = len(s) - - tests = ntests - score = maxpoints = ncorrect = ntotal = ncorrectvalued = nvalued = 0 - - if options.copyonly: - j, isdummy = tests[-1] - if isdummy: - realinname = dummyinname - realoutname = dummyoutname - else: - realinname = testcaseinname - realoutname = testcaseoutname - for i in j: - if i in testsexcluded and not isdummy: - continue - s = str(i) - if isdummy: - if len(s) < paddummieswithzeroestolength: - s = s.zfill(paddummieswithzeroestolength) - else: - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - copytestcase(realinname.replace('$', s), inname) - if ansname: - copytestcase(realoutname.replace('$', s), ansname) - continue - - for j, isdummy in tests: - ncorrectgrp = 0 - ntotalgrp = 0 - scoregrp = 0 - maxpointsgrp = 0 - if isdummy: - realinname = dummyinname - realoutname = dummyoutname - else: - realinname = testcaseinname - realoutname = testcaseoutname - for i in j: - if i in testsexcluded and not isdummy: - continue - ntotalgrp += 1 - s = str(i) - if isdummy: - npoints = 0 - if len(s) < paddummieswithzeroestolength: - s = s.zfill(paddummieswithzeroestolength) - spref = 'sample ' - else: - npoints = pointmap.get(None, 1) - npoints = pointmap.get(i, npoints) - maxpointsgrp += npoints - if npoints: - nvalued += 1 - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - spref = '' - print ' ' * (padoutputtolength - len(spref + s)) + spref + s + ':', - sys.stdout.flush() - outputdata = open(os.devnull, 'w') - if stdio: - f = tempfile.NamedTemporaryFile(delete=False) - inputdatafname = f.name - f.close() - copytestcase(realinname.replace('$', s), inputdatafname) - inputdata = open(inputdatafname, 'rU') - if options.erase: - tempoutput = tempfile.TemporaryFile('w+') - else: - tempoutput = open(outname, 'w+') - try: - proc = subprocess.Popen(name, stdin=inputdata, stdout=tempoutput, stderr=outputdata, universal_newlines=True) - except OSError, error: - raise OSError, 'The program to be tested cannot be launched: ' + str(error), sys.exc_info()[2] - else: - if os.path.exists(outname): - os.remove(outname) - copytestcase(realinname.replace('$', s), inname) - try: - proc = subprocess.Popen(name, stdin=outputdata, stdout=outputdata, stderr=outputdata, universal_newlines=True) - except OSError, error: - raise OSError, 'The program to be tested cannot be launched: ' + str(error), sys.exc_info()[2] - cl = clock() - if maxtime > 0: - while 1: - proc.poll() - elapsed = clock() - cl - if proc.returncode == None: - if elapsed >= maxtime: - print '%.3f%s s, 0/%d, time limit exceeded' % (elapsed, strmaxtime, npoints) - sys.stdout.flush() - while proc.returncode == None: - try: - proc.terminate() - except OSError: - pass - except AttributeError: - try: - os.kill(proc.pid, signal.SIGTERM) - except Exception: - pass - proc.poll() - outputdata.close() - if stdio: - tempoutput.close() - break - else: - print '%.3f%s s,' % (elapsed, strmaxtime), - sys.stdout.flush() - elapsed = 0 - if stdio: - tempoutput.seek(0) - lines = tempoutput.readlines() - tempoutput.close() - break - if elapsed >= maxtime: - continue - else: - data = proc.communicate() - elapsed = clock() - cl - print '%.3f%s s,' % (elapsed, strmaxtime), - sys.stdout.flush() - if stdio: - tempoutput.seek(0) - lines = tempoutput.readlines() - tempoutput.close() - outputdata.close() - if stdio: - inputdata.close() - try: - os.unlink(inputdatafname) - except Exception: - pass - if proc.returncode > 0: - print '0/%d, non-zero return code %d' % (npoints, proc.returncode) - sys.stdout.flush() - elif proc.returncode < 0: - print '0/%d, terminated by signal %d' % (npoints, -proc.returncode) - sys.stdout.flush() - else: - if not tester: - if stdio: - outputdata = opentestcase(realoutname.replace('$', s)) - r = 0 - data = outputdata.read().splitlines(True) - if len(lines) != len(data): - r = 1 - else: - for i in zip(lines, data): - if i[0] != i[1]: - r = 1 - break - outputdata.close() - else: - try: - inputdata = open(outname, 'rU') - except IOError: - print '0/%g, output file not created or not readable' % npoints - sys.stdout.flush() - r = None - else: - outputdata = opentestcase(realoutname.replace('$', s)) - r = 0 - lines = inputdata.readlines() - data = outputdata.read().splitlines(True) - if len(lines) != len(data): - r = 1 - else: - for i in zip(lines, data): - if i[0] != i[1]: - r = 1 - break - inputdata.close() - outputdata.close() - else: - if ansname: - copytestcase(realoutname.replace('$', s), ansname) - if stdio: - try: copytestcase(realinname.replace('$', s), inname) - except NameError: pass - outputdata = open(outname, 'w') - outputdata.writelines(lines) - outputdata.close() - try: - proc = subprocess.Popen(tester, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) - except OSError, error: - raise OSError, 'The tester application cannot be launched: ' + str(error), sys.exc_info()[2] - data = proc.communicate() - r = proc.returncode - if tester and data[0]: - data = ''.join((' (', data[0].strip(), ')')) - else: - data = '' - if r: - print '0/%g, wrong answer%s' % (npoints, data) - sys.stdout.flush() - elif r == 0: - print '%g/%g, OK%s' % (npoints, npoints, data) - sys.stdout.flush() - scoregrp += npoints - ncorrectgrp += 1 - if npoints: - ncorrectvalued += 1 - if ntotalgrp: - if scoregrp < maxpointsgrp: - scoregrp = 0 - if ntotalgrp > 1: - print 'Group total: %d/%d tests; %g/%g points' % (ncorrectgrp, ntotalgrp, scoregrp, maxpointsgrp) - sys.stdout.flush() - ncorrect += ncorrectgrp - ntotal += ntotalgrp - score += scoregrp - maxpoints += maxpointsgrp - - if options.erase: - if not stdio or tester: - if os.path.exists(inname): os.remove(inname) - if os.path.exists(outname): os.remove(outname) - if tester and ansname: - if os.path.exists(ansname): os.remove(ansname) - elif stdio: - copytestcase(realinname.replace('$', s), inname) - copytestcase(realoutname.replace('$', s), ansname) - if nvalued != ntotal: - print 'Grand total: %d/%d tests (%d/%d valued); %g/%g points; weighted score: %g/%g' % (ncorrect, ntotal, ncorrectvalued, nvalued, score, maxpoints, (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0, taskweight) - else: - print 'Grand total: %d/%d tests; %g/%g points; weighted score: %g/%g' % (ncorrect, ntotal, score, maxpoints, (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0, taskweight) - - scoresumoveralltasks += (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0 - scoremaxoveralltasks += taskweight - ntasks += 1 - nfulltasks += int((score == maxpoints) if maxpoints else (taskweight == 0)) - - os.chdir(oldcwd) - -if options.clean or options.copyonly: - sys.exit() - -if ntasks != 1: - print - print 'Grand grand total: %g/%g weighted points; %d/%d problems solved fully' % (scoresumoveralltasks, scoremaxoveralltasks, nfulltasks, ntasks) - -if options.pause: - print 'Press any key to exit... ', - sys.stdout.flush() - os.system(pause + ' >' + os.devnull) diff -r 06f1683c8db9 -r 245150080c48 2.00/compat.py --- a/2.00/compat.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,218 +0,0 @@ -#! /usr/bin/env python -# Copyright (c) 2010 Chortos-2 - -# A compatibility layer for Python 2.5+. This is what lets test.py -# run on all versions of Python starting with 2.5, including Python 3. - -# A few notes regarding some compatibility-driven peculiarities -# in the use of the language that can be seen in all modules: -# -# * Except statements never specify target; instead, when needed, -# the exception is taken from sys.exc_info(). Blame the incompatible -# syntaxes of the except clause in Python 2.5 and Python 3 and the lack -# of preprocessor macros in Python of any version ;P. -# -# * Keyword-only parameters are never used, even for parameters -# that should never be given in as arguments. The reason is -# the laziness of some Python developers who have failed to finish -# implementing them in Python 2 even though they had several years -# of time and multiple version releases to sneak them in. -# -# * Abstract classes are only implemented for Python 2.6 and 2.7. -# ABC's require the abc module and the specification of metaclasses, -# but in Python 2.5, the abc module does not exist, while in Python 3, -# metaclasses are specified using a syntax totally incompatible -# with Python 2 and not usable conditionally via exec() and such -# because it is a detail of the syntax of the class statement itself. - -try: - import builtins -except ImportError: - import __builtin__ as builtins - -__all__ = ('say', 'basestring', 'range', 'map', 'zip', 'filter', 'items', - 'keys', 'values', 'zip_longest', 'callable', - 'ABCMeta', 'abstractmethod', 'CompatBuiltins') - -try: - # Python 3 - exec('say = print') -except SyntaxError: - try: - # Python 2.6/2.7 - # An alternative is exec('from __future__ import print_function; say = print'); - # if problems arise with the current line, one should try replacing it - # with this one with the future import before abandoning the idea altogether - say = getattr(builtins, 'print') - except Exception: - # Python 2.5 - import sys - # This should fully emulate the print function of Python 2.6 in Python 2.3+ - # The error messages are taken from Python 2.6 - # The name bindings at the bottom of this file are in effect - def saytypeerror(value, name): - return TypeError(' '.join((name, 'must be None, str or unicode, not', type(value).__name__))) - def say(*values, **kwargs): - sep = kwargs.pop('sep' , None) - end = kwargs.pop('end' , None) - file = kwargs.pop('file', None) - if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % kwargs.popitem()[0]) - if sep is None: sep = ' ' - if end is None: end = '\n' - if file is None: file = sys.stdout - if not isinstance(sep, basestring): raise saytypeerror(sep, 'sep') - if not isinstance(end, basestring): raise saytypeerror(end, 'end') - file.write(sep.join(map(str, values)) + end) - -try: - from os.path import relpath -except ImportError: - # Python 2.5 - import os.path as _path - - # Adapted from Python 2.7.1 - - if hasattr(_path, 'splitunc'): - def _abspath_split(path): - abs = _path.abspath(_path.normpath(path)) - prefix, rest = _path.splitunc(abs) - is_unc = bool(prefix) - if not is_unc: - prefix, rest = _path.splitdrive(abs) - return is_unc, prefix, [x for x in rest.split(_path.sep) if x] - else: - def _abspath_split(path): - prefix, rest = _path.splitdrive(_path.abspath(_path.normpath(path))) - return False, prefix, [x for x in rest.split(_path.sep) if x] - - def relpath(path, start=_path.curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_is_unc, start_prefix, start_list = _abspath_split(start) - path_is_unc, path_prefix, path_list = _abspath_split(path) - - if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - if path_prefix.lower() != start_prefix.lower(): - if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if e1.lower() != e2.lower(): - break - i += 1 - - rel_list = [_path.pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return _path.curdir - return _path.join(*rel_list) - - _path.relpath = relpath - -def import_urllib(): - try: - # Python 3 - import urllib.request - return urllib.request, lambda url: urllib.request.urlopen(url).read().decode() - except ImportError: - # Python 2 - import urllib - return urllib, lambda url: urllib.urlopen(url).read() - -try: - from abc import ABCMeta, abstractmethod -except ImportError: - ABCMeta, abstractmethod = None, lambda x: x - -# In all of the following, the try clause is for Python 2 and the except -# clause is for Python 3. More checks are performed than needed -# for standard builds of Python to ensure as much as possible works -# on custom builds. -try: - basestring = basestring -except NameError: - basestring = str - -try: - range = xrange -except NameError: - range = range - -try: - callable = callable -except NameError: - callable = lambda obj: hasattr(obj, '__call__') - -try: - next = next -except NameError: - next = lambda obj: obj.next() - -try: - from itertools import imap as map -except ImportError: - map = map - -try: - from itertools import izip as zip -except ImportError: - zip = zip - -try: - from itertools import ifilter as filter -except ImportError: - filter = filter - -items = dict.iteritems if hasattr(dict, 'iteritems') else dict.items -keys = dict.iterkeys if hasattr(dict, 'iterkeys') else dict.keys -values = dict.itervalues if hasattr(dict, 'itervalues') else dict.values - -try: - # Python 3 - from itertools import zip_longest -except ImportError: - try: - # Python 2.6/2.7 - from itertools import izip_longest as zip_longest - except ImportError: - # Python 2.5 - from itertools import chain, repeat - # Adapted from the documentation of itertools.izip_longest - def zip_longest(*args, **kwargs): - fillvalue = kwargs.get('fillvalue') - def sentinel(counter=([fillvalue]*(len(args)-1)).pop): - yield counter() - fillers = repeat(fillvalue) - iters = [chain(it, sentinel(), fillers) for it in args] - try: - for tup in zip(*iters): - yield tup - except IndexError: - pass - -# Automatically import * from this module into testconf.py's -class CompatBuiltins(object): - __slots__ = 'originals' - def __init__(self): - self.originals = {} - def __enter__(self): - g = globals() - for name in __all__: - if hasattr(builtins, name): - self.originals[name] = getattr(builtins, name) - setattr(builtins, name, g[name]) - def __exit__(self, exc_type, exc_val, exc_tb): - for name in self.originals: - setattr(builtins, name, self.originals[name]) - -# Support simple testconf.py's written for test.py 1.x -builtins.xrange = range \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/config.py --- a/2.00/config.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,182 +0,0 @@ -#! /usr/bin/env python -# Copyright (c) 2010 Chortos-2 - -from __future__ import division, with_statement - -try: - from compat import * - import files -except ImportError: - import __main__ - __main__.import_error(sys.exc_info()[1]) -else: - from __main__ import options - -if files.ZipArchive: - try: - import zipimport - except ImportError: - zipimport = None -else: - zipimport = None - -import imp, os, sys, tempfile - -__all__ = 'load_problem', 'load_global', 'globalconf' - -defaults_problem = {'usegroups': False, - 'maxtime': None, - 'maxmemory': None, - 'dummies': {}, - 'testsexcluded': (), - 'padtests': 0, - 'paddummies': 0, - 'taskweight': 100, - 'pointmap': {}, - 'stdio': False, - 'dummyinname': '', - 'dummyoutname': '', - 'tester': None, - 'maxexitcode': 0, - 'inname': '', - 'ansname': ''} -defaults_global = {'tasknames': None, - 'force_zero_exitcode': True} -defaults_noerase = {'inname': '%.in', - 'outname': '%.out', - 'ansname': '%.ans'} -patterns = ('inname', 'outname', 'ansname', 'testcaseinname', - 'testcaseoutname', 'dummyinname', 'dummyoutname') - -class Config(object): - __slots__ = 'modules', '__dict__' - - def __init__(self, *modules): - self.modules = modules - - def __getattr__(self, name): - for module in self.modules: - try: - return getattr(module, name) - except AttributeError: - pass - # TODO: provide a message - raise AttributeError(name) - -# A helper context manager -class ReadDeleting(object): - __slots__ = 'name', 'file' - - def __init__(self, name): - self.name = name - - def __enter__(self): - try: - self.file = open(self.name, 'rU') - return self.file - except: - try: - self.__exit__(None, None, None) - except: - pass - raise - - def __exit__(self, exc_type, exc_val, exc_tb): - self.file.close() - os.remove(self.name) - -def load_problem(problem_name): - dwb = sys.dont_write_bytecode - sys.dont_write_bytecode = True - metafile = files.File('/'.join((problem_name, 'testconf.py')), True, 'configuration') - module = None - with CompatBuiltins(): - if zipimport and isinstance(metafile.archive, files.ZipArchive): - try: - module = zipimport.zipimporter(os.path.dirname(metafile.full_real_path)).load_module('testconf') - except zipimport.ZipImportError: - pass - else: - del sys.modules['testconf'] - if not module: - try: - with metafile.open() as f: - module = imp.load_module('testconf', f, metafile.full_real_path, ('.py', 'r', imp.PY_SOURCE)) - # Handle the case when f is not a true file object but imp requires one - except ValueError: - # FIXME: 2.5 lacks the delete parameter - with tempfile.NamedTemporaryFile(delete=False) as f: - inputdatafname = f.name - metafile.copy(inputdatafname) - with ReadDeleting(inputdatafname) as f: - module = imp.load_module('testconf', f, metafile.full_real_path, ('.py', 'r', imp.PY_SOURCE)) - del sys.modules['testconf'] - if hasattr(module, 'padwithzeroestolength'): - if not hasattr(module, 'padtests'): - try: - module.padtests = module.padwithzeroestolength[0] - except TypeError: - module.padtests = module.padwithzeroestolength - if not hasattr(module, 'paddummies'): - try: - module.paddummies = module.padwithzeroestolength[1] - except TypeError: - module.paddummies = module.padwithzeroestolength - for name in defaults_problem: - if not hasattr(globalconf, name): - setattr(module, name, getattr(module, name, defaults_problem[name])) - module = Config(module, globalconf) - if not module.dummyinname: - module.dummyinname = getattr(module, 'testcaseinname', module.dummyinname) - if not module.dummyoutname: - module.dummyoutname = getattr(module, 'testcaseoutname', module.dummyoutname) - if not hasattr(module, 'path'): - if hasattr(module, 'name'): - module.path = module.name - elif sys.platform != 'win32': - module.path = os.path.join(os.path.curdir, problem_name) - else: - module.path = problem_name - if options.no_maxtime: - module.maxtime = 0 - sys.dont_write_bytecode = dwb - for name in patterns: - if hasattr(module, name): - setattr(module, name, getattr(module, name).replace('%', problem_name)) - return module - -def load_global(): - dwb = sys.dont_write_bytecode - sys.dont_write_bytecode = True - metafile = files.File('testconf.py', True, 'configuration') - module = None - with CompatBuiltins(): - if zipimport and isinstance(metafile.archive, files.ZipArchive): - try: - module = zipimport.zipimporter(os.path.dirname(metafile.full_real_path)).load_module('testconf') - except zipimport.ZipImportError: - pass - else: - del sys.modules['testconf'] - if not module: - try: - with metafile.open() as f: - module = imp.load_module('testconf', f, metafile.full_real_path, ('.py', 'r', imp.PY_SOURCE)) - # Handle the case when f is not a true file object but imp requires one - except ValueError: - # FIXME: 2.5 lacks the delete parameter - with tempfile.NamedTemporaryFile(delete=False) as f: - inputdatafname = f.name - metafile.copy(inputdatafname) - with ReadDeleting(inputdatafname) as f: - module = imp.load_module('testconf', f, metafile.full_real_path, ('.py', 'r', imp.PY_SOURCE)) - del sys.modules['testconf'] - for name in defaults_global: - setattr(module, name, getattr(module, name, defaults_global[name])) - if not options.erase: - for name in defaults_noerase: - setattr(module, name, getattr(module, name, defaults_noerase[name])) - global globalconf - globalconf = module - sys.dont_write_bytecode = dwb - return module \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/files.py --- a/2.00/files.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,250 +0,0 @@ -#! /usr/bin/env python -# Copyright (c) 2010 Chortos-2 - -"""File access routines and classes with support for archives.""" - -from __future__ import division, with_statement - -try: - from compat import * -except ImportError: - import __main__ - __main__.import_error(sys.exc_info()[1]) - -import contextlib, os, shutil, sys - -# You don't need to know about anything else. -__all__ = 'File', - -# In these two variables, use full stops no matter what os.extsep is; -# all full stops will be converted to os.extsep on the fly -archives = 'tests.tar', 'tests.zip', 'tests.tgz', 'tests.tar.gz', 'tests.tbz2', 'tests.tar.bz2' -formats = {} - -class Archive(object): - __slots__ = 'file' - - if ABCMeta: - __metaclass__ = ABCMeta - - def __new__(cls, path): - """ - Create a new instance of the archive class corresponding - to the file name in the given path. - """ - if cls is not Archive: - return object.__new__(cls) - else: - # Do this by hand rather than through os.path.splitext - # because we support multi-dotted file name extensions - ext = path.partition(os.path.extsep)[2] - while ext: - if ext in formats: - return formats[ext](path) - ext = ext.partition(os.path.extsep)[2] - raise LookupError("unsupported archive file name extension in file name '%s'" % filename) - - @abstractmethod - def __init__(self, path): raise NotImplementedError - - @abstractmethod - def extract(self, name, target): raise NotImplementedError - - def __del__(self): - del self.file - -try: - import tarfile -except ImportError: - TarArchive = None -else: - class TarArchive(Archive): - __slots__ = '__namelist' - - def __init__(self, path): - self.file = tarfile.open(path) - - def extract(self, name, target): - member = self.file.getmember(name) - member.name = target - self.file.extract(member) - - # TODO: somehow automagically emulate universal line break support - def open(self, name): - return self.file.extractfile(name) - - def exists(self, queried_name): - if not hasattr(self, '__namelist'): - names = set() - for name in self.file.getnames(): - cutname = name - while cutname: - names.add(cutname) - cutname = cutname.rpartition('/')[0] - self.__namelist = frozenset(names) - return queried_name in self.__namelist - - def __enter__(self): - if hasattr(self.file, '__enter__'): - self.file.__enter__() - return self - - def __exit__(self, exc_type, exc_value, traceback): - if hasattr(self.file, '__exit__'): - return self.file.__exit__(exc_type, exc_value, traceback) - elif exc_type is None: - self.file.close() - else: - # This code was shamelessly copied from tarfile.py of Python 2.7 - if not self.file._extfileobj: - self.file.fileobj.close() - self.file.closed = True - - formats['tar'] = formats['tgz'] = formats['tar.gz'] = formats['tbz2'] = formats['tar.bz2'] = TarArchive - -try: - import zipfile -except ImportError: - ZipArchive = None -else: - class ZipArchive(Archive): - __slots__ = '__namelist' - - def __init__(self, path): - self.file = zipfile.ZipFile(path) - - def extract(self, name, target): - if os.path.isabs(target): - # To my knowledge, this is as portable as it gets - path = os.path.join(os.path.splitdrive(target)[0], os.path.sep) - else: - path = None - - member = self.file.getinfo(name) - member.filename = os.path.relpath(target, path) - # FIXME: 2.5 lacks ZipFile.extract - self.file.extract(member, path) - - def open(self, name): - return self.file.open(name, 'rU') - - def exists(self, queried_name): - if not hasattr(self, '__namelist'): - names = set() - for name in self.file.namelist(): - cutname = name - while cutname: - names.add(cutname) - cutname = cutname.rpartition('/')[0] - self.__namelist = frozenset(names) - return queried_name in self.__namelist - - def __enter__(self): - if hasattr(self.file, '__enter__'): - self.file.__enter__() - return self - - def __exit__(self, exc_type, exc_value, traceback): - if hasattr(self.file, '__exit__'): - return self.file.__exit__(exc_type, exc_value, traceback) - else: - return self.file.close() - - formats['zip'] = ZipArchive - -# Remove unsupported archive formats and replace full stops -# with the platform-dependent file name extension separator -def issupported(filename, formats=formats): - ext = filename.partition('.')[2] - while ext: - if ext in formats: return True - ext = ext.partition('.')[2] - return False -archives = [filename.replace('.', os.path.extsep) for filename in filter(issupported, archives)] -formats = dict((item[0].replace('.', os.path.extsep), item[1]) for item in items(formats)) - -open_archives = {} - -def open_archive(path): - if path in open_archives: - return open_archives[path] - else: - open_archives[path] = archive = Archive(path) - return archive - -class File(object): - __slots__ = 'virtual_path', 'real_path', 'full_real_path', 'archive' - - def __init__(self, virtpath, allow_root=False, msg='test data'): - self.virtual_path = virtpath - self.archive = None - if not self.realize_path('', tuple(comp.replace('.', os.path.extsep) for comp in virtpath.split('/')), allow_root): - raise IOError("%s file '%s' could not be found" % (msg, virtpath)) - - def realize_path(self, root, virtpath, allow_root=False, hastests=False): - if root and not os.path.exists(root): - return False - if len(virtpath) > 1: - if self.realize_path(os.path.join(root, virtpath[0]), virtpath[1:], allow_root, hastests): - return True - elif not hastests: - if self.realize_path(os.path.join(root, 'tests'), virtpath, allow_root, True): - return True - for archive in archives: - path = os.path.join(root, archive) - if os.path.exists(path): - if self.realize_path_archive(open_archive(path), '', virtpath, path): - return True - if self.realize_path(root, virtpath[1:], allow_root, hastests): - return True - else: - if not hastests: - path = os.path.join(root, 'tests', virtpath[0]) - if os.path.exists(path): - self.full_real_path = self.real_path = path - return True - for archive in archives: - path = os.path.join(root, archive) - if os.path.exists(path): - if self.realize_path_archive(open_archive(path), '', virtpath, path): - return True - if hastests or allow_root: - path = os.path.join(root, virtpath[0]) - if os.path.exists(path): - self.full_real_path = self.real_path = path - return True - return False - - def realize_path_archive(self, archive, root, virtpath, archpath): - if root and not archive.exists(root): - return False - if root: path = ''.join((root, '/', virtpath[0])) - else: path = virtpath[0] - if len(virtpath) > 1: - if self.realize_path_archive(archive, path, virtpath[1:], archpath): - return True - elif self.realize_path_archive(archive, root, virtpath[1:], archpath): - return True - else: - if archive.exists(path): - self.archive = archive - self.real_path = path - self.full_real_path = os.path.join(archpath, *path.split('/')) - return True - return False - - def open(self): - if self.archive: - file = self.archive.open(self.real_path) - if hasattr(file, '__exit__'): - return file - else: - return contextlib.closing(file) - else: - return open(self.real_path) - - def copy(self, target): - if self.archive: - self.archive.extract(self.real_path, target) - else: - shutil.copy(self.real_path, target) \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/problem.py --- a/2.00/problem.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,202 +0,0 @@ -#! /usr/bin/env python -# Copyright (c) 2010 Chortos-2 - -from __future__ import division, with_statement - -try: - from compat import * - import config, testcases -except ImportError: - import __main__ - __main__.import_error(sys.exc_info()[1]) -else: - from __main__ import clock, options - -import os, re, sys - -try: - import signal -except ImportError: - signalnames = () -else: - # Construct a cache of all signal names available on the current - # platform. Prefer names from the UNIX standards over other versions. - unixnames = frozenset(('HUP', 'INT', 'QUIT', 'ILL', 'ABRT', 'FPE', 'KILL', 'SEGV', 'PIPE', 'ALRM', 'TERM', 'USR1', 'USR2', 'CHLD', 'CONT', 'STOP', 'TSTP', 'TTIN', 'TTOU', 'BUS', 'POLL', 'PROF', 'SYS', 'TRAP', 'URG', 'VTALRM', 'XCPU', 'XFSZ')) - signalnames = {} - for name in dir(signal): - if re.match('SIG[A-Z]+$', name): - value = signal.__dict__[name] - if isinstance(value, int) and (value not in signalnames or name[3:] in unixnames): - signalnames[value] = name - del unixnames - -__all__ = 'Problem', 'TestContext', 'test_context_end', 'TestGroup' - -def strerror(e): - s = getattr(e, 'strerror') - if not s: s = str(e) - return ' (%s%s)' % (s[0].lower(), s[1:]) if s else '' - -class Cache(object): - def __init__(self, mydict): - self.__dict__ = mydict - -class TestContext(object): - pass - -test_context_end = object() - -class TestGroup(TestContext): - __slots__ = 'case', 'log', 'correct', 'allcorrect', 'real', 'max', 'ntotal', 'nvalued', 'ncorrect', 'ncorrectvalued' - - def __init__(self): - self.real = self.max = self.ntotal = self.nvalued = self.ncorrect = self.ncorrectvalued = 0 - self.allcorrect = True - self.log = [] - - def case_start(self, case): - self.case = case - self.correct = False - self.ntotal += 1 - self.max += case.points - if case.points: - self.nvalued += 1 - - def case_correct(self): - self.correct = True - self.ncorrect += 1 - if self.case.points: - self.ncorrectvalued += 1 - - def case_end(self, granted): - self.log.append((self.case, self.correct, granted)) - self.real += granted - del self.case - if not self.correct: - self.allcorrect = False - - def end(self): - say('Group total: %d/%d tests; %d/%d points' % (self.ncorrect, self.ntotal, self.real if self.allcorrect else 0, self.max)) - # No real need to flush stdout, as it will anyway be flushed in a moment, - # when either the problem total or the next test case's ID is printed - if self.allcorrect: - return self.log - else: - return ((case, correct, 0) for case, correct, granted in self.log) - -class Problem(object): - __slots__ = 'name', 'config', 'cache', 'testcases' - - def __init__(prob, name): - if not isinstance(name, basestring): - # This shouldn't happen, of course - raise TypeError('Problem() argument 1 must be string, not ' + type(name).__name__) - prob.name = name - prob.config = config.load_problem(name) - if not getattr(prob.config, 'kind', None): prob.config.kind = 'batch' - prob.cache = Cache({'padoutput': 0}) - prob.testcases = testcases.load_problem(prob) - - # TODO - def build(prob): - raise NotImplementedError - - def test(prob): - case = None - try: - contexts = [TestGroup()] - for case in prob.testcases: - if case is test_context_end: - for case, correct, granted in contexts.pop().end(): - contexts[-1].case_start(case) - if correct: - contexts[-1].case_correct() - contexts[-1].case_end(granted) - continue - elif isinstance(case, TestContext): - contexts.append(case) - continue - contexts[-1].case_start(case) - granted = 0 - id = str(case.id) - if case.isdummy: - id = 'sample ' + id - say('%*s: ' % (prob.cache.padoutput, id), end='') - sys.stdout.flush() - try: - granted = case(lambda: (say('%7.3f%s s, ' % (case.time_stopped - case.time_started, case.time_limit_string), end=''), sys.stdout.flush())) - except testcases.CanceledByUser: - verdict = 'canceled by the user' - except testcases.TimeLimitExceeded: - verdict = 'time limit exceeded' - except testcases.WrongAnswer: - e = sys.exc_info()[1] - if e.comment: - verdict = 'wrong answer (%s)' % e.comment - else: - verdict = 'wrong answer' - except testcases.NonZeroExitCode: - e = sys.exc_info()[1] - if e.exitcode < 0: - if sys.platform == 'win32': - verdict = 'terminated with error 0x%X' % (e.exitcode + 0x100000000) - elif -e.exitcode in signalnames: - verdict = 'terminated by signal %d (%s)' % (-e.exitcode, signalnames[-e.exitcode]) - else: - verdict = 'terminated by signal %d' % -e.exitcode - else: - verdict = 'non-zero return code %d' % e.exitcode - except testcases.CannotStartTestee: - verdict = 'cannot launch the program to test%s' % strerror(sys.exc_info()[1].upstream) - except testcases.CannotStartValidator: - verdict = 'cannot launch the validator%s' % strerror(sys.exc_info()[1].upstream) - except testcases.CannotReadOutputFile: - verdict = 'cannot read the output file%s' % strerror(sys.exc_info()[1].upstream) - except testcases.CannotReadInputFile: - verdict = 'cannot read the input file%s' % strerror(sys.exc_info()[1].upstream) - except testcases.CannotReadAnswerFile: - verdict = 'cannot read the reference output file%s' % strerror(sys.exc_info()[1].upstream) - except testcases.TestCaseNotPassed: - verdict = 'unspecified reason [this may be a bug in test.py]%s' % strerror(sys.exc_info()[1]) - #except Exception: - # verdict = 'unknown error [this may be a bug in test.py]%s' % strerror(sys.exc_info()[1]) - else: - try: - granted, comment = granted - except TypeError: - comment = '' - else: - if comment: - comment = ' (%s)' % comment - if granted >= 1: - contexts[-1].case_correct() - verdict = 'OK' + comment - elif not granted: - verdict = 'wrong answer' + comment - else: - verdict = 'partly correct' + comment - granted *= case.points - say('%g/%g, %s' % (granted, case.points, verdict)) - contexts[-1].case_end(granted) - weighted = contexts[0].real * prob.config.taskweight / contexts[0].max if contexts[0].max else 0 - if contexts[0].nvalued != contexts[0].ntotal: - say('Problem total: %d/%d tests (%d/%d valued); %g/%g points; weighted score: %g/%g' % (contexts[0].ncorrect, contexts[0].ntotal, contexts[0].ncorrectvalued, contexts[0].nvalued, contexts[0].real, contexts[0].max, weighted, prob.config.taskweight)) - else: - say('Problem total: %d/%d tests; %g/%g points; weighted score: %g/%g' % (contexts[0].ncorrect, contexts[0].ntotal, contexts[0].real, contexts[0].max, weighted, prob.config.taskweight)) - sys.stdout.flush() - return weighted, prob.config.taskweight - finally: - if options.erase and (not prob.config.stdio or case and case.validator): - for var in 'in', 'out': - name = getattr(prob.config, var + 'name') - if name: - try: - os.remove(name) - except Exception: - pass - if case.validator and not callable(case.validator): - if prob.config.ansname: - try: - os.remove(prob.config.ansname) - except Exception: - pass \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/publish.sh --- a/2.00/publish.sh Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -#! /bin/sh - -VERSION=`hg summary | grep '^parent' | sed -En 's/^parent: [0-9]*:([0-9a-f]+) .*$/\1/p'` -if [ -z "$VERSION" ] -then - echo The current Mercurial changeset could not be determined. >&2 - exit 1 -fi - -sed 's/$$REV$\$/hg '"$VERSION/" upreckon-vcs >upreckon -chmod +x upreckon \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/testcases.py --- a/2.00/testcases.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,549 +0,0 @@ -#! /usr/bin/env python -# Copyright (c) 2010 Chortos-2 - -# TODO: copy the ansfile if not options.erase even if no validator is used - -from __future__ import division, with_statement - -try: - from compat import * - import files, problem, config -except ImportError: - import __main__ - __main__.import_error(sys.exc_info()[1]) -else: - from __main__ import clock, options - -import glob, re, sys, tempfile, time -from subprocess import Popen, PIPE, STDOUT - -import os -devnull = open(os.path.devnull, 'w+') - -try: - from signal import SIGTERM, SIGKILL -except ImportError: - SIGTERM = 15 - SIGKILL = 9 - -try: - from _subprocess import TerminateProcess -except ImportError: - # CPython 2.5 does define _subprocess.TerminateProcess even though it is - # not used in the subprocess module, but maybe something else does not - try: - import ctypes - TerminateProcess = ctypes.windll.kernel32.TerminateProcess - except (ImportError, AttributeError): - TerminateProcess = None - - -# Do the hacky-wacky dark magic needed to catch presses of the Escape button. -# If only Python supported forcible termination of threads... -if not sys.stdin.isatty(): - canceled = init_canceled = lambda: False - pause = None -else: - try: - # Windows has select() too, but it is not the select() we want - import msvcrt - except ImportError: - try: - import select, termios, tty, atexit - except ImportError: - # It cannot be helped! - # Silently disable support for killing the program being tested - canceled = init_canceled = lambda: False - pause = None - else: - def cleanup(old=termios.tcgetattr(sys.stdin.fileno())): - termios.tcsetattr(sys.stdin.fileno(), termios.TCSAFLUSH, old) - atexit.register(cleanup) - del cleanup - tty.setcbreak(sys.stdin.fileno()) - def canceled(select=select.select, stdin=sys.stdin, read=sys.stdin.read): - while select((stdin,), (), (), 0)[0]: - if read(1) == '\33': - return True - return False - def init_canceled(): - while select.select((sys.stdin,), (), (), 0)[0]: - sys.stdin.read(1) - def pause(): - sys.stdin.read(1) - else: - def canceled(kbhit=msvcrt.kbhit, getch=msvcrt.getch): - while kbhit(): - c = getch() - if c == '\33': - return True - elif c == '\0': - # Let's hope no-one is fiddling with this - getch() - return False - def init_canceled(): - while msvcrt.kbhit(): - msvcrt.getch() - def pause(): - msvcrt.getch() - - -__all__ = ('TestCase', 'load_problem', 'TestCaseNotPassed', - 'TimeLimitExceeded', 'CanceledByUser', 'WrongAnswer', - 'NonZeroExitCode', 'CannotStartTestee', - 'CannotStartValidator', 'CannotReadOutputFile', - 'CannotReadInputFile', 'CannotReadAnswerFile') - - - -# Exceptions - -class TestCaseNotPassed(Exception): __slots__ = () -class TimeLimitExceeded(TestCaseNotPassed): __slots__ = () -class CanceledByUser(TestCaseNotPassed): __slots__ = () - -class WrongAnswer(TestCaseNotPassed): - __slots__ = 'comment' - def __init__(self, comment=''): - self.comment = comment - -class NonZeroExitCode(TestCaseNotPassed): - __slots__ = 'exitcode' - def __init__(self, exitcode): - self.exitcode = exitcode - -class ExceptionWrapper(TestCaseNotPassed): - __slots__ = 'upstream' - def __init__(self, upstream): - self.upstream = upstream - -class CannotStartTestee(ExceptionWrapper): __slots__ = () -class CannotStartValidator(ExceptionWrapper): __slots__ = () -class CannotReadOutputFile(ExceptionWrapper): __slots__ = () -class CannotReadInputFile(ExceptionWrapper): __slots__ = () -class CannotReadAnswerFile(ExceptionWrapper): __slots__ = () - - - -# Helper context managers - -class CopyDeleting(object): - __slots__ = 'case', 'file', 'name' - - def __init__(self, case, file, name): - self.case = case - self.file = file - self.name = name - - def __enter__(self): - if self.name: - try: - self.file.copy(self.name) - except: - try: - self.__exit__(None, None, None) - except: - pass - raise - - def __exit__(self, exc_type, exc_val, exc_tb): - if self.name: - self.case.files_to_delete.append(self.name) - - -class Copying(object): - __slots__ = 'file', 'name' - - def __init__(self, file, name): - self.file = file - self.name = name - - def __enter__(self): - if self.name: - self.file.copy(self.name) - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - - -# Test case types - -class TestCase(object): - __slots__ = ('problem', 'id', 'isdummy', 'infile', 'outfile', 'points', - 'process', 'time_started', 'time_stopped', 'time_limit_string', - 'realinname', 'realoutname', 'maxtime', 'maxmemory', - 'has_called_back', 'files_to_delete') - - if ABCMeta: - __metaclass__ = ABCMeta - - def __init__(case, prob, id, isdummy, points): - case.problem = prob - case.id = id - case.isdummy = isdummy - case.points = points - case.maxtime = case.problem.config.maxtime - case.maxmemory = case.problem.config.maxmemory - if case.maxtime: - case.time_limit_string = '/%.3f' % case.maxtime - else: - case.time_limit_string = '' - if not isdummy: - case.realinname = case.problem.config.testcaseinname - case.realoutname = case.problem.config.testcaseoutname - else: - case.realinname = case.problem.config.dummyinname - case.realoutname = case.problem.config.dummyoutname - - @abstractmethod - def test(case): raise NotImplementedError - - def __call__(case, callback): - case.has_called_back = False - case.files_to_delete = [] - try: - return case.test(callback) - finally: - now = clock() - if not getattr(case, 'time_started', None): - case.time_started = case.time_stopped = now - elif not getattr(case, 'time_stopped', None): - case.time_stopped = now - if not case.has_called_back: - callback() - case.cleanup() - - def cleanup(case): - #if getattr(case, 'infile', None): - # case.infile.close() - #if getattr(case, 'outfile', None): - # case.outfile.close() - if getattr(case, 'process', None): - # Try killing after three unsuccessful TERM attempts in a row - # (except on Windows, where TERMing is killing) - for i in range(3): - try: - try: - case.process.terminate() - except AttributeError: - # Python 2.5 - if TerminateProcess and hasattr(proc, '_handle'): - # Windows API - TerminateProcess(proc._handle, 1) - else: - # POSIX - os.kill(proc.pid, SIGTERM) - except Exception: - time.sleep(0) - case.process.poll() - else: - case.process.wait() - break - else: - # If killing the process is unsuccessful three times in a row, - # just silently stop trying - for i in range(3): - try: - try: - case.process.kill() - except AttributeError: - # Python 2.5 - if TerminateProcess and hasattr(proc, '_handle'): - # Windows API - TerminateProcess(proc._handle, 1) - else: - # POSIX - os.kill(proc.pid, SIGKILL) - except Exception: - time.sleep(0) - case.process.poll() - else: - case.process.wait() - break - if case.files_to_delete: - for name in case.files_to_delete: - try: - os.remove(name) - except Exception: - # It can't be helped - pass - - def open_infile(case): - try: - case.infile = files.File('/'.join((case.problem.name, case.realinname.replace('$', case.id)))) - except IOError: - e = sys.exc_info()[1] - raise CannotReadInputFile(e) - - def open_outfile(case): - try: - case.outfile = files.File('/'.join((case.problem.name, case.realoutname.replace('$', case.id)))) - except IOError: - e = sys.exc_info()[1] - raise CannotReadAnswerFile(e) - - -class ValidatedTestCase(TestCase): - __slots__ = 'validator' - - def __init__(case, *args): - TestCase.__init__(case, *args) - if not case.problem.config.tester: - case.validator = None - else: - case.validator = case.problem.config.tester - - def validate(case, output): - if not case.validator: - # Compare the output with the reference output - case.open_outfile() - with case.outfile.open() as refoutput: - for line, refline in zip_longest(output, refoutput): - if refline is not None and not isinstance(refline, basestring): - line = bytes(line, sys.getdefaultencoding()) - if line != refline: - raise WrongAnswer - return 1 - elif callable(case.validator): - return case.validator(output) - else: - # Call the validator program - output.close() - if case.problem.config.ansname: - case.open_outfile() - case.outfile.copy(case.problem.config.ansname) - try: - case.process = Popen(case.validator, stdin=devnull, stdout=PIPE, stderr=STDOUT, universal_newlines=True, bufsize=-1) - except OSError: - raise CannotStartValidator(sys.exc_info()[1]) - comment = case.process.communicate()[0].strip() - match = re.match(r'(?i)(ok|(?:correct|wrong)(?:(?:\s|_)*answer)?)(?:$|\s+|[.,!:]+\s*)', comment) - if match: - comment = comment[match.end():] - if not case.problem.config.maxexitcode: - if case.process.returncode: - raise WrongAnswer(comment) - else: - return 1, comment - else: - return case.process.returncode / case.problem.config.maxexitcode, comment - - -class BatchTestCase(ValidatedTestCase): - __slots__ = () - - def test(case, callback): - init_canceled() - if sys.platform == 'win32' or not case.maxmemory: - preexec_fn = None - else: - def preexec_fn(): - try: - import resource - maxmemory = int(case.maxmemory * 1048576) - resource.setrlimit(resource.RLIMIT_AS, (maxmemory, maxmemory)) - # I would also set a CPU time limit but I do not want the time - # that passes between the calls to fork and exec to be counted in - except MemoryError: - # We do not have enough memory for ourselves; - # let the parent know about this - raise - except Exception: - # Well, at least we tried - pass - case.open_infile() - case.time_started = None - if case.problem.config.stdio: - if options.erase and not case.validator and case.problem.config.inname: - # TODO: re-use the same file name if possible - # FIXME: 2.5 lacks the delete parameter - with tempfile.NamedTemporaryFile(delete=False) as f: - inputdatafname = f.name - contextmgr = CopyDeleting(case, case.infile, inputdatafname) - else: - inputdatafname = case.problem.config.inname - contextmgr = Copying(case.infile, inputdatafname) - with contextmgr: - # FIXME: this U doesn't do anything good for the child process, does it? - with open(inputdatafname, 'rU') as infile: - with tempfile.TemporaryFile('w+') if options.erase and not case.validator else open(case.problem.config.outname, 'w+') as outfile: - try: - try: - case.process = Popen(case.problem.config.path, stdin=infile, stdout=outfile, stderr=devnull, universal_newlines=True, bufsize=-1, preexec_fn=preexec_fn) - except MemoryError: - # If there is not enough memory for the forked test.py, - # opt for silent dropping of the limit - # TODO: show a warning somewhere - case.process = Popen(case.problem.config.path, stdin=infile, stdout=outfile, stderr=devnull, universal_newlines=True, bufsize=-1) - except OSError: - raise CannotStartTestee(sys.exc_info()[1]) - case.time_started = clock() - time_next_check = case.time_started + .15 - if not case.maxtime: - while True: - exitcode, now = case.process.poll(), clock() - if exitcode is not None: - case.time_stopped = now - break - # For some reason (probably Microsoft's fault), - # msvcrt.kbhit() is slow as hell - else: - if now >= time_next_check: - if canceled(): - raise CanceledByUser - else: - time_next_check = now + .15 - time.sleep(0) - else: - time_end = case.time_started + case.maxtime - while True: - exitcode, now = case.process.poll(), clock() - if exitcode is not None: - case.time_stopped = now - break - elif now >= time_end: - raise TimeLimitExceeded - else: - if now >= time_next_check: - if canceled(): - raise CanceledByUser - else: - time_next_check = now + .15 - time.sleep(0) - if config.globalconf.force_zero_exitcode and case.process.returncode: - raise NonZeroExitCode(case.process.returncode) - callback() - case.has_called_back = True - outfile.seek(0) - return case.validate(outfile) - else: - case.infile.copy(case.problem.config.inname) - try: - try: - case.process = Popen(case.problem.config.path, stdin=devnull, stdout=devnull, stderr=STDOUT, preexec_fn=preexec_fn) - except MemoryError: - # If there is not enough memory for the forked test.py, - # opt for silent dropping of the limit - # TODO: show a warning somewhere - case.process = Popen(case.problem.config.path, stdin=devnull, stdout=devnull, stderr=STDOUT) - except OSError: - raise CannotStartTestee(sys.exc_info()[1]) - case.time_started = clock() - time_next_check = case.time_started + .15 - if not case.maxtime: - while True: - exitcode, now = case.process.poll(), clock() - if exitcode is not None: - case.time_stopped = now - break - else: - if now >= time_next_check: - if canceled(): - raise CanceledByUser - else: - time_next_check = now + .15 - time.sleep(0) - else: - time_end = case.time_started + case.maxtime - while True: - exitcode, now = case.process.poll(), clock() - if exitcode is not None: - case.time_stopped = now - break - elif now >= time_end: - raise TimeLimitExceeded - else: - if now >= time_next_check: - if canceled(): - raise CanceledByUser - else: - time_next_check = now + .15 - time.sleep(0) - if config.globalconf.force_zero_exitcode and case.process.returncode: - raise NonZeroExitCode(case.process.returncode) - callback() - case.has_called_back = True - with open(case.problem.config.outname, 'rU') as output: - return case.validate(output) - - -# This is the only test case type not executing any programs to be tested -class OutputOnlyTestCase(ValidatedTestCase): - __slots__ = () - def cleanup(case): pass - -class BestOutputTestCase(ValidatedTestCase): - __slots__ = () - -# This is the only test case type executing two programs simultaneously -class ReactiveTestCase(TestCase): - __slots__ = () - # The basic idea is to launch the program to be tested and the grader - # and to pipe their standard I/O from and to each other, - # and then to capture the grader's exit code and use it - # like the exit code of an output validator is used. - - -def load_problem(prob, _types={'batch' : BatchTestCase, - 'outonly' : OutputOnlyTestCase, - 'bestout' : BestOutputTestCase, - 'reactive': ReactiveTestCase}): - # We will need to iterate over these configuration variables twice - try: - len(prob.config.dummies) - except Exception: - prob.config.dummies = tuple(prob.config.dummies) - try: - len(prob.config.tests) - except Exception: - prob.config.tests = tuple(prob.config.tests) - - if options.legacy: - prob.config.usegroups = False - prob.config.tests = list(prob.config.tests) - for i, name in enumerate(prob.config.tests): - # Same here; we'll need to iterate over them twice - try: - l = len(name) - except Exception: - try: - name = tuple(name) - except TypeError: - name = (name,) - l = len(name) - if len(name) > 1: - prob.config.usegroups = True - break - elif not len(name): - prob.config.tests[i] = (name,) - - # First get prob.cache.padoutput right, - # then yield the actual test cases - for i in prob.config.dummies: - s = 'sample ' + str(i).zfill(prob.config.paddummies) - prob.cache.padoutput = max(prob.cache.padoutput, len(s)) - if prob.config.usegroups: - for group in prob.config.tests: - for i in group: - s = str(i).zfill(prob.config.padtests) - prob.cache.padoutput = max(prob.cache.padoutput, len(s)) - for i in prob.config.dummies: - s = str(i).zfill(prob.config.paddummies) - yield _types[prob.config.kind](prob, s, True, 0) - for group in prob.config.tests: - yield problem.TestGroup() - for i in group: - s = str(i).zfill(prob.config.padtests) - yield _types[prob.config.kind](prob, s, False, prob.config.pointmap.get(i, prob.config.pointmap.get(None, prob.config.maxexitcode if prob.config.maxexitcode else 1))) - yield problem.test_context_end - else: - for i in prob.config.tests: - s = str(i).zfill(prob.config.padtests) - prob.cache.padoutput = max(prob.cache.padoutput, len(s)) - for i in prob.config.dummies: - s = str(i).zfill(prob.config.paddummies) - yield _types[prob.config.kind](prob, s, True, 0) - for i in prob.config.tests: - s = str(i).zfill(prob.config.padtests) - yield _types[prob.config.kind](prob, s, False, prob.config.pointmap.get(i, prob.config.pointmap.get(None, prob.config.maxexitcode if prob.config.maxexitcode else 1))) \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/upreckon-vcs --- a/2.00/upreckon-vcs Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,156 +0,0 @@ -#! /usr/bin/env python -# Copyright (c) 2009-2010 Chortos-2 - -from __future__ import division, with_statement -import optparse, sys, compat - -def import_error(e): - say('Error: your installation of Upreckon is incomplete;', str(e).lower() + '.', file=sys.stderr) - sys.exit(3) - -from compat import * - -version = '2.00.0 ($$REV$$)' -parser = optparse.OptionParser(version='Upreckon '+version, epilog='Python 2.5 or newer is required.') -parser.add_option('-1', dest='legacy', action='store_true', default=False, help='handle configuration files in a way more compatible with test.py 1.x') -parser.add_option('-u', '--update', dest='update', action='store_true', default=False, help='update the installed Upreckon to the latest publicly available version') -parser.add_option('-p', '--problem', dest='problems', metavar='PROBLEM', action='append', help='test only the PROBLEM (this option can be specified more than once with different problem names, all of which will be tested)') -parser.add_option('-m', '--copy-io', dest='copyonly', action='store_true', default=False, help='create a copy of the input/output files of the last test case for manual testing and exit') -parser.add_option('-x', '--auto-exit', dest='pause', action='store_false', default=True, help='do not wait for a key to be pressed after finishing testing') -parser.add_option('-s', '--save-io', dest='erase', action='store_false', default=True, help='do not delete the copies of input/output files after the last test case; create copies of input files and store output in files even if the solution uses standard I/O; delete the stored input/output files if the solution uses standard I/O and the -c/--cleanup option is specified') -parser.add_option('-t', '--detect-time', dest='autotime', action='store_true', default=False, help='spend a second detecting the most precise time measurement function') -parser.add_option('--no-time-limits', dest='no_maxtime', action='store_true', default=False, help='disable all time limits') - -options, args = parser.parse_args() -parser.destroy() -del parser - -if options.update: - try: - urllib, urlread = compat.import_urllib() - except ImportError: - sys.exit('Error: the urllib Python module is missing. Without it, an automatic update is impossible.') - - latesttext = urlread('http://chortos.selfip.net/~astiob/test.py/version.txt') - latest = latesttext.split('.') - installed = version.split('.') - update = None - - if latest[0] > installed[0]: - update = 'major' - elif latest[0] == installed[0]: - if latest[1] > installed[1]: - update = 'feature' - elif latest[1] == installed[1]: - if latest[2] > installed[2]: - update = 'bug-fixing' - elif latest[2] == installed[2]: - say('You are using the latest publicly available version of Upreckon.') - sys.exit() - - if not update: - say('Your copy of Upreckon is newer than the publicly available version.') - sys.exit() - - say('A ' + update + ' update to Upreckon is available. Downloading...') - sys.stdout.flush() - # FIXME: need to update all files! - urllib.urlretrieve('http://chortos.selfip.net/~astiob/test.py/test.py', sys.argv[0]) - say('Downloaded and installed. Now you are using Upreckon ' + latesttext + '.') - sys.exit() - -import config, itertools, os, subprocess, sys, time - -if options.autotime: - # This is really a dirty hack that assumes that sleep() does not spend - # the CPU time of the current process and that if clock() measures - # wall-clock time, then it is more precise than time() is. Both these - # assumptions are true on all platforms I have tested this on so far, - # but I am not aware of any guarantee that they will both be true - # on every other platform. - c = time.clock() - time.sleep(1) - c = time.clock() - c - if int(c + .5) == 1: - clock = time.clock - else: - clock = time.time -elif sys.platform == 'win32': - clock = time.clock -else: - clock = time.time - -try: - from testcases import pause -except ImportError: - pause = None - -try: - globalconf = config.load_global() - - # Do this check here so that if we have to warn them, we do it as early as possible - if options.pause and not pause and not hasattr(globalconf, 'pause'): - if os.name == 'posix': - globalconf.pause = 'read -s -n 1' - say('Warning: configuration variable pause is not defined; it was devised automatically but the choice might be incorrect, so Upreckon might exit immediately after the testing is completed.', file=sys.stderr) - sys.stderr.flush() - elif os.name == 'nt': - globalconf.pause = 'pause' - else: - sys.exit('Error: configuration variable pause is not defined and cannot be devised automatically.') - - try: - from problem import * - except ImportError: - import_error(sys.exc_info()[1]) - - # Support single-problem configurations - if globalconf.tasknames is None: - shouldprintnames = False - globalconf.multiproblem = False - globalconf.tasknames = os.path.curdir, - else: - globalconf.multiproblem = True - shouldprintnames = True - - ntasks = 0 - nfulltasks = 0 - maxscore = 0 - realscore = 0 - - for taskname in (globalconf.tasknames if not options.problems else options.problems): - problem = Problem(taskname) - - if ntasks and not options.copyonly: say() - if shouldprintnames: say(taskname) - - if options.copyonly: - problem.copytestdata() - else: - real, max = problem.test() - - ntasks += 1 - nfulltasks += real == max - realscore += real - maxscore += max - - if options.copyonly: - sys.exit() - - if ntasks != 1: - say() - say('Grand grand total: %g/%g weighted points; %d/%d problems solved fully' % (realscore, maxscore, nfulltasks, ntasks)) -except KeyboardInterrupt: - sys.exit('Exiting due to a keyboard interrupt.') - -if options.pause: - say('Press any key to exit...') - sys.stdout.flush() - - if pause: - pause() - elif callable(globalconf.pause): - globalconf.pause() - else: - with open(os.devnull, 'w') as devnull: - subprocess.call(globalconf.pause, stdout=devnull, stderr=subprocess.STDOUT) \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/upreckon.sublime-project --- a/2.00/upreckon.sublime-project Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ - - - - - - - - diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile.py --- a/2.00/zipfile.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -import sys -if sys.version_info[0] >= 3: - from zipfile31 import * -elif sys.version_info[1] >= 7: - from zipfile27 import * -else: - from zipfile26 import * \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile26.py --- a/2.00/zipfile26.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1436 +0,0 @@ -""" -Read and write ZIP files. -""" -# Improved by Chortos-2 in 2009 and 2010 (added bzip2 support) -import struct, os, time, sys, shutil -import binascii, cStringIO, stat - -try: - import zlib # We may need its compression method - crc32 = zlib.crc32 -except ImportError: - zlib = None - crc32 = binascii.crc32 - -try: - import bz2 # We may need its compression method -except ImportError: - bz2 = None - -__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", - "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] - -class BadZipfile(Exception): - pass - - -class LargeZipFile(Exception): - """ - Raised when writing a zipfile, the zipfile requires ZIP64 extensions - and those extensions are disabled. - """ - -error = BadZipfile # The exception raised by this module - -ZIP64_LIMIT = (1 << 31) - 1 -ZIP_FILECOUNT_LIMIT = 1 << 16 -ZIP_MAX_COMMENT = (1 << 16) - 1 - -# constants for Zip file compression methods -ZIP_STORED = 0 -ZIP_DEFLATED = 8 -ZIP_BZIP2 = 12 -# Other ZIP compression methods not supported - -# Below are some formats and associated data for reading/writing headers using -# the struct module. The names and structures of headers/records are those used -# in the PKWARE description of the ZIP file format: -# http://www.pkware.com/documents/casestudies/APPNOTE.TXT -# (URL valid as of January 2008) - -# The "end of central directory" structure, magic number, size, and indices -# (section V.I in the format document) -structEndArchive = "<4s4H2LH" -stringEndArchive = "PK\005\006" -sizeEndCentDir = struct.calcsize(structEndArchive) - -_ECD_SIGNATURE = 0 -_ECD_DISK_NUMBER = 1 -_ECD_DISK_START = 2 -_ECD_ENTRIES_THIS_DISK = 3 -_ECD_ENTRIES_TOTAL = 4 -_ECD_SIZE = 5 -_ECD_OFFSET = 6 -_ECD_COMMENT_SIZE = 7 -# These last two indices are not part of the structure as defined in the -# spec, but they are used internally by this module as a convenience -_ECD_COMMENT = 8 -_ECD_LOCATION = 9 - -# The "central directory" structure, magic number, size, and indices -# of entries in the structure (section V.F in the format document) -structCentralDir = "<4s4B4HL2L5H2L" -stringCentralDir = "PK\001\002" -sizeCentralDir = struct.calcsize(structCentralDir) - -# indexes of entries in the central directory structure -_CD_SIGNATURE = 0 -_CD_CREATE_VERSION = 1 -_CD_CREATE_SYSTEM = 2 -_CD_EXTRACT_VERSION = 3 -_CD_EXTRACT_SYSTEM = 4 -_CD_FLAG_BITS = 5 -_CD_COMPRESS_TYPE = 6 -_CD_TIME = 7 -_CD_DATE = 8 -_CD_CRC = 9 -_CD_COMPRESSED_SIZE = 10 -_CD_UNCOMPRESSED_SIZE = 11 -_CD_FILENAME_LENGTH = 12 -_CD_EXTRA_FIELD_LENGTH = 13 -_CD_COMMENT_LENGTH = 14 -_CD_DISK_NUMBER_START = 15 -_CD_INTERNAL_FILE_ATTRIBUTES = 16 -_CD_EXTERNAL_FILE_ATTRIBUTES = 17 -_CD_LOCAL_HEADER_OFFSET = 18 - -# The "local file header" structure, magic number, size, and indices -# (section V.A in the format document) -structFileHeader = "<4s2B4HL2L2H" -stringFileHeader = "PK\003\004" -sizeFileHeader = struct.calcsize(structFileHeader) - -_FH_SIGNATURE = 0 -_FH_EXTRACT_VERSION = 1 -_FH_EXTRACT_SYSTEM = 2 -_FH_GENERAL_PURPOSE_FLAG_BITS = 3 -_FH_COMPRESSION_METHOD = 4 -_FH_LAST_MOD_TIME = 5 -_FH_LAST_MOD_DATE = 6 -_FH_CRC = 7 -_FH_COMPRESSED_SIZE = 8 -_FH_UNCOMPRESSED_SIZE = 9 -_FH_FILENAME_LENGTH = 10 -_FH_EXTRA_FIELD_LENGTH = 11 - -# The "Zip64 end of central directory locator" structure, magic number, and size -structEndArchive64Locator = "<4sLQL" -stringEndArchive64Locator = "PK\x06\x07" -sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) - -# The "Zip64 end of central directory" record, magic number, size, and indices -# (section V.G in the format document) -structEndArchive64 = "<4sQ2H2L4Q" -stringEndArchive64 = "PK\x06\x06" -sizeEndCentDir64 = struct.calcsize(structEndArchive64) - -_CD64_SIGNATURE = 0 -_CD64_DIRECTORY_RECSIZE = 1 -_CD64_CREATE_VERSION = 2 -_CD64_EXTRACT_VERSION = 3 -_CD64_DISK_NUMBER = 4 -_CD64_DISK_NUMBER_START = 5 -_CD64_NUMBER_ENTRIES_THIS_DISK = 6 -_CD64_NUMBER_ENTRIES_TOTAL = 7 -_CD64_DIRECTORY_SIZE = 8 -_CD64_OFFSET_START_CENTDIR = 9 - -def is_zipfile(filename): - """Quickly see if file is a ZIP file by checking the magic number.""" - try: - fpin = open(filename, "rb") - endrec = _EndRecData(fpin) - fpin.close() - if endrec: - return True # file has correct magic number - except IOError: - pass - return False - -def _EndRecData64(fpin, offset, endrec): - """ - Read the ZIP64 end-of-archive records and use that to update endrec - """ - fpin.seek(offset - sizeEndCentDir64Locator, 2) - data = fpin.read(sizeEndCentDir64Locator) - sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) - if sig != stringEndArchive64Locator: - return endrec - - if diskno != 0 or disks != 1: - raise BadZipfile("zipfiles that span multiple disks are not supported") - - # Assume no 'zip64 extensible data' - fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) - data = fpin.read(sizeEndCentDir64) - sig, sz, create_version, read_version, disk_num, disk_dir, \ - dircount, dircount2, dirsize, diroffset = \ - struct.unpack(structEndArchive64, data) - if sig != stringEndArchive64: - return endrec - - # Update the original endrec using data from the ZIP64 record - endrec[_ECD_SIGNATURE] = sig - endrec[_ECD_DISK_NUMBER] = disk_num - endrec[_ECD_DISK_START] = disk_dir - endrec[_ECD_ENTRIES_THIS_DISK] = dircount - endrec[_ECD_ENTRIES_TOTAL] = dircount2 - endrec[_ECD_SIZE] = dirsize - endrec[_ECD_OFFSET] = diroffset - return endrec - - -def _EndRecData(fpin): - """Return data from the "End of Central Directory" record, or None. - - The data is a list of the nine items in the ZIP "End of central dir" - record followed by a tenth item, the file seek offset of this record.""" - - # Determine file size - fpin.seek(0, 2) - filesize = fpin.tell() - - # Check to see if this is ZIP file with no archive comment (the - # "end of central directory" structure should be the last item in the - # file if this is the case). - try: - fpin.seek(-sizeEndCentDir, 2) - except IOError: - return None - data = fpin.read() - if data[0:4] == stringEndArchive and data[-2:] == "\000\000": - # the signature is correct and there's no comment, unpack structure - endrec = struct.unpack(structEndArchive, data) - endrec=list(endrec) - - # Append a blank comment and record start offset - endrec.append("") - endrec.append(filesize - sizeEndCentDir) - - # Try to read the "Zip64 end of central directory" structure - return _EndRecData64(fpin, -sizeEndCentDir, endrec) - - # Either this is not a ZIP file, or it is a ZIP file with an archive - # comment. Search the end of the file for the "end of central directory" - # record signature. The comment is the last item in the ZIP file and may be - # up to 64K long. It is assumed that the "end of central directory" magic - # number does not appear in the comment. - maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) - fpin.seek(maxCommentStart, 0) - data = fpin.read() - start = data.rfind(stringEndArchive) - if start >= 0: - # found the magic number; attempt to unpack and interpret - recData = data[start:start+sizeEndCentDir] - endrec = list(struct.unpack(structEndArchive, recData)) - comment = data[start+sizeEndCentDir:] - # check that comment length is correct - if endrec[_ECD_COMMENT_SIZE] == len(comment): - # Append the archive comment and start offset - endrec.append(comment) - endrec.append(maxCommentStart + start) - - # Try to read the "Zip64 end of central directory" structure - return _EndRecData64(fpin, maxCommentStart + start - filesize, - endrec) - - # Unable to find a valid end of central directory structure - return - - -class ZipInfo (object): - """Class with attributes describing each file in the ZIP archive.""" - - __slots__ = ( - 'orig_filename', - 'filename', - 'date_time', - 'compress_type', - 'comment', - 'extra', - 'create_system', - 'create_version', - 'extract_version', - 'reserved', - 'flag_bits', - 'volume', - 'internal_attr', - 'external_attr', - 'header_offset', - 'CRC', - 'compress_size', - 'file_size', - '_raw_time', - ) - - def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): - self.orig_filename = filename # Original file name in archive - - # Terminate the file name at the first null byte. Null bytes in file - # names are used as tricks by viruses in archives. - null_byte = filename.find(chr(0)) - if null_byte >= 0: - filename = filename[0:null_byte] - # This is used to ensure paths in generated ZIP files always use - # forward slashes as the directory separator, as required by the - # ZIP format specification. - if os.sep != "/" and os.sep in filename: - filename = filename.replace(os.sep, "/") - - self.filename = filename # Normalized file name - self.date_time = date_time # year, month, day, hour, min, sec - # Standard values: - self.compress_type = ZIP_STORED # Type of compression for the file - self.comment = "" # Comment for each file - self.extra = "" # ZIP extra data - if sys.platform == 'win32': - self.create_system = 0 # System which created ZIP archive - else: - # Assume everything else is unix-y - self.create_system = 3 # System which created ZIP archive - self.create_version = 20 # Version which created ZIP archive - self.extract_version = 20 # Version needed to extract archive - self.reserved = 0 # Must be zero - self.flag_bits = 0 # ZIP flag bits - self.volume = 0 # Volume number of file header - self.internal_attr = 0 # Internal attributes - self.external_attr = 0 # External file attributes - # Other attributes are set by class ZipFile: - # header_offset Byte offset to the file header - # CRC CRC-32 of the uncompressed file - # compress_size Size of the compressed file - # file_size Size of the uncompressed file - - def FileHeader(self): - """Return the per-file header as a string.""" - dt = self.date_time - dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] - dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) - if self.flag_bits & 0x08: - # Set these to zero because we write them after the file data - CRC = compress_size = file_size = 0 - else: - CRC = self.CRC - compress_size = self.compress_size - file_size = self.file_size - - extra = self.extra - - if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: - # File is larger than what fits into a 4 byte integer, - # fall back to the ZIP64 extension - fmt = '= 24: - counts = unpack('> 1) & 0x7FFFFFFF) ^ poly - else: - crc = ((crc >> 1) & 0x7FFFFFFF) - table[i] = crc - return table - crctable = _GenerateCRCTable() - - def _crc32(self, ch, crc): - """Compute the CRC32 primitive on one byte.""" - return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff] - - def __init__(self, pwd): - self.key0 = 305419896 - self.key1 = 591751049 - self.key2 = 878082192 - for p in pwd: - self._UpdateKeys(p) - - def _UpdateKeys(self, c): - self.key0 = self._crc32(c, self.key0) - self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295 - self.key1 = (self.key1 * 134775813 + 1) & 4294967295 - self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2) - - def __call__(self, c): - """Decrypt a single character.""" - c = ord(c) - k = self.key2 | 2 - c = c ^ (((k * (k^1)) >> 8) & 255) - c = chr(c) - self._UpdateKeys(c) - return c - -class ZipExtFile: - """File-like object for reading an archive member. - Is returned by ZipFile.open(). - """ - - def __init__(self, fileobj, zipinfo, decrypt=None): - self.fileobj = fileobj - self.decrypter = decrypt - self.bytes_read = 0L - self.rawbuffer = '' - self.readbuffer = '' - self.linebuffer = '' - self.eof = False - self.univ_newlines = False - self.nlSeps = ("\n", ) - self.lastdiscard = '' - - self.compress_type = zipinfo.compress_type - self.compress_size = zipinfo.compress_size - - self.closed = False - self.mode = "r" - self.name = zipinfo.filename - - # read from compressed files in 64k blocks - self.compreadsize = 64*1024 - if self.compress_type == ZIP_DEFLATED: - self.dc = zlib.decompressobj(-15) - elif self.compress_type == ZIP_BZIP2: - self.dc = bz2.BZ2Decompressor() - self.compreadsize = 900000 - - def set_univ_newlines(self, univ_newlines): - self.univ_newlines = univ_newlines - - # pick line separator char(s) based on universal newlines flag - self.nlSeps = ("\n", ) - if self.univ_newlines: - self.nlSeps = ("\r\n", "\r", "\n") - - def __iter__(self): - return self - - def next(self): - nextline = self.readline() - if not nextline: - raise StopIteration() - - return nextline - - def close(self): - self.closed = True - - def _checkfornewline(self): - nl, nllen = -1, -1 - if self.linebuffer: - # ugly check for cases where half of an \r\n pair was - # read on the last pass, and the \r was discarded. In this - # case we just throw away the \n at the start of the buffer. - if (self.lastdiscard, self.linebuffer[0]) == ('\r','\n'): - self.linebuffer = self.linebuffer[1:] - - for sep in self.nlSeps: - nl = self.linebuffer.find(sep) - if nl >= 0: - nllen = len(sep) - return nl, nllen - - return nl, nllen - - def readline(self, size = -1): - """Read a line with approx. size. If size is negative, - read a whole line. - """ - if size < 0: - size = sys.maxint - elif size == 0: - return '' - - # check for a newline already in buffer - nl, nllen = self._checkfornewline() - - if nl >= 0: - # the next line was already in the buffer - nl = min(nl, size) - else: - # no line break in buffer - try to read more - size -= len(self.linebuffer) - while nl < 0 and size > 0: - buf = self.read(min(size, 100)) - if not buf: - break - self.linebuffer += buf - size -= len(buf) - - # check for a newline in buffer - nl, nllen = self._checkfornewline() - - # we either ran out of bytes in the file, or - # met the specified size limit without finding a newline, - # so return current buffer - if nl < 0: - s = self.linebuffer - self.linebuffer = '' - return s - - buf = self.linebuffer[:nl] - self.lastdiscard = self.linebuffer[nl:nl + nllen] - self.linebuffer = self.linebuffer[nl + nllen:] - - # line is always returned with \n as newline char (except possibly - # for a final incomplete line in the file, which is handled above). - return buf + "\n" - - def readlines(self, sizehint = -1): - """Return a list with all (following) lines. The sizehint parameter - is ignored in this implementation. - """ - result = [] - while True: - line = self.readline() - if not line: break - result.append(line) - return result - - def read(self, size = None): - # act like file() obj and return empty string if size is 0 - if size == 0: - return '' - - # determine read size - bytesToRead = self.compress_size - self.bytes_read - - # adjust read size for encrypted files since the first 12 bytes - # are for the encryption/password information - if self.decrypter is not None: - bytesToRead -= 12 - - if size is not None and size >= 0: - if self.compress_type == ZIP_STORED: - lr = len(self.readbuffer) - bytesToRead = min(bytesToRead, size - lr) - else: - if len(self.readbuffer) > size: - # the user has requested fewer bytes than we've already - # pulled through the decompressor; don't read any more - bytesToRead = 0 - else: - # user will use up the buffer, so read some more - lr = len(self.rawbuffer) - bytesToRead = min(bytesToRead, self.compreadsize - lr) - - # avoid reading past end of file contents - if bytesToRead + self.bytes_read > self.compress_size: - bytesToRead = self.compress_size - self.bytes_read - - # try to read from file (if necessary) - if bytesToRead > 0: - bytes = self.fileobj.read(bytesToRead) - self.bytes_read += len(bytes) - self.rawbuffer += bytes - - # handle contents of raw buffer - if self.rawbuffer: - newdata = self.rawbuffer - self.rawbuffer = '' - - # decrypt new data if we were given an object to handle that - if newdata and self.decrypter is not None: - newdata = ''.join(map(self.decrypter, newdata)) - - # decompress newly read data if necessary - if newdata and self.compress_type != ZIP_STORED: - newdata = self.dc.decompress(newdata) - self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' - if self.eof and len(self.rawbuffer) == 0: - # we're out of raw bytes (both from the file and - # the local buffer); flush just to make sure the - # decompressor is done - try: - newdata += self.dc.flush() - except AttributeError: - pass - # prevent decompressor from being used again - self.dc = None - - self.readbuffer += newdata - - - # return what the user asked for - if size is None or len(self.readbuffer) <= size: - bytes = self.readbuffer - self.readbuffer = '' - else: - bytes = self.readbuffer[:size] - self.readbuffer = self.readbuffer[size:] - - return bytes - - -class ZipFile: - """ Class with methods to open, read, write, close, list zip files. - - z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False) - - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read "r", write "w" or append "a". - compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), - or ZIP_BZIP2 (requires bz2). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. - - """ - - fp = None # Set here since __del__ checks it - - def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): - """Open the ZIP file with mode read "r", write "w" or append "a".""" - if mode not in ("r", "w", "a"): - raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') - - if compression == ZIP_STORED: - pass - elif compression == ZIP_DEFLATED: - if not zlib: - raise RuntimeError,\ - "Compression requires the (missing) zlib module" - elif compression == ZIP_BZIP2: - if not bz2: - raise RuntimeError,\ - "Compression requires the (missing) bz2 module" - else: - raise RuntimeError, "That compression method is not supported" - - self._allowZip64 = allowZip64 - self._didModify = False - self.debug = 0 # Level of printing: 0 through 3 - self.NameToInfo = {} # Find file info given name - self.filelist = [] # List of ZipInfo instances for archive - self.compression = compression # Method of compression - self.mode = key = mode.replace('b', '')[0] - self.pwd = None - self.comment = '' - - # Check if we were passed a file-like object - if isinstance(file, basestring): - self._filePassed = 0 - self.filename = file - modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} - try: - self.fp = open(file, modeDict[mode]) - except IOError: - if mode == 'a': - mode = key = 'w' - self.fp = open(file, modeDict[mode]) - else: - raise - else: - self._filePassed = 1 - self.fp = file - self.filename = getattr(file, 'name', None) - - if key == 'r': - self._GetContents() - elif key == 'w': - pass - elif key == 'a': - try: # See if file is a zip file - self._RealGetContents() - # seek to start of directory and overwrite - self.fp.seek(self.start_dir, 0) - except BadZipfile: # file is not a zip file, just append - self.fp.seek(0, 2) - else: - if not self._filePassed: - self.fp.close() - self.fp = None - raise RuntimeError, 'Mode must be "r", "w" or "a"' - - def _GetContents(self): - """Read the directory, making sure we close the file if the format - is bad.""" - try: - self._RealGetContents() - except BadZipfile: - if not self._filePassed: - self.fp.close() - self.fp = None - raise - - def _RealGetContents(self): - """Read in the table of contents for the ZIP file.""" - fp = self.fp - endrec = _EndRecData(fp) - if not endrec: - raise BadZipfile, "File is not a zip file" - if self.debug > 1: - print endrec - size_cd = endrec[_ECD_SIZE] # bytes in central directory - offset_cd = endrec[_ECD_OFFSET] # offset of central directory - self.comment = endrec[_ECD_COMMENT] # archive comment - - # "concat" is zero, unless zip was concatenated to another file - concat = endrec[_ECD_LOCATION] - size_cd - offset_cd - if endrec[_ECD_SIGNATURE] == stringEndArchive64: - # If Zip64 extension structures are present, account for them - concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) - - if self.debug > 2: - inferred = concat + offset_cd - print "given, inferred, offset", offset_cd, inferred, concat - # self.start_dir: Position of start of central directory - self.start_dir = offset_cd + concat - fp.seek(self.start_dir, 0) - data = fp.read(size_cd) - fp = cStringIO.StringIO(data) - total = 0 - while total < size_cd: - centdir = fp.read(sizeCentralDir) - if centdir[0:4] != stringCentralDir: - raise BadZipfile, "Bad magic number for central directory" - centdir = struct.unpack(structCentralDir, centdir) - if self.debug > 2: - print centdir - filename = fp.read(centdir[_CD_FILENAME_LENGTH]) - # Create ZipInfo instance to store file information - x = ZipInfo(filename) - x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) - x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) - x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] - (x.create_version, x.create_system, x.extract_version, x.reserved, - x.flag_bits, x.compress_type, t, d, - x.CRC, x.compress_size, x.file_size) = centdir[1:12] - x.volume, x.internal_attr, x.external_attr = centdir[15:18] - # Convert date/time code to (year, month, day, hour, min, sec) - x._raw_time = t - x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, - t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) - - x._decodeExtra() - x.header_offset = x.header_offset + concat - x.filename = x._decodeFilename() - self.filelist.append(x) - self.NameToInfo[x.filename] = x - - # update total bytes read from central directory - total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] - + centdir[_CD_EXTRA_FIELD_LENGTH] - + centdir[_CD_COMMENT_LENGTH]) - - if self.debug > 2: - print "total", total - - - def namelist(self): - """Return a list of file names in the archive.""" - l = [] - for data in self.filelist: - l.append(data.filename) - return l - - def infolist(self): - """Return a list of class ZipInfo instances for files in the - archive.""" - return self.filelist - - def printdir(self): - """Print a table of contents for the zip file.""" - print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") - for zinfo in self.filelist: - date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] - print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size) - - def testzip(self): - """Read all the files and check the CRC.""" - chunk_size = 2 ** 20 - for zinfo in self.filelist: - try: - # Read by chunks, to avoid an OverflowError or a - # MemoryError with very large embedded files. - f = self.open(zinfo.filename, "r") - while f.read(chunk_size): # Check CRC-32 - pass - except BadZipfile: - return zinfo.filename - - def getinfo(self, name): - """Return the instance of ZipInfo given 'name'.""" - info = self.NameToInfo.get(name) - if info is None: - raise KeyError( - 'There is no item named %r in the archive' % name) - - return info - - def setpassword(self, pwd): - """Set default password for encrypted files.""" - self.pwd = pwd - - def read(self, name, pwd=None): - """Return file bytes (as a string) for name.""" - return self.open(name, "r", pwd).read() - - def open(self, name, mode="r", pwd=None): - """Return file-like object for 'name'.""" - if mode not in ("r", "U", "rU"): - raise RuntimeError, 'open() requires mode "r", "U", or "rU"' - if not self.fp: - raise RuntimeError, \ - "Attempt to read ZIP archive that was already closed" - - # Only open a new file for instances where we were not - # given a file object in the constructor - if self._filePassed: - zef_file = self.fp - else: - zef_file = open(self.filename, 'rb') - - # Make sure we have an info object - if isinstance(name, ZipInfo): - # 'name' is already an info object - zinfo = name - else: - # Get info object for name - zinfo = self.getinfo(name) - - zef_file.seek(zinfo.header_offset, 0) - - # Skip the file header: - fheader = zef_file.read(sizeFileHeader) - if fheader[0:4] != stringFileHeader: - raise BadZipfile, "Bad magic number for file header" - - fheader = struct.unpack(structFileHeader, fheader) - fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) - if fheader[_FH_EXTRA_FIELD_LENGTH]: - zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) - - if fname != zinfo.orig_filename: - raise BadZipfile, \ - 'File name in directory "%s" and header "%s" differ.' % ( - zinfo.orig_filename, fname) - - # check for encrypted flag & handle password - is_encrypted = zinfo.flag_bits & 0x1 - zd = None - if is_encrypted: - if not pwd: - pwd = self.pwd - if not pwd: - raise RuntimeError, "File %s is encrypted, " \ - "password required for extraction" % name - - zd = _ZipDecrypter(pwd) - # The first 12 bytes in the cypher stream is an encryption header - # used to strengthen the algorithm. The first 11 bytes are - # completely random, while the 12th contains the MSB of the CRC, - # or the MSB of the file time depending on the header type - # and is used to check the correctness of the password. - bytes = zef_file.read(12) - h = map(zd, bytes[0:12]) - if zinfo.flag_bits & 0x8: - # compare against the file type from extended local headers - check_byte = (zinfo._raw_time >> 8) & 0xff - else: - # compare against the CRC otherwise - check_byte = (zinfo.CRC >> 24) & 0xff - if ord(h[11]) != check_byte: - raise RuntimeError("Bad password for file", name) - - # build and return a ZipExtFile - if zd is None: - zef = ZipExtFile(zef_file, zinfo) - else: - zef = ZipExtFile(zef_file, zinfo, zd) - - # set universal newlines on ZipExtFile if necessary - if "U" in mode: - zef.set_univ_newlines(True) - return zef - - def extract(self, member, path=None, pwd=None): - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. `member' may be a filename or a ZipInfo object. You can - specify a different directory using `path'. - """ - if not isinstance(member, ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return self._extract_member(member, path, pwd) - - def extractall(self, path=None, members=None, pwd=None): - """Extract all members from the archive to the current working - directory. `path' specifies a different directory to extract to. - `members' is optional and must be a subset of the list returned - by namelist(). - """ - if members is None: - members = self.namelist() - - for zipinfo in members: - self.extract(zipinfo, path, pwd) - - def _extract_member(self, member, targetpath, pwd): - """Extract the ZipInfo object 'member' to a physical - file on the path targetpath. - """ - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - source = self.open(member, pwd=pwd) - target = file(targetpath, "wb") - shutil.copyfileobj(source, target) - source.close() - target.close() - - return targetpath - - def _writecheck(self, zinfo): - """Check for errors before writing a file to the archive.""" - if zinfo.filename in self.NameToInfo: - if self.debug: # Warning for duplicate names - print "Duplicate name:", zinfo.filename - if self.mode not in ("w", "a"): - raise RuntimeError, 'write() requires mode "w" or "a"' - if not self.fp: - raise RuntimeError, \ - "Attempt to write ZIP archive that was already closed" - if zinfo.compress_type == ZIP_DEFLATED and not zlib: - raise RuntimeError, \ - "Compression requires the (missing) zlib module" - if zinfo.compress_type == ZIP_BZIP2 and not bz2: - raise RuntimeError, \ - "Compression requires the (missing) bz2 module" - if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): - raise RuntimeError, \ - "That compression method is not supported" - if zinfo.file_size > ZIP64_LIMIT: - if not self._allowZip64: - raise LargeZipFile("Filesize would require ZIP64 extensions") - if zinfo.header_offset > ZIP64_LIMIT: - if not self._allowZip64: - raise LargeZipFile("Zipfile size would require ZIP64 extensions") - - def write(self, filename, arcname=None, compress_type=None): - """Put the bytes from filename into the archive under the name - arcname.""" - if not self.fp: - raise RuntimeError( - "Attempt to write to ZIP archive that was already closed") - - st = os.stat(filename) - isdir = stat.S_ISDIR(st.st_mode) - mtime = time.localtime(st.st_mtime) - date_time = mtime[0:6] - # Create ZipInfo instance to store file information - if arcname is None: - arcname = filename - arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) - while arcname[0] in (os.sep, os.altsep): - arcname = arcname[1:] - if isdir: - arcname += '/' - zinfo = ZipInfo(arcname, date_time) - zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes - if compress_type is None: - zinfo.compress_type = self.compression - else: - zinfo.compress_type = compress_type - - zinfo.file_size = st.st_size - zinfo.flag_bits = 0x00 - zinfo.header_offset = self.fp.tell() # Start of header bytes - - self._writecheck(zinfo) - self._didModify = True - - if isdir: - zinfo.file_size = 0 - zinfo.compress_size = 0 - zinfo.CRC = 0 - self.filelist.append(zinfo) - self.NameToInfo[zinfo.filename] = zinfo - self.fp.write(zinfo.FileHeader()) - return - - fp = open(filename, "rb") - # Must overwrite CRC and sizes with correct data later - zinfo.CRC = CRC = 0 - zinfo.compress_size = compress_size = 0 - zinfo.file_size = file_size = 0 - self.fp.write(zinfo.FileHeader()) - if zinfo.compress_type == ZIP_DEFLATED: - cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, - zlib.DEFLATED, -15) - elif zinfo.compress_type == ZIP_BZIP2: - cmpr = bz2.BZ2Compressor() - else: - cmpr = None - while 1: - buf = fp.read(1024 * 8) - if not buf: - break - file_size = file_size + len(buf) - CRC = crc32(buf, CRC) & 0xffffffff - if cmpr: - buf = cmpr.compress(buf) - compress_size = compress_size + len(buf) - self.fp.write(buf) - fp.close() - if cmpr: - buf = cmpr.flush() - compress_size = compress_size + len(buf) - self.fp.write(buf) - zinfo.compress_size = compress_size - else: - zinfo.compress_size = file_size - zinfo.CRC = CRC - zinfo.file_size = file_size - # Seek backwards and write CRC and file sizes - position = self.fp.tell() # Preserve current position in file - self.fp.seek(zinfo.header_offset + 14, 0) - self.fp.write(struct.pack(" ZIP64_LIMIT \ - or zinfo.compress_size > ZIP64_LIMIT: - extra.append(zinfo.file_size) - extra.append(zinfo.compress_size) - file_size = 0xffffffff - compress_size = 0xffffffff - else: - file_size = zinfo.file_size - compress_size = zinfo.compress_size - - if zinfo.header_offset > ZIP64_LIMIT: - extra.append(zinfo.header_offset) - header_offset = 0xffffffffL - else: - header_offset = zinfo.header_offset - - extra_data = zinfo.extra - if extra: - # Append a ZIP64 field to the extra's - extra_data = struct.pack( - '>sys.stderr, (structCentralDir, - stringCentralDir, create_version, - zinfo.create_system, extract_version, zinfo.reserved, - zinfo.flag_bits, zinfo.compress_type, dostime, dosdate, - zinfo.CRC, compress_size, file_size, - len(zinfo.filename), len(extra_data), len(zinfo.comment), - 0, zinfo.internal_attr, zinfo.external_attr, - header_offset) - raise - self.fp.write(centdir) - self.fp.write(filename) - self.fp.write(extra_data) - self.fp.write(zinfo.comment) - - pos2 = self.fp.tell() - # Write end-of-zip-archive record - centDirCount = count - centDirSize = pos2 - pos1 - centDirOffset = pos1 - if (centDirCount >= ZIP_FILECOUNT_LIMIT or - centDirOffset > ZIP64_LIMIT or - centDirSize > ZIP64_LIMIT): - # Need to write the ZIP64 end-of-archive records - zip64endrec = struct.pack( - structEndArchive64, stringEndArchive64, - 44, 45, 45, 0, 0, centDirCount, centDirCount, - centDirSize, centDirOffset) - self.fp.write(zip64endrec) - - zip64locrec = struct.pack( - structEndArchive64Locator, - stringEndArchive64Locator, 0, pos2, 1) - self.fp.write(zip64locrec) - centDirCount = min(centDirCount, 0xFFFF) - centDirSize = min(centDirSize, 0xFFFFFFFF) - centDirOffset = min(centDirOffset, 0xFFFFFFFF) - - # check for valid comment length - if len(self.comment) >= ZIP_MAX_COMMENT: - if self.debug > 0: - msg = 'Archive comment is too long; truncating to %d bytes' \ - % ZIP_MAX_COMMENT - self.comment = self.comment[:ZIP_MAX_COMMENT] - - endrec = struct.pack(structEndArchive, stringEndArchive, - 0, 0, centDirCount, centDirCount, - centDirSize, centDirOffset, len(self.comment)) - self.fp.write(endrec) - self.fp.write(self.comment) - self.fp.flush() - - if not self._filePassed: - self.fp.close() - self.fp = None - - -class PyZipFile(ZipFile): - """Class to create ZIP archives with Python library files and packages.""" - - def writepy(self, pathname, basename = ""): - """Add all files from "pathname" to the ZIP archive. - - If pathname is a package directory, search the directory and - all package subdirectories recursively for all *.py and enter - the modules into the archive. If pathname is a plain - directory, listdir *.py and enter all modules. Else, pathname - must be a Python *.py file and the module will be put into the - archive. Added modules are always module.pyo or module.pyc. - This method will compile the module.py into module.pyc if - necessary. - """ - dir, name = os.path.split(pathname) - if os.path.isdir(pathname): - initname = os.path.join(pathname, "__init__.py") - if os.path.isfile(initname): - # This is a package directory, add it - if basename: - basename = "%s/%s" % (basename, name) - else: - basename = name - if self.debug: - print "Adding package in", pathname, "as", basename - fname, arcname = self._get_codename(initname[0:-3], basename) - if self.debug: - print "Adding", arcname - self.write(fname, arcname) - dirlist = os.listdir(pathname) - dirlist.remove("__init__.py") - # Add all *.py files and package subdirectories - for filename in dirlist: - path = os.path.join(pathname, filename) - root, ext = os.path.splitext(filename) - if os.path.isdir(path): - if os.path.isfile(os.path.join(path, "__init__.py")): - # This is a package directory, add it - self.writepy(path, basename) # Recursive call - elif ext == ".py": - fname, arcname = self._get_codename(path[0:-3], - basename) - if self.debug: - print "Adding", arcname - self.write(fname, arcname) - else: - # This is NOT a package directory, add its files at top level - if self.debug: - print "Adding files from directory", pathname - for filename in os.listdir(pathname): - path = os.path.join(pathname, filename) - root, ext = os.path.splitext(filename) - if ext == ".py": - fname, arcname = self._get_codename(path[0:-3], - basename) - if self.debug: - print "Adding", arcname - self.write(fname, arcname) - else: - if pathname[-3:] != ".py": - raise RuntimeError, \ - 'Files added with writepy() must end with ".py"' - fname, arcname = self._get_codename(pathname[0:-3], basename) - if self.debug: - print "Adding file", arcname - self.write(fname, arcname) - - def _get_codename(self, pathname, basename): - """Return (filename, archivename) for the path. - - Given a module name path, return the correct file path and - archive name, compiling if necessary. For example, given - /python/lib/string, return (/python/lib/string.pyc, string). - """ - file_py = pathname + ".py" - file_pyc = pathname + ".pyc" - file_pyo = pathname + ".pyo" - if os.path.isfile(file_pyo) and \ - os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: - fname = file_pyo # Use .pyo file - elif not os.path.isfile(file_pyc) or \ - os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: - import py_compile - if self.debug: - print "Compiling", file_py - try: - py_compile.compile(file_py, file_pyc, None, True) - except py_compile.PyCompileError,err: - print err.msg - fname = file_pyc - else: - fname = file_pyc - archivename = os.path.split(fname)[1] - if basename: - archivename = "%s/%s" % (basename, archivename) - return (fname, archivename) - - -def main(args = None): - import textwrap - USAGE=textwrap.dedent("""\ - Usage: - zipfile.py -l zipfile.zip # Show listing of a zipfile - zipfile.py -t zipfile.zip # Test if a zipfile is valid - zipfile.py -e zipfile.zip target # Extract zipfile into target dir - zipfile.py -c zipfile.zip src ... # Create zipfile from sources - """) - if args is None: - args = sys.argv[1:] - - if not args or args[0] not in ('-l', '-c', '-e', '-t'): - print USAGE - sys.exit(1) - - if args[0] == '-l': - if len(args) != 2: - print USAGE - sys.exit(1) - zf = ZipFile(args[1], 'r') - zf.printdir() - zf.close() - - elif args[0] == '-t': - if len(args) != 2: - print USAGE - sys.exit(1) - zf = ZipFile(args[1], 'r') - zf.testzip() - print "Done testing" - - elif args[0] == '-e': - if len(args) != 3: - print USAGE - sys.exit(1) - - zf = ZipFile(args[1], 'r') - out = args[2] - for path in zf.namelist(): - if path.startswith('./'): - tgt = os.path.join(out, path[2:]) - else: - tgt = os.path.join(out, path) - - tgtdir = os.path.dirname(tgt) - if not os.path.exists(tgtdir): - os.makedirs(tgtdir) - fp = open(tgt, 'wb') - fp.write(zf.read(path)) - fp.close() - zf.close() - - elif args[0] == '-c': - if len(args) < 3: - print USAGE - sys.exit(1) - - def addToZip(zf, path, zippath): - if os.path.isfile(path): - zf.write(path, zippath, ZIP_DEFLATED) - elif os.path.isdir(path): - for nm in os.listdir(path): - addToZip(zf, - os.path.join(path, nm), os.path.join(zippath, nm)) - # else: ignore - - zf = ZipFile(args[1], 'w', allowZip64=True) - for src in args[2:]: - addToZip(zf, src, os.path.basename(src)) - - zf.close() - -if __name__ == "__main__": - main() diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile266.diff --- a/2.00/zipfile266.diff Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,126 +0,0 @@ ---- /usr/lib/python2.6/zipfile.py 2010-07-05 14:48:38.000000000 +0300 -+++ zipfile.py 2010-11-25 01:39:22.749743303 +0200 -@@ -1,6 +1,7 @@ - """ - Read and write ZIP files. - """ -+# Improved by Chortos-2 in 2009 and 2010 (added bzip2 support) - import struct, os, time, sys, shutil - import binascii, cStringIO, stat - -@@ -11,8 +12,13 @@ - zlib = None - crc32 = binascii.crc32 - -+try: -+ import bz2 # We may need its compression method -+except ImportError: -+ bz2 = None -+ - __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", -- "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] -+ "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] - - class BadZipfile(Exception): - pass -@@ -33,6 +39,7 @@ - # constants for Zip file compression methods - ZIP_STORED = 0 - ZIP_DEFLATED = 8 -+ZIP_BZIP2 = 12 - # Other ZIP compression methods not supported - - # Below are some formats and associated data for reading/writing headers using -@@ -467,6 +474,9 @@ - self.compreadsize = 64*1024 - if self.compress_type == ZIP_DEFLATED: - self.dc = zlib.decompressobj(-15) -+ elif self.compress_type == ZIP_BZIP2: -+ self.dc = bz2.BZ2Decompressor() -+ self.compreadsize = 900000 - - def set_univ_newlines(self, univ_newlines): - self.univ_newlines = univ_newlines -@@ -578,7 +588,7 @@ - if self.compress_type == ZIP_STORED: - lr = len(self.readbuffer) - bytesToRead = min(bytesToRead, size - lr) -- elif self.compress_type == ZIP_DEFLATED: -+ else: - if len(self.readbuffer) > size: - # the user has requested fewer bytes than we've already - # pulled through the decompressor; don't read any more -@@ -608,14 +618,17 @@ - newdata = ''.join(map(self.decrypter, newdata)) - - # decompress newly read data if necessary -- if newdata and self.compress_type == ZIP_DEFLATED: -+ if newdata and self.compress_type != ZIP_STORED: - newdata = self.dc.decompress(newdata) -- self.rawbuffer = self.dc.unconsumed_tail -+ self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' - if self.eof and len(self.rawbuffer) == 0: - # we're out of raw bytes (both from the file and - # the local buffer); flush just to make sure the - # decompressor is done -- newdata += self.dc.flush() -+ try: -+ newdata += self.dc.flush() -+ except AttributeError: -+ pass - # prevent decompressor from being used again - self.dc = None - -@@ -641,7 +654,8 @@ - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read "r", write "w" or append "a". -- compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). -+ compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), -+ or ZIP_BZIP2 (requires bz2). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. -@@ -661,6 +675,10 @@ - if not zlib: - raise RuntimeError,\ - "Compression requires the (missing) zlib module" -+ elif compression == ZIP_BZIP2: -+ if not bz2: -+ raise RuntimeError,\ -+ "Compression requires the (missing) bz2 module" - else: - raise RuntimeError, "That compression method is not supported" - -@@ -987,7 +1005,10 @@ - if zinfo.compress_type == ZIP_DEFLATED and not zlib: - raise RuntimeError, \ - "Compression requires the (missing) zlib module" -- if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): -+ if zinfo.compress_type == ZIP_BZIP2 and not bz2: -+ raise RuntimeError, \ -+ "Compression requires the (missing) bz2 module" -+ if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): - raise RuntimeError, \ - "That compression method is not supported" - if zinfo.file_size > ZIP64_LIMIT: -@@ -1048,6 +1069,8 @@ - if zinfo.compress_type == ZIP_DEFLATED: - cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, - zlib.DEFLATED, -15) -+ elif zinfo.compress_type == ZIP_BZIP2: -+ cmpr = bz2.BZ2Compressor() - else: - cmpr = None - while 1: -@@ -1105,6 +1128,10 @@ - zlib.DEFLATED, -15) - bytes = co.compress(bytes) + co.flush() - zinfo.compress_size = len(bytes) # Compressed size -+ elif zinfo.compress_type == ZIP_BZIP2: -+ co = bz2.BZ2Compressor() -+ bytes = co.compress(bytes) + co.flush() -+ zinfo.compress_size = len(bytes) # Compressed size - else: - zinfo.compress_size = zinfo.file_size - zinfo.header_offset = self.fp.tell() # Start of header bytes diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile27.py --- a/2.00/zipfile27.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1468 +0,0 @@ -""" -Read and write ZIP files. -""" -# Improved by Chortos-2 in 2010 (added bzip2 support) -import struct, os, time, sys, shutil -import binascii, cStringIO, stat -import io -import re - -try: - import zlib # We may need its compression method - crc32 = zlib.crc32 -except ImportError: - zlib = None - crc32 = binascii.crc32 - -try: - import bz2 # We may need its compression method -except ImportError: - bz2 = None - -__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", - "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] - -class BadZipfile(Exception): - pass - - -class LargeZipFile(Exception): - """ - Raised when writing a zipfile, the zipfile requires ZIP64 extensions - and those extensions are disabled. - """ - -error = BadZipfile # The exception raised by this module - -ZIP64_LIMIT = (1 << 31) - 1 -ZIP_FILECOUNT_LIMIT = 1 << 16 -ZIP_MAX_COMMENT = (1 << 16) - 1 - -# constants for Zip file compression methods -ZIP_STORED = 0 -ZIP_DEFLATED = 8 -ZIP_BZIP2 = 12 -# Other ZIP compression methods not supported - -# Below are some formats and associated data for reading/writing headers using -# the struct module. The names and structures of headers/records are those used -# in the PKWARE description of the ZIP file format: -# http://www.pkware.com/documents/casestudies/APPNOTE.TXT -# (URL valid as of January 2008) - -# The "end of central directory" structure, magic number, size, and indices -# (section V.I in the format document) -structEndArchive = "<4s4H2LH" -stringEndArchive = "PK\005\006" -sizeEndCentDir = struct.calcsize(structEndArchive) - -_ECD_SIGNATURE = 0 -_ECD_DISK_NUMBER = 1 -_ECD_DISK_START = 2 -_ECD_ENTRIES_THIS_DISK = 3 -_ECD_ENTRIES_TOTAL = 4 -_ECD_SIZE = 5 -_ECD_OFFSET = 6 -_ECD_COMMENT_SIZE = 7 -# These last two indices are not part of the structure as defined in the -# spec, but they are used internally by this module as a convenience -_ECD_COMMENT = 8 -_ECD_LOCATION = 9 - -# The "central directory" structure, magic number, size, and indices -# of entries in the structure (section V.F in the format document) -structCentralDir = "<4s4B4HL2L5H2L" -stringCentralDir = "PK\001\002" -sizeCentralDir = struct.calcsize(structCentralDir) - -# indexes of entries in the central directory structure -_CD_SIGNATURE = 0 -_CD_CREATE_VERSION = 1 -_CD_CREATE_SYSTEM = 2 -_CD_EXTRACT_VERSION = 3 -_CD_EXTRACT_SYSTEM = 4 -_CD_FLAG_BITS = 5 -_CD_COMPRESS_TYPE = 6 -_CD_TIME = 7 -_CD_DATE = 8 -_CD_CRC = 9 -_CD_COMPRESSED_SIZE = 10 -_CD_UNCOMPRESSED_SIZE = 11 -_CD_FILENAME_LENGTH = 12 -_CD_EXTRA_FIELD_LENGTH = 13 -_CD_COMMENT_LENGTH = 14 -_CD_DISK_NUMBER_START = 15 -_CD_INTERNAL_FILE_ATTRIBUTES = 16 -_CD_EXTERNAL_FILE_ATTRIBUTES = 17 -_CD_LOCAL_HEADER_OFFSET = 18 - -# The "local file header" structure, magic number, size, and indices -# (section V.A in the format document) -structFileHeader = "<4s2B4HL2L2H" -stringFileHeader = "PK\003\004" -sizeFileHeader = struct.calcsize(structFileHeader) - -_FH_SIGNATURE = 0 -_FH_EXTRACT_VERSION = 1 -_FH_EXTRACT_SYSTEM = 2 -_FH_GENERAL_PURPOSE_FLAG_BITS = 3 -_FH_COMPRESSION_METHOD = 4 -_FH_LAST_MOD_TIME = 5 -_FH_LAST_MOD_DATE = 6 -_FH_CRC = 7 -_FH_COMPRESSED_SIZE = 8 -_FH_UNCOMPRESSED_SIZE = 9 -_FH_FILENAME_LENGTH = 10 -_FH_EXTRA_FIELD_LENGTH = 11 - -# The "Zip64 end of central directory locator" structure, magic number, and size -structEndArchive64Locator = "<4sLQL" -stringEndArchive64Locator = "PK\x06\x07" -sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) - -# The "Zip64 end of central directory" record, magic number, size, and indices -# (section V.G in the format document) -structEndArchive64 = "<4sQ2H2L4Q" -stringEndArchive64 = "PK\x06\x06" -sizeEndCentDir64 = struct.calcsize(structEndArchive64) - -_CD64_SIGNATURE = 0 -_CD64_DIRECTORY_RECSIZE = 1 -_CD64_CREATE_VERSION = 2 -_CD64_EXTRACT_VERSION = 3 -_CD64_DISK_NUMBER = 4 -_CD64_DISK_NUMBER_START = 5 -_CD64_NUMBER_ENTRIES_THIS_DISK = 6 -_CD64_NUMBER_ENTRIES_TOTAL = 7 -_CD64_DIRECTORY_SIZE = 8 -_CD64_OFFSET_START_CENTDIR = 9 - -def _check_zipfile(fp): - try: - if _EndRecData(fp): - return True # file has correct magic number - except IOError: - pass - return False - -def is_zipfile(filename): - """Quickly see if a file is a ZIP file by checking the magic number. - - The filename argument may be a file or file-like object too. - """ - result = False - try: - if hasattr(filename, "read"): - result = _check_zipfile(fp=filename) - else: - with open(filename, "rb") as fp: - result = _check_zipfile(fp) - except IOError: - pass - return result - -def _EndRecData64(fpin, offset, endrec): - """ - Read the ZIP64 end-of-archive records and use that to update endrec - """ - try: - fpin.seek(offset - sizeEndCentDir64Locator, 2) - except IOError: - # If the seek fails, the file is not large enough to contain a ZIP64 - # end-of-archive record, so just return the end record we were given. - return endrec - - data = fpin.read(sizeEndCentDir64Locator) - sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) - if sig != stringEndArchive64Locator: - return endrec - - if diskno != 0 or disks != 1: - raise BadZipfile("zipfiles that span multiple disks are not supported") - - # Assume no 'zip64 extensible data' - fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) - data = fpin.read(sizeEndCentDir64) - sig, sz, create_version, read_version, disk_num, disk_dir, \ - dircount, dircount2, dirsize, diroffset = \ - struct.unpack(structEndArchive64, data) - if sig != stringEndArchive64: - return endrec - - # Update the original endrec using data from the ZIP64 record - endrec[_ECD_SIGNATURE] = sig - endrec[_ECD_DISK_NUMBER] = disk_num - endrec[_ECD_DISK_START] = disk_dir - endrec[_ECD_ENTRIES_THIS_DISK] = dircount - endrec[_ECD_ENTRIES_TOTAL] = dircount2 - endrec[_ECD_SIZE] = dirsize - endrec[_ECD_OFFSET] = diroffset - return endrec - - -def _EndRecData(fpin): - """Return data from the "End of Central Directory" record, or None. - - The data is a list of the nine items in the ZIP "End of central dir" - record followed by a tenth item, the file seek offset of this record.""" - - # Determine file size - fpin.seek(0, 2) - filesize = fpin.tell() - - # Check to see if this is ZIP file with no archive comment (the - # "end of central directory" structure should be the last item in the - # file if this is the case). - try: - fpin.seek(-sizeEndCentDir, 2) - except IOError: - return None - data = fpin.read() - if data[0:4] == stringEndArchive and data[-2:] == "\000\000": - # the signature is correct and there's no comment, unpack structure - endrec = struct.unpack(structEndArchive, data) - endrec=list(endrec) - - # Append a blank comment and record start offset - endrec.append("") - endrec.append(filesize - sizeEndCentDir) - - # Try to read the "Zip64 end of central directory" structure - return _EndRecData64(fpin, -sizeEndCentDir, endrec) - - # Either this is not a ZIP file, or it is a ZIP file with an archive - # comment. Search the end of the file for the "end of central directory" - # record signature. The comment is the last item in the ZIP file and may be - # up to 64K long. It is assumed that the "end of central directory" magic - # number does not appear in the comment. - maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) - fpin.seek(maxCommentStart, 0) - data = fpin.read() - start = data.rfind(stringEndArchive) - if start >= 0: - # found the magic number; attempt to unpack and interpret - recData = data[start:start+sizeEndCentDir] - endrec = list(struct.unpack(structEndArchive, recData)) - comment = data[start+sizeEndCentDir:] - # check that comment length is correct - if endrec[_ECD_COMMENT_SIZE] == len(comment): - # Append the archive comment and start offset - endrec.append(comment) - endrec.append(maxCommentStart + start) - - # Try to read the "Zip64 end of central directory" structure - return _EndRecData64(fpin, maxCommentStart + start - filesize, - endrec) - - # Unable to find a valid end of central directory structure - return - - -class ZipInfo (object): - """Class with attributes describing each file in the ZIP archive.""" - - __slots__ = ( - 'orig_filename', - 'filename', - 'date_time', - 'compress_type', - 'comment', - 'extra', - 'create_system', - 'create_version', - 'extract_version', - 'reserved', - 'flag_bits', - 'volume', - 'internal_attr', - 'external_attr', - 'header_offset', - 'CRC', - 'compress_size', - 'file_size', - '_raw_time', - ) - - def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): - self.orig_filename = filename # Original file name in archive - - # Terminate the file name at the first null byte. Null bytes in file - # names are used as tricks by viruses in archives. - null_byte = filename.find(chr(0)) - if null_byte >= 0: - filename = filename[0:null_byte] - # This is used to ensure paths in generated ZIP files always use - # forward slashes as the directory separator, as required by the - # ZIP format specification. - if os.sep != "/" and os.sep in filename: - filename = filename.replace(os.sep, "/") - - self.filename = filename # Normalized file name - self.date_time = date_time # year, month, day, hour, min, sec - # Standard values: - self.compress_type = ZIP_STORED # Type of compression for the file - self.comment = "" # Comment for each file - self.extra = "" # ZIP extra data - if sys.platform == 'win32': - self.create_system = 0 # System which created ZIP archive - else: - # Assume everything else is unix-y - self.create_system = 3 # System which created ZIP archive - self.create_version = 20 # Version which created ZIP archive - self.extract_version = 20 # Version needed to extract archive - self.reserved = 0 # Must be zero - self.flag_bits = 0 # ZIP flag bits - self.volume = 0 # Volume number of file header - self.internal_attr = 0 # Internal attributes - self.external_attr = 0 # External file attributes - # Other attributes are set by class ZipFile: - # header_offset Byte offset to the file header - # CRC CRC-32 of the uncompressed file - # compress_size Size of the compressed file - # file_size Size of the uncompressed file - - def FileHeader(self): - """Return the per-file header as a string.""" - dt = self.date_time - dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] - dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) - if self.flag_bits & 0x08: - # Set these to zero because we write them after the file data - CRC = compress_size = file_size = 0 - else: - CRC = self.CRC - compress_size = self.compress_size - file_size = self.file_size - - extra = self.extra - - if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: - # File is larger than what fits into a 4 byte integer, - # fall back to the ZIP64 extension - fmt = '= 24: - counts = unpack('> 1) & 0x7FFFFFFF) ^ poly - else: - crc = ((crc >> 1) & 0x7FFFFFFF) - table[i] = crc - return table - crctable = _GenerateCRCTable() - - def _crc32(self, ch, crc): - """Compute the CRC32 primitive on one byte.""" - return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff] - - def __init__(self, pwd): - self.key0 = 305419896 - self.key1 = 591751049 - self.key2 = 878082192 - for p in pwd: - self._UpdateKeys(p) - - def _UpdateKeys(self, c): - self.key0 = self._crc32(c, self.key0) - self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295 - self.key1 = (self.key1 * 134775813 + 1) & 4294967295 - self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2) - - def __call__(self, c): - """Decrypt a single character.""" - c = ord(c) - k = self.key2 | 2 - c = c ^ (((k * (k^1)) >> 8) & 255) - c = chr(c) - self._UpdateKeys(c) - return c - -class ZipExtFile(io.BufferedIOBase): - """File-like object for reading an archive member. - Is returned by ZipFile.open(). - """ - - # Max size supported by decompressor. - MAX_N = 1 << 31 - 1 - - # Read from compressed files in 4k blocks. - MIN_READ_SIZE = 4096 - - # Search for universal newlines or line chunks. - PATTERN = re.compile(r'^(?P[^\r\n]+)|(?P\n|\r\n?)') - - def __init__(self, fileobj, mode, zipinfo, decrypter=None): - self._fileobj = fileobj - self._decrypter = decrypter - - self._compress_type = zipinfo.compress_type - self._compress_size = zipinfo.compress_size - self._compress_left = zipinfo.compress_size - - if self._compress_type == ZIP_DEFLATED: - self._decompressor = zlib.decompressobj(-15) - elif self._compress_type == ZIP_BZIP2: - self._decompressor = bz2.BZ2Decompressor() - self.MIN_READ_SIZE = 900000 - self._unconsumed = '' - - self._readbuffer = '' - self._offset = 0 - - self._universal = 'U' in mode - self.newlines = None - - # Adjust read size for encrypted files since the first 12 bytes - # are for the encryption/password information. - if self._decrypter is not None: - self._compress_left -= 12 - - self.mode = mode - self.name = zipinfo.filename - - if hasattr(zipinfo, 'CRC'): - self._expected_crc = zipinfo.CRC - self._running_crc = crc32(b'') & 0xffffffff - else: - self._expected_crc = None - - def readline(self, limit=-1): - """Read and return a line from the stream. - - If limit is specified, at most limit bytes will be read. - """ - - if not self._universal and limit < 0: - # Shortcut common case - newline found in buffer. - i = self._readbuffer.find('\n', self._offset) + 1 - if i > 0: - line = self._readbuffer[self._offset: i] - self._offset = i - return line - - if not self._universal: - return io.BufferedIOBase.readline(self, limit) - - line = '' - while limit < 0 or len(line) < limit: - readahead = self.peek(2) - if readahead == '': - return line - - # - # Search for universal newlines or line chunks. - # - # The pattern returns either a line chunk or a newline, but not - # both. Combined with peek(2), we are assured that the sequence - # '\r\n' is always retrieved completely and never split into - # separate newlines - '\r', '\n' due to coincidental readaheads. - # - match = self.PATTERN.search(readahead) - newline = match.group('newline') - if newline is not None: - if self.newlines is None: - self.newlines = [] - if newline not in self.newlines: - self.newlines.append(newline) - self._offset += len(newline) - return line + '\n' - - chunk = match.group('chunk') - if limit >= 0: - chunk = chunk[: limit - len(line)] - - self._offset += len(chunk) - line += chunk - - return line - - def peek(self, n=1): - """Returns buffered bytes without advancing the position.""" - if n > len(self._readbuffer) - self._offset: - chunk = self.read(n) - self._offset -= len(chunk) - - # Return up to 512 bytes to reduce allocation overhead for tight loops. - return self._readbuffer[self._offset: self._offset + 512] - - def readable(self): - return True - - def read(self, n=-1): - """Read and return up to n bytes. - If the argument is omitted, None, or negative, data is read and returned until EOF is reached.. - """ - buf = '' - if n is None: - n = -1 - while True: - if n < 0: - data = self.read1(n) - elif n > len(buf): - data = self.read1(n - len(buf)) - else: - return buf - if len(data) == 0: - return buf - buf += data - - def _update_crc(self, newdata, eof): - # Update the CRC using the given data. - if self._expected_crc is None: - # No need to compute the CRC if we don't have a reference value - return - self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff - # Check the CRC if we're at the end of the file - if eof and self._running_crc != self._expected_crc: - raise BadZipfile("Bad CRC-32 for file %r" % self.name) - - def read1(self, n): - """Read up to n bytes with at most one read() system call.""" - - # Simplify algorithm (branching) by transforming negative n to large n. - if n < 0 or n is None: - n = self.MAX_N - - # Bytes available in read buffer. - len_readbuffer = len(self._readbuffer) - self._offset - - # Read from file. - if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed): - nbytes = n - len_readbuffer - len(self._unconsumed) - nbytes = max(nbytes, self.MIN_READ_SIZE) - nbytes = min(nbytes, self._compress_left) - - data = self._fileobj.read(nbytes) - self._compress_left -= len(data) - - if data and self._decrypter is not None: - data = ''.join(map(self._decrypter, data)) - - if self._compress_type == ZIP_STORED: - self._update_crc(data, eof=(self._compress_left==0)) - self._readbuffer = self._readbuffer[self._offset:] + data - self._offset = 0 - else: - # Prepare deflated bytes for decompression. - self._unconsumed += data - - # Handle unconsumed data. - if (len(self._unconsumed) > 0 and n > len_readbuffer and - self._compress_type == ZIP_DEFLATED): - data = self._decompressor.decompress( - self._unconsumed, - max(n - len_readbuffer, self.MIN_READ_SIZE) - ) - - self._unconsumed = self._decompressor.unconsumed_tail - eof = len(self._unconsumed) == 0 and self._compress_left == 0 - if eof: - data += self._decompressor.flush() - - self._update_crc(data, eof=eof) - self._readbuffer = self._readbuffer[self._offset:] + data - self._offset = 0 - elif (len(self._unconsumed) > 0 and n > len_readbuffer and - self._compress_type == ZIP_BZIP2): - data = self._decompressor.decompress(self._unconsumed) - - self._unconsumed = '' - self._readbuffer = self._readbuffer[self._offset:] + data - self._offset = 0 - - # Read from buffer. - data = self._readbuffer[self._offset: self._offset + n] - self._offset += len(data) - return data - - - -class ZipFile: - """ Class with methods to open, read, write, close, list zip files. - - z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False) - - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read "r", write "w" or append "a". - compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), - or ZIP_BZIP2 (requires bz2). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. - - """ - - fp = None # Set here since __del__ checks it - - def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): - """Open the ZIP file with mode read "r", write "w" or append "a".""" - if mode not in ("r", "w", "a"): - raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') - - if compression == ZIP_STORED: - pass - elif compression == ZIP_DEFLATED: - if not zlib: - raise RuntimeError,\ - "Compression requires the (missing) zlib module" - elif compression == ZIP_BZIP2: - if not bz2: - raise RuntimeError,\ - "Compression requires the (missing) bz2 module" - else: - raise RuntimeError, "That compression method is not supported" - - self._allowZip64 = allowZip64 - self._didModify = False - self.debug = 0 # Level of printing: 0 through 3 - self.NameToInfo = {} # Find file info given name - self.filelist = [] # List of ZipInfo instances for archive - self.compression = compression # Method of compression - self.mode = key = mode.replace('b', '')[0] - self.pwd = None - self.comment = '' - - # Check if we were passed a file-like object - if isinstance(file, basestring): - self._filePassed = 0 - self.filename = file - modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} - try: - self.fp = open(file, modeDict[mode]) - except IOError: - if mode == 'a': - mode = key = 'w' - self.fp = open(file, modeDict[mode]) - else: - raise - else: - self._filePassed = 1 - self.fp = file - self.filename = getattr(file, 'name', None) - - if key == 'r': - self._GetContents() - elif key == 'w': - # set the modified flag so central directory gets written - # even if no files are added to the archive - self._didModify = True - elif key == 'a': - try: - # See if file is a zip file - self._RealGetContents() - # seek to start of directory and overwrite - self.fp.seek(self.start_dir, 0) - except BadZipfile: - # file is not a zip file, just append - self.fp.seek(0, 2) - - # set the modified flag so central directory gets written - # even if no files are added to the archive - self._didModify = True - else: - if not self._filePassed: - self.fp.close() - self.fp = None - raise RuntimeError, 'Mode must be "r", "w" or "a"' - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - def _GetContents(self): - """Read the directory, making sure we close the file if the format - is bad.""" - try: - self._RealGetContents() - except BadZipfile: - if not self._filePassed: - self.fp.close() - self.fp = None - raise - - def _RealGetContents(self): - """Read in the table of contents for the ZIP file.""" - fp = self.fp - try: - endrec = _EndRecData(fp) - except IOError: - raise BadZipfile("File is not a zip file") - if not endrec: - raise BadZipfile, "File is not a zip file" - if self.debug > 1: - print endrec - size_cd = endrec[_ECD_SIZE] # bytes in central directory - offset_cd = endrec[_ECD_OFFSET] # offset of central directory - self.comment = endrec[_ECD_COMMENT] # archive comment - - # "concat" is zero, unless zip was concatenated to another file - concat = endrec[_ECD_LOCATION] - size_cd - offset_cd - if endrec[_ECD_SIGNATURE] == stringEndArchive64: - # If Zip64 extension structures are present, account for them - concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) - - if self.debug > 2: - inferred = concat + offset_cd - print "given, inferred, offset", offset_cd, inferred, concat - # self.start_dir: Position of start of central directory - self.start_dir = offset_cd + concat - fp.seek(self.start_dir, 0) - data = fp.read(size_cd) - fp = cStringIO.StringIO(data) - total = 0 - while total < size_cd: - centdir = fp.read(sizeCentralDir) - if centdir[0:4] != stringCentralDir: - raise BadZipfile, "Bad magic number for central directory" - centdir = struct.unpack(structCentralDir, centdir) - if self.debug > 2: - print centdir - filename = fp.read(centdir[_CD_FILENAME_LENGTH]) - # Create ZipInfo instance to store file information - x = ZipInfo(filename) - x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) - x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) - x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] - (x.create_version, x.create_system, x.extract_version, x.reserved, - x.flag_bits, x.compress_type, t, d, - x.CRC, x.compress_size, x.file_size) = centdir[1:12] - x.volume, x.internal_attr, x.external_attr = centdir[15:18] - # Convert date/time code to (year, month, day, hour, min, sec) - x._raw_time = t - x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, - t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) - - x._decodeExtra() - x.header_offset = x.header_offset + concat - x.filename = x._decodeFilename() - self.filelist.append(x) - self.NameToInfo[x.filename] = x - - # update total bytes read from central directory - total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] - + centdir[_CD_EXTRA_FIELD_LENGTH] - + centdir[_CD_COMMENT_LENGTH]) - - if self.debug > 2: - print "total", total - - - def namelist(self): - """Return a list of file names in the archive.""" - l = [] - for data in self.filelist: - l.append(data.filename) - return l - - def infolist(self): - """Return a list of class ZipInfo instances for files in the - archive.""" - return self.filelist - - def printdir(self): - """Print a table of contents for the zip file.""" - print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") - for zinfo in self.filelist: - date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] - print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size) - - def testzip(self): - """Read all the files and check the CRC.""" - chunk_size = 2 ** 20 - for zinfo in self.filelist: - try: - # Read by chunks, to avoid an OverflowError or a - # MemoryError with very large embedded files. - f = self.open(zinfo.filename, "r") - while f.read(chunk_size): # Check CRC-32 - pass - except BadZipfile: - return zinfo.filename - - def getinfo(self, name): - """Return the instance of ZipInfo given 'name'.""" - info = self.NameToInfo.get(name) - if info is None: - raise KeyError( - 'There is no item named %r in the archive' % name) - - return info - - def setpassword(self, pwd): - """Set default password for encrypted files.""" - self.pwd = pwd - - def read(self, name, pwd=None): - """Return file bytes (as a string) for name.""" - return self.open(name, "r", pwd).read() - - def open(self, name, mode="r", pwd=None): - """Return file-like object for 'name'.""" - if mode not in ("r", "U", "rU"): - raise RuntimeError, 'open() requires mode "r", "U", or "rU"' - if not self.fp: - raise RuntimeError, \ - "Attempt to read ZIP archive that was already closed" - - # Only open a new file for instances where we were not - # given a file object in the constructor - if self._filePassed: - zef_file = self.fp - else: - zef_file = open(self.filename, 'rb') - - # Make sure we have an info object - if isinstance(name, ZipInfo): - # 'name' is already an info object - zinfo = name - else: - # Get info object for name - zinfo = self.getinfo(name) - - zef_file.seek(zinfo.header_offset, 0) - - # Skip the file header: - fheader = zef_file.read(sizeFileHeader) - if fheader[0:4] != stringFileHeader: - raise BadZipfile, "Bad magic number for file header" - - fheader = struct.unpack(structFileHeader, fheader) - fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) - if fheader[_FH_EXTRA_FIELD_LENGTH]: - zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) - - if fname != zinfo.orig_filename: - raise BadZipfile, \ - 'File name in directory "%s" and header "%s" differ.' % ( - zinfo.orig_filename, fname) - - # check for encrypted flag & handle password - is_encrypted = zinfo.flag_bits & 0x1 - zd = None - if is_encrypted: - if not pwd: - pwd = self.pwd - if not pwd: - raise RuntimeError, "File %s is encrypted, " \ - "password required for extraction" % name - - zd = _ZipDecrypter(pwd) - # The first 12 bytes in the cypher stream is an encryption header - # used to strengthen the algorithm. The first 11 bytes are - # completely random, while the 12th contains the MSB of the CRC, - # or the MSB of the file time depending on the header type - # and is used to check the correctness of the password. - bytes = zef_file.read(12) - h = map(zd, bytes[0:12]) - if zinfo.flag_bits & 0x8: - # compare against the file type from extended local headers - check_byte = (zinfo._raw_time >> 8) & 0xff - else: - # compare against the CRC otherwise - check_byte = (zinfo.CRC >> 24) & 0xff - if ord(h[11]) != check_byte: - raise RuntimeError("Bad password for file", name) - - return ZipExtFile(zef_file, mode, zinfo, zd) - - def extract(self, member, path=None, pwd=None): - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. `member' may be a filename or a ZipInfo object. You can - specify a different directory using `path'. - """ - if not isinstance(member, ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return self._extract_member(member, path, pwd) - - def extractall(self, path=None, members=None, pwd=None): - """Extract all members from the archive to the current working - directory. `path' specifies a different directory to extract to. - `members' is optional and must be a subset of the list returned - by namelist(). - """ - if members is None: - members = self.namelist() - - for zipinfo in members: - self.extract(zipinfo, path, pwd) - - def _extract_member(self, member, targetpath, pwd): - """Extract the ZipInfo object 'member' to a physical - file on the path targetpath. - """ - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - source = self.open(member, pwd=pwd) - target = file(targetpath, "wb") - shutil.copyfileobj(source, target) - source.close() - target.close() - - return targetpath - - def _writecheck(self, zinfo): - """Check for errors before writing a file to the archive.""" - if zinfo.filename in self.NameToInfo: - if self.debug: # Warning for duplicate names - print "Duplicate name:", zinfo.filename - if self.mode not in ("w", "a"): - raise RuntimeError, 'write() requires mode "w" or "a"' - if not self.fp: - raise RuntimeError, \ - "Attempt to write ZIP archive that was already closed" - if zinfo.compress_type == ZIP_DEFLATED and not zlib: - raise RuntimeError, \ - "Compression requires the (missing) zlib module" - if zinfo.compress_type == ZIP_BZIP2 and not bz2: - raise RuntimeError, \ - "Compression requires the (missing) bz2 module" - if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): - raise RuntimeError, \ - "That compression method is not supported" - if zinfo.file_size > ZIP64_LIMIT: - if not self._allowZip64: - raise LargeZipFile("Filesize would require ZIP64 extensions") - if zinfo.header_offset > ZIP64_LIMIT: - if not self._allowZip64: - raise LargeZipFile("Zipfile size would require ZIP64 extensions") - - def write(self, filename, arcname=None, compress_type=None): - """Put the bytes from filename into the archive under the name - arcname.""" - if not self.fp: - raise RuntimeError( - "Attempt to write to ZIP archive that was already closed") - - st = os.stat(filename) - isdir = stat.S_ISDIR(st.st_mode) - mtime = time.localtime(st.st_mtime) - date_time = mtime[0:6] - # Create ZipInfo instance to store file information - if arcname is None: - arcname = filename - arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) - while arcname[0] in (os.sep, os.altsep): - arcname = arcname[1:] - if isdir: - arcname += '/' - zinfo = ZipInfo(arcname, date_time) - zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes - if compress_type is None: - zinfo.compress_type = self.compression - else: - zinfo.compress_type = compress_type - - zinfo.file_size = st.st_size - zinfo.flag_bits = 0x00 - zinfo.header_offset = self.fp.tell() # Start of header bytes - - self._writecheck(zinfo) - self._didModify = True - - if isdir: - zinfo.file_size = 0 - zinfo.compress_size = 0 - zinfo.CRC = 0 - self.filelist.append(zinfo) - self.NameToInfo[zinfo.filename] = zinfo - self.fp.write(zinfo.FileHeader()) - return - - with open(filename, "rb") as fp: - # Must overwrite CRC and sizes with correct data later - zinfo.CRC = CRC = 0 - zinfo.compress_size = compress_size = 0 - zinfo.file_size = file_size = 0 - self.fp.write(zinfo.FileHeader()) - if zinfo.compress_type == ZIP_DEFLATED: - cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, - zlib.DEFLATED, -15) - elif zinfo.compress_type == ZIP_BZIP2: - cmpr = bz2.BZ2Compressor() - else: - cmpr = None - while 1: - buf = fp.read(1024 * 8) - if not buf: - break - file_size = file_size + len(buf) - CRC = crc32(buf, CRC) & 0xffffffff - if cmpr: - buf = cmpr.compress(buf) - compress_size = compress_size + len(buf) - self.fp.write(buf) - if cmpr: - buf = cmpr.flush() - compress_size = compress_size + len(buf) - self.fp.write(buf) - zinfo.compress_size = compress_size - else: - zinfo.compress_size = file_size - zinfo.CRC = CRC - zinfo.file_size = file_size - # Seek backwards and write CRC and file sizes - position = self.fp.tell() # Preserve current position in file - self.fp.seek(zinfo.header_offset + 14, 0) - self.fp.write(struct.pack(" ZIP64_LIMIT \ - or zinfo.compress_size > ZIP64_LIMIT: - extra.append(zinfo.file_size) - extra.append(zinfo.compress_size) - file_size = 0xffffffff - compress_size = 0xffffffff - else: - file_size = zinfo.file_size - compress_size = zinfo.compress_size - - if zinfo.header_offset > ZIP64_LIMIT: - extra.append(zinfo.header_offset) - header_offset = 0xffffffffL - else: - header_offset = zinfo.header_offset - - extra_data = zinfo.extra - if extra: - # Append a ZIP64 field to the extra's - extra_data = struct.pack( - '>sys.stderr, (structCentralDir, - stringCentralDir, create_version, - zinfo.create_system, extract_version, zinfo.reserved, - zinfo.flag_bits, zinfo.compress_type, dostime, dosdate, - zinfo.CRC, compress_size, file_size, - len(zinfo.filename), len(extra_data), len(zinfo.comment), - 0, zinfo.internal_attr, zinfo.external_attr, - header_offset) - raise - self.fp.write(centdir) - self.fp.write(filename) - self.fp.write(extra_data) - self.fp.write(zinfo.comment) - - pos2 = self.fp.tell() - # Write end-of-zip-archive record - centDirCount = count - centDirSize = pos2 - pos1 - centDirOffset = pos1 - if (centDirCount >= ZIP_FILECOUNT_LIMIT or - centDirOffset > ZIP64_LIMIT or - centDirSize > ZIP64_LIMIT): - # Need to write the ZIP64 end-of-archive records - zip64endrec = struct.pack( - structEndArchive64, stringEndArchive64, - 44, 45, 45, 0, 0, centDirCount, centDirCount, - centDirSize, centDirOffset) - self.fp.write(zip64endrec) - - zip64locrec = struct.pack( - structEndArchive64Locator, - stringEndArchive64Locator, 0, pos2, 1) - self.fp.write(zip64locrec) - centDirCount = min(centDirCount, 0xFFFF) - centDirSize = min(centDirSize, 0xFFFFFFFF) - centDirOffset = min(centDirOffset, 0xFFFFFFFF) - - # check for valid comment length - if len(self.comment) >= ZIP_MAX_COMMENT: - if self.debug > 0: - msg = 'Archive comment is too long; truncating to %d bytes' \ - % ZIP_MAX_COMMENT - self.comment = self.comment[:ZIP_MAX_COMMENT] - - endrec = struct.pack(structEndArchive, stringEndArchive, - 0, 0, centDirCount, centDirCount, - centDirSize, centDirOffset, len(self.comment)) - self.fp.write(endrec) - self.fp.write(self.comment) - self.fp.flush() - - if not self._filePassed: - self.fp.close() - self.fp = None - - -class PyZipFile(ZipFile): - """Class to create ZIP archives with Python library files and packages.""" - - def writepy(self, pathname, basename = ""): - """Add all files from "pathname" to the ZIP archive. - - If pathname is a package directory, search the directory and - all package subdirectories recursively for all *.py and enter - the modules into the archive. If pathname is a plain - directory, listdir *.py and enter all modules. Else, pathname - must be a Python *.py file and the module will be put into the - archive. Added modules are always module.pyo or module.pyc. - This method will compile the module.py into module.pyc if - necessary. - """ - dir, name = os.path.split(pathname) - if os.path.isdir(pathname): - initname = os.path.join(pathname, "__init__.py") - if os.path.isfile(initname): - # This is a package directory, add it - if basename: - basename = "%s/%s" % (basename, name) - else: - basename = name - if self.debug: - print "Adding package in", pathname, "as", basename - fname, arcname = self._get_codename(initname[0:-3], basename) - if self.debug: - print "Adding", arcname - self.write(fname, arcname) - dirlist = os.listdir(pathname) - dirlist.remove("__init__.py") - # Add all *.py files and package subdirectories - for filename in dirlist: - path = os.path.join(pathname, filename) - root, ext = os.path.splitext(filename) - if os.path.isdir(path): - if os.path.isfile(os.path.join(path, "__init__.py")): - # This is a package directory, add it - self.writepy(path, basename) # Recursive call - elif ext == ".py": - fname, arcname = self._get_codename(path[0:-3], - basename) - if self.debug: - print "Adding", arcname - self.write(fname, arcname) - else: - # This is NOT a package directory, add its files at top level - if self.debug: - print "Adding files from directory", pathname - for filename in os.listdir(pathname): - path = os.path.join(pathname, filename) - root, ext = os.path.splitext(filename) - if ext == ".py": - fname, arcname = self._get_codename(path[0:-3], - basename) - if self.debug: - print "Adding", arcname - self.write(fname, arcname) - else: - if pathname[-3:] != ".py": - raise RuntimeError, \ - 'Files added with writepy() must end with ".py"' - fname, arcname = self._get_codename(pathname[0:-3], basename) - if self.debug: - print "Adding file", arcname - self.write(fname, arcname) - - def _get_codename(self, pathname, basename): - """Return (filename, archivename) for the path. - - Given a module name path, return the correct file path and - archive name, compiling if necessary. For example, given - /python/lib/string, return (/python/lib/string.pyc, string). - """ - file_py = pathname + ".py" - file_pyc = pathname + ".pyc" - file_pyo = pathname + ".pyo" - if os.path.isfile(file_pyo) and \ - os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: - fname = file_pyo # Use .pyo file - elif not os.path.isfile(file_pyc) or \ - os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: - import py_compile - if self.debug: - print "Compiling", file_py - try: - py_compile.compile(file_py, file_pyc, None, True) - except py_compile.PyCompileError,err: - print err.msg - fname = file_pyc - else: - fname = file_pyc - archivename = os.path.split(fname)[1] - if basename: - archivename = "%s/%s" % (basename, archivename) - return (fname, archivename) - - -def main(args = None): - import textwrap - USAGE=textwrap.dedent("""\ - Usage: - zipfile.py -l zipfile.zip # Show listing of a zipfile - zipfile.py -t zipfile.zip # Test if a zipfile is valid - zipfile.py -e zipfile.zip target # Extract zipfile into target dir - zipfile.py -c zipfile.zip src ... # Create zipfile from sources - """) - if args is None: - args = sys.argv[1:] - - if not args or args[0] not in ('-l', '-c', '-e', '-t'): - print USAGE - sys.exit(1) - - if args[0] == '-l': - if len(args) != 2: - print USAGE - sys.exit(1) - zf = ZipFile(args[1], 'r') - zf.printdir() - zf.close() - - elif args[0] == '-t': - if len(args) != 2: - print USAGE - sys.exit(1) - zf = ZipFile(args[1], 'r') - badfile = zf.testzip() - if badfile: - print("The following enclosed file is corrupted: {!r}".format(badfile)) - print "Done testing" - - elif args[0] == '-e': - if len(args) != 3: - print USAGE - sys.exit(1) - - zf = ZipFile(args[1], 'r') - out = args[2] - for path in zf.namelist(): - if path.startswith('./'): - tgt = os.path.join(out, path[2:]) - else: - tgt = os.path.join(out, path) - - tgtdir = os.path.dirname(tgt) - if not os.path.exists(tgtdir): - os.makedirs(tgtdir) - with open(tgt, 'wb') as fp: - fp.write(zf.read(path)) - zf.close() - - elif args[0] == '-c': - if len(args) < 3: - print USAGE - sys.exit(1) - - def addToZip(zf, path, zippath): - if os.path.isfile(path): - zf.write(path, zippath, ZIP_DEFLATED) - elif os.path.isdir(path): - for nm in os.listdir(path): - addToZip(zf, - os.path.join(path, nm), os.path.join(zippath, nm)) - # else: ignore - - zf = ZipFile(args[1], 'w', allowZip64=True) - for src in args[2:]: - addToZip(zf, src, os.path.basename(src)) - - zf.close() - -if __name__ == "__main__": - main() diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile271.diff --- a/2.00/zipfile271.diff Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,110 +0,0 @@ ---- /usr/local/lib/python2.7/zipfile.py 2010-11-29 00:56:38.000000000 +0000 -+++ zipfile271.py 2010-11-29 01:20:17.000000000 +0000 -@@ -1,6 +1,7 @@ - """ - Read and write ZIP files. - """ -+# Improved by Chortos-2 in 2010 (added bzip2 support) - import struct, os, time, sys, shutil - import binascii, cStringIO, stat - import io -@@ -13,8 +14,13 @@ - zlib = None - crc32 = binascii.crc32 - -+try: -+ import bz2 # We may need its compression method -+except ImportError: -+ bz2 = None -+ - __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", -- "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] -+ "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] - - class BadZipfile(Exception): - pass -@@ -35,6 +41,7 @@ - # constants for Zip file compression methods - ZIP_STORED = 0 - ZIP_DEFLATED = 8 -+ZIP_BZIP2 = 12 - # Other ZIP compression methods not supported - - # Below are some formats and associated data for reading/writing headers using -@@ -483,6 +490,9 @@ - - if self._compress_type == ZIP_DEFLATED: - self._decompressor = zlib.decompressobj(-15) -+ elif self._compress_type == ZIP_BZIP2: -+ self._decompressor = bz2.BZ2Decompressor() -+ self.MIN_READ_SIZE = 900000 - self._unconsumed = '' - - self._readbuffer = '' -@@ -641,6 +651,13 @@ - self._update_crc(data, eof=eof) - self._readbuffer = self._readbuffer[self._offset:] + data - self._offset = 0 -+ elif (len(self._unconsumed) > 0 and n > len_readbuffer and -+ self._compress_type == ZIP_BZIP2): -+ data = self._decompressor.decompress(self._unconsumed) -+ -+ self._unconsumed = '' -+ self._readbuffer = self._readbuffer[self._offset:] + data -+ self._offset = 0 - - # Read from buffer. - data = self._readbuffer[self._offset: self._offset + n] -@@ -657,7 +674,8 @@ - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read "r", write "w" or append "a". -- compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). -+ compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), -+ or ZIP_BZIP2 (requires bz2). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. -@@ -677,6 +695,10 @@ - if not zlib: - raise RuntimeError,\ - "Compression requires the (missing) zlib module" -+ elif compression == ZIP_BZIP2: -+ if not bz2: -+ raise RuntimeError,\ -+ "Compression requires the (missing) bz2 module" - else: - raise RuntimeError, "That compression method is not supported" - -@@ -1011,7 +1033,10 @@ - if zinfo.compress_type == ZIP_DEFLATED and not zlib: - raise RuntimeError, \ - "Compression requires the (missing) zlib module" -- if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): -+ if zinfo.compress_type == ZIP_BZIP2 and not bz2: -+ raise RuntimeError, \ -+ "Compression requires the (missing) bz2 module" -+ if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): - raise RuntimeError, \ - "That compression method is not supported" - if zinfo.file_size > ZIP64_LIMIT: -@@ -1072,6 +1097,8 @@ - if zinfo.compress_type == ZIP_DEFLATED: - cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, - zlib.DEFLATED, -15) -+ elif zinfo.compress_type == ZIP_BZIP2: -+ cmpr = bz2.BZ2Compressor() - else: - cmpr = None - while 1: -@@ -1132,6 +1159,10 @@ - zlib.DEFLATED, -15) - bytes = co.compress(bytes) + co.flush() - zinfo.compress_size = len(bytes) # Compressed size -+ elif zinfo.compress_type == ZIP_BZIP2: -+ co = bz2.BZ2Compressor() -+ bytes = co.compress(bytes) + co.flush() -+ zinfo.compress_size = len(bytes) # Compressed size - else: - zinfo.compress_size = zinfo.file_size - zinfo.header_offset = self.fp.tell() # Start of header bytes diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile31.py --- a/2.00/zipfile31.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1493 +0,0 @@ -""" -Read and write ZIP files. - -XXX references to utf-8 need further investigation. -""" -# Improved by Chortos-2 in 2010 (added bzip2 support) -import struct, os, time, sys, shutil -import binascii, io, stat - -try: - import zlib # We may need its compression method - crc32 = zlib.crc32 -except ImportError: - zlib = None - crc32 = binascii.crc32 - -try: - import bz2 # We may need its compression method -except ImportError: - bz2 = None - -__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", - "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] - -class BadZipfile(Exception): - pass - - -class LargeZipFile(Exception): - """ - Raised when writing a zipfile, the zipfile requires ZIP64 extensions - and those extensions are disabled. - """ - -error = BadZipfile # The exception raised by this module - -ZIP64_LIMIT = (1 << 31) - 1 -ZIP_FILECOUNT_LIMIT = 1 << 16 -ZIP_MAX_COMMENT = (1 << 16) - 1 - -# constants for Zip file compression methods -ZIP_STORED = 0 -ZIP_DEFLATED = 8 -ZIP_BZIP2 = 12 -# Other ZIP compression methods not supported - -# Below are some formats and associated data for reading/writing headers using -# the struct module. The names and structures of headers/records are those used -# in the PKWARE description of the ZIP file format: -# http://www.pkware.com/documents/casestudies/APPNOTE.TXT -# (URL valid as of January 2008) - -# The "end of central directory" structure, magic number, size, and indices -# (section V.I in the format document) -structEndArchive = b"<4s4H2LH" -stringEndArchive = b"PK\005\006" -sizeEndCentDir = struct.calcsize(structEndArchive) - -_ECD_SIGNATURE = 0 -_ECD_DISK_NUMBER = 1 -_ECD_DISK_START = 2 -_ECD_ENTRIES_THIS_DISK = 3 -_ECD_ENTRIES_TOTAL = 4 -_ECD_SIZE = 5 -_ECD_OFFSET = 6 -_ECD_COMMENT_SIZE = 7 -# These last two indices are not part of the structure as defined in the -# spec, but they are used internally by this module as a convenience -_ECD_COMMENT = 8 -_ECD_LOCATION = 9 - -# The "central directory" structure, magic number, size, and indices -# of entries in the structure (section V.F in the format document) -structCentralDir = "<4s4B4HL2L5H2L" -stringCentralDir = b"PK\001\002" -sizeCentralDir = struct.calcsize(structCentralDir) - -# indexes of entries in the central directory structure -_CD_SIGNATURE = 0 -_CD_CREATE_VERSION = 1 -_CD_CREATE_SYSTEM = 2 -_CD_EXTRACT_VERSION = 3 -_CD_EXTRACT_SYSTEM = 4 -_CD_FLAG_BITS = 5 -_CD_COMPRESS_TYPE = 6 -_CD_TIME = 7 -_CD_DATE = 8 -_CD_CRC = 9 -_CD_COMPRESSED_SIZE = 10 -_CD_UNCOMPRESSED_SIZE = 11 -_CD_FILENAME_LENGTH = 12 -_CD_EXTRA_FIELD_LENGTH = 13 -_CD_COMMENT_LENGTH = 14 -_CD_DISK_NUMBER_START = 15 -_CD_INTERNAL_FILE_ATTRIBUTES = 16 -_CD_EXTERNAL_FILE_ATTRIBUTES = 17 -_CD_LOCAL_HEADER_OFFSET = 18 - -# The "local file header" structure, magic number, size, and indices -# (section V.A in the format document) -structFileHeader = "<4s2B4HL2L2H" -stringFileHeader = b"PK\003\004" -sizeFileHeader = struct.calcsize(structFileHeader) - -_FH_SIGNATURE = 0 -_FH_EXTRACT_VERSION = 1 -_FH_EXTRACT_SYSTEM = 2 -_FH_GENERAL_PURPOSE_FLAG_BITS = 3 -_FH_COMPRESSION_METHOD = 4 -_FH_LAST_MOD_TIME = 5 -_FH_LAST_MOD_DATE = 6 -_FH_CRC = 7 -_FH_COMPRESSED_SIZE = 8 -_FH_UNCOMPRESSED_SIZE = 9 -_FH_FILENAME_LENGTH = 10 -_FH_EXTRA_FIELD_LENGTH = 11 - -# The "Zip64 end of central directory locator" structure, magic number, and size -structEndArchive64Locator = "<4sLQL" -stringEndArchive64Locator = b"PK\x06\x07" -sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) - -# The "Zip64 end of central directory" record, magic number, size, and indices -# (section V.G in the format document) -structEndArchive64 = "<4sQ2H2L4Q" -stringEndArchive64 = b"PK\x06\x06" -sizeEndCentDir64 = struct.calcsize(structEndArchive64) - -_CD64_SIGNATURE = 0 -_CD64_DIRECTORY_RECSIZE = 1 -_CD64_CREATE_VERSION = 2 -_CD64_EXTRACT_VERSION = 3 -_CD64_DISK_NUMBER = 4 -_CD64_DISK_NUMBER_START = 5 -_CD64_NUMBER_ENTRIES_THIS_DISK = 6 -_CD64_NUMBER_ENTRIES_TOTAL = 7 -_CD64_DIRECTORY_SIZE = 8 -_CD64_OFFSET_START_CENTDIR = 9 - -def _check_zipfile(fp): - try: - if _EndRecData(fp): - return True # file has correct magic number - except IOError: - pass - return False - -def is_zipfile(filename): - """Quickly see if a file is a ZIP file by checking the magic number. - - The filename argument may be a file or file-like object too. - """ - result = False - try: - if hasattr(filename, "read"): - result = _check_zipfile(fp=filename) - else: - with open(filename, "rb") as fp: - result = _check_zipfile(fp) - except IOError: - pass - return result - -def _EndRecData64(fpin, offset, endrec): - """ - Read the ZIP64 end-of-archive records and use that to update endrec - """ - try: - fpin.seek(offset - sizeEndCentDir64Locator, 2) - except IOError: - # If the seek fails, the file is not large enough to contain a ZIP64 - # end-of-archive record, so just return the end record we were given. - return endrec - - data = fpin.read(sizeEndCentDir64Locator) - sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) - if sig != stringEndArchive64Locator: - return endrec - - if diskno != 0 or disks != 1: - raise BadZipfile("zipfiles that span multiple disks are not supported") - - # Assume no 'zip64 extensible data' - fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) - data = fpin.read(sizeEndCentDir64) - sig, sz, create_version, read_version, disk_num, disk_dir, \ - dircount, dircount2, dirsize, diroffset = \ - struct.unpack(structEndArchive64, data) - if sig != stringEndArchive64: - return endrec - - # Update the original endrec using data from the ZIP64 record - endrec[_ECD_SIGNATURE] = sig - endrec[_ECD_DISK_NUMBER] = disk_num - endrec[_ECD_DISK_START] = disk_dir - endrec[_ECD_ENTRIES_THIS_DISK] = dircount - endrec[_ECD_ENTRIES_TOTAL] = dircount2 - endrec[_ECD_SIZE] = dirsize - endrec[_ECD_OFFSET] = diroffset - return endrec - - -def _EndRecData(fpin): - """Return data from the "End of Central Directory" record, or None. - - The data is a list of the nine items in the ZIP "End of central dir" - record followed by a tenth item, the file seek offset of this record.""" - - # Determine file size - fpin.seek(0, 2) - filesize = fpin.tell() - - # Check to see if this is ZIP file with no archive comment (the - # "end of central directory" structure should be the last item in the - # file if this is the case). - try: - fpin.seek(-sizeEndCentDir, 2) - except IOError: - return None - data = fpin.read() - if data[0:4] == stringEndArchive and data[-2:] == b"\000\000": - # the signature is correct and there's no comment, unpack structure - endrec = struct.unpack(structEndArchive, data) - endrec=list(endrec) - - # Append a blank comment and record start offset - endrec.append(b"") - endrec.append(filesize - sizeEndCentDir) - - # Try to read the "Zip64 end of central directory" structure - return _EndRecData64(fpin, -sizeEndCentDir, endrec) - - # Either this is not a ZIP file, or it is a ZIP file with an archive - # comment. Search the end of the file for the "end of central directory" - # record signature. The comment is the last item in the ZIP file and may be - # up to 64K long. It is assumed that the "end of central directory" magic - # number does not appear in the comment. - maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) - fpin.seek(maxCommentStart, 0) - data = fpin.read() - start = data.rfind(stringEndArchive) - if start >= 0: - # found the magic number; attempt to unpack and interpret - recData = data[start:start+sizeEndCentDir] - endrec = list(struct.unpack(structEndArchive, recData)) - comment = data[start+sizeEndCentDir:] - # check that comment length is correct - if endrec[_ECD_COMMENT_SIZE] == len(comment): - # Append the archive comment and start offset - endrec.append(comment) - endrec.append(maxCommentStart + start) - - # Try to read the "Zip64 end of central directory" structure - return _EndRecData64(fpin, maxCommentStart + start - filesize, - endrec) - - # Unable to find a valid end of central directory structure - return - - -class ZipInfo (object): - """Class with attributes describing each file in the ZIP archive.""" - - __slots__ = ( - 'orig_filename', - 'filename', - 'date_time', - 'compress_type', - 'comment', - 'extra', - 'create_system', - 'create_version', - 'extract_version', - 'reserved', - 'flag_bits', - 'volume', - 'internal_attr', - 'external_attr', - 'header_offset', - 'CRC', - 'compress_size', - 'file_size', - '_raw_time', - ) - - def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): - self.orig_filename = filename # Original file name in archive - - # Terminate the file name at the first null byte. Null bytes in file - # names are used as tricks by viruses in archives. - null_byte = filename.find(chr(0)) - if null_byte >= 0: - filename = filename[0:null_byte] - # This is used to ensure paths in generated ZIP files always use - # forward slashes as the directory separator, as required by the - # ZIP format specification. - if os.sep != "/" and os.sep in filename: - filename = filename.replace(os.sep, "/") - - self.filename = filename # Normalized file name - self.date_time = date_time # year, month, day, hour, min, sec - # Standard values: - self.compress_type = ZIP_STORED # Type of compression for the file - self.comment = b"" # Comment for each file - self.extra = b"" # ZIP extra data - if sys.platform == 'win32': - self.create_system = 0 # System which created ZIP archive - else: - # Assume everything else is unix-y - self.create_system = 3 # System which created ZIP archive - self.create_version = 20 # Version which created ZIP archive - self.extract_version = 20 # Version needed to extract archive - self.reserved = 0 # Must be zero - self.flag_bits = 0 # ZIP flag bits - self.volume = 0 # Volume number of file header - self.internal_attr = 0 # Internal attributes - self.external_attr = 0 # External file attributes - # Other attributes are set by class ZipFile: - # header_offset Byte offset to the file header - # CRC CRC-32 of the uncompressed file - # compress_size Size of the compressed file - # file_size Size of the uncompressed file - - def FileHeader(self): - """Return the per-file header as a string.""" - dt = self.date_time - dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] - dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) - if self.flag_bits & 0x08: - # Set these to zero because we write them after the file data - CRC = compress_size = file_size = 0 - else: - CRC = self.CRC - compress_size = self.compress_size - file_size = self.file_size - - extra = self.extra - - if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: - # File is larger than what fits into a 4 byte integer, - # fall back to the ZIP64 extension - fmt = '= 24: - counts = unpack('> 1) & 0x7FFFFFFF) ^ poly - else: - crc = ((crc >> 1) & 0x7FFFFFFF) - table[i] = crc - return table - crctable = _GenerateCRCTable() - - def _crc32(self, ch, crc): - """Compute the CRC32 primitive on one byte.""" - return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ch) & 0xff] - - def __init__(self, pwd): - self.key0 = 305419896 - self.key1 = 591751049 - self.key2 = 878082192 - for p in pwd: - self._UpdateKeys(p) - - def _UpdateKeys(self, c): - self.key0 = self._crc32(c, self.key0) - self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295 - self.key1 = (self.key1 * 134775813 + 1) & 4294967295 - self.key2 = self._crc32((self.key1 >> 24) & 255, self.key2) - - def __call__(self, c): - """Decrypt a single character.""" - assert isinstance(c, int) - k = self.key2 | 2 - c = c ^ (((k * (k^1)) >> 8) & 255) - self._UpdateKeys(c) - return c - -class ZipExtFile: - """File-like object for reading an archive member. - Is returned by ZipFile.open(). - """ - - def __init__(self, fileobj, zipinfo, decrypt=None): - self.fileobj = fileobj - self.decrypter = decrypt - self.bytes_read = 0 - self.rawbuffer = b'' - self.readbuffer = b'' - self.linebuffer = b'' - self.eof = False - self.univ_newlines = False - self.nlSeps = (b"\n", ) - self.lastdiscard = b'' - - self.compress_type = zipinfo.compress_type - self.compress_size = zipinfo.compress_size - - self.closed = False - self.mode = "r" - self.name = zipinfo.filename - - # read from compressed files in 64k blocks - self.compreadsize = 64*1024 - if self.compress_type == ZIP_DEFLATED: - self.dc = zlib.decompressobj(-15) - elif self.compress_type == ZIP_BZIP2: - self.dc = bz2.BZ2Decompressor() - self.compreadsize = 900000 - - if hasattr(zipinfo, 'CRC'): - self._expected_crc = zipinfo.CRC - self._running_crc = crc32(b'') & 0xffffffff - else: - self._expected_crc = None - - def set_univ_newlines(self, univ_newlines): - self.univ_newlines = univ_newlines - - # pick line separator char(s) based on universal newlines flag - self.nlSeps = (b"\n", ) - if self.univ_newlines: - self.nlSeps = (b"\r\n", b"\r", b"\n") - - def __iter__(self): - return self - - def __next__(self): - nextline = self.readline() - if not nextline: - raise StopIteration() - - return nextline - - def close(self): - self.closed = True - - def _checkfornewline(self): - nl, nllen = -1, -1 - if self.linebuffer: - # ugly check for cases where half of an \r\n pair was - # read on the last pass, and the \r was discarded. In this - # case we just throw away the \n at the start of the buffer. - if (self.lastdiscard, self.linebuffer[:1]) == (b'\r', b'\n'): - self.linebuffer = self.linebuffer[1:] - - for sep in self.nlSeps: - nl = self.linebuffer.find(sep) - if nl >= 0: - nllen = len(sep) - return nl, nllen - - return nl, nllen - - def readline(self, size = -1): - """Read a line with approx. size. If size is negative, - read a whole line. - """ - if size < 0: - size = sys.maxsize - elif size == 0: - return b'' - - # check for a newline already in buffer - nl, nllen = self._checkfornewline() - - if nl >= 0: - # the next line was already in the buffer - nl = min(nl, size) - else: - # no line break in buffer - try to read more - size -= len(self.linebuffer) - while nl < 0 and size > 0: - buf = self.read(min(size, 100)) - if not buf: - break - self.linebuffer += buf - size -= len(buf) - - # check for a newline in buffer - nl, nllen = self._checkfornewline() - - # we either ran out of bytes in the file, or - # met the specified size limit without finding a newline, - # so return current buffer - if nl < 0: - s = self.linebuffer - self.linebuffer = b'' - return s - - buf = self.linebuffer[:nl] - self.lastdiscard = self.linebuffer[nl:nl + nllen] - self.linebuffer = self.linebuffer[nl + nllen:] - - # line is always returned with \n as newline char (except possibly - # for a final incomplete line in the file, which is handled above). - return buf + b"\n" - - def readlines(self, sizehint = -1): - """Return a list with all (following) lines. The sizehint parameter - is ignored in this implementation. - """ - result = [] - while True: - line = self.readline() - if not line: break - result.append(line) - return result - - def _update_crc(self, newdata, eof): - # Update the CRC using the given data. - if self._expected_crc is None: - # No need to compute the CRC if we don't have a reference value - return - self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff - # Check the CRC if we're at the end of the file - if eof and self._running_crc != self._expected_crc: - raise BadZipfile("Bad CRC-32 for file %r" % self.name) - - def read(self, size = None): - # act like file obj and return empty string if size is 0 - if size == 0: - return b'' - - # determine read size - bytesToRead = self.compress_size - self.bytes_read - - # adjust read size for encrypted files since the first 12 bytes - # are for the encryption/password information - if self.decrypter is not None: - bytesToRead -= 12 - - if size is not None and size >= 0: - if self.compress_type == ZIP_STORED: - lr = len(self.readbuffer) - bytesToRead = min(bytesToRead, size - lr) - else: - if len(self.readbuffer) > size: - # the user has requested fewer bytes than we've already - # pulled through the decompressor; don't read any more - bytesToRead = 0 - else: - # user will use up the buffer, so read some more - lr = len(self.rawbuffer) - bytesToRead = min(bytesToRead, self.compreadsize - lr) - - # avoid reading past end of file contents - if bytesToRead + self.bytes_read > self.compress_size: - bytesToRead = self.compress_size - self.bytes_read - - # try to read from file (if necessary) - if bytesToRead > 0: - data = self.fileobj.read(bytesToRead) - self.bytes_read += len(data) - try: - self.rawbuffer += data - except: - print(repr(self.fileobj), repr(self.rawbuffer), - repr(data)) - raise - - # handle contents of raw buffer - if self.rawbuffer: - newdata = self.rawbuffer - self.rawbuffer = b'' - - # decrypt new data if we were given an object to handle that - if newdata and self.decrypter is not None: - newdata = bytes(map(self.decrypter, newdata)) - - # decompress newly read data if necessary - if newdata and self.compress_type != ZIP_STORED: - newdata = self.dc.decompress(newdata) - self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' - if self.eof and len(self.rawbuffer) == 0: - # we're out of raw bytes (both from the file and - # the local buffer); flush just to make sure the - # decompressor is done - try: - newdata += self.dc.flush() - except AttributeError: - pass - # prevent decompressor from being used again - self.dc = None - - self._update_crc(newdata, eof=( - self.compress_size == self.bytes_read and - len(self.rawbuffer) == 0)) - self.readbuffer += newdata - - # return what the user asked for - if size is None or len(self.readbuffer) <= size: - data = self.readbuffer - self.readbuffer = b'' - else: - data = self.readbuffer[:size] - self.readbuffer = self.readbuffer[size:] - - return data - - -class ZipFile: - """ Class with methods to open, read, write, close, list zip files. - - z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False) - - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read "r", write "w" or append "a". - compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), - or ZIP_BZIP2 (requires bz2). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. - - """ - - fp = None # Set here since __del__ checks it - - def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): - """Open the ZIP file with mode read "r", write "w" or append "a".""" - if mode not in ("r", "w", "a"): - raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') - - if compression == ZIP_STORED: - pass - elif compression == ZIP_DEFLATED: - if not zlib: - raise RuntimeError( - "Compression requires the (missing) zlib module") - elif compression == ZIP_BZIP2: - if not bz2: - raise RuntimeError( - "Compression requires the (missing) bz2 module") - else: - raise RuntimeError("That compression method is not supported") - - self._allowZip64 = allowZip64 - self._didModify = False - self.debug = 0 # Level of printing: 0 through 3 - self.NameToInfo = {} # Find file info given name - self.filelist = [] # List of ZipInfo instances for archive - self.compression = compression # Method of compression - self.mode = key = mode.replace('b', '')[0] - self.pwd = None - self.comment = b'' - - # Check if we were passed a file-like object - if isinstance(file, str): - # No, it's a filename - self._filePassed = 0 - self.filename = file - modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} - try: - self.fp = io.open(file, modeDict[mode]) - except IOError: - if mode == 'a': - mode = key = 'w' - self.fp = io.open(file, modeDict[mode]) - else: - raise - else: - self._filePassed = 1 - self.fp = file - self.filename = getattr(file, 'name', None) - - if key == 'r': - self._GetContents() - elif key == 'w': - # set the modified flag so central directory gets written - # even if no files are added to the archive - self._didModify = True - elif key == 'a': - try: - # See if file is a zip file - self._RealGetContents() - # seek to start of directory and overwrite - self.fp.seek(self.start_dir, 0) - except BadZipfile: - # file is not a zip file, just append - self.fp.seek(0, 2) - - # set the modified flag so central directory gets written - # even if no files are added to the archive - self._didModify = True - else: - if not self._filePassed: - self.fp.close() - self.fp = None - raise RuntimeError('Mode must be "r", "w" or "a"') - - def _GetContents(self): - """Read the directory, making sure we close the file if the format - is bad.""" - try: - self._RealGetContents() - except BadZipfile: - if not self._filePassed: - self.fp.close() - self.fp = None - raise - - def _RealGetContents(self): - """Read in the table of contents for the ZIP file.""" - fp = self.fp - try: - endrec = _EndRecData(fp) - except IOError: - raise BadZipfile("File is not a zip file") - if not endrec: - raise BadZipfile("File is not a zip file") - if self.debug > 1: - print(endrec) - size_cd = endrec[_ECD_SIZE] # bytes in central directory - offset_cd = endrec[_ECD_OFFSET] # offset of central directory - self.comment = endrec[_ECD_COMMENT] # archive comment - - # "concat" is zero, unless zip was concatenated to another file - concat = endrec[_ECD_LOCATION] - size_cd - offset_cd - if endrec[_ECD_SIGNATURE] == stringEndArchive64: - # If Zip64 extension structures are present, account for them - concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) - - if self.debug > 2: - inferred = concat + offset_cd - print("given, inferred, offset", offset_cd, inferred, concat) - # self.start_dir: Position of start of central directory - self.start_dir = offset_cd + concat - fp.seek(self.start_dir, 0) - data = fp.read(size_cd) - fp = io.BytesIO(data) - total = 0 - while total < size_cd: - centdir = fp.read(sizeCentralDir) - if centdir[0:4] != stringCentralDir: - raise BadZipfile("Bad magic number for central directory") - centdir = struct.unpack(structCentralDir, centdir) - if self.debug > 2: - print(centdir) - filename = fp.read(centdir[_CD_FILENAME_LENGTH]) - flags = centdir[5] - if flags & 0x800: - # UTF-8 file names extension - filename = filename.decode('utf-8') - else: - # Historical ZIP filename encoding - filename = filename.decode('cp437') - # Create ZipInfo instance to store file information - x = ZipInfo(filename) - x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) - x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) - x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] - (x.create_version, x.create_system, x.extract_version, x.reserved, - x.flag_bits, x.compress_type, t, d, - x.CRC, x.compress_size, x.file_size) = centdir[1:12] - x.volume, x.internal_attr, x.external_attr = centdir[15:18] - # Convert date/time code to (year, month, day, hour, min, sec) - x._raw_time = t - x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, - t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) - - x._decodeExtra() - x.header_offset = x.header_offset + concat - self.filelist.append(x) - self.NameToInfo[x.filename] = x - - # update total bytes read from central directory - total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] - + centdir[_CD_EXTRA_FIELD_LENGTH] - + centdir[_CD_COMMENT_LENGTH]) - - if self.debug > 2: - print("total", total) - - - def namelist(self): - """Return a list of file names in the archive.""" - l = [] - for data in self.filelist: - l.append(data.filename) - return l - - def infolist(self): - """Return a list of class ZipInfo instances for files in the - archive.""" - return self.filelist - - def printdir(self, file=None): - """Print a table of contents for the zip file.""" - print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"), - file=file) - for zinfo in self.filelist: - date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] - print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size), - file=file) - - def testzip(self): - """Read all the files and check the CRC.""" - chunk_size = 2 ** 20 - for zinfo in self.filelist: - try: - # Read by chunks, to avoid an OverflowError or a - # MemoryError with very large embedded files. - f = self.open(zinfo.filename, "r") - while f.read(chunk_size): # Check CRC-32 - pass - except BadZipfile: - return zinfo.filename - - def getinfo(self, name): - """Return the instance of ZipInfo given 'name'.""" - info = self.NameToInfo.get(name) - if info is None: - raise KeyError( - 'There is no item named %r in the archive' % name) - - return info - - def setpassword(self, pwd): - """Set default password for encrypted files.""" - assert isinstance(pwd, bytes) - self.pwd = pwd - - def read(self, name, pwd=None): - """Return file bytes (as a string) for name.""" - return self.open(name, "r", pwd).read() - - def open(self, name, mode="r", pwd=None): - """Return file-like object for 'name'.""" - if mode not in ("r", "U", "rU"): - raise RuntimeError('open() requires mode "r", "U", or "rU"') - if not self.fp: - raise RuntimeError( - "Attempt to read ZIP archive that was already closed") - - # Only open a new file for instances where we were not - # given a file object in the constructor - if self._filePassed: - zef_file = self.fp - else: - zef_file = io.open(self.filename, 'rb') - - # Make sure we have an info object - if isinstance(name, ZipInfo): - # 'name' is already an info object - zinfo = name - else: - # Get info object for name - zinfo = self.getinfo(name) - - zef_file.seek(zinfo.header_offset, 0) - - # Skip the file header: - fheader = zef_file.read(sizeFileHeader) - if fheader[0:4] != stringFileHeader: - raise BadZipfile("Bad magic number for file header") - - fheader = struct.unpack(structFileHeader, fheader) - fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) - if fheader[_FH_EXTRA_FIELD_LENGTH]: - zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) - - if fname != zinfo.orig_filename.encode("utf-8"): - raise BadZipfile( - 'File name in directory %r and header %r differ.' - % (zinfo.orig_filename, fname)) - - # check for encrypted flag & handle password - is_encrypted = zinfo.flag_bits & 0x1 - zd = None - if is_encrypted: - if not pwd: - pwd = self.pwd - if not pwd: - raise RuntimeError("File %s is encrypted, " - "password required for extraction" % name) - - zd = _ZipDecrypter(pwd) - # The first 12 bytes in the cypher stream is an encryption header - # used to strengthen the algorithm. The first 11 bytes are - # completely random, while the 12th contains the MSB of the CRC, - # or the MSB of the file time depending on the header type - # and is used to check the correctness of the password. - bytes = zef_file.read(12) - h = list(map(zd, bytes[0:12])) - if zinfo.flag_bits & 0x8: - # compare against the file type from extended local headers - check_byte = (zinfo._raw_time >> 8) & 0xff - else: - # compare against the CRC otherwise - check_byte = (zinfo.CRC >> 24) & 0xff - if h[11] != check_byte: - raise RuntimeError("Bad password for file", name) - - # build and return a ZipExtFile - if zd is None: - zef = ZipExtFile(zef_file, zinfo) - else: - zef = ZipExtFile(zef_file, zinfo, zd) - - # set universal newlines on ZipExtFile if necessary - if "U" in mode: - zef.set_univ_newlines(True) - return zef - - def extract(self, member, path=None, pwd=None): - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. `member' may be a filename or a ZipInfo object. You can - specify a different directory using `path'. - """ - if not isinstance(member, ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return self._extract_member(member, path, pwd) - - def extractall(self, path=None, members=None, pwd=None): - """Extract all members from the archive to the current working - directory. `path' specifies a different directory to extract to. - `members' is optional and must be a subset of the list returned - by namelist(). - """ - if members is None: - members = self.namelist() - - for zipinfo in members: - self.extract(zipinfo, path, pwd) - - def _extract_member(self, member, targetpath, pwd): - """Extract the ZipInfo object 'member' to a physical - file on the path targetpath. - """ - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - source = self.open(member, pwd=pwd) - target = open(targetpath, "wb") - shutil.copyfileobj(source, target) - source.close() - target.close() - - return targetpath - - def _writecheck(self, zinfo): - """Check for errors before writing a file to the archive.""" - if zinfo.filename in self.NameToInfo: - if self.debug: # Warning for duplicate names - print("Duplicate name:", zinfo.filename) - if self.mode not in ("w", "a"): - raise RuntimeError('write() requires mode "w" or "a"') - if not self.fp: - raise RuntimeError( - "Attempt to write ZIP archive that was already closed") - if zinfo.compress_type == ZIP_DEFLATED and not zlib: - raise RuntimeError( - "Compression requires the (missing) zlib module") - if zinfo.compress_type == ZIP_BZIP2 and not bz2: - raise RuntimeError( - "Compression requires the (missing) bz2 module") - if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): - raise RuntimeError("That compression method is not supported") - if zinfo.file_size > ZIP64_LIMIT: - if not self._allowZip64: - raise LargeZipFile("Filesize would require ZIP64 extensions") - if zinfo.header_offset > ZIP64_LIMIT: - if not self._allowZip64: - raise LargeZipFile( - "Zipfile size would require ZIP64 extensions") - - def write(self, filename, arcname=None, compress_type=None): - """Put the bytes from filename into the archive under the name - arcname.""" - if not self.fp: - raise RuntimeError( - "Attempt to write to ZIP archive that was already closed") - - st = os.stat(filename) - isdir = stat.S_ISDIR(st.st_mode) - mtime = time.localtime(st.st_mtime) - date_time = mtime[0:6] - # Create ZipInfo instance to store file information - if arcname is None: - arcname = filename - arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) - while arcname[0] in (os.sep, os.altsep): - arcname = arcname[1:] - if isdir: - arcname += '/' - zinfo = ZipInfo(arcname, date_time) - zinfo.external_attr = (st[0] & 0xFFFF) << 16 # Unix attributes - if compress_type is None: - zinfo.compress_type = self.compression - else: - zinfo.compress_type = compress_type - - zinfo.file_size = st.st_size - zinfo.flag_bits = 0x00 - zinfo.header_offset = self.fp.tell() # Start of header bytes - - self._writecheck(zinfo) - self._didModify = True - - if isdir: - zinfo.file_size = 0 - zinfo.compress_size = 0 - zinfo.CRC = 0 - self.filelist.append(zinfo) - self.NameToInfo[zinfo.filename] = zinfo - self.fp.write(zinfo.FileHeader()) - return - - with open(filename, "rb") as fp: - # Must overwrite CRC and sizes with correct data later - zinfo.CRC = CRC = 0 - zinfo.compress_size = compress_size = 0 - zinfo.file_size = file_size = 0 - self.fp.write(zinfo.FileHeader()) - if zinfo.compress_type == ZIP_DEFLATED: - cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, - zlib.DEFLATED, -15) - elif zinfo.compress_type == ZIP_BZIP2: - cmpr = bz2.BZ2Compressor() - else: - cmpr = None - while 1: - buf = fp.read(1024 * 8) - if not buf: - break - file_size = file_size + len(buf) - CRC = crc32(buf, CRC) & 0xffffffff - if cmpr: - buf = cmpr.compress(buf) - compress_size = compress_size + len(buf) - self.fp.write(buf) - if cmpr: - buf = cmpr.flush() - compress_size = compress_size + len(buf) - self.fp.write(buf) - zinfo.compress_size = compress_size - else: - zinfo.compress_size = file_size - zinfo.CRC = CRC - zinfo.file_size = file_size - # Seek backwards and write CRC and file sizes - position = self.fp.tell() # Preserve current position in file - self.fp.seek(zinfo.header_offset + 14, 0) - self.fp.write(struct.pack(" ZIP64_LIMIT \ - or zinfo.compress_size > ZIP64_LIMIT: - extra.append(zinfo.file_size) - extra.append(zinfo.compress_size) - file_size = 0xffffffff - compress_size = 0xffffffff - else: - file_size = zinfo.file_size - compress_size = zinfo.compress_size - - if zinfo.header_offset > ZIP64_LIMIT: - extra.append(zinfo.header_offset) - header_offset = 0xffffffff - else: - header_offset = zinfo.header_offset - - extra_data = zinfo.extra - if extra: - # Append a ZIP64 field to the extra's - extra_data = struct.pack( - '= ZIP_FILECOUNT_LIMIT or - centDirOffset > ZIP64_LIMIT or - centDirSize > ZIP64_LIMIT): - # Need to write the ZIP64 end-of-archive records - zip64endrec = struct.pack( - structEndArchive64, stringEndArchive64, - 44, 45, 45, 0, 0, centDirCount, centDirCount, - centDirSize, centDirOffset) - self.fp.write(zip64endrec) - - zip64locrec = struct.pack( - structEndArchive64Locator, - stringEndArchive64Locator, 0, pos2, 1) - self.fp.write(zip64locrec) - centDirCount = min(centDirCount, 0xFFFF) - centDirSize = min(centDirSize, 0xFFFFFFFF) - centDirOffset = min(centDirOffset, 0xFFFFFFFF) - - # check for valid comment length - if len(self.comment) >= ZIP_MAX_COMMENT: - if self.debug > 0: - msg = 'Archive comment is too long; truncating to %d bytes' \ - % ZIP_MAX_COMMENT - self.comment = self.comment[:ZIP_MAX_COMMENT] - - endrec = struct.pack(structEndArchive, stringEndArchive, - 0, 0, centDirCount, centDirCount, - centDirSize, centDirOffset, len(self.comment)) - self.fp.write(endrec) - self.fp.write(self.comment) - self.fp.flush() - - if not self._filePassed: - self.fp.close() - self.fp = None - - -class PyZipFile(ZipFile): - """Class to create ZIP archives with Python library files and packages.""" - - def writepy(self, pathname, basename=""): - """Add all files from "pathname" to the ZIP archive. - - If pathname is a package directory, search the directory and - all package subdirectories recursively for all *.py and enter - the modules into the archive. If pathname is a plain - directory, listdir *.py and enter all modules. Else, pathname - must be a Python *.py file and the module will be put into the - archive. Added modules are always module.pyo or module.pyc. - This method will compile the module.py into module.pyc if - necessary. - """ - dir, name = os.path.split(pathname) - if os.path.isdir(pathname): - initname = os.path.join(pathname, "__init__.py") - if os.path.isfile(initname): - # This is a package directory, add it - if basename: - basename = "%s/%s" % (basename, name) - else: - basename = name - if self.debug: - print("Adding package in", pathname, "as", basename) - fname, arcname = self._get_codename(initname[0:-3], basename) - if self.debug: - print("Adding", arcname) - self.write(fname, arcname) - dirlist = os.listdir(pathname) - dirlist.remove("__init__.py") - # Add all *.py files and package subdirectories - for filename in dirlist: - path = os.path.join(pathname, filename) - root, ext = os.path.splitext(filename) - if os.path.isdir(path): - if os.path.isfile(os.path.join(path, "__init__.py")): - # This is a package directory, add it - self.writepy(path, basename) # Recursive call - elif ext == ".py": - fname, arcname = self._get_codename(path[0:-3], - basename) - if self.debug: - print("Adding", arcname) - self.write(fname, arcname) - else: - # This is NOT a package directory, add its files at top level - if self.debug: - print("Adding files from directory", pathname) - for filename in os.listdir(pathname): - path = os.path.join(pathname, filename) - root, ext = os.path.splitext(filename) - if ext == ".py": - fname, arcname = self._get_codename(path[0:-3], - basename) - if self.debug: - print("Adding", arcname) - self.write(fname, arcname) - else: - if pathname[-3:] != ".py": - raise RuntimeError( - 'Files added with writepy() must end with ".py"') - fname, arcname = self._get_codename(pathname[0:-3], basename) - if self.debug: - print("Adding file", arcname) - self.write(fname, arcname) - - def _get_codename(self, pathname, basename): - """Return (filename, archivename) for the path. - - Given a module name path, return the correct file path and - archive name, compiling if necessary. For example, given - /python/lib/string, return (/python/lib/string.pyc, string). - """ - file_py = pathname + ".py" - file_pyc = pathname + ".pyc" - file_pyo = pathname + ".pyo" - if os.path.isfile(file_pyo) and \ - os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: - fname = file_pyo # Use .pyo file - elif not os.path.isfile(file_pyc) or \ - os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: - import py_compile - if self.debug: - print("Compiling", file_py) - try: - py_compile.compile(file_py, file_pyc, None, True) - except py_compile.PyCompileError as err: - print(err.msg) - fname = file_pyc - else: - fname = file_pyc - archivename = os.path.split(fname)[1] - if basename: - archivename = "%s/%s" % (basename, archivename) - return (fname, archivename) - - -def main(args = None): - import textwrap - USAGE=textwrap.dedent("""\ - Usage: - zipfile.py -l zipfile.zip # Show listing of a zipfile - zipfile.py -t zipfile.zip # Test if a zipfile is valid - zipfile.py -e zipfile.zip target # Extract zipfile into target dir - zipfile.py -c zipfile.zip src ... # Create zipfile from sources - """) - if args is None: - args = sys.argv[1:] - - if not args or args[0] not in ('-l', '-c', '-e', '-t'): - print(USAGE) - sys.exit(1) - - if args[0] == '-l': - if len(args) != 2: - print(USAGE) - sys.exit(1) - zf = ZipFile(args[1], 'r') - zf.printdir() - zf.close() - - elif args[0] == '-t': - if len(args) != 2: - print(USAGE) - sys.exit(1) - zf = ZipFile(args[1], 'r') - badfile = zf.testzip() - if badfile: - print("The following enclosed file is corrupted: {!r}".format(badfile)) - print("Done testing") - - elif args[0] == '-e': - if len(args) != 3: - print(USAGE) - sys.exit(1) - - zf = ZipFile(args[1], 'r') - out = args[2] - for path in zf.namelist(): - if path.startswith('./'): - tgt = os.path.join(out, path[2:]) - else: - tgt = os.path.join(out, path) - - tgtdir = os.path.dirname(tgt) - if not os.path.exists(tgtdir): - os.makedirs(tgtdir) - with open(tgt, 'wb') as fp: - fp.write(zf.read(path)) - zf.close() - - elif args[0] == '-c': - if len(args) < 3: - print(USAGE) - sys.exit(1) - - def addToZip(zf, path, zippath): - if os.path.isfile(path): - zf.write(path, zippath, ZIP_DEFLATED) - elif os.path.isdir(path): - for nm in os.listdir(path): - addToZip(zf, - os.path.join(path, nm), os.path.join(zippath, nm)) - # else: ignore - - zf = ZipFile(args[1], 'w', allowZip64=True) - for src in args[2:]: - addToZip(zf, src, os.path.basename(src)) - - zf.close() - -if __name__ == "__main__": - main() diff -r 06f1683c8db9 -r 245150080c48 2.00/zipfile313.diff --- a/2.00/zipfile313.diff Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,126 +0,0 @@ ---- /usr/local/lib/python3.1/zipfile.py 2010-11-29 00:59:28.000000000 +0000 -+++ zipfile313.py 2010-11-29 01:22:19.000000000 +0000 -@@ -3,6 +3,7 @@ - - XXX references to utf-8 need further investigation. - """ -+# Improved by Chortos-2 in 2010 (added bzip2 support) - import struct, os, time, sys, shutil - import binascii, io, stat - -@@ -13,8 +14,13 @@ - zlib = None - crc32 = binascii.crc32 - -+try: -+ import bz2 # We may need its compression method -+except ImportError: -+ bz2 = None -+ - __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", -- "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] -+ "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] - - class BadZipfile(Exception): - pass -@@ -35,6 +41,7 @@ - # constants for Zip file compression methods - ZIP_STORED = 0 - ZIP_DEFLATED = 8 -+ZIP_BZIP2 = 12 - # Other ZIP compression methods not supported - - # Below are some formats and associated data for reading/writing headers using -@@ -477,6 +484,9 @@ - self.compreadsize = 64*1024 - if self.compress_type == ZIP_DEFLATED: - self.dc = zlib.decompressobj(-15) -+ elif self.compress_type == ZIP_BZIP2: -+ self.dc = bz2.BZ2Decompressor() -+ self.compreadsize = 900000 - - if hasattr(zipinfo, 'CRC'): - self._expected_crc = zipinfo.CRC -@@ -604,7 +614,7 @@ - if self.compress_type == ZIP_STORED: - lr = len(self.readbuffer) - bytesToRead = min(bytesToRead, size - lr) -- elif self.compress_type == ZIP_DEFLATED: -+ else: - if len(self.readbuffer) > size: - # the user has requested fewer bytes than we've already - # pulled through the decompressor; don't read any more -@@ -639,14 +649,17 @@ - newdata = bytes(map(self.decrypter, newdata)) - - # decompress newly read data if necessary -- if newdata and self.compress_type == ZIP_DEFLATED: -+ if newdata and self.compress_type != ZIP_STORED: - newdata = self.dc.decompress(newdata) -- self.rawbuffer = self.dc.unconsumed_tail -+ self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' - if self.eof and len(self.rawbuffer) == 0: - # we're out of raw bytes (both from the file and - # the local buffer); flush just to make sure the - # decompressor is done -- newdata += self.dc.flush() -+ try: -+ newdata += self.dc.flush() -+ except AttributeError: -+ pass - # prevent decompressor from being used again - self.dc = None - -@@ -674,7 +687,8 @@ - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read "r", write "w" or append "a". -- compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). -+ compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), -+ or ZIP_BZIP2 (requires bz2). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. -@@ -694,6 +708,10 @@ - if not zlib: - raise RuntimeError( - "Compression requires the (missing) zlib module") -+ elif compression == ZIP_BZIP2: -+ if not bz2: -+ raise RuntimeError( -+ "Compression requires the (missing) bz2 module") - else: - raise RuntimeError("That compression method is not supported") - -@@ -1041,7 +1059,10 @@ - if zinfo.compress_type == ZIP_DEFLATED and not zlib: - raise RuntimeError( - "Compression requires the (missing) zlib module") -- if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): -+ if zinfo.compress_type == ZIP_BZIP2 and not bz2: -+ raise RuntimeError( -+ "Compression requires the (missing) bz2 module") -+ if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): - raise RuntimeError("That compression method is not supported") - if zinfo.file_size > ZIP64_LIMIT: - if not self._allowZip64: -@@ -1102,6 +1123,8 @@ - if zinfo.compress_type == ZIP_DEFLATED: - cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, - zlib.DEFLATED, -15) -+ elif zinfo.compress_type == ZIP_BZIP2: -+ cmpr = bz2.BZ2Compressor() - else: - cmpr = None - while 1: -@@ -1162,6 +1185,10 @@ - zlib.DEFLATED, -15) - data = co.compress(data) + co.flush() - zinfo.compress_size = len(data) # Compressed size -+ elif zinfo.compress_type == ZIP_BZIP2: -+ co = bz2.BZ2Compressor() -+ data = co.compress(data) + co.flush() -+ zinfo.compress_size = len(data) # Compressed size - else: - zinfo.compress_size = zinfo.file_size - zinfo.header_offset = self.fp.tell() # Start of header data diff -r 06f1683c8db9 -r 245150080c48 publish.sh --- a/publish.sh Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -#! /bin/sh - -VERSION=`hg summary | grep '^parent' | sed -En 's/^parent: [0-9]*:([0-9a-f]+) .*$/\1/p'` -if [ -z "$VERSION" ] -then - echo The current Mercurial changeset could not be determined. >&2 - exit 1 -fi - -sed 's/$$REV$\$/hg '"$VERSION/" test-vcs.py >test.py -chmod +x test.py \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 test-vcs.py --- a/test-vcs.py Sun Dec 19 23:12:11 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,969 +0,0 @@ -#! /usr/bin/python -# Copyright (c) 2009-2010 Chortos-2 - -import os, sys, shutil, time, subprocess, filecmp, optparse, signal, tempfile, tarfile, zipfile - -version = '1.21.0 ($$REV$$)' -parser = optparse.OptionParser(version='test.py '+version, usage='usage: %prog [options] [problem names] [[path' + os.path.sep + 'to' + os.path.sep + ']solution-app] [test case numbers]\n\nTest case numbers can be specified in plain text or as a Python expression\nif there is only one positional argument.\n\nOnly problem names listed in testconf.py are recognized.') -parser.add_option('-u', '--update', dest='update', action='store_true', default=False, help='check for an updated version of test.py') -parser.add_option('-e', '--exclude', dest='exclude', action='append', help='test case number(s) to exclude, as a Python expression; multiple -e options can be supplied') -parser.add_option('-c', '--cleanup', dest='clean', action='store_true', default=False, help='delete the copies of input/output files and exit') -parser.add_option('-s', '--save-io', dest='erase', action='store_false', default=True, help='do not delete the copies of input/output files after the last test case; create copies of input files and store output in files even if the solution uses standard I/O; delete the stored input/output files if the solution uses standard I/O and the -c/--cleanup option is specified') -parser.add_option('-m', '--copy-io', dest='copyonly', action='store_true', default=False, help='only create a copy of the input/output files of the last test case for manual testing; to delete them, use options -cs or -cm') -parser.add_option('-x', '--auto-exit', dest='pause', action='store_false', default=True, help='do not wait for a key to be pressed when finished testing') -parser.add_option('-p', '--python', action='store_true', default=False, help='always parse all positional arguments as a single Python expression (including the first argument even if it names an executable file)') -parser.add_option('-t', '--detect-time', dest='autotime', action='store_true', default=False, help='spend a second detecting the most precise time measurement function') -parser.add_option('-b', dest='builtin', action='store_true', default=False) - -options, args = parser.parse_args() -parser.destroy() -del parser - -if options.builtin: - try: - if args[0] == 'run': - import resource - maxmemory = int(args[1]) - resource.setrlimit(resource.RLIMIT_AS, (maxmemory*1024**2, maxmemory*1024**2)) - os.execv(args[2], args[2:]) - else: - sys.exit(2) - except: - sys.exit(2) - -def update(): - import urllib - latesttext = urllib.urlopen('http://chortos.selfip.net/~astiob/test.py/version.txt').read() - latest = latesttext.split('.') - installed = version.split('.') - update = '' - if latest[0] > installed[0]: - update = 'major' - elif latest[0] == installed[0]: - if latest[1] > installed[1]: - update = 'feature' - elif latest[1] == installed[1]: - if latest[2] > installed[2]: - update = 'bug-fixing' - elif latest[2] == installed[2]: - print 'You are using the latest publicly available version of test.py.' - return - if update == '': - print 'Your copy of test.py is newer than the publicly available version.' - return - print 'A ' + update + ' update to test.py is available. Downloading...' - sys.stdout.flush() - urllib.urlretrieve('http://chortos.selfip.net/~astiob/test.py/test.py', 'test.py') - print 'Downloaded and installed. Now you are using test.py ' + latesttext + '.' - -if options.update: - update() - sys.exit() - -try: - import resource - memlimit = True - def call(name): - pid = os.fork() - if not pid: - resource.setrlimit(resource.RLIMIT_AS, (maxmemory*1024**2, maxmemory*1024**2)) - os.execl(name) - else: - return pid -except ImportError: - memlimit = False - -globals1 = set(globals()) - -# Initialize some configuration variables with default values -tasknames = (os.path.curdir,) -maxtime = 0 -tests = () -dummies = () -testsexcluded = () -padwithzeroestolength = 0 -taskweight = 100 -pointmap = {} -stdio = False -dummyinname = '' -dummyoutname = '' -tester = '' -maxexitcode = 0 - -def exectestconf_helper(name): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - f = zipfile.ZipFile('tests.zip') - try: - exec f.open(name, 'rU').read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - exec f.extractfile(name).read() in globals() - f.close() - return True - except KeyError: - f.close() - return False - -try: - execfile('testconf.py') -except IOError, error: - exc_info = sys.exc_info()[2] - try: - execfile(os.path.join('tests', 'testconf.py')) - except IOError: - if not exectestconf_helper('testconf.py'): - raise IOError, (error.errno, 'The configuration file is missing', error.filename), exc_info - del exc_info - -globals2 = set(globals()) -globals2.remove('globals1') -globals2 -= globals1 -del globals1 - -shared = {} -g = globals() -for k in globals2: - shared[k] = g[k] - -newtasknames = [] -while len(args) and args[0] in tasknames: - newtasknames.append(args[0]) - del args[0] -if len(newtasknames): - tasknames = newtasknames - -scoresumoveralltasks = 0 -scoremaxoveralltasks = 0 -ntasks = 0 -nfulltasks = 0 -cwd = '' # At any time this is either '' or taskname - -if options.autotime: - c = time.clock() - time.sleep(1) - c = time.clock() - c - if int(c + .99999) == 1: - clock = time.clock - else: - clock = time.time -elif os.name == 'nt': - clock = time.clock -else: - clock = time.time - -if options.copyonly: - options.erase = False - -def existstestcase_helper(name): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - f = zipfile.ZipFile('tests.zip') - try: - f.getinfo(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - f.getmember(name) - f.close() - return True - except KeyError: - f.close() - return False - -def existstestcase(name): - if os.path.isfile(os.path.join('tests', taskname, name)) or os.path.isfile(os.path.join('tests', name)): - return True - if cwd and (os.path.isfile(os.path.join(oldcwd, 'tests', cwd, name)) or os.path.isfile(os.path.join(oldcwd, 'tests', name))): - return True - if existstestcase_helper(os.path.join(taskname, name)) or existstestcase_helper(name): - return True - if cwd: - os.chdir(oldcwd) - if existstestcase_helper(os.path.join(cwd, name)) or existstestcase_helper(name): - os.chdir(cwd) - return True - os.chdir(cwd) - return False - -def opentestcase_helper(name): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - f = zipfile.ZipFile('tests.zip') - try: - c = f.open(name, 'rU') - f.close() - return c - except KeyError: - f.close() - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - c = f.extractfile(name) - return c - except KeyError: - f.close() - return None - -def opentestcase(name): - if os.path.isfile(os.path.join('tests', taskname, name)): - return open(os.path.join('tests', taskname, name), 'rU') - elif os.path.isfile(os.path.join('tests', name)): - return open(os.path.join('tests', name), 'rU') - f = opentestcase_helper(os.path.join(taskname, name)) - if not f: - f = opentestcase_helper(name) - if f: - return f - if cwd: - if os.path.isfile(os.path.join(oldcwd, 'tests', cwd, name)): - return open(os.path.join(oldcwd, 'tests', cwd, name), 'rU') - elif os.path.isfile(os.path.join(oldcwd, 'tests', name)): - return open(os.path.join(oldcwd, 'tests', name), 'rU') - os.chdir(oldcwd) - f = opentestcase_helper(os.path.join(cwd, name)) - if not f: - f = opentestcase_helper(name) - os.chdir(cwd) - if f: - return f - raise KeyError, 'The test-case-defining file \'' + name + '\' cannot be found' - -def copytestcase_helper(name, target): - if os.path.isfile('tests.tar'): - f = tarfile.open('tests.tar') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.zip'): - if not os.path.isabs(target): - f = zipfile.ZipFile('tests.zip') - try: - m = f.getinfo(name) - m.filename = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - else: - oldcwd = os.getcwdu() - os.chdir('/') # FIXME: portability? - f = zipfile.ZipFile(os.path.join(oldcwd, 'tests.zip')) - try: - m = f.getinfo(name) - m.filename = os.path.relpath(target) - f.extract(m) - f.close() - os.chdir(oldcwd) - return True - except KeyError: - f.close() - os.chdir(oldcwd) - if os.path.isfile('tests.tgz'): - f = tarfile.open('tests.tgz') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.gz'): - f = tarfile.open('tests.tar.gz') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tbz2'): - f = tarfile.open('tests.tbz2') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - if os.path.isfile('tests.tar.bz2'): - f = tarfile.open('tests.tar.bz2') - try: - m = f.getmember(name) - m.name = target - f.extract(m) - f.close() - return True - except KeyError: - f.close() - return False - -def copytestcase(name, target): - if os.path.isfile(os.path.join('tests', taskname, name)): - shutil.copyfile(os.path.join('tests', taskname, name), target) - return - elif os.path.isfile(os.path.join('tests', name)): - shutil.copyfile(os.path.join('tests', name), target) - return - if copytestcase_helper(os.path.join(taskname, name), target) or copytestcase_helper(name, target): - return - if cwd: - if os.path.isfile(os.path.join(oldcwd, 'tests', cwd, name)): - shutil.copyfile(os.path.join(oldcwd, 'tests', cwd, name), target) - return - elif os.path.isfile(os.path.join(oldcwd, 'tests', name)): - shutil.copyfile(os.path.join(oldcwd, 'tests', name), target) - return - os.chdir(oldcwd) - if copytestcase_helper(os.path.join(cwd, name), target) or copytestcase_helper(name, target): - os.chdir(cwd) - return - os.chdir(cwd) - raise KeyError, 'The test-case-defining file \'' + name + '\' cannot be found' - -# Always chdir if the directory exists but use any existing config -def chdir_and_exec_testconf(): - global cwd - cwd = '' - if os.path.isdir(taskname): - os.chdir(taskname) - if taskname != os.path.curdir: - cwd = taskname - try: - execfile('testconf.py', globals()) - return - except IOError: - pass - if not cwd: - if os.path.isfile(os.path.join('tests', taskname, 'testconf.py')): - execfile(os.path.join('tests', taskname, 'testconf.py'), globals()) - return - if os.path.isfile(os.path.join('tests', 'testconf.py')): - execfile(os.path.join('tests', 'testconf.py'), globals()) - return - if exectestconf_helper(os.path.join(taskname, 'testconf.py')) or exectestconf_helper('testconf.py'): - return - if cwd: - os.chdir(oldcwd) - if os.path.isfile(os.path.join('tests', cwd, 'testconf.py')): - execfile(os.path.join('tests', cwd, 'testconf.py'), globals()) - os.chdir(cwd) - return - if os.path.isfile(os.path.join('tests', 'testconf.py')): - execfile(os.path.join('tests', 'testconf.py'), globals()) - os.chdir(cwd) - return - if exectestconf_helper(os.path.join(cwd, 'testconf.py')) or exectestconf_helper('testconf.py'): - os.chdir(cwd) - return - if os.path.isfile('testconf.py'): - execfile('testconf.py', globals()) - os.chdir(cwd) - return - os.chdir(cwd) - elif os.path.isfile('testconf.py'): - execfile('testconf.py', globals()) - return - raise KeyError, 'The configuration file for task ' + taskname + ' is missing' - -try: - name - namedefined = True -except Exception: - namedefined = False - -for taskname in tasknames: - if ntasks: - print - - try: - if len(tasknames) > 1: - print taskname - except Exception: - if taskname != os.path.curdir or ntasks: - print taskname - - try: del inname - except NameError: pass - try: del outname - except NameError: pass - try: del ansname - except NameError: pass - - if not namedefined and taskname != os.path.curdir: - name = os.path.join(os.path.curdir, taskname) - for k in shared: - g[k] = shared[k] - - oldcwd = os.getcwdu() - chdir_and_exec_testconf() - - if options.clean: - try: - if not stdio or tester: - if not tester: - inname - outname - if tester: - ansname - except NameError, error: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] - if not options.erase: - try: - inname = inname.replace('%', taskname) - except NameError: - inname = taskname + '.in' - try: - outname = outname.replace('%', taskname) - except NameError: - outname = taskname + '.out' - try: - ansname = ansname.replace('%', taskname) - except NameError: - ansname = taskname + '.ans' - elif not stdio or tester or not options.erase: - inname = inname.replace('%', taskname) - outname = outname.replace('%', taskname) - if tester: - ansname = ansname.replace('%', taskname) - if not stdio or tester or not options.erase: - if os.path.exists(inname): os.remove(inname) - if os.path.exists(outname): os.remove(outname) - if (tester or not options.erase) and ansname: - if os.path.exists(ansname): os.remove(ansname) - continue - - try: - name - except NameError, error: - if str(error).count('name') == 1: - raise NameError, 'configuration ' + str(error), sys.exc_info()[2] - else: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] - - try: - if not stdio: - inname - outname - testcaseinname - if tester: - outname - if ansname: - testcaseoutname - else: - testcaseoutname - except NameError, error: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] - - if not options.erase: - try: - inname - except NameError: - inname = taskname + '.in' - try: - outname - except NameError: - outname = taskname + '.out' - try: - ansname - except NameError: - ansname = taskname + '.ans' - - if options.pause: - try: - pause - except NameError, error: - if os.name == 'posix': - pause = 'read -s -n 1' - print 'Configuration ' + str(error).replace('name ', 'variable ') + '; it was devised automatically but the choice might be incorrect, so test.py might exit immediately after the testing is completed.' - elif os.name == 'nt': - pause = 'pause' - else: - raise NameError, 'configuration ' + str(error).replace('name ', 'variable ') + ' and cannot be devised automatically', sys.exc_info()[2] - - if not dummyinname: - dummyinname = testcaseinname - if not dummyoutname and (not tester or ansname): - dummyoutname = testcaseoutname - - dummyinname = dummyinname.replace('%', taskname) - dummyoutname = dummyoutname.replace('%', taskname) - testcaseinname = testcaseinname.replace('%', taskname) - if not stdio or not options.erase: - inname = inname.replace('%', taskname) - outname = outname.replace('%', taskname) - try: - ansname = ansname.replace('%', taskname) - except NameError: - pass - if tester: - try: inname = inname.replace('%', taskname) - except NameError: pass - outname = outname.replace('%', taskname) - if ansname: - ansname = ansname.replace('%', taskname) - testcaseoutname = testcaseoutname.replace('%', taskname) - else: - testcaseoutname = testcaseoutname.replace('%', taskname) - - if isinstance(padwithzeroestolength, tuple): - padwithzeroestolength, paddummieswithzeroestolength = padwithzeroestolength - else: - paddummieswithzeroestolength = padwithzeroestolength - - if options.python: - dummies = () - s = ' '.join(args) - tests = eval(s) - try: - tests.__iter__ - except AttributeError: - tests = (tests,) - elif len(args): - if os.path.exists(args[0]): - name = args[0] - del args[0] - if len(args) > 1: - dummies = () - tests = args - elif len(args): - dummies = () - s = args[0] - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - if existstestcase(testcaseinname.replace('$', s)): - tests = (s,) - else: - try: - tests = eval(args[0]) - try: - tests.__iter__ - except AttributeError: - tests = (tests,) - except Exception: - tests = (s,) - - if options.exclude: - testsexcluded = [] - for i in options.exclude: - v = eval(i) - try: - testsexcluded.extend(v) - except TypeError: - testsexcluded.append(v) - - # Windows doesn't like paths beginning with .\ and not ending with an extension - name = os.path.normcase(name) - if os.name == 'nt' and name.startswith('.\\'): - name = name[2:] - - newpointmap = {} - - for i in pointmap: - try: - for j in i: - newpointmap[j] = pointmap[i] - except TypeError: - newpointmap[i] = pointmap[i] - - pointmap = newpointmap - - if not tester: - maxexitcode = 0 - - if maxtime > 0: - strmaxtime = '/%.3f' % maxtime - else: - strmaxtime = '' - - padoutputtolength = 0 - ntests = [] - - for j in dummies: - try: - j.__iter__ - except AttributeError: - j = (j,) - ntests.append((j, True)) - for i in j: - s = str(i) - if len(s) < paddummieswithzeroestolength: - s = s.zfill(paddummieswithzeroestolength) - s = 'sample ' + s - if padoutputtolength < len(s): - padoutputtolength = len(s) - - for j in tests: - try: - j.__iter__ - except AttributeError: - j = (j,) - ntests.append((j, False)) - for i in j: - s = str(i) - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - if padoutputtolength < len(s): - padoutputtolength = len(s) - - tests = ntests - score = maxpoints = ncorrect = ntotal = ncorrectvalued = nvalued = 0 - - if options.copyonly: - j, isdummy = tests[-1] - if isdummy: - realinname = dummyinname - realoutname = dummyoutname - else: - realinname = testcaseinname - realoutname = testcaseoutname - for i in j: - if i in testsexcluded and not isdummy: - continue - s = str(i) - if isdummy: - if len(s) < paddummieswithzeroestolength: - s = s.zfill(paddummieswithzeroestolength) - else: - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - copytestcase(realinname.replace('$', s), inname) - if ansname: - copytestcase(realoutname.replace('$', s), ansname) - continue - - for j, isdummy in tests: - ncorrectgrp = 0 - ntotalgrp = 0 - scoregrp = 0 - maxpointsgrp = 0 - if isdummy: - realinname = dummyinname - realoutname = dummyoutname - else: - realinname = testcaseinname - realoutname = testcaseoutname - for i in j: - if i in testsexcluded and not isdummy: - continue - ntotalgrp += 1 - s = str(i) - if isdummy: - npoints = 0 - if len(s) < paddummieswithzeroestolength: - s = s.zfill(paddummieswithzeroestolength) - spref = 'sample ' - else: - npoints = pointmap.get(None, maxexitcode if maxexitcode and isinstance(maxexitcode, int) else 1) - npoints = pointmap.get(i, npoints) - maxpointsgrp += npoints - if npoints: - nvalued += 1 - if len(s) < padwithzeroestolength: - s = s.zfill(padwithzeroestolength) - spref = '' - print ' ' * (padoutputtolength - len(spref + s)) + spref + s + ':', - sys.stdout.flush() - outputdata = open(os.devnull, 'w') - if stdio: - f = tempfile.NamedTemporaryFile(delete=False) - inputdatafname = f.name - f.close() - copytestcase(realinname.replace('$', s), inputdatafname) - inputdata = open(inputdatafname, 'rU') - if options.erase: - tempoutput = tempfile.TemporaryFile('w+') - else: - tempoutput = open(outname, 'w+') - try: - proc = subprocess.Popen(name, stdin=inputdata, stdout=tempoutput, stderr=outputdata, universal_newlines=True) - except OSError, error: - raise OSError, 'The program to be tested cannot be launched: ' + str(error), sys.exc_info()[2] - else: - if os.path.exists(outname): - os.remove(outname) - copytestcase(realinname.replace('$', s), inname) - try: - proc = subprocess.Popen(name, stdin=outputdata, stdout=outputdata, stderr=outputdata, universal_newlines=True) - except OSError, error: - raise OSError, 'The program to be tested cannot be launched: ' + str(error), sys.exc_info()[2] - cl = clock() - if maxtime > 0: - while 1: - proc.poll() - elapsed = clock() - cl - if proc.returncode == None: - if elapsed >= maxtime: - print '%.3f%s s, 0/%d, time limit exceeded' % (elapsed, strmaxtime, npoints) - sys.stdout.flush() - while proc.returncode == None: - try: - proc.terminate() - except OSError: - pass - except AttributeError: - try: - os.kill(proc.pid, signal.SIGTERM) - except Exception: - pass - proc.poll() - outputdata.close() - if stdio: - tempoutput.close() - break - else: - print '%.3f%s s,' % (elapsed, strmaxtime), - sys.stdout.flush() - elapsed = 0 - if stdio: - tempoutput.seek(0) - lines = tempoutput.readlines() - tempoutput.close() - break - if elapsed >= maxtime: - continue - else: - data = proc.communicate() - elapsed = clock() - cl - print '%.3f%s s,' % (elapsed, strmaxtime), - sys.stdout.flush() - if stdio: - tempoutput.seek(0) - lines = tempoutput.readlines() - tempoutput.close() - outputdata.close() - if stdio: - inputdata.close() - try: - os.unlink(inputdatafname) - except Exception: - pass - if proc.returncode > 0: - print '0/%d, non-zero return code %d' % (npoints, proc.returncode) - sys.stdout.flush() - elif proc.returncode < 0: - print '0/%d, terminated by signal %d' % (npoints, -proc.returncode) - sys.stdout.flush() - else: - if not tester: - if stdio: - outputdata = opentestcase(realoutname.replace('$', s)) - r = 0 - data = outputdata.read().splitlines(True) - if len(lines) != len(data): - r = 1 - else: - for i in zip(lines, data): - if i[0] != i[1]: - r = 1 - break - outputdata.close() - else: - try: - inputdata = open(outname, 'rU') - except IOError: - print '0/%g, output file not created or not readable' % npoints - sys.stdout.flush() - r = None - else: - outputdata = opentestcase(realoutname.replace('$', s)) - r = 0 - lines = inputdata.readlines() - data = outputdata.read().splitlines(True) - if len(lines) != len(data): - r = 1 - else: - for i in zip(lines, data): - if i[0] != i[1]: - r = 1 - break - inputdata.close() - outputdata.close() - else: - if ansname: - copytestcase(realoutname.replace('$', s), ansname) - if stdio: - try: copytestcase(realinname.replace('$', s), inname) - except NameError: pass - outputdata = open(outname, 'w') - outputdata.writelines(lines) - outputdata.close() - try: - proc = subprocess.Popen(tester, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) - except OSError, error: - raise OSError, 'The tester application cannot be launched: ' + str(error), sys.exc_info()[2] - data = proc.communicate() - r = proc.returncode - if tester and data[0]: - data = ''.join((' (', data[0].strip(), ')')) - else: - data = '' - if not maxexitcode and r or maxexitcode and not r: - print '0/%g, wrong answer%s' % (npoints, data) - sys.stdout.flush() - elif not maxexitcode and r == 0 or maxexitcode and r >= maxexitcode: - print '%g/%g, OK%s' % (npoints, npoints, data) - sys.stdout.flush() - scoregrp += npoints - ncorrectgrp += 1 - if npoints: - ncorrectvalued += 1 - elif maxexitcode and r != None: - actualpoints = npoints*r/maxexitcode if not npoints*r%maxexitcode else float(npoints*r)/maxexitcode - print '%g/%g, partly OK%s' % (actualpoints, npoints, data) - sys.stdout.flush() - scoregrp += actualpoints - ncorrectgrp += 1 - if npoints: - ncorrectvalued += 1 - if ntotalgrp: - if ncorrectgrp < ntotalgrp: - scoregrp = 0 - if ntotalgrp > 1: - print 'Group total: %d/%d tests; %g/%g points' % (ncorrectgrp, ntotalgrp, scoregrp, maxpointsgrp) - sys.stdout.flush() - ncorrect += ncorrectgrp - ntotal += ntotalgrp - score += scoregrp - maxpoints += maxpointsgrp - - if options.erase: - if not stdio or tester: - if os.path.exists(inname): os.remove(inname) - if os.path.exists(outname): os.remove(outname) - if tester and ansname: - if os.path.exists(ansname): os.remove(ansname) - elif stdio: - copytestcase(realinname.replace('$', s), inname) - copytestcase(realoutname.replace('$', s), ansname) - actualpoints = (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0 - if nvalued != ntotal: - print 'Grand total: %d/%d tests (%d/%d valued); %g/%g points; weighted score: %g/%g' % (ncorrect, ntotal, ncorrectvalued, nvalued, score, maxpoints, actualpoints, taskweight) - else: - print 'Grand total: %d/%d tests; %g/%g points; weighted score: %g/%g' % (ncorrect, ntotal, score, maxpoints, actualpoints, taskweight) - - scoresumoveralltasks += actualpoints - scoremaxoveralltasks += taskweight - ntasks += 1 - nfulltasks += int((score == maxpoints) if maxpoints else (taskweight == 0)) - - os.chdir(oldcwd) - -if options.clean or options.copyonly: - sys.exit() - -if ntasks != 1: - print - print 'Grand grand total: %g/%g weighted points; %d/%d problems solved fully' % (scoresumoveralltasks, scoremaxoveralltasks, nfulltasks, ntasks) - -if options.pause: - print 'Press any key to exit... ', - sys.stdout.flush() - os.system(pause + ' >' + os.devnull) diff -r 06f1683c8db9 -r 245150080c48 test.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test.py Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,899 @@ +#! /usr/bin/python +# Copyright (c) 2009, 2010 Chortos-2 + +import os, sys, shutil, time, subprocess, filecmp, optparse, signal, tempfile, tarfile, zipfile + +parser = optparse.OptionParser(version='test.py 1.20.3', usage='usage: %prog [options] [problem names] [[path/to/]solution-app] [test case numbers]\n\nTest case numbers can be specified in plain text or as a Python expression\nif there is only one positional argument.\n\nOnly problem names listed in testconf.py are recognized.') +parser.add_option('-e', '--exclude', dest='exclude', action='append', help='test case number(s) to exclude, as a Python expression; multiple -e options can be supplied') +parser.add_option('-c', '--cleanup', dest='clean', action='store_true', default=False, help='delete the copies of input/output files and exit') +parser.add_option('-s', '--save-io', dest='erase', action='store_false', default=True, help='do not delete the copies of input/output files after the last test case; create copies of input files and store output in files even if the solution uses standard I/O; delete the stored input/output files if the solution uses standard I/O and the -c/--cleanup option is specified') +parser.add_option('-m', '--copy-io', dest='copyonly', action='store_true', default=False, help='only create a copy of the input/output files of the last test case for manual testing; to delete them, use options -cs') +parser.add_option('-x', '--auto-exit', dest='pause', action='store_false', default=True, help='do not wait for a key to be pressed when finished testing') +parser.add_option('-p', '--python', action='store_true', default=False, help='always parse all positional arguments as a single Python expression (including the first argument even if it names an executable file)') +parser.add_option('-t', '--detect-time', dest='autotime', action='store_true', default=False, help='spend a second detecting the most precise time measurement function') + +options, args = parser.parse_args() +parser.destroy() +del parser + +globals1 = set(globals()) + +# Initialize some configuration variables with default values +tasknames = ('.',) +maxtime = 0 +tests = () +dummies = () +testsexcluded = () +padwithzeroestolength = 0 +taskweight = 100 +pointmap = {} +stdio = False +dummyinname = '' +dummyoutname = '' +tester = '' + +def exectestconf_helper(name): + if os.path.isfile('tests.tar'): + f = tarfile.open('tests.tar') + try: + exec f.extractfile(name).read() in globals() + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.zip'): + f = zipfile.ZipFile('tests.zip') + try: + exec f.open(name, 'rU').read() in globals() + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tgz'): + f = tarfile.open('tests.tgz') + try: + exec f.extractfile(name).read() in globals() + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tar.gz'): + f = tarfile.open('tests.tar.gz') + try: + exec f.extractfile(name).read() in globals() + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tbz2'): + f = tarfile.open('tests.tbz2') + try: + exec f.extractfile(name).read() in globals() + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tar.bz2'): + f = tarfile.open('tests.tar.bz2') + try: + exec f.extractfile(name).read() in globals() + f.close() + return True + except KeyError: + f.close() + return False + +try: + execfile('testconf.py') +except IOError, error: + exc_info = sys.exc_info()[2] + try: + execfile('tests/testconf.py') + except IOError: + if not exectestconf_helper('testconf.py'): + raise IOError, (error.errno, 'The configuration file is missing', error.filename), exc_info + del exc_info + +globals2 = set(globals()) +globals2.remove('globals1') +globals2 -= globals1 +del globals1 + +shared = {} +g = globals() +for k in globals2: + shared[k] = g[k] + +newtasknames = [] +while len(args) and args[0] in tasknames: + newtasknames.append(args[0]) + del args[0] +if len(newtasknames): + tasknames = newtasknames + +scoresumoveralltasks = 0 +scoremaxoveralltasks = 0 +ntasks = 0 +nfulltasks = 0 +cwd = '' # At any time this is either '' or taskname + '/' + +if options.autotime: + c = time.clock() + time.sleep(1) + c = time.clock() - c + if int(c + .99999) == 1: + clock = time.clock + else: + clock = time.time +elif os.name == 'nt': + clock = time.clock +else: + clock = time.time + +if options.copyonly: + options.erase = False + +def existstestcase_helper(name): + if os.path.isfile('tests.tar'): + f = tarfile.open('tests.tar') + try: + f.getmember(name) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.zip'): + f = zipfile.ZipFile('tests.zip') + try: + f.getinfo(name) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tgz'): + f = tarfile.open('tests.tgz') + try: + f.getmember(name) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tar.gz'): + f = tarfile.open('tests.tar.gz') + try: + f.getmember(name) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tbz2'): + f = tarfile.open('tests.tbz2') + try: + f.getmember(name) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tar.bz2'): + f = tarfile.open('tests.tar.bz2') + try: + f.getmember(name) + f.close() + return True + except KeyError: + f.close() + return False + +def existstestcase(name): + if os.path.isfile('tests/' + taskname + '/' + name) or os.path.isfile('tests/' + name): + return True + if cwd and (os.path.isfile(oldcwd + '/tests/' + cwd + name) or os.path.isfile(oldcwd + '/tests/' + name)): + return True + if existstestcase_helper(taskname + '/' + name) or existstestcase_helper(name): + return True + if cwd: + os.chdir(oldcwd) + if existstestcase_helper(cwd + name) or existstestcase_helper(name): + os.chdir(cwd) + return True + os.chdir(cwd) + return False + +def opentestcase_helper(name): + if os.path.isfile('tests.tar'): + f = tarfile.open('tests.tar') + try: + c = f.extractfile(name) + return c + except KeyError: + f.close() + if os.path.isfile('tests.zip'): + f = zipfile.ZipFile('tests.zip') + try: + c = f.open(name, 'rU') + f.close() + return c + except KeyError: + f.close() + if os.path.isfile('tests.tgz'): + f = tarfile.open('tests.tgz') + try: + c = f.extractfile(name) + return c + except KeyError: + f.close() + if os.path.isfile('tests.tar.gz'): + f = tarfile.open('tests.tar.gz') + try: + c = f.extractfile(name) + return c + except KeyError: + f.close() + if os.path.isfile('tests.tbz2'): + f = tarfile.open('tests.tbz2') + try: + c = f.extractfile(name) + return c + except KeyError: + f.close() + if os.path.isfile('tests.tar.bz2'): + f = tarfile.open('tests.tar.bz2') + try: + c = f.extractfile(name) + return c + except KeyError: + f.close() + return None + +def opentestcase(name): + if os.path.isfile('tests/' + taskname + '/' + name): + return open('tests/' + taskname + '/' + name, 'rU') + elif os.path.isfile('tests/' + name): + return open('tests/' + name, 'rU') + f = opentestcase_helper(taskname + '/' + name) + if not f: + f = opentestcase_helper(name) + if f: + return f + if cwd: + if os.path.isfile(oldcwd + '/tests/' + cwd + name): + return open(oldcwd + '/tests/' + cwd + name, 'rU') + elif os.path.isfile(oldcwd + '/tests/' + name): + return open(oldcwd + '/tests/' + name, 'rU') + os.chdir(oldcwd) + f = opentestcase_helper(cwd + name) + if not f: + f = opentestcase_helper(name) + os.chdir(cwd) + if f: + return f + raise KeyError, 'The test-case-defining file \'' + name + '\' cannot be found' + +def copytestcase_helper(name, target): + if os.path.isfile('tests.tar'): + f = tarfile.open('tests.tar') + try: + m = f.getmember(name) + m.name = target + f.extract(m) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.zip'): + if not target.startswith('/'): + f = zipfile.ZipFile('tests.zip') + try: + m = f.getinfo(name) + m.filename = target + f.extract(m) + f.close() + return True + except KeyError: + f.close() + else: + oldcwd = os.getcwdu() + os.chdir('/') + f = zipfile.ZipFile(oldcwd + '/tests.zip') + try: + m = f.getinfo(name) + m.filename = os.path.relpath(target) + f.extract(m) + f.close() + os.chdir(oldcwd) + return True + except KeyError: + f.close() + os.chdir(oldcwd) + if os.path.isfile('tests.tgz'): + f = tarfile.open('tests.tgz') + try: + m = f.getmember(name) + m.name = target + f.extract(m) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tar.gz'): + f = tarfile.open('tests.tar.gz') + try: + m = f.getmember(name) + m.name = target + f.extract(m) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tbz2'): + f = tarfile.open('tests.tbz2') + try: + m = f.getmember(name) + m.name = target + f.extract(m) + f.close() + return True + except KeyError: + f.close() + if os.path.isfile('tests.tar.bz2'): + f = tarfile.open('tests.tar.bz2') + try: + m = f.getmember(name) + m.name = target + f.extract(m) + f.close() + return True + except KeyError: + f.close() + return False + +def copytestcase(name, target): + if os.path.isfile('tests/' + taskname + '/' + name): + shutil.copyfile('tests/' + taskname + '/' + name, target) + return + elif os.path.isfile('tests/' + name): + shutil.copyfile('tests/' + name, target) + return + if copytestcase_helper(taskname + '/' + name, target) or copytestcase_helper(name, target): + return + if cwd: + if os.path.isfile(oldcwd + '/tests/' + cwd + name): + shutil.copyfile(oldcwd + '/tests/' + cwd + name, target) + return + elif os.path.isfile(oldcwd + '/tests/' + name): + shutil.copyfile(oldcwd + '/tests/' + name, target) + return + os.chdir(oldcwd) + if copytestcase_helper(cwd + name, target) or copytestcase_helper(name, target): + os.chdir(cwd) + return + os.chdir(cwd) + raise KeyError, 'The test-case-defining file \'' + name + '\' cannot be found' + +# Always chdir if the directory exists but use any existing config +def chdir_and_exec_testconf(): + global cwd + cwd = '' + if os.path.isdir(taskname): + os.chdir(taskname) + if taskname != '.': + cwd = taskname + '/' + try: + execfile('testconf.py', globals()) + return + except IOError: + pass + if not cwd: + if os.path.isfile('tests/' + taskname + '/testconf.py'): + execfile('tests/' + taskname + '/testconf.py', globals()) + return + if os.path.isfile('tests/testconf.py'): + execfile('tests/testconf.py', globals()) + return + if exectestconf_helper(taskname + '/testconf.py') or exectestconf_helper('testconf.py'): + return + if cwd: + os.chdir(oldcwd) + if os.path.isfile('tests/' + cwd + 'testconf.py'): + execfile('tests/' + cwd + 'testconf.py', globals()) + os.chdir(cwd) + return + if os.path.isfile('tests/testconf.py'): + execfile('tests/testconf.py', globals()) + os.chdir(cwd) + return + if exectestconf_helper(cwd + 'testconf.py') or exectestconf_helper('testconf.py'): + os.chdir(cwd) + return + if os.path.isfile('testconf.py'): + execfile('testconf.py', globals()) + os.chdir(cwd) + return + os.chdir(cwd) + elif os.path.isfile('testconf.py'): + execfile('testconf.py', globals()) + return + raise KeyError, 'The configuration file for task ' + taskname + ' is missing' + +try: + name + namedefined = True +except Exception: + namedefined = False + +for taskname in tasknames: + if ntasks: + print + + try: + if len(tasknames) > 1: + print taskname + except Exception: + if taskname != '.' or ntasks: + print taskname + + try: del inname + except NameError: pass + try: del outname + except NameError: pass + try: del ansname + except NameError: pass + + if not namedefined and taskname != '.': + name = './' + taskname + for k in shared: + g[k] = shared[k] + + oldcwd = os.getcwdu() + chdir_and_exec_testconf() + + if options.clean: + try: + if not stdio or tester: + if not tester: + inname + outname + if tester: + ansname + except NameError, error: + raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] + if not options.erase: + try: + inname = inname.replace('%', taskname) + except NameError: + inname = taskname + '.in' + try: + outname = outname.replace('%', taskname) + except NameError: + outname = taskname + '.out' + try: + ansname = ansname.replace('%', taskname) + except NameError: + ansname = taskname + '.ans' + else: + inname = inname.replace('%', taskname) + outname = outname.replace('%', taskname) + if tester: + ansname = ansname.replace('%', taskname) + if not stdio or tester or not options.erase: + if os.path.exists(inname): os.remove(inname) + if os.path.exists(outname): os.remove(outname) + if (tester or not options.erase) and ansname: + if os.path.exists(ansname): os.remove(ansname) + continue + + try: + name + except NameError, error: + if str(error).count('name') == 1: + raise NameError, 'configuration ' + str(error), sys.exc_info()[2] + else: + raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] + + try: + if not stdio: + inname + outname + testcaseinname + if tester: + outname + if ansname: + testcaseoutname + else: + testcaseoutname + except NameError, error: + raise NameError, 'configuration ' + str(error).replace('name ', 'variable ', 1), sys.exc_info()[2] + + if not options.erase: + try: + inname + except NameError: + inname = taskname + '.in' + try: + outname + except NameError: + outname = taskname + '.out' + try: + ansname + except NameError: + ansname = taskname + '.ans' + + if options.pause: + try: + pause + except NameError, error: + if os.name == 'posix': + pause = 'read -s -n 1' + print 'Configuration ' + str(error).replace('name ', 'variable ') + '; it was devised automatically but the choice might be incorrect, so test.py might exit immediately after the testing is complete.' + elif os.name == 'nt': + pause = 'pause' + else: + raise NameError, 'configuration ' + str(error).replace('name ', 'variable ') + ' and cannot be devised automatically', sys.exc_info()[2] + + if not dummyinname: + dummyinname = testcaseinname + if not dummyoutname and (not tester or ansname): + dummyoutname = testcaseoutname + + dummyinname = dummyinname.replace('%', taskname) + dummyoutname = dummyoutname.replace('%', taskname) + testcaseinname = testcaseinname.replace('%', taskname) + if not stdio or not options.erase: + inname = inname.replace('%', taskname) + outname = outname.replace('%', taskname) + try: + ansname = ansname.replace('%', taskname) + except NameError: + pass + if tester: + try: inname = inname.replace('%', taskname) + except NameError: pass + outname = outname.replace('%', taskname) + if ansname: + ansname = ansname.replace('%', taskname) + testcaseoutname = testcaseoutname.replace('%', taskname) + else: + testcaseoutname = testcaseoutname.replace('%', taskname) + + if isinstance(padwithzeroestolength, tuple): + padwithzeroestolength, paddummieswithzeroestolength = padwithzeroestolength + else: + paddummieswithzeroestolength = padwithzeroestolength + + if options.python: + dummies = () + s = ' '.join(args) + tests = eval(s) + try: + tests.__iter__ + except AttributeError: + tests = (tests,) + elif len(args): + if os.path.exists(args[0]): + name = args[0] + del args[0] + if len(args) > 1: + dummies = () + tests = args + elif len(args): + dummies = () + s = args[0] + if len(s) < padwithzeroestolength: + s = s.zfill(padwithzeroestolength) + if existstestcase(testcaseinname.replace('$', s)): + tests = (s,) + else: + try: + tests = eval(args[0]) + try: + tests.__iter__ + except AttributeError: + tests = (tests,) + except Exception: + tests = (s,) + + if options.exclude: + testsexcluded = [] + for i in options.exclude: + v = eval(i) + try: + testsexcluded.extend(v) + except TypeError: + testsexcluded.append(v) + + # Windows doesn't like paths beginning with .\ and not ending with an extension + name = os.path.normcase(name) + if name.startswith('.\\'): + name = name[2:] + + newpointmap = {} + + for i in pointmap: + try: + for j in i: + newpointmap[j] = pointmap[i] + except TypeError: + newpointmap[i] = pointmap[i] + + pointmap = newpointmap + + if maxtime > 0: + strmaxtime = '/%.3f' % maxtime + else: + strmaxtime = '' + + padoutputtolength = 0 + ntests = [] + + for j in dummies: + try: + j.__iter__ + except AttributeError: + j = (j,) + ntests.append((j, True)) + for i in j: + s = str(i) + if len(s) < paddummieswithzeroestolength: + s = s.zfill(paddummieswithzeroestolength) + s = 'sample ' + s + if padoutputtolength < len(s): + padoutputtolength = len(s) + + for j in tests: + try: + j.__iter__ + except AttributeError: + j = (j,) + ntests.append((j, False)) + for i in j: + s = str(i) + if len(s) < padwithzeroestolength: + s = s.zfill(padwithzeroestolength) + if padoutputtolength < len(s): + padoutputtolength = len(s) + + tests = ntests + score = maxpoints = ncorrect = ntotal = ncorrectvalued = nvalued = 0 + + if options.copyonly: + j, isdummy = tests[-1] + if isdummy: + realinname = dummyinname + realoutname = dummyoutname + else: + realinname = testcaseinname + realoutname = testcaseoutname + for i in j: + if i in testsexcluded and not isdummy: + continue + s = str(i) + if isdummy: + if len(s) < paddummieswithzeroestolength: + s = s.zfill(paddummieswithzeroestolength) + else: + if len(s) < padwithzeroestolength: + s = s.zfill(padwithzeroestolength) + copytestcase(realinname.replace('$', s), inname) + if ansname: + copytestcase(realoutname.replace('$', s), ansname) + continue + + for j, isdummy in tests: + ncorrectgrp = 0 + ntotalgrp = 0 + scoregrp = 0 + maxpointsgrp = 0 + if isdummy: + realinname = dummyinname + realoutname = dummyoutname + else: + realinname = testcaseinname + realoutname = testcaseoutname + for i in j: + if i in testsexcluded and not isdummy: + continue + ntotalgrp += 1 + s = str(i) + if isdummy: + npoints = 0 + if len(s) < paddummieswithzeroestolength: + s = s.zfill(paddummieswithzeroestolength) + spref = 'sample ' + else: + npoints = pointmap.get(None, 1) + npoints = pointmap.get(i, npoints) + maxpointsgrp += npoints + if npoints: + nvalued += 1 + if len(s) < padwithzeroestolength: + s = s.zfill(padwithzeroestolength) + spref = '' + print ' ' * (padoutputtolength - len(spref + s)) + spref + s + ':', + sys.stdout.flush() + outputdata = open(os.devnull, 'w') + if stdio: + f = tempfile.NamedTemporaryFile(delete=False) + inputdatafname = f.name + f.close() + copytestcase(realinname.replace('$', s), inputdatafname) + inputdata = open(inputdatafname, 'rU') + if options.erase: + tempoutput = tempfile.TemporaryFile('w+') + else: + tempoutput = open(outname, 'w+') + try: + proc = subprocess.Popen(name, stdin=inputdata, stdout=tempoutput, stderr=outputdata, universal_newlines=True) + except OSError, error: + raise OSError, 'The program to be tested cannot be launched: ' + str(error), sys.exc_info()[2] + else: + if os.path.exists(outname): + os.remove(outname) + copytestcase(realinname.replace('$', s), inname) + try: + proc = subprocess.Popen(name, stdin=outputdata, stdout=outputdata, stderr=outputdata, universal_newlines=True) + except OSError, error: + raise OSError, 'The program to be tested cannot be launched: ' + str(error), sys.exc_info()[2] + cl = clock() + if maxtime > 0: + while 1: + proc.poll() + elapsed = clock() - cl + if proc.returncode == None: + if elapsed >= maxtime: + print '%.3f%s s, 0/%d, time limit exceeded' % (elapsed, strmaxtime, npoints) + sys.stdout.flush() + while proc.returncode == None: + try: + proc.terminate() + except OSError: + pass + except AttributeError: + try: + os.kill(proc.pid, signal.SIGTERM) + except Exception: + pass + proc.poll() + outputdata.close() + if stdio: + tempoutput.close() + break + else: + print '%.3f%s s,' % (elapsed, strmaxtime), + sys.stdout.flush() + elapsed = 0 + if stdio: + tempoutput.seek(0) + lines = tempoutput.readlines() + tempoutput.close() + break + if elapsed >= maxtime: + continue + else: + data = proc.communicate() + elapsed = clock() - cl + print '%.3f%s s,' % (elapsed, strmaxtime), + sys.stdout.flush() + if stdio: + tempoutput.seek(0) + lines = tempoutput.readlines() + tempoutput.close() + outputdata.close() + if stdio: + inputdata.close() + try: + os.unlink(inputdatafname) + except Exception: + pass + if proc.returncode > 0: + print '0/%d, non-zero return code %d' % (npoints, proc.returncode) + sys.stdout.flush() + elif proc.returncode < 0: + print '0/%d, terminated by signal %d' % (npoints, -proc.returncode) + sys.stdout.flush() + else: + if not tester: + if stdio: + outputdata = opentestcase(realoutname.replace('$', s)) + r = 0 + data = outputdata.read().splitlines(True) + if len(lines) != len(data): + r = 1 + else: + for i in zip(lines, data): + if i[0] != i[1]: + r = 1 + break + outputdata.close() + else: + try: + inputdata = open(outname, 'rU') + except IOError: + print '0/%g, output file not created or not readable' % npoints + sys.stdout.flush() + r = None + else: + outputdata = opentestcase(realoutname.replace('$', s)) + r = 0 + lines = inputdata.readlines() + data = outputdata.read().splitlines(True) + if len(lines) != len(data): + r = 1 + else: + for i in zip(lines, data): + if i[0] != i[1]: + r = 1 + break + inputdata.close() + outputdata.close() + else: + if ansname: + copytestcase(realoutname.replace('$', s), ansname) + if stdio: + try: copytestcase(realinname.replace('$', s), inname) + except NameError: pass + outputdata = open(outname, 'w') + outputdata.writelines(lines) + outputdata.close() + try: + proc = subprocess.Popen(tester, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) + except OSError, error: + raise OSError, 'The tester application cannot be launched: ' + str(error), sys.exc_info()[2] + data = proc.communicate() + r = proc.returncode + if tester and data[0]: + data = ''.join((' (', data[0].strip(), ')')) + else: + data = '' + if r: + print '0/%g, wrong answer%s' % (npoints, data) + sys.stdout.flush() + elif r == 0: + print '%g/%g, OK%s' % (npoints, npoints, data) + sys.stdout.flush() + scoregrp += npoints + ncorrectgrp += 1 + if npoints: + ncorrectvalued += 1 + if ntotalgrp: + if scoregrp < maxpointsgrp: + scoregrp = 0 + if ntotalgrp > 1: + print 'Group total: %d/%d tests; %g/%g points' % (ncorrectgrp, ntotalgrp, scoregrp, maxpointsgrp) + sys.stdout.flush() + ncorrect += ncorrectgrp + ntotal += ntotalgrp + score += scoregrp + maxpoints += maxpointsgrp + + if options.erase: + if not stdio or tester: + if os.path.exists(inname): os.remove(inname) + if os.path.exists(outname): os.remove(outname) + if tester and ansname: + if os.path.exists(ansname): os.remove(ansname) + elif stdio: + copytestcase(realinname.replace('$', s), inname) + copytestcase(realoutname.replace('$', s), ansname) + if nvalued != ntotal: + print 'Grand total: %d/%d tests (%d/%d valued); %g/%g points; weighted score: %g/%g' % (ncorrect, ntotal, ncorrectvalued, nvalued, score, maxpoints, (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0, taskweight) + else: + print 'Grand total: %d/%d tests; %g/%g points; weighted score: %g/%g' % (ncorrect, ntotal, score, maxpoints, (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0, taskweight) + + scoresumoveralltasks += (score*taskweight/maxpoints if not score*taskweight%maxpoints else float(score*taskweight)/maxpoints) if maxpoints else 0 + scoremaxoveralltasks += taskweight + ntasks += 1 + nfulltasks += int((score == maxpoints) if maxpoints else (taskweight == 0)) + + os.chdir(oldcwd) + +if options.clean or options.copyonly: + sys.exit() + +if ntasks != 1: + print + print 'Grand grand total: %g/%g weighted points; %d/%d problems solved fully' % (scoresumoveralltasks, scoremaxoveralltasks, nfulltasks, ntasks) + +if options.pause: + print 'Press any key to exit... ', + sys.stdout.flush() + os.system(pause + ' >' + os.devnull) diff -r 06f1683c8db9 -r 245150080c48 zipfile.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile.py Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,7 @@ +import sys +if sys.version_info[0] >= 3: + from zipfile31 import * +elif sys.version_info[1] >= 7: + from zipfile27 import * +else: + from zipfile26 import * \ No newline at end of file diff -r 06f1683c8db9 -r 245150080c48 zipfile26.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile26.py Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,1436 @@ +""" +Read and write ZIP files. +""" +# Improved by Chortos-2 in 2009 and 2010 (added bzip2 support) +import struct, os, time, sys, shutil +import binascii, cStringIO, stat + +try: + import zlib # We may need its compression method + crc32 = zlib.crc32 +except ImportError: + zlib = None + crc32 = binascii.crc32 + +try: + import bz2 # We may need its compression method +except ImportError: + bz2 = None + +__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", + "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] + +class BadZipfile(Exception): + pass + + +class LargeZipFile(Exception): + """ + Raised when writing a zipfile, the zipfile requires ZIP64 extensions + and those extensions are disabled. + """ + +error = BadZipfile # The exception raised by this module + +ZIP64_LIMIT = (1 << 31) - 1 +ZIP_FILECOUNT_LIMIT = 1 << 16 +ZIP_MAX_COMMENT = (1 << 16) - 1 + +# constants for Zip file compression methods +ZIP_STORED = 0 +ZIP_DEFLATED = 8 +ZIP_BZIP2 = 12 +# Other ZIP compression methods not supported + +# Below are some formats and associated data for reading/writing headers using +# the struct module. The names and structures of headers/records are those used +# in the PKWARE description of the ZIP file format: +# http://www.pkware.com/documents/casestudies/APPNOTE.TXT +# (URL valid as of January 2008) + +# The "end of central directory" structure, magic number, size, and indices +# (section V.I in the format document) +structEndArchive = "<4s4H2LH" +stringEndArchive = "PK\005\006" +sizeEndCentDir = struct.calcsize(structEndArchive) + +_ECD_SIGNATURE = 0 +_ECD_DISK_NUMBER = 1 +_ECD_DISK_START = 2 +_ECD_ENTRIES_THIS_DISK = 3 +_ECD_ENTRIES_TOTAL = 4 +_ECD_SIZE = 5 +_ECD_OFFSET = 6 +_ECD_COMMENT_SIZE = 7 +# These last two indices are not part of the structure as defined in the +# spec, but they are used internally by this module as a convenience +_ECD_COMMENT = 8 +_ECD_LOCATION = 9 + +# The "central directory" structure, magic number, size, and indices +# of entries in the structure (section V.F in the format document) +structCentralDir = "<4s4B4HL2L5H2L" +stringCentralDir = "PK\001\002" +sizeCentralDir = struct.calcsize(structCentralDir) + +# indexes of entries in the central directory structure +_CD_SIGNATURE = 0 +_CD_CREATE_VERSION = 1 +_CD_CREATE_SYSTEM = 2 +_CD_EXTRACT_VERSION = 3 +_CD_EXTRACT_SYSTEM = 4 +_CD_FLAG_BITS = 5 +_CD_COMPRESS_TYPE = 6 +_CD_TIME = 7 +_CD_DATE = 8 +_CD_CRC = 9 +_CD_COMPRESSED_SIZE = 10 +_CD_UNCOMPRESSED_SIZE = 11 +_CD_FILENAME_LENGTH = 12 +_CD_EXTRA_FIELD_LENGTH = 13 +_CD_COMMENT_LENGTH = 14 +_CD_DISK_NUMBER_START = 15 +_CD_INTERNAL_FILE_ATTRIBUTES = 16 +_CD_EXTERNAL_FILE_ATTRIBUTES = 17 +_CD_LOCAL_HEADER_OFFSET = 18 + +# The "local file header" structure, magic number, size, and indices +# (section V.A in the format document) +structFileHeader = "<4s2B4HL2L2H" +stringFileHeader = "PK\003\004" +sizeFileHeader = struct.calcsize(structFileHeader) + +_FH_SIGNATURE = 0 +_FH_EXTRACT_VERSION = 1 +_FH_EXTRACT_SYSTEM = 2 +_FH_GENERAL_PURPOSE_FLAG_BITS = 3 +_FH_COMPRESSION_METHOD = 4 +_FH_LAST_MOD_TIME = 5 +_FH_LAST_MOD_DATE = 6 +_FH_CRC = 7 +_FH_COMPRESSED_SIZE = 8 +_FH_UNCOMPRESSED_SIZE = 9 +_FH_FILENAME_LENGTH = 10 +_FH_EXTRA_FIELD_LENGTH = 11 + +# The "Zip64 end of central directory locator" structure, magic number, and size +structEndArchive64Locator = "<4sLQL" +stringEndArchive64Locator = "PK\x06\x07" +sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) + +# The "Zip64 end of central directory" record, magic number, size, and indices +# (section V.G in the format document) +structEndArchive64 = "<4sQ2H2L4Q" +stringEndArchive64 = "PK\x06\x06" +sizeEndCentDir64 = struct.calcsize(structEndArchive64) + +_CD64_SIGNATURE = 0 +_CD64_DIRECTORY_RECSIZE = 1 +_CD64_CREATE_VERSION = 2 +_CD64_EXTRACT_VERSION = 3 +_CD64_DISK_NUMBER = 4 +_CD64_DISK_NUMBER_START = 5 +_CD64_NUMBER_ENTRIES_THIS_DISK = 6 +_CD64_NUMBER_ENTRIES_TOTAL = 7 +_CD64_DIRECTORY_SIZE = 8 +_CD64_OFFSET_START_CENTDIR = 9 + +def is_zipfile(filename): + """Quickly see if file is a ZIP file by checking the magic number.""" + try: + fpin = open(filename, "rb") + endrec = _EndRecData(fpin) + fpin.close() + if endrec: + return True # file has correct magic number + except IOError: + pass + return False + +def _EndRecData64(fpin, offset, endrec): + """ + Read the ZIP64 end-of-archive records and use that to update endrec + """ + fpin.seek(offset - sizeEndCentDir64Locator, 2) + data = fpin.read(sizeEndCentDir64Locator) + sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) + if sig != stringEndArchive64Locator: + return endrec + + if diskno != 0 or disks != 1: + raise BadZipfile("zipfiles that span multiple disks are not supported") + + # Assume no 'zip64 extensible data' + fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) + data = fpin.read(sizeEndCentDir64) + sig, sz, create_version, read_version, disk_num, disk_dir, \ + dircount, dircount2, dirsize, diroffset = \ + struct.unpack(structEndArchive64, data) + if sig != stringEndArchive64: + return endrec + + # Update the original endrec using data from the ZIP64 record + endrec[_ECD_SIGNATURE] = sig + endrec[_ECD_DISK_NUMBER] = disk_num + endrec[_ECD_DISK_START] = disk_dir + endrec[_ECD_ENTRIES_THIS_DISK] = dircount + endrec[_ECD_ENTRIES_TOTAL] = dircount2 + endrec[_ECD_SIZE] = dirsize + endrec[_ECD_OFFSET] = diroffset + return endrec + + +def _EndRecData(fpin): + """Return data from the "End of Central Directory" record, or None. + + The data is a list of the nine items in the ZIP "End of central dir" + record followed by a tenth item, the file seek offset of this record.""" + + # Determine file size + fpin.seek(0, 2) + filesize = fpin.tell() + + # Check to see if this is ZIP file with no archive comment (the + # "end of central directory" structure should be the last item in the + # file if this is the case). + try: + fpin.seek(-sizeEndCentDir, 2) + except IOError: + return None + data = fpin.read() + if data[0:4] == stringEndArchive and data[-2:] == "\000\000": + # the signature is correct and there's no comment, unpack structure + endrec = struct.unpack(structEndArchive, data) + endrec=list(endrec) + + # Append a blank comment and record start offset + endrec.append("") + endrec.append(filesize - sizeEndCentDir) + + # Try to read the "Zip64 end of central directory" structure + return _EndRecData64(fpin, -sizeEndCentDir, endrec) + + # Either this is not a ZIP file, or it is a ZIP file with an archive + # comment. Search the end of the file for the "end of central directory" + # record signature. The comment is the last item in the ZIP file and may be + # up to 64K long. It is assumed that the "end of central directory" magic + # number does not appear in the comment. + maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) + fpin.seek(maxCommentStart, 0) + data = fpin.read() + start = data.rfind(stringEndArchive) + if start >= 0: + # found the magic number; attempt to unpack and interpret + recData = data[start:start+sizeEndCentDir] + endrec = list(struct.unpack(structEndArchive, recData)) + comment = data[start+sizeEndCentDir:] + # check that comment length is correct + if endrec[_ECD_COMMENT_SIZE] == len(comment): + # Append the archive comment and start offset + endrec.append(comment) + endrec.append(maxCommentStart + start) + + # Try to read the "Zip64 end of central directory" structure + return _EndRecData64(fpin, maxCommentStart + start - filesize, + endrec) + + # Unable to find a valid end of central directory structure + return + + +class ZipInfo (object): + """Class with attributes describing each file in the ZIP archive.""" + + __slots__ = ( + 'orig_filename', + 'filename', + 'date_time', + 'compress_type', + 'comment', + 'extra', + 'create_system', + 'create_version', + 'extract_version', + 'reserved', + 'flag_bits', + 'volume', + 'internal_attr', + 'external_attr', + 'header_offset', + 'CRC', + 'compress_size', + 'file_size', + '_raw_time', + ) + + def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): + self.orig_filename = filename # Original file name in archive + + # Terminate the file name at the first null byte. Null bytes in file + # names are used as tricks by viruses in archives. + null_byte = filename.find(chr(0)) + if null_byte >= 0: + filename = filename[0:null_byte] + # This is used to ensure paths in generated ZIP files always use + # forward slashes as the directory separator, as required by the + # ZIP format specification. + if os.sep != "/" and os.sep in filename: + filename = filename.replace(os.sep, "/") + + self.filename = filename # Normalized file name + self.date_time = date_time # year, month, day, hour, min, sec + # Standard values: + self.compress_type = ZIP_STORED # Type of compression for the file + self.comment = "" # Comment for each file + self.extra = "" # ZIP extra data + if sys.platform == 'win32': + self.create_system = 0 # System which created ZIP archive + else: + # Assume everything else is unix-y + self.create_system = 3 # System which created ZIP archive + self.create_version = 20 # Version which created ZIP archive + self.extract_version = 20 # Version needed to extract archive + self.reserved = 0 # Must be zero + self.flag_bits = 0 # ZIP flag bits + self.volume = 0 # Volume number of file header + self.internal_attr = 0 # Internal attributes + self.external_attr = 0 # External file attributes + # Other attributes are set by class ZipFile: + # header_offset Byte offset to the file header + # CRC CRC-32 of the uncompressed file + # compress_size Size of the compressed file + # file_size Size of the uncompressed file + + def FileHeader(self): + """Return the per-file header as a string.""" + dt = self.date_time + dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] + dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) + if self.flag_bits & 0x08: + # Set these to zero because we write them after the file data + CRC = compress_size = file_size = 0 + else: + CRC = self.CRC + compress_size = self.compress_size + file_size = self.file_size + + extra = self.extra + + if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: + # File is larger than what fits into a 4 byte integer, + # fall back to the ZIP64 extension + fmt = '= 24: + counts = unpack('> 1) & 0x7FFFFFFF) ^ poly + else: + crc = ((crc >> 1) & 0x7FFFFFFF) + table[i] = crc + return table + crctable = _GenerateCRCTable() + + def _crc32(self, ch, crc): + """Compute the CRC32 primitive on one byte.""" + return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff] + + def __init__(self, pwd): + self.key0 = 305419896 + self.key1 = 591751049 + self.key2 = 878082192 + for p in pwd: + self._UpdateKeys(p) + + def _UpdateKeys(self, c): + self.key0 = self._crc32(c, self.key0) + self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295 + self.key1 = (self.key1 * 134775813 + 1) & 4294967295 + self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2) + + def __call__(self, c): + """Decrypt a single character.""" + c = ord(c) + k = self.key2 | 2 + c = c ^ (((k * (k^1)) >> 8) & 255) + c = chr(c) + self._UpdateKeys(c) + return c + +class ZipExtFile: + """File-like object for reading an archive member. + Is returned by ZipFile.open(). + """ + + def __init__(self, fileobj, zipinfo, decrypt=None): + self.fileobj = fileobj + self.decrypter = decrypt + self.bytes_read = 0L + self.rawbuffer = '' + self.readbuffer = '' + self.linebuffer = '' + self.eof = False + self.univ_newlines = False + self.nlSeps = ("\n", ) + self.lastdiscard = '' + + self.compress_type = zipinfo.compress_type + self.compress_size = zipinfo.compress_size + + self.closed = False + self.mode = "r" + self.name = zipinfo.filename + + # read from compressed files in 64k blocks + self.compreadsize = 64*1024 + if self.compress_type == ZIP_DEFLATED: + self.dc = zlib.decompressobj(-15) + elif self.compress_type == ZIP_BZIP2: + self.dc = bz2.BZ2Decompressor() + self.compreadsize = 900000 + + def set_univ_newlines(self, univ_newlines): + self.univ_newlines = univ_newlines + + # pick line separator char(s) based on universal newlines flag + self.nlSeps = ("\n", ) + if self.univ_newlines: + self.nlSeps = ("\r\n", "\r", "\n") + + def __iter__(self): + return self + + def next(self): + nextline = self.readline() + if not nextline: + raise StopIteration() + + return nextline + + def close(self): + self.closed = True + + def _checkfornewline(self): + nl, nllen = -1, -1 + if self.linebuffer: + # ugly check for cases where half of an \r\n pair was + # read on the last pass, and the \r was discarded. In this + # case we just throw away the \n at the start of the buffer. + if (self.lastdiscard, self.linebuffer[0]) == ('\r','\n'): + self.linebuffer = self.linebuffer[1:] + + for sep in self.nlSeps: + nl = self.linebuffer.find(sep) + if nl >= 0: + nllen = len(sep) + return nl, nllen + + return nl, nllen + + def readline(self, size = -1): + """Read a line with approx. size. If size is negative, + read a whole line. + """ + if size < 0: + size = sys.maxint + elif size == 0: + return '' + + # check for a newline already in buffer + nl, nllen = self._checkfornewline() + + if nl >= 0: + # the next line was already in the buffer + nl = min(nl, size) + else: + # no line break in buffer - try to read more + size -= len(self.linebuffer) + while nl < 0 and size > 0: + buf = self.read(min(size, 100)) + if not buf: + break + self.linebuffer += buf + size -= len(buf) + + # check for a newline in buffer + nl, nllen = self._checkfornewline() + + # we either ran out of bytes in the file, or + # met the specified size limit without finding a newline, + # so return current buffer + if nl < 0: + s = self.linebuffer + self.linebuffer = '' + return s + + buf = self.linebuffer[:nl] + self.lastdiscard = self.linebuffer[nl:nl + nllen] + self.linebuffer = self.linebuffer[nl + nllen:] + + # line is always returned with \n as newline char (except possibly + # for a final incomplete line in the file, which is handled above). + return buf + "\n" + + def readlines(self, sizehint = -1): + """Return a list with all (following) lines. The sizehint parameter + is ignored in this implementation. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def read(self, size = None): + # act like file() obj and return empty string if size is 0 + if size == 0: + return '' + + # determine read size + bytesToRead = self.compress_size - self.bytes_read + + # adjust read size for encrypted files since the first 12 bytes + # are for the encryption/password information + if self.decrypter is not None: + bytesToRead -= 12 + + if size is not None and size >= 0: + if self.compress_type == ZIP_STORED: + lr = len(self.readbuffer) + bytesToRead = min(bytesToRead, size - lr) + else: + if len(self.readbuffer) > size: + # the user has requested fewer bytes than we've already + # pulled through the decompressor; don't read any more + bytesToRead = 0 + else: + # user will use up the buffer, so read some more + lr = len(self.rawbuffer) + bytesToRead = min(bytesToRead, self.compreadsize - lr) + + # avoid reading past end of file contents + if bytesToRead + self.bytes_read > self.compress_size: + bytesToRead = self.compress_size - self.bytes_read + + # try to read from file (if necessary) + if bytesToRead > 0: + bytes = self.fileobj.read(bytesToRead) + self.bytes_read += len(bytes) + self.rawbuffer += bytes + + # handle contents of raw buffer + if self.rawbuffer: + newdata = self.rawbuffer + self.rawbuffer = '' + + # decrypt new data if we were given an object to handle that + if newdata and self.decrypter is not None: + newdata = ''.join(map(self.decrypter, newdata)) + + # decompress newly read data if necessary + if newdata and self.compress_type != ZIP_STORED: + newdata = self.dc.decompress(newdata) + self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' + if self.eof and len(self.rawbuffer) == 0: + # we're out of raw bytes (both from the file and + # the local buffer); flush just to make sure the + # decompressor is done + try: + newdata += self.dc.flush() + except AttributeError: + pass + # prevent decompressor from being used again + self.dc = None + + self.readbuffer += newdata + + + # return what the user asked for + if size is None or len(self.readbuffer) <= size: + bytes = self.readbuffer + self.readbuffer = '' + else: + bytes = self.readbuffer[:size] + self.readbuffer = self.readbuffer[size:] + + return bytes + + +class ZipFile: + """ Class with methods to open, read, write, close, list zip files. + + z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False) + + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read "r", write "w" or append "a". + compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), + or ZIP_BZIP2 (requires bz2). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. + + """ + + fp = None # Set here since __del__ checks it + + def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): + """Open the ZIP file with mode read "r", write "w" or append "a".""" + if mode not in ("r", "w", "a"): + raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') + + if compression == ZIP_STORED: + pass + elif compression == ZIP_DEFLATED: + if not zlib: + raise RuntimeError,\ + "Compression requires the (missing) zlib module" + elif compression == ZIP_BZIP2: + if not bz2: + raise RuntimeError,\ + "Compression requires the (missing) bz2 module" + else: + raise RuntimeError, "That compression method is not supported" + + self._allowZip64 = allowZip64 + self._didModify = False + self.debug = 0 # Level of printing: 0 through 3 + self.NameToInfo = {} # Find file info given name + self.filelist = [] # List of ZipInfo instances for archive + self.compression = compression # Method of compression + self.mode = key = mode.replace('b', '')[0] + self.pwd = None + self.comment = '' + + # Check if we were passed a file-like object + if isinstance(file, basestring): + self._filePassed = 0 + self.filename = file + modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} + try: + self.fp = open(file, modeDict[mode]) + except IOError: + if mode == 'a': + mode = key = 'w' + self.fp = open(file, modeDict[mode]) + else: + raise + else: + self._filePassed = 1 + self.fp = file + self.filename = getattr(file, 'name', None) + + if key == 'r': + self._GetContents() + elif key == 'w': + pass + elif key == 'a': + try: # See if file is a zip file + self._RealGetContents() + # seek to start of directory and overwrite + self.fp.seek(self.start_dir, 0) + except BadZipfile: # file is not a zip file, just append + self.fp.seek(0, 2) + else: + if not self._filePassed: + self.fp.close() + self.fp = None + raise RuntimeError, 'Mode must be "r", "w" or "a"' + + def _GetContents(self): + """Read the directory, making sure we close the file if the format + is bad.""" + try: + self._RealGetContents() + except BadZipfile: + if not self._filePassed: + self.fp.close() + self.fp = None + raise + + def _RealGetContents(self): + """Read in the table of contents for the ZIP file.""" + fp = self.fp + endrec = _EndRecData(fp) + if not endrec: + raise BadZipfile, "File is not a zip file" + if self.debug > 1: + print endrec + size_cd = endrec[_ECD_SIZE] # bytes in central directory + offset_cd = endrec[_ECD_OFFSET] # offset of central directory + self.comment = endrec[_ECD_COMMENT] # archive comment + + # "concat" is zero, unless zip was concatenated to another file + concat = endrec[_ECD_LOCATION] - size_cd - offset_cd + if endrec[_ECD_SIGNATURE] == stringEndArchive64: + # If Zip64 extension structures are present, account for them + concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) + + if self.debug > 2: + inferred = concat + offset_cd + print "given, inferred, offset", offset_cd, inferred, concat + # self.start_dir: Position of start of central directory + self.start_dir = offset_cd + concat + fp.seek(self.start_dir, 0) + data = fp.read(size_cd) + fp = cStringIO.StringIO(data) + total = 0 + while total < size_cd: + centdir = fp.read(sizeCentralDir) + if centdir[0:4] != stringCentralDir: + raise BadZipfile, "Bad magic number for central directory" + centdir = struct.unpack(structCentralDir, centdir) + if self.debug > 2: + print centdir + filename = fp.read(centdir[_CD_FILENAME_LENGTH]) + # Create ZipInfo instance to store file information + x = ZipInfo(filename) + x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) + x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) + x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] + (x.create_version, x.create_system, x.extract_version, x.reserved, + x.flag_bits, x.compress_type, t, d, + x.CRC, x.compress_size, x.file_size) = centdir[1:12] + x.volume, x.internal_attr, x.external_attr = centdir[15:18] + # Convert date/time code to (year, month, day, hour, min, sec) + x._raw_time = t + x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, + t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) + + x._decodeExtra() + x.header_offset = x.header_offset + concat + x.filename = x._decodeFilename() + self.filelist.append(x) + self.NameToInfo[x.filename] = x + + # update total bytes read from central directory + total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] + + centdir[_CD_EXTRA_FIELD_LENGTH] + + centdir[_CD_COMMENT_LENGTH]) + + if self.debug > 2: + print "total", total + + + def namelist(self): + """Return a list of file names in the archive.""" + l = [] + for data in self.filelist: + l.append(data.filename) + return l + + def infolist(self): + """Return a list of class ZipInfo instances for files in the + archive.""" + return self.filelist + + def printdir(self): + """Print a table of contents for the zip file.""" + print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") + for zinfo in self.filelist: + date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] + print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size) + + def testzip(self): + """Read all the files and check the CRC.""" + chunk_size = 2 ** 20 + for zinfo in self.filelist: + try: + # Read by chunks, to avoid an OverflowError or a + # MemoryError with very large embedded files. + f = self.open(zinfo.filename, "r") + while f.read(chunk_size): # Check CRC-32 + pass + except BadZipfile: + return zinfo.filename + + def getinfo(self, name): + """Return the instance of ZipInfo given 'name'.""" + info = self.NameToInfo.get(name) + if info is None: + raise KeyError( + 'There is no item named %r in the archive' % name) + + return info + + def setpassword(self, pwd): + """Set default password for encrypted files.""" + self.pwd = pwd + + def read(self, name, pwd=None): + """Return file bytes (as a string) for name.""" + return self.open(name, "r", pwd).read() + + def open(self, name, mode="r", pwd=None): + """Return file-like object for 'name'.""" + if mode not in ("r", "U", "rU"): + raise RuntimeError, 'open() requires mode "r", "U", or "rU"' + if not self.fp: + raise RuntimeError, \ + "Attempt to read ZIP archive that was already closed" + + # Only open a new file for instances where we were not + # given a file object in the constructor + if self._filePassed: + zef_file = self.fp + else: + zef_file = open(self.filename, 'rb') + + # Make sure we have an info object + if isinstance(name, ZipInfo): + # 'name' is already an info object + zinfo = name + else: + # Get info object for name + zinfo = self.getinfo(name) + + zef_file.seek(zinfo.header_offset, 0) + + # Skip the file header: + fheader = zef_file.read(sizeFileHeader) + if fheader[0:4] != stringFileHeader: + raise BadZipfile, "Bad magic number for file header" + + fheader = struct.unpack(structFileHeader, fheader) + fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) + if fheader[_FH_EXTRA_FIELD_LENGTH]: + zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) + + if fname != zinfo.orig_filename: + raise BadZipfile, \ + 'File name in directory "%s" and header "%s" differ.' % ( + zinfo.orig_filename, fname) + + # check for encrypted flag & handle password + is_encrypted = zinfo.flag_bits & 0x1 + zd = None + if is_encrypted: + if not pwd: + pwd = self.pwd + if not pwd: + raise RuntimeError, "File %s is encrypted, " \ + "password required for extraction" % name + + zd = _ZipDecrypter(pwd) + # The first 12 bytes in the cypher stream is an encryption header + # used to strengthen the algorithm. The first 11 bytes are + # completely random, while the 12th contains the MSB of the CRC, + # or the MSB of the file time depending on the header type + # and is used to check the correctness of the password. + bytes = zef_file.read(12) + h = map(zd, bytes[0:12]) + if zinfo.flag_bits & 0x8: + # compare against the file type from extended local headers + check_byte = (zinfo._raw_time >> 8) & 0xff + else: + # compare against the CRC otherwise + check_byte = (zinfo.CRC >> 24) & 0xff + if ord(h[11]) != check_byte: + raise RuntimeError("Bad password for file", name) + + # build and return a ZipExtFile + if zd is None: + zef = ZipExtFile(zef_file, zinfo) + else: + zef = ZipExtFile(zef_file, zinfo, zd) + + # set universal newlines on ZipExtFile if necessary + if "U" in mode: + zef.set_univ_newlines(True) + return zef + + def extract(self, member, path=None, pwd=None): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a ZipInfo object. You can + specify a different directory using `path'. + """ + if not isinstance(member, ZipInfo): + member = self.getinfo(member) + + if path is None: + path = os.getcwd() + + return self._extract_member(member, path, pwd) + + def extractall(self, path=None, members=None, pwd=None): + """Extract all members from the archive to the current working + directory. `path' specifies a different directory to extract to. + `members' is optional and must be a subset of the list returned + by namelist(). + """ + if members is None: + members = self.namelist() + + for zipinfo in members: + self.extract(zipinfo, path, pwd) + + def _extract_member(self, member, targetpath, pwd): + """Extract the ZipInfo object 'member' to a physical + file on the path targetpath. + """ + # build the destination pathname, replacing + # forward slashes to platform specific separators. + # Strip trailing path separator, unless it represents the root. + if (targetpath[-1:] in (os.path.sep, os.path.altsep) + and len(os.path.splitdrive(targetpath)[1]) > 1): + targetpath = targetpath[:-1] + + # don't include leading "/" from file name if present + if member.filename[0] == '/': + targetpath = os.path.join(targetpath, member.filename[1:]) + else: + targetpath = os.path.join(targetpath, member.filename) + + targetpath = os.path.normpath(targetpath) + + # Create all upper directories if necessary. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + os.makedirs(upperdirs) + + if member.filename[-1] == '/': + if not os.path.isdir(targetpath): + os.mkdir(targetpath) + return targetpath + + source = self.open(member, pwd=pwd) + target = file(targetpath, "wb") + shutil.copyfileobj(source, target) + source.close() + target.close() + + return targetpath + + def _writecheck(self, zinfo): + """Check for errors before writing a file to the archive.""" + if zinfo.filename in self.NameToInfo: + if self.debug: # Warning for duplicate names + print "Duplicate name:", zinfo.filename + if self.mode not in ("w", "a"): + raise RuntimeError, 'write() requires mode "w" or "a"' + if not self.fp: + raise RuntimeError, \ + "Attempt to write ZIP archive that was already closed" + if zinfo.compress_type == ZIP_DEFLATED and not zlib: + raise RuntimeError, \ + "Compression requires the (missing) zlib module" + if zinfo.compress_type == ZIP_BZIP2 and not bz2: + raise RuntimeError, \ + "Compression requires the (missing) bz2 module" + if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): + raise RuntimeError, \ + "That compression method is not supported" + if zinfo.file_size > ZIP64_LIMIT: + if not self._allowZip64: + raise LargeZipFile("Filesize would require ZIP64 extensions") + if zinfo.header_offset > ZIP64_LIMIT: + if not self._allowZip64: + raise LargeZipFile("Zipfile size would require ZIP64 extensions") + + def write(self, filename, arcname=None, compress_type=None): + """Put the bytes from filename into the archive under the name + arcname.""" + if not self.fp: + raise RuntimeError( + "Attempt to write to ZIP archive that was already closed") + + st = os.stat(filename) + isdir = stat.S_ISDIR(st.st_mode) + mtime = time.localtime(st.st_mtime) + date_time = mtime[0:6] + # Create ZipInfo instance to store file information + if arcname is None: + arcname = filename + arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) + while arcname[0] in (os.sep, os.altsep): + arcname = arcname[1:] + if isdir: + arcname += '/' + zinfo = ZipInfo(arcname, date_time) + zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes + if compress_type is None: + zinfo.compress_type = self.compression + else: + zinfo.compress_type = compress_type + + zinfo.file_size = st.st_size + zinfo.flag_bits = 0x00 + zinfo.header_offset = self.fp.tell() # Start of header bytes + + self._writecheck(zinfo) + self._didModify = True + + if isdir: + zinfo.file_size = 0 + zinfo.compress_size = 0 + zinfo.CRC = 0 + self.filelist.append(zinfo) + self.NameToInfo[zinfo.filename] = zinfo + self.fp.write(zinfo.FileHeader()) + return + + fp = open(filename, "rb") + # Must overwrite CRC and sizes with correct data later + zinfo.CRC = CRC = 0 + zinfo.compress_size = compress_size = 0 + zinfo.file_size = file_size = 0 + self.fp.write(zinfo.FileHeader()) + if zinfo.compress_type == ZIP_DEFLATED: + cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, + zlib.DEFLATED, -15) + elif zinfo.compress_type == ZIP_BZIP2: + cmpr = bz2.BZ2Compressor() + else: + cmpr = None + while 1: + buf = fp.read(1024 * 8) + if not buf: + break + file_size = file_size + len(buf) + CRC = crc32(buf, CRC) & 0xffffffff + if cmpr: + buf = cmpr.compress(buf) + compress_size = compress_size + len(buf) + self.fp.write(buf) + fp.close() + if cmpr: + buf = cmpr.flush() + compress_size = compress_size + len(buf) + self.fp.write(buf) + zinfo.compress_size = compress_size + else: + zinfo.compress_size = file_size + zinfo.CRC = CRC + zinfo.file_size = file_size + # Seek backwards and write CRC and file sizes + position = self.fp.tell() # Preserve current position in file + self.fp.seek(zinfo.header_offset + 14, 0) + self.fp.write(struct.pack(" ZIP64_LIMIT \ + or zinfo.compress_size > ZIP64_LIMIT: + extra.append(zinfo.file_size) + extra.append(zinfo.compress_size) + file_size = 0xffffffff + compress_size = 0xffffffff + else: + file_size = zinfo.file_size + compress_size = zinfo.compress_size + + if zinfo.header_offset > ZIP64_LIMIT: + extra.append(zinfo.header_offset) + header_offset = 0xffffffffL + else: + header_offset = zinfo.header_offset + + extra_data = zinfo.extra + if extra: + # Append a ZIP64 field to the extra's + extra_data = struct.pack( + '>sys.stderr, (structCentralDir, + stringCentralDir, create_version, + zinfo.create_system, extract_version, zinfo.reserved, + zinfo.flag_bits, zinfo.compress_type, dostime, dosdate, + zinfo.CRC, compress_size, file_size, + len(zinfo.filename), len(extra_data), len(zinfo.comment), + 0, zinfo.internal_attr, zinfo.external_attr, + header_offset) + raise + self.fp.write(centdir) + self.fp.write(filename) + self.fp.write(extra_data) + self.fp.write(zinfo.comment) + + pos2 = self.fp.tell() + # Write end-of-zip-archive record + centDirCount = count + centDirSize = pos2 - pos1 + centDirOffset = pos1 + if (centDirCount >= ZIP_FILECOUNT_LIMIT or + centDirOffset > ZIP64_LIMIT or + centDirSize > ZIP64_LIMIT): + # Need to write the ZIP64 end-of-archive records + zip64endrec = struct.pack( + structEndArchive64, stringEndArchive64, + 44, 45, 45, 0, 0, centDirCount, centDirCount, + centDirSize, centDirOffset) + self.fp.write(zip64endrec) + + zip64locrec = struct.pack( + structEndArchive64Locator, + stringEndArchive64Locator, 0, pos2, 1) + self.fp.write(zip64locrec) + centDirCount = min(centDirCount, 0xFFFF) + centDirSize = min(centDirSize, 0xFFFFFFFF) + centDirOffset = min(centDirOffset, 0xFFFFFFFF) + + # check for valid comment length + if len(self.comment) >= ZIP_MAX_COMMENT: + if self.debug > 0: + msg = 'Archive comment is too long; truncating to %d bytes' \ + % ZIP_MAX_COMMENT + self.comment = self.comment[:ZIP_MAX_COMMENT] + + endrec = struct.pack(structEndArchive, stringEndArchive, + 0, 0, centDirCount, centDirCount, + centDirSize, centDirOffset, len(self.comment)) + self.fp.write(endrec) + self.fp.write(self.comment) + self.fp.flush() + + if not self._filePassed: + self.fp.close() + self.fp = None + + +class PyZipFile(ZipFile): + """Class to create ZIP archives with Python library files and packages.""" + + def writepy(self, pathname, basename = ""): + """Add all files from "pathname" to the ZIP archive. + + If pathname is a package directory, search the directory and + all package subdirectories recursively for all *.py and enter + the modules into the archive. If pathname is a plain + directory, listdir *.py and enter all modules. Else, pathname + must be a Python *.py file and the module will be put into the + archive. Added modules are always module.pyo or module.pyc. + This method will compile the module.py into module.pyc if + necessary. + """ + dir, name = os.path.split(pathname) + if os.path.isdir(pathname): + initname = os.path.join(pathname, "__init__.py") + if os.path.isfile(initname): + # This is a package directory, add it + if basename: + basename = "%s/%s" % (basename, name) + else: + basename = name + if self.debug: + print "Adding package in", pathname, "as", basename + fname, arcname = self._get_codename(initname[0:-3], basename) + if self.debug: + print "Adding", arcname + self.write(fname, arcname) + dirlist = os.listdir(pathname) + dirlist.remove("__init__.py") + # Add all *.py files and package subdirectories + for filename in dirlist: + path = os.path.join(pathname, filename) + root, ext = os.path.splitext(filename) + if os.path.isdir(path): + if os.path.isfile(os.path.join(path, "__init__.py")): + # This is a package directory, add it + self.writepy(path, basename) # Recursive call + elif ext == ".py": + fname, arcname = self._get_codename(path[0:-3], + basename) + if self.debug: + print "Adding", arcname + self.write(fname, arcname) + else: + # This is NOT a package directory, add its files at top level + if self.debug: + print "Adding files from directory", pathname + for filename in os.listdir(pathname): + path = os.path.join(pathname, filename) + root, ext = os.path.splitext(filename) + if ext == ".py": + fname, arcname = self._get_codename(path[0:-3], + basename) + if self.debug: + print "Adding", arcname + self.write(fname, arcname) + else: + if pathname[-3:] != ".py": + raise RuntimeError, \ + 'Files added with writepy() must end with ".py"' + fname, arcname = self._get_codename(pathname[0:-3], basename) + if self.debug: + print "Adding file", arcname + self.write(fname, arcname) + + def _get_codename(self, pathname, basename): + """Return (filename, archivename) for the path. + + Given a module name path, return the correct file path and + archive name, compiling if necessary. For example, given + /python/lib/string, return (/python/lib/string.pyc, string). + """ + file_py = pathname + ".py" + file_pyc = pathname + ".pyc" + file_pyo = pathname + ".pyo" + if os.path.isfile(file_pyo) and \ + os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: + fname = file_pyo # Use .pyo file + elif not os.path.isfile(file_pyc) or \ + os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: + import py_compile + if self.debug: + print "Compiling", file_py + try: + py_compile.compile(file_py, file_pyc, None, True) + except py_compile.PyCompileError,err: + print err.msg + fname = file_pyc + else: + fname = file_pyc + archivename = os.path.split(fname)[1] + if basename: + archivename = "%s/%s" % (basename, archivename) + return (fname, archivename) + + +def main(args = None): + import textwrap + USAGE=textwrap.dedent("""\ + Usage: + zipfile.py -l zipfile.zip # Show listing of a zipfile + zipfile.py -t zipfile.zip # Test if a zipfile is valid + zipfile.py -e zipfile.zip target # Extract zipfile into target dir + zipfile.py -c zipfile.zip src ... # Create zipfile from sources + """) + if args is None: + args = sys.argv[1:] + + if not args or args[0] not in ('-l', '-c', '-e', '-t'): + print USAGE + sys.exit(1) + + if args[0] == '-l': + if len(args) != 2: + print USAGE + sys.exit(1) + zf = ZipFile(args[1], 'r') + zf.printdir() + zf.close() + + elif args[0] == '-t': + if len(args) != 2: + print USAGE + sys.exit(1) + zf = ZipFile(args[1], 'r') + zf.testzip() + print "Done testing" + + elif args[0] == '-e': + if len(args) != 3: + print USAGE + sys.exit(1) + + zf = ZipFile(args[1], 'r') + out = args[2] + for path in zf.namelist(): + if path.startswith('./'): + tgt = os.path.join(out, path[2:]) + else: + tgt = os.path.join(out, path) + + tgtdir = os.path.dirname(tgt) + if not os.path.exists(tgtdir): + os.makedirs(tgtdir) + fp = open(tgt, 'wb') + fp.write(zf.read(path)) + fp.close() + zf.close() + + elif args[0] == '-c': + if len(args) < 3: + print USAGE + sys.exit(1) + + def addToZip(zf, path, zippath): + if os.path.isfile(path): + zf.write(path, zippath, ZIP_DEFLATED) + elif os.path.isdir(path): + for nm in os.listdir(path): + addToZip(zf, + os.path.join(path, nm), os.path.join(zippath, nm)) + # else: ignore + + zf = ZipFile(args[1], 'w', allowZip64=True) + for src in args[2:]: + addToZip(zf, src, os.path.basename(src)) + + zf.close() + +if __name__ == "__main__": + main() diff -r 06f1683c8db9 -r 245150080c48 zipfile266.diff --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile266.diff Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,126 @@ +--- /usr/lib/python2.6/zipfile.py 2010-07-05 14:48:38.000000000 +0300 ++++ zipfile.py 2010-11-25 01:39:22.749743303 +0200 +@@ -1,6 +1,7 @@ + """ + Read and write ZIP files. + """ ++# Improved by Chortos-2 in 2009 and 2010 (added bzip2 support) + import struct, os, time, sys, shutil + import binascii, cStringIO, stat + +@@ -11,8 +12,13 @@ + zlib = None + crc32 = binascii.crc32 + ++try: ++ import bz2 # We may need its compression method ++except ImportError: ++ bz2 = None ++ + __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", +- "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] ++ "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] + + class BadZipfile(Exception): + pass +@@ -33,6 +39,7 @@ + # constants for Zip file compression methods + ZIP_STORED = 0 + ZIP_DEFLATED = 8 ++ZIP_BZIP2 = 12 + # Other ZIP compression methods not supported + + # Below are some formats and associated data for reading/writing headers using +@@ -467,6 +474,9 @@ + self.compreadsize = 64*1024 + if self.compress_type == ZIP_DEFLATED: + self.dc = zlib.decompressobj(-15) ++ elif self.compress_type == ZIP_BZIP2: ++ self.dc = bz2.BZ2Decompressor() ++ self.compreadsize = 900000 + + def set_univ_newlines(self, univ_newlines): + self.univ_newlines = univ_newlines +@@ -578,7 +588,7 @@ + if self.compress_type == ZIP_STORED: + lr = len(self.readbuffer) + bytesToRead = min(bytesToRead, size - lr) +- elif self.compress_type == ZIP_DEFLATED: ++ else: + if len(self.readbuffer) > size: + # the user has requested fewer bytes than we've already + # pulled through the decompressor; don't read any more +@@ -608,14 +618,17 @@ + newdata = ''.join(map(self.decrypter, newdata)) + + # decompress newly read data if necessary +- if newdata and self.compress_type == ZIP_DEFLATED: ++ if newdata and self.compress_type != ZIP_STORED: + newdata = self.dc.decompress(newdata) +- self.rawbuffer = self.dc.unconsumed_tail ++ self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' + if self.eof and len(self.rawbuffer) == 0: + # we're out of raw bytes (both from the file and + # the local buffer); flush just to make sure the + # decompressor is done +- newdata += self.dc.flush() ++ try: ++ newdata += self.dc.flush() ++ except AttributeError: ++ pass + # prevent decompressor from being used again + self.dc = None + +@@ -641,7 +654,8 @@ + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read "r", write "w" or append "a". +- compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). ++ compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), ++ or ZIP_BZIP2 (requires bz2). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. +@@ -661,6 +675,10 @@ + if not zlib: + raise RuntimeError,\ + "Compression requires the (missing) zlib module" ++ elif compression == ZIP_BZIP2: ++ if not bz2: ++ raise RuntimeError,\ ++ "Compression requires the (missing) bz2 module" + else: + raise RuntimeError, "That compression method is not supported" + +@@ -987,7 +1005,10 @@ + if zinfo.compress_type == ZIP_DEFLATED and not zlib: + raise RuntimeError, \ + "Compression requires the (missing) zlib module" +- if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): ++ if zinfo.compress_type == ZIP_BZIP2 and not bz2: ++ raise RuntimeError, \ ++ "Compression requires the (missing) bz2 module" ++ if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): + raise RuntimeError, \ + "That compression method is not supported" + if zinfo.file_size > ZIP64_LIMIT: +@@ -1048,6 +1069,8 @@ + if zinfo.compress_type == ZIP_DEFLATED: + cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, + zlib.DEFLATED, -15) ++ elif zinfo.compress_type == ZIP_BZIP2: ++ cmpr = bz2.BZ2Compressor() + else: + cmpr = None + while 1: +@@ -1105,6 +1128,10 @@ + zlib.DEFLATED, -15) + bytes = co.compress(bytes) + co.flush() + zinfo.compress_size = len(bytes) # Compressed size ++ elif zinfo.compress_type == ZIP_BZIP2: ++ co = bz2.BZ2Compressor() ++ bytes = co.compress(bytes) + co.flush() ++ zinfo.compress_size = len(bytes) # Compressed size + else: + zinfo.compress_size = zinfo.file_size + zinfo.header_offset = self.fp.tell() # Start of header bytes diff -r 06f1683c8db9 -r 245150080c48 zipfile27.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile27.py Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,1468 @@ +""" +Read and write ZIP files. +""" +# Improved by Chortos-2 in 2010 (added bzip2 support) +import struct, os, time, sys, shutil +import binascii, cStringIO, stat +import io +import re + +try: + import zlib # We may need its compression method + crc32 = zlib.crc32 +except ImportError: + zlib = None + crc32 = binascii.crc32 + +try: + import bz2 # We may need its compression method +except ImportError: + bz2 = None + +__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", + "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] + +class BadZipfile(Exception): + pass + + +class LargeZipFile(Exception): + """ + Raised when writing a zipfile, the zipfile requires ZIP64 extensions + and those extensions are disabled. + """ + +error = BadZipfile # The exception raised by this module + +ZIP64_LIMIT = (1 << 31) - 1 +ZIP_FILECOUNT_LIMIT = 1 << 16 +ZIP_MAX_COMMENT = (1 << 16) - 1 + +# constants for Zip file compression methods +ZIP_STORED = 0 +ZIP_DEFLATED = 8 +ZIP_BZIP2 = 12 +# Other ZIP compression methods not supported + +# Below are some formats and associated data for reading/writing headers using +# the struct module. The names and structures of headers/records are those used +# in the PKWARE description of the ZIP file format: +# http://www.pkware.com/documents/casestudies/APPNOTE.TXT +# (URL valid as of January 2008) + +# The "end of central directory" structure, magic number, size, and indices +# (section V.I in the format document) +structEndArchive = "<4s4H2LH" +stringEndArchive = "PK\005\006" +sizeEndCentDir = struct.calcsize(structEndArchive) + +_ECD_SIGNATURE = 0 +_ECD_DISK_NUMBER = 1 +_ECD_DISK_START = 2 +_ECD_ENTRIES_THIS_DISK = 3 +_ECD_ENTRIES_TOTAL = 4 +_ECD_SIZE = 5 +_ECD_OFFSET = 6 +_ECD_COMMENT_SIZE = 7 +# These last two indices are not part of the structure as defined in the +# spec, but they are used internally by this module as a convenience +_ECD_COMMENT = 8 +_ECD_LOCATION = 9 + +# The "central directory" structure, magic number, size, and indices +# of entries in the structure (section V.F in the format document) +structCentralDir = "<4s4B4HL2L5H2L" +stringCentralDir = "PK\001\002" +sizeCentralDir = struct.calcsize(structCentralDir) + +# indexes of entries in the central directory structure +_CD_SIGNATURE = 0 +_CD_CREATE_VERSION = 1 +_CD_CREATE_SYSTEM = 2 +_CD_EXTRACT_VERSION = 3 +_CD_EXTRACT_SYSTEM = 4 +_CD_FLAG_BITS = 5 +_CD_COMPRESS_TYPE = 6 +_CD_TIME = 7 +_CD_DATE = 8 +_CD_CRC = 9 +_CD_COMPRESSED_SIZE = 10 +_CD_UNCOMPRESSED_SIZE = 11 +_CD_FILENAME_LENGTH = 12 +_CD_EXTRA_FIELD_LENGTH = 13 +_CD_COMMENT_LENGTH = 14 +_CD_DISK_NUMBER_START = 15 +_CD_INTERNAL_FILE_ATTRIBUTES = 16 +_CD_EXTERNAL_FILE_ATTRIBUTES = 17 +_CD_LOCAL_HEADER_OFFSET = 18 + +# The "local file header" structure, magic number, size, and indices +# (section V.A in the format document) +structFileHeader = "<4s2B4HL2L2H" +stringFileHeader = "PK\003\004" +sizeFileHeader = struct.calcsize(structFileHeader) + +_FH_SIGNATURE = 0 +_FH_EXTRACT_VERSION = 1 +_FH_EXTRACT_SYSTEM = 2 +_FH_GENERAL_PURPOSE_FLAG_BITS = 3 +_FH_COMPRESSION_METHOD = 4 +_FH_LAST_MOD_TIME = 5 +_FH_LAST_MOD_DATE = 6 +_FH_CRC = 7 +_FH_COMPRESSED_SIZE = 8 +_FH_UNCOMPRESSED_SIZE = 9 +_FH_FILENAME_LENGTH = 10 +_FH_EXTRA_FIELD_LENGTH = 11 + +# The "Zip64 end of central directory locator" structure, magic number, and size +structEndArchive64Locator = "<4sLQL" +stringEndArchive64Locator = "PK\x06\x07" +sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) + +# The "Zip64 end of central directory" record, magic number, size, and indices +# (section V.G in the format document) +structEndArchive64 = "<4sQ2H2L4Q" +stringEndArchive64 = "PK\x06\x06" +sizeEndCentDir64 = struct.calcsize(structEndArchive64) + +_CD64_SIGNATURE = 0 +_CD64_DIRECTORY_RECSIZE = 1 +_CD64_CREATE_VERSION = 2 +_CD64_EXTRACT_VERSION = 3 +_CD64_DISK_NUMBER = 4 +_CD64_DISK_NUMBER_START = 5 +_CD64_NUMBER_ENTRIES_THIS_DISK = 6 +_CD64_NUMBER_ENTRIES_TOTAL = 7 +_CD64_DIRECTORY_SIZE = 8 +_CD64_OFFSET_START_CENTDIR = 9 + +def _check_zipfile(fp): + try: + if _EndRecData(fp): + return True # file has correct magic number + except IOError: + pass + return False + +def is_zipfile(filename): + """Quickly see if a file is a ZIP file by checking the magic number. + + The filename argument may be a file or file-like object too. + """ + result = False + try: + if hasattr(filename, "read"): + result = _check_zipfile(fp=filename) + else: + with open(filename, "rb") as fp: + result = _check_zipfile(fp) + except IOError: + pass + return result + +def _EndRecData64(fpin, offset, endrec): + """ + Read the ZIP64 end-of-archive records and use that to update endrec + """ + try: + fpin.seek(offset - sizeEndCentDir64Locator, 2) + except IOError: + # If the seek fails, the file is not large enough to contain a ZIP64 + # end-of-archive record, so just return the end record we were given. + return endrec + + data = fpin.read(sizeEndCentDir64Locator) + sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) + if sig != stringEndArchive64Locator: + return endrec + + if diskno != 0 or disks != 1: + raise BadZipfile("zipfiles that span multiple disks are not supported") + + # Assume no 'zip64 extensible data' + fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) + data = fpin.read(sizeEndCentDir64) + sig, sz, create_version, read_version, disk_num, disk_dir, \ + dircount, dircount2, dirsize, diroffset = \ + struct.unpack(structEndArchive64, data) + if sig != stringEndArchive64: + return endrec + + # Update the original endrec using data from the ZIP64 record + endrec[_ECD_SIGNATURE] = sig + endrec[_ECD_DISK_NUMBER] = disk_num + endrec[_ECD_DISK_START] = disk_dir + endrec[_ECD_ENTRIES_THIS_DISK] = dircount + endrec[_ECD_ENTRIES_TOTAL] = dircount2 + endrec[_ECD_SIZE] = dirsize + endrec[_ECD_OFFSET] = diroffset + return endrec + + +def _EndRecData(fpin): + """Return data from the "End of Central Directory" record, or None. + + The data is a list of the nine items in the ZIP "End of central dir" + record followed by a tenth item, the file seek offset of this record.""" + + # Determine file size + fpin.seek(0, 2) + filesize = fpin.tell() + + # Check to see if this is ZIP file with no archive comment (the + # "end of central directory" structure should be the last item in the + # file if this is the case). + try: + fpin.seek(-sizeEndCentDir, 2) + except IOError: + return None + data = fpin.read() + if data[0:4] == stringEndArchive and data[-2:] == "\000\000": + # the signature is correct and there's no comment, unpack structure + endrec = struct.unpack(structEndArchive, data) + endrec=list(endrec) + + # Append a blank comment and record start offset + endrec.append("") + endrec.append(filesize - sizeEndCentDir) + + # Try to read the "Zip64 end of central directory" structure + return _EndRecData64(fpin, -sizeEndCentDir, endrec) + + # Either this is not a ZIP file, or it is a ZIP file with an archive + # comment. Search the end of the file for the "end of central directory" + # record signature. The comment is the last item in the ZIP file and may be + # up to 64K long. It is assumed that the "end of central directory" magic + # number does not appear in the comment. + maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) + fpin.seek(maxCommentStart, 0) + data = fpin.read() + start = data.rfind(stringEndArchive) + if start >= 0: + # found the magic number; attempt to unpack and interpret + recData = data[start:start+sizeEndCentDir] + endrec = list(struct.unpack(structEndArchive, recData)) + comment = data[start+sizeEndCentDir:] + # check that comment length is correct + if endrec[_ECD_COMMENT_SIZE] == len(comment): + # Append the archive comment and start offset + endrec.append(comment) + endrec.append(maxCommentStart + start) + + # Try to read the "Zip64 end of central directory" structure + return _EndRecData64(fpin, maxCommentStart + start - filesize, + endrec) + + # Unable to find a valid end of central directory structure + return + + +class ZipInfo (object): + """Class with attributes describing each file in the ZIP archive.""" + + __slots__ = ( + 'orig_filename', + 'filename', + 'date_time', + 'compress_type', + 'comment', + 'extra', + 'create_system', + 'create_version', + 'extract_version', + 'reserved', + 'flag_bits', + 'volume', + 'internal_attr', + 'external_attr', + 'header_offset', + 'CRC', + 'compress_size', + 'file_size', + '_raw_time', + ) + + def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): + self.orig_filename = filename # Original file name in archive + + # Terminate the file name at the first null byte. Null bytes in file + # names are used as tricks by viruses in archives. + null_byte = filename.find(chr(0)) + if null_byte >= 0: + filename = filename[0:null_byte] + # This is used to ensure paths in generated ZIP files always use + # forward slashes as the directory separator, as required by the + # ZIP format specification. + if os.sep != "/" and os.sep in filename: + filename = filename.replace(os.sep, "/") + + self.filename = filename # Normalized file name + self.date_time = date_time # year, month, day, hour, min, sec + # Standard values: + self.compress_type = ZIP_STORED # Type of compression for the file + self.comment = "" # Comment for each file + self.extra = "" # ZIP extra data + if sys.platform == 'win32': + self.create_system = 0 # System which created ZIP archive + else: + # Assume everything else is unix-y + self.create_system = 3 # System which created ZIP archive + self.create_version = 20 # Version which created ZIP archive + self.extract_version = 20 # Version needed to extract archive + self.reserved = 0 # Must be zero + self.flag_bits = 0 # ZIP flag bits + self.volume = 0 # Volume number of file header + self.internal_attr = 0 # Internal attributes + self.external_attr = 0 # External file attributes + # Other attributes are set by class ZipFile: + # header_offset Byte offset to the file header + # CRC CRC-32 of the uncompressed file + # compress_size Size of the compressed file + # file_size Size of the uncompressed file + + def FileHeader(self): + """Return the per-file header as a string.""" + dt = self.date_time + dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] + dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) + if self.flag_bits & 0x08: + # Set these to zero because we write them after the file data + CRC = compress_size = file_size = 0 + else: + CRC = self.CRC + compress_size = self.compress_size + file_size = self.file_size + + extra = self.extra + + if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: + # File is larger than what fits into a 4 byte integer, + # fall back to the ZIP64 extension + fmt = '= 24: + counts = unpack('> 1) & 0x7FFFFFFF) ^ poly + else: + crc = ((crc >> 1) & 0x7FFFFFFF) + table[i] = crc + return table + crctable = _GenerateCRCTable() + + def _crc32(self, ch, crc): + """Compute the CRC32 primitive on one byte.""" + return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff] + + def __init__(self, pwd): + self.key0 = 305419896 + self.key1 = 591751049 + self.key2 = 878082192 + for p in pwd: + self._UpdateKeys(p) + + def _UpdateKeys(self, c): + self.key0 = self._crc32(c, self.key0) + self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295 + self.key1 = (self.key1 * 134775813 + 1) & 4294967295 + self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2) + + def __call__(self, c): + """Decrypt a single character.""" + c = ord(c) + k = self.key2 | 2 + c = c ^ (((k * (k^1)) >> 8) & 255) + c = chr(c) + self._UpdateKeys(c) + return c + +class ZipExtFile(io.BufferedIOBase): + """File-like object for reading an archive member. + Is returned by ZipFile.open(). + """ + + # Max size supported by decompressor. + MAX_N = 1 << 31 - 1 + + # Read from compressed files in 4k blocks. + MIN_READ_SIZE = 4096 + + # Search for universal newlines or line chunks. + PATTERN = re.compile(r'^(?P[^\r\n]+)|(?P\n|\r\n?)') + + def __init__(self, fileobj, mode, zipinfo, decrypter=None): + self._fileobj = fileobj + self._decrypter = decrypter + + self._compress_type = zipinfo.compress_type + self._compress_size = zipinfo.compress_size + self._compress_left = zipinfo.compress_size + + if self._compress_type == ZIP_DEFLATED: + self._decompressor = zlib.decompressobj(-15) + elif self._compress_type == ZIP_BZIP2: + self._decompressor = bz2.BZ2Decompressor() + self.MIN_READ_SIZE = 900000 + self._unconsumed = '' + + self._readbuffer = '' + self._offset = 0 + + self._universal = 'U' in mode + self.newlines = None + + # Adjust read size for encrypted files since the first 12 bytes + # are for the encryption/password information. + if self._decrypter is not None: + self._compress_left -= 12 + + self.mode = mode + self.name = zipinfo.filename + + if hasattr(zipinfo, 'CRC'): + self._expected_crc = zipinfo.CRC + self._running_crc = crc32(b'') & 0xffffffff + else: + self._expected_crc = None + + def readline(self, limit=-1): + """Read and return a line from the stream. + + If limit is specified, at most limit bytes will be read. + """ + + if not self._universal and limit < 0: + # Shortcut common case - newline found in buffer. + i = self._readbuffer.find('\n', self._offset) + 1 + if i > 0: + line = self._readbuffer[self._offset: i] + self._offset = i + return line + + if not self._universal: + return io.BufferedIOBase.readline(self, limit) + + line = '' + while limit < 0 or len(line) < limit: + readahead = self.peek(2) + if readahead == '': + return line + + # + # Search for universal newlines or line chunks. + # + # The pattern returns either a line chunk or a newline, but not + # both. Combined with peek(2), we are assured that the sequence + # '\r\n' is always retrieved completely and never split into + # separate newlines - '\r', '\n' due to coincidental readaheads. + # + match = self.PATTERN.search(readahead) + newline = match.group('newline') + if newline is not None: + if self.newlines is None: + self.newlines = [] + if newline not in self.newlines: + self.newlines.append(newline) + self._offset += len(newline) + return line + '\n' + + chunk = match.group('chunk') + if limit >= 0: + chunk = chunk[: limit - len(line)] + + self._offset += len(chunk) + line += chunk + + return line + + def peek(self, n=1): + """Returns buffered bytes without advancing the position.""" + if n > len(self._readbuffer) - self._offset: + chunk = self.read(n) + self._offset -= len(chunk) + + # Return up to 512 bytes to reduce allocation overhead for tight loops. + return self._readbuffer[self._offset: self._offset + 512] + + def readable(self): + return True + + def read(self, n=-1): + """Read and return up to n bytes. + If the argument is omitted, None, or negative, data is read and returned until EOF is reached.. + """ + buf = '' + if n is None: + n = -1 + while True: + if n < 0: + data = self.read1(n) + elif n > len(buf): + data = self.read1(n - len(buf)) + else: + return buf + if len(data) == 0: + return buf + buf += data + + def _update_crc(self, newdata, eof): + # Update the CRC using the given data. + if self._expected_crc is None: + # No need to compute the CRC if we don't have a reference value + return + self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff + # Check the CRC if we're at the end of the file + if eof and self._running_crc != self._expected_crc: + raise BadZipfile("Bad CRC-32 for file %r" % self.name) + + def read1(self, n): + """Read up to n bytes with at most one read() system call.""" + + # Simplify algorithm (branching) by transforming negative n to large n. + if n < 0 or n is None: + n = self.MAX_N + + # Bytes available in read buffer. + len_readbuffer = len(self._readbuffer) - self._offset + + # Read from file. + if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed): + nbytes = n - len_readbuffer - len(self._unconsumed) + nbytes = max(nbytes, self.MIN_READ_SIZE) + nbytes = min(nbytes, self._compress_left) + + data = self._fileobj.read(nbytes) + self._compress_left -= len(data) + + if data and self._decrypter is not None: + data = ''.join(map(self._decrypter, data)) + + if self._compress_type == ZIP_STORED: + self._update_crc(data, eof=(self._compress_left==0)) + self._readbuffer = self._readbuffer[self._offset:] + data + self._offset = 0 + else: + # Prepare deflated bytes for decompression. + self._unconsumed += data + + # Handle unconsumed data. + if (len(self._unconsumed) > 0 and n > len_readbuffer and + self._compress_type == ZIP_DEFLATED): + data = self._decompressor.decompress( + self._unconsumed, + max(n - len_readbuffer, self.MIN_READ_SIZE) + ) + + self._unconsumed = self._decompressor.unconsumed_tail + eof = len(self._unconsumed) == 0 and self._compress_left == 0 + if eof: + data += self._decompressor.flush() + + self._update_crc(data, eof=eof) + self._readbuffer = self._readbuffer[self._offset:] + data + self._offset = 0 + elif (len(self._unconsumed) > 0 and n > len_readbuffer and + self._compress_type == ZIP_BZIP2): + data = self._decompressor.decompress(self._unconsumed) + + self._unconsumed = '' + self._readbuffer = self._readbuffer[self._offset:] + data + self._offset = 0 + + # Read from buffer. + data = self._readbuffer[self._offset: self._offset + n] + self._offset += len(data) + return data + + + +class ZipFile: + """ Class with methods to open, read, write, close, list zip files. + + z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False) + + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read "r", write "w" or append "a". + compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), + or ZIP_BZIP2 (requires bz2). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. + + """ + + fp = None # Set here since __del__ checks it + + def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): + """Open the ZIP file with mode read "r", write "w" or append "a".""" + if mode not in ("r", "w", "a"): + raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') + + if compression == ZIP_STORED: + pass + elif compression == ZIP_DEFLATED: + if not zlib: + raise RuntimeError,\ + "Compression requires the (missing) zlib module" + elif compression == ZIP_BZIP2: + if not bz2: + raise RuntimeError,\ + "Compression requires the (missing) bz2 module" + else: + raise RuntimeError, "That compression method is not supported" + + self._allowZip64 = allowZip64 + self._didModify = False + self.debug = 0 # Level of printing: 0 through 3 + self.NameToInfo = {} # Find file info given name + self.filelist = [] # List of ZipInfo instances for archive + self.compression = compression # Method of compression + self.mode = key = mode.replace('b', '')[0] + self.pwd = None + self.comment = '' + + # Check if we were passed a file-like object + if isinstance(file, basestring): + self._filePassed = 0 + self.filename = file + modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} + try: + self.fp = open(file, modeDict[mode]) + except IOError: + if mode == 'a': + mode = key = 'w' + self.fp = open(file, modeDict[mode]) + else: + raise + else: + self._filePassed = 1 + self.fp = file + self.filename = getattr(file, 'name', None) + + if key == 'r': + self._GetContents() + elif key == 'w': + # set the modified flag so central directory gets written + # even if no files are added to the archive + self._didModify = True + elif key == 'a': + try: + # See if file is a zip file + self._RealGetContents() + # seek to start of directory and overwrite + self.fp.seek(self.start_dir, 0) + except BadZipfile: + # file is not a zip file, just append + self.fp.seek(0, 2) + + # set the modified flag so central directory gets written + # even if no files are added to the archive + self._didModify = True + else: + if not self._filePassed: + self.fp.close() + self.fp = None + raise RuntimeError, 'Mode must be "r", "w" or "a"' + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def _GetContents(self): + """Read the directory, making sure we close the file if the format + is bad.""" + try: + self._RealGetContents() + except BadZipfile: + if not self._filePassed: + self.fp.close() + self.fp = None + raise + + def _RealGetContents(self): + """Read in the table of contents for the ZIP file.""" + fp = self.fp + try: + endrec = _EndRecData(fp) + except IOError: + raise BadZipfile("File is not a zip file") + if not endrec: + raise BadZipfile, "File is not a zip file" + if self.debug > 1: + print endrec + size_cd = endrec[_ECD_SIZE] # bytes in central directory + offset_cd = endrec[_ECD_OFFSET] # offset of central directory + self.comment = endrec[_ECD_COMMENT] # archive comment + + # "concat" is zero, unless zip was concatenated to another file + concat = endrec[_ECD_LOCATION] - size_cd - offset_cd + if endrec[_ECD_SIGNATURE] == stringEndArchive64: + # If Zip64 extension structures are present, account for them + concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) + + if self.debug > 2: + inferred = concat + offset_cd + print "given, inferred, offset", offset_cd, inferred, concat + # self.start_dir: Position of start of central directory + self.start_dir = offset_cd + concat + fp.seek(self.start_dir, 0) + data = fp.read(size_cd) + fp = cStringIO.StringIO(data) + total = 0 + while total < size_cd: + centdir = fp.read(sizeCentralDir) + if centdir[0:4] != stringCentralDir: + raise BadZipfile, "Bad magic number for central directory" + centdir = struct.unpack(structCentralDir, centdir) + if self.debug > 2: + print centdir + filename = fp.read(centdir[_CD_FILENAME_LENGTH]) + # Create ZipInfo instance to store file information + x = ZipInfo(filename) + x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) + x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) + x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] + (x.create_version, x.create_system, x.extract_version, x.reserved, + x.flag_bits, x.compress_type, t, d, + x.CRC, x.compress_size, x.file_size) = centdir[1:12] + x.volume, x.internal_attr, x.external_attr = centdir[15:18] + # Convert date/time code to (year, month, day, hour, min, sec) + x._raw_time = t + x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, + t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) + + x._decodeExtra() + x.header_offset = x.header_offset + concat + x.filename = x._decodeFilename() + self.filelist.append(x) + self.NameToInfo[x.filename] = x + + # update total bytes read from central directory + total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] + + centdir[_CD_EXTRA_FIELD_LENGTH] + + centdir[_CD_COMMENT_LENGTH]) + + if self.debug > 2: + print "total", total + + + def namelist(self): + """Return a list of file names in the archive.""" + l = [] + for data in self.filelist: + l.append(data.filename) + return l + + def infolist(self): + """Return a list of class ZipInfo instances for files in the + archive.""" + return self.filelist + + def printdir(self): + """Print a table of contents for the zip file.""" + print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") + for zinfo in self.filelist: + date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] + print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size) + + def testzip(self): + """Read all the files and check the CRC.""" + chunk_size = 2 ** 20 + for zinfo in self.filelist: + try: + # Read by chunks, to avoid an OverflowError or a + # MemoryError with very large embedded files. + f = self.open(zinfo.filename, "r") + while f.read(chunk_size): # Check CRC-32 + pass + except BadZipfile: + return zinfo.filename + + def getinfo(self, name): + """Return the instance of ZipInfo given 'name'.""" + info = self.NameToInfo.get(name) + if info is None: + raise KeyError( + 'There is no item named %r in the archive' % name) + + return info + + def setpassword(self, pwd): + """Set default password for encrypted files.""" + self.pwd = pwd + + def read(self, name, pwd=None): + """Return file bytes (as a string) for name.""" + return self.open(name, "r", pwd).read() + + def open(self, name, mode="r", pwd=None): + """Return file-like object for 'name'.""" + if mode not in ("r", "U", "rU"): + raise RuntimeError, 'open() requires mode "r", "U", or "rU"' + if not self.fp: + raise RuntimeError, \ + "Attempt to read ZIP archive that was already closed" + + # Only open a new file for instances where we were not + # given a file object in the constructor + if self._filePassed: + zef_file = self.fp + else: + zef_file = open(self.filename, 'rb') + + # Make sure we have an info object + if isinstance(name, ZipInfo): + # 'name' is already an info object + zinfo = name + else: + # Get info object for name + zinfo = self.getinfo(name) + + zef_file.seek(zinfo.header_offset, 0) + + # Skip the file header: + fheader = zef_file.read(sizeFileHeader) + if fheader[0:4] != stringFileHeader: + raise BadZipfile, "Bad magic number for file header" + + fheader = struct.unpack(structFileHeader, fheader) + fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) + if fheader[_FH_EXTRA_FIELD_LENGTH]: + zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) + + if fname != zinfo.orig_filename: + raise BadZipfile, \ + 'File name in directory "%s" and header "%s" differ.' % ( + zinfo.orig_filename, fname) + + # check for encrypted flag & handle password + is_encrypted = zinfo.flag_bits & 0x1 + zd = None + if is_encrypted: + if not pwd: + pwd = self.pwd + if not pwd: + raise RuntimeError, "File %s is encrypted, " \ + "password required for extraction" % name + + zd = _ZipDecrypter(pwd) + # The first 12 bytes in the cypher stream is an encryption header + # used to strengthen the algorithm. The first 11 bytes are + # completely random, while the 12th contains the MSB of the CRC, + # or the MSB of the file time depending on the header type + # and is used to check the correctness of the password. + bytes = zef_file.read(12) + h = map(zd, bytes[0:12]) + if zinfo.flag_bits & 0x8: + # compare against the file type from extended local headers + check_byte = (zinfo._raw_time >> 8) & 0xff + else: + # compare against the CRC otherwise + check_byte = (zinfo.CRC >> 24) & 0xff + if ord(h[11]) != check_byte: + raise RuntimeError("Bad password for file", name) + + return ZipExtFile(zef_file, mode, zinfo, zd) + + def extract(self, member, path=None, pwd=None): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a ZipInfo object. You can + specify a different directory using `path'. + """ + if not isinstance(member, ZipInfo): + member = self.getinfo(member) + + if path is None: + path = os.getcwd() + + return self._extract_member(member, path, pwd) + + def extractall(self, path=None, members=None, pwd=None): + """Extract all members from the archive to the current working + directory. `path' specifies a different directory to extract to. + `members' is optional and must be a subset of the list returned + by namelist(). + """ + if members is None: + members = self.namelist() + + for zipinfo in members: + self.extract(zipinfo, path, pwd) + + def _extract_member(self, member, targetpath, pwd): + """Extract the ZipInfo object 'member' to a physical + file on the path targetpath. + """ + # build the destination pathname, replacing + # forward slashes to platform specific separators. + # Strip trailing path separator, unless it represents the root. + if (targetpath[-1:] in (os.path.sep, os.path.altsep) + and len(os.path.splitdrive(targetpath)[1]) > 1): + targetpath = targetpath[:-1] + + # don't include leading "/" from file name if present + if member.filename[0] == '/': + targetpath = os.path.join(targetpath, member.filename[1:]) + else: + targetpath = os.path.join(targetpath, member.filename) + + targetpath = os.path.normpath(targetpath) + + # Create all upper directories if necessary. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + os.makedirs(upperdirs) + + if member.filename[-1] == '/': + if not os.path.isdir(targetpath): + os.mkdir(targetpath) + return targetpath + + source = self.open(member, pwd=pwd) + target = file(targetpath, "wb") + shutil.copyfileobj(source, target) + source.close() + target.close() + + return targetpath + + def _writecheck(self, zinfo): + """Check for errors before writing a file to the archive.""" + if zinfo.filename in self.NameToInfo: + if self.debug: # Warning for duplicate names + print "Duplicate name:", zinfo.filename + if self.mode not in ("w", "a"): + raise RuntimeError, 'write() requires mode "w" or "a"' + if not self.fp: + raise RuntimeError, \ + "Attempt to write ZIP archive that was already closed" + if zinfo.compress_type == ZIP_DEFLATED and not zlib: + raise RuntimeError, \ + "Compression requires the (missing) zlib module" + if zinfo.compress_type == ZIP_BZIP2 and not bz2: + raise RuntimeError, \ + "Compression requires the (missing) bz2 module" + if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): + raise RuntimeError, \ + "That compression method is not supported" + if zinfo.file_size > ZIP64_LIMIT: + if not self._allowZip64: + raise LargeZipFile("Filesize would require ZIP64 extensions") + if zinfo.header_offset > ZIP64_LIMIT: + if not self._allowZip64: + raise LargeZipFile("Zipfile size would require ZIP64 extensions") + + def write(self, filename, arcname=None, compress_type=None): + """Put the bytes from filename into the archive under the name + arcname.""" + if not self.fp: + raise RuntimeError( + "Attempt to write to ZIP archive that was already closed") + + st = os.stat(filename) + isdir = stat.S_ISDIR(st.st_mode) + mtime = time.localtime(st.st_mtime) + date_time = mtime[0:6] + # Create ZipInfo instance to store file information + if arcname is None: + arcname = filename + arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) + while arcname[0] in (os.sep, os.altsep): + arcname = arcname[1:] + if isdir: + arcname += '/' + zinfo = ZipInfo(arcname, date_time) + zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes + if compress_type is None: + zinfo.compress_type = self.compression + else: + zinfo.compress_type = compress_type + + zinfo.file_size = st.st_size + zinfo.flag_bits = 0x00 + zinfo.header_offset = self.fp.tell() # Start of header bytes + + self._writecheck(zinfo) + self._didModify = True + + if isdir: + zinfo.file_size = 0 + zinfo.compress_size = 0 + zinfo.CRC = 0 + self.filelist.append(zinfo) + self.NameToInfo[zinfo.filename] = zinfo + self.fp.write(zinfo.FileHeader()) + return + + with open(filename, "rb") as fp: + # Must overwrite CRC and sizes with correct data later + zinfo.CRC = CRC = 0 + zinfo.compress_size = compress_size = 0 + zinfo.file_size = file_size = 0 + self.fp.write(zinfo.FileHeader()) + if zinfo.compress_type == ZIP_DEFLATED: + cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, + zlib.DEFLATED, -15) + elif zinfo.compress_type == ZIP_BZIP2: + cmpr = bz2.BZ2Compressor() + else: + cmpr = None + while 1: + buf = fp.read(1024 * 8) + if not buf: + break + file_size = file_size + len(buf) + CRC = crc32(buf, CRC) & 0xffffffff + if cmpr: + buf = cmpr.compress(buf) + compress_size = compress_size + len(buf) + self.fp.write(buf) + if cmpr: + buf = cmpr.flush() + compress_size = compress_size + len(buf) + self.fp.write(buf) + zinfo.compress_size = compress_size + else: + zinfo.compress_size = file_size + zinfo.CRC = CRC + zinfo.file_size = file_size + # Seek backwards and write CRC and file sizes + position = self.fp.tell() # Preserve current position in file + self.fp.seek(zinfo.header_offset + 14, 0) + self.fp.write(struct.pack(" ZIP64_LIMIT \ + or zinfo.compress_size > ZIP64_LIMIT: + extra.append(zinfo.file_size) + extra.append(zinfo.compress_size) + file_size = 0xffffffff + compress_size = 0xffffffff + else: + file_size = zinfo.file_size + compress_size = zinfo.compress_size + + if zinfo.header_offset > ZIP64_LIMIT: + extra.append(zinfo.header_offset) + header_offset = 0xffffffffL + else: + header_offset = zinfo.header_offset + + extra_data = zinfo.extra + if extra: + # Append a ZIP64 field to the extra's + extra_data = struct.pack( + '>sys.stderr, (structCentralDir, + stringCentralDir, create_version, + zinfo.create_system, extract_version, zinfo.reserved, + zinfo.flag_bits, zinfo.compress_type, dostime, dosdate, + zinfo.CRC, compress_size, file_size, + len(zinfo.filename), len(extra_data), len(zinfo.comment), + 0, zinfo.internal_attr, zinfo.external_attr, + header_offset) + raise + self.fp.write(centdir) + self.fp.write(filename) + self.fp.write(extra_data) + self.fp.write(zinfo.comment) + + pos2 = self.fp.tell() + # Write end-of-zip-archive record + centDirCount = count + centDirSize = pos2 - pos1 + centDirOffset = pos1 + if (centDirCount >= ZIP_FILECOUNT_LIMIT or + centDirOffset > ZIP64_LIMIT or + centDirSize > ZIP64_LIMIT): + # Need to write the ZIP64 end-of-archive records + zip64endrec = struct.pack( + structEndArchive64, stringEndArchive64, + 44, 45, 45, 0, 0, centDirCount, centDirCount, + centDirSize, centDirOffset) + self.fp.write(zip64endrec) + + zip64locrec = struct.pack( + structEndArchive64Locator, + stringEndArchive64Locator, 0, pos2, 1) + self.fp.write(zip64locrec) + centDirCount = min(centDirCount, 0xFFFF) + centDirSize = min(centDirSize, 0xFFFFFFFF) + centDirOffset = min(centDirOffset, 0xFFFFFFFF) + + # check for valid comment length + if len(self.comment) >= ZIP_MAX_COMMENT: + if self.debug > 0: + msg = 'Archive comment is too long; truncating to %d bytes' \ + % ZIP_MAX_COMMENT + self.comment = self.comment[:ZIP_MAX_COMMENT] + + endrec = struct.pack(structEndArchive, stringEndArchive, + 0, 0, centDirCount, centDirCount, + centDirSize, centDirOffset, len(self.comment)) + self.fp.write(endrec) + self.fp.write(self.comment) + self.fp.flush() + + if not self._filePassed: + self.fp.close() + self.fp = None + + +class PyZipFile(ZipFile): + """Class to create ZIP archives with Python library files and packages.""" + + def writepy(self, pathname, basename = ""): + """Add all files from "pathname" to the ZIP archive. + + If pathname is a package directory, search the directory and + all package subdirectories recursively for all *.py and enter + the modules into the archive. If pathname is a plain + directory, listdir *.py and enter all modules. Else, pathname + must be a Python *.py file and the module will be put into the + archive. Added modules are always module.pyo or module.pyc. + This method will compile the module.py into module.pyc if + necessary. + """ + dir, name = os.path.split(pathname) + if os.path.isdir(pathname): + initname = os.path.join(pathname, "__init__.py") + if os.path.isfile(initname): + # This is a package directory, add it + if basename: + basename = "%s/%s" % (basename, name) + else: + basename = name + if self.debug: + print "Adding package in", pathname, "as", basename + fname, arcname = self._get_codename(initname[0:-3], basename) + if self.debug: + print "Adding", arcname + self.write(fname, arcname) + dirlist = os.listdir(pathname) + dirlist.remove("__init__.py") + # Add all *.py files and package subdirectories + for filename in dirlist: + path = os.path.join(pathname, filename) + root, ext = os.path.splitext(filename) + if os.path.isdir(path): + if os.path.isfile(os.path.join(path, "__init__.py")): + # This is a package directory, add it + self.writepy(path, basename) # Recursive call + elif ext == ".py": + fname, arcname = self._get_codename(path[0:-3], + basename) + if self.debug: + print "Adding", arcname + self.write(fname, arcname) + else: + # This is NOT a package directory, add its files at top level + if self.debug: + print "Adding files from directory", pathname + for filename in os.listdir(pathname): + path = os.path.join(pathname, filename) + root, ext = os.path.splitext(filename) + if ext == ".py": + fname, arcname = self._get_codename(path[0:-3], + basename) + if self.debug: + print "Adding", arcname + self.write(fname, arcname) + else: + if pathname[-3:] != ".py": + raise RuntimeError, \ + 'Files added with writepy() must end with ".py"' + fname, arcname = self._get_codename(pathname[0:-3], basename) + if self.debug: + print "Adding file", arcname + self.write(fname, arcname) + + def _get_codename(self, pathname, basename): + """Return (filename, archivename) for the path. + + Given a module name path, return the correct file path and + archive name, compiling if necessary. For example, given + /python/lib/string, return (/python/lib/string.pyc, string). + """ + file_py = pathname + ".py" + file_pyc = pathname + ".pyc" + file_pyo = pathname + ".pyo" + if os.path.isfile(file_pyo) and \ + os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: + fname = file_pyo # Use .pyo file + elif not os.path.isfile(file_pyc) or \ + os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: + import py_compile + if self.debug: + print "Compiling", file_py + try: + py_compile.compile(file_py, file_pyc, None, True) + except py_compile.PyCompileError,err: + print err.msg + fname = file_pyc + else: + fname = file_pyc + archivename = os.path.split(fname)[1] + if basename: + archivename = "%s/%s" % (basename, archivename) + return (fname, archivename) + + +def main(args = None): + import textwrap + USAGE=textwrap.dedent("""\ + Usage: + zipfile.py -l zipfile.zip # Show listing of a zipfile + zipfile.py -t zipfile.zip # Test if a zipfile is valid + zipfile.py -e zipfile.zip target # Extract zipfile into target dir + zipfile.py -c zipfile.zip src ... # Create zipfile from sources + """) + if args is None: + args = sys.argv[1:] + + if not args or args[0] not in ('-l', '-c', '-e', '-t'): + print USAGE + sys.exit(1) + + if args[0] == '-l': + if len(args) != 2: + print USAGE + sys.exit(1) + zf = ZipFile(args[1], 'r') + zf.printdir() + zf.close() + + elif args[0] == '-t': + if len(args) != 2: + print USAGE + sys.exit(1) + zf = ZipFile(args[1], 'r') + badfile = zf.testzip() + if badfile: + print("The following enclosed file is corrupted: {!r}".format(badfile)) + print "Done testing" + + elif args[0] == '-e': + if len(args) != 3: + print USAGE + sys.exit(1) + + zf = ZipFile(args[1], 'r') + out = args[2] + for path in zf.namelist(): + if path.startswith('./'): + tgt = os.path.join(out, path[2:]) + else: + tgt = os.path.join(out, path) + + tgtdir = os.path.dirname(tgt) + if not os.path.exists(tgtdir): + os.makedirs(tgtdir) + with open(tgt, 'wb') as fp: + fp.write(zf.read(path)) + zf.close() + + elif args[0] == '-c': + if len(args) < 3: + print USAGE + sys.exit(1) + + def addToZip(zf, path, zippath): + if os.path.isfile(path): + zf.write(path, zippath, ZIP_DEFLATED) + elif os.path.isdir(path): + for nm in os.listdir(path): + addToZip(zf, + os.path.join(path, nm), os.path.join(zippath, nm)) + # else: ignore + + zf = ZipFile(args[1], 'w', allowZip64=True) + for src in args[2:]: + addToZip(zf, src, os.path.basename(src)) + + zf.close() + +if __name__ == "__main__": + main() diff -r 06f1683c8db9 -r 245150080c48 zipfile271.diff --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile271.diff Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,110 @@ +--- /usr/local/lib/python2.7/zipfile.py 2010-11-29 00:56:38.000000000 +0000 ++++ zipfile271.py 2010-11-29 01:20:17.000000000 +0000 +@@ -1,6 +1,7 @@ + """ + Read and write ZIP files. + """ ++# Improved by Chortos-2 in 2010 (added bzip2 support) + import struct, os, time, sys, shutil + import binascii, cStringIO, stat + import io +@@ -13,8 +14,13 @@ + zlib = None + crc32 = binascii.crc32 + ++try: ++ import bz2 # We may need its compression method ++except ImportError: ++ bz2 = None ++ + __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", +- "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] ++ "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] + + class BadZipfile(Exception): + pass +@@ -35,6 +41,7 @@ + # constants for Zip file compression methods + ZIP_STORED = 0 + ZIP_DEFLATED = 8 ++ZIP_BZIP2 = 12 + # Other ZIP compression methods not supported + + # Below are some formats and associated data for reading/writing headers using +@@ -483,6 +490,9 @@ + + if self._compress_type == ZIP_DEFLATED: + self._decompressor = zlib.decompressobj(-15) ++ elif self._compress_type == ZIP_BZIP2: ++ self._decompressor = bz2.BZ2Decompressor() ++ self.MIN_READ_SIZE = 900000 + self._unconsumed = '' + + self._readbuffer = '' +@@ -641,6 +651,13 @@ + self._update_crc(data, eof=eof) + self._readbuffer = self._readbuffer[self._offset:] + data + self._offset = 0 ++ elif (len(self._unconsumed) > 0 and n > len_readbuffer and ++ self._compress_type == ZIP_BZIP2): ++ data = self._decompressor.decompress(self._unconsumed) ++ ++ self._unconsumed = '' ++ self._readbuffer = self._readbuffer[self._offset:] + data ++ self._offset = 0 + + # Read from buffer. + data = self._readbuffer[self._offset: self._offset + n] +@@ -657,7 +674,8 @@ + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read "r", write "w" or append "a". +- compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). ++ compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), ++ or ZIP_BZIP2 (requires bz2). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. +@@ -677,6 +695,10 @@ + if not zlib: + raise RuntimeError,\ + "Compression requires the (missing) zlib module" ++ elif compression == ZIP_BZIP2: ++ if not bz2: ++ raise RuntimeError,\ ++ "Compression requires the (missing) bz2 module" + else: + raise RuntimeError, "That compression method is not supported" + +@@ -1011,7 +1033,10 @@ + if zinfo.compress_type == ZIP_DEFLATED and not zlib: + raise RuntimeError, \ + "Compression requires the (missing) zlib module" +- if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): ++ if zinfo.compress_type == ZIP_BZIP2 and not bz2: ++ raise RuntimeError, \ ++ "Compression requires the (missing) bz2 module" ++ if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): + raise RuntimeError, \ + "That compression method is not supported" + if zinfo.file_size > ZIP64_LIMIT: +@@ -1072,6 +1097,8 @@ + if zinfo.compress_type == ZIP_DEFLATED: + cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, + zlib.DEFLATED, -15) ++ elif zinfo.compress_type == ZIP_BZIP2: ++ cmpr = bz2.BZ2Compressor() + else: + cmpr = None + while 1: +@@ -1132,6 +1159,10 @@ + zlib.DEFLATED, -15) + bytes = co.compress(bytes) + co.flush() + zinfo.compress_size = len(bytes) # Compressed size ++ elif zinfo.compress_type == ZIP_BZIP2: ++ co = bz2.BZ2Compressor() ++ bytes = co.compress(bytes) + co.flush() ++ zinfo.compress_size = len(bytes) # Compressed size + else: + zinfo.compress_size = zinfo.file_size + zinfo.header_offset = self.fp.tell() # Start of header bytes diff -r 06f1683c8db9 -r 245150080c48 zipfile31.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile31.py Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,1493 @@ +""" +Read and write ZIP files. + +XXX references to utf-8 need further investigation. +""" +# Improved by Chortos-2 in 2010 (added bzip2 support) +import struct, os, time, sys, shutil +import binascii, io, stat + +try: + import zlib # We may need its compression method + crc32 = zlib.crc32 +except ImportError: + zlib = None + crc32 = binascii.crc32 + +try: + import bz2 # We may need its compression method +except ImportError: + bz2 = None + +__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", + "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] + +class BadZipfile(Exception): + pass + + +class LargeZipFile(Exception): + """ + Raised when writing a zipfile, the zipfile requires ZIP64 extensions + and those extensions are disabled. + """ + +error = BadZipfile # The exception raised by this module + +ZIP64_LIMIT = (1 << 31) - 1 +ZIP_FILECOUNT_LIMIT = 1 << 16 +ZIP_MAX_COMMENT = (1 << 16) - 1 + +# constants for Zip file compression methods +ZIP_STORED = 0 +ZIP_DEFLATED = 8 +ZIP_BZIP2 = 12 +# Other ZIP compression methods not supported + +# Below are some formats and associated data for reading/writing headers using +# the struct module. The names and structures of headers/records are those used +# in the PKWARE description of the ZIP file format: +# http://www.pkware.com/documents/casestudies/APPNOTE.TXT +# (URL valid as of January 2008) + +# The "end of central directory" structure, magic number, size, and indices +# (section V.I in the format document) +structEndArchive = b"<4s4H2LH" +stringEndArchive = b"PK\005\006" +sizeEndCentDir = struct.calcsize(structEndArchive) + +_ECD_SIGNATURE = 0 +_ECD_DISK_NUMBER = 1 +_ECD_DISK_START = 2 +_ECD_ENTRIES_THIS_DISK = 3 +_ECD_ENTRIES_TOTAL = 4 +_ECD_SIZE = 5 +_ECD_OFFSET = 6 +_ECD_COMMENT_SIZE = 7 +# These last two indices are not part of the structure as defined in the +# spec, but they are used internally by this module as a convenience +_ECD_COMMENT = 8 +_ECD_LOCATION = 9 + +# The "central directory" structure, magic number, size, and indices +# of entries in the structure (section V.F in the format document) +structCentralDir = "<4s4B4HL2L5H2L" +stringCentralDir = b"PK\001\002" +sizeCentralDir = struct.calcsize(structCentralDir) + +# indexes of entries in the central directory structure +_CD_SIGNATURE = 0 +_CD_CREATE_VERSION = 1 +_CD_CREATE_SYSTEM = 2 +_CD_EXTRACT_VERSION = 3 +_CD_EXTRACT_SYSTEM = 4 +_CD_FLAG_BITS = 5 +_CD_COMPRESS_TYPE = 6 +_CD_TIME = 7 +_CD_DATE = 8 +_CD_CRC = 9 +_CD_COMPRESSED_SIZE = 10 +_CD_UNCOMPRESSED_SIZE = 11 +_CD_FILENAME_LENGTH = 12 +_CD_EXTRA_FIELD_LENGTH = 13 +_CD_COMMENT_LENGTH = 14 +_CD_DISK_NUMBER_START = 15 +_CD_INTERNAL_FILE_ATTRIBUTES = 16 +_CD_EXTERNAL_FILE_ATTRIBUTES = 17 +_CD_LOCAL_HEADER_OFFSET = 18 + +# The "local file header" structure, magic number, size, and indices +# (section V.A in the format document) +structFileHeader = "<4s2B4HL2L2H" +stringFileHeader = b"PK\003\004" +sizeFileHeader = struct.calcsize(structFileHeader) + +_FH_SIGNATURE = 0 +_FH_EXTRACT_VERSION = 1 +_FH_EXTRACT_SYSTEM = 2 +_FH_GENERAL_PURPOSE_FLAG_BITS = 3 +_FH_COMPRESSION_METHOD = 4 +_FH_LAST_MOD_TIME = 5 +_FH_LAST_MOD_DATE = 6 +_FH_CRC = 7 +_FH_COMPRESSED_SIZE = 8 +_FH_UNCOMPRESSED_SIZE = 9 +_FH_FILENAME_LENGTH = 10 +_FH_EXTRA_FIELD_LENGTH = 11 + +# The "Zip64 end of central directory locator" structure, magic number, and size +structEndArchive64Locator = "<4sLQL" +stringEndArchive64Locator = b"PK\x06\x07" +sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator) + +# The "Zip64 end of central directory" record, magic number, size, and indices +# (section V.G in the format document) +structEndArchive64 = "<4sQ2H2L4Q" +stringEndArchive64 = b"PK\x06\x06" +sizeEndCentDir64 = struct.calcsize(structEndArchive64) + +_CD64_SIGNATURE = 0 +_CD64_DIRECTORY_RECSIZE = 1 +_CD64_CREATE_VERSION = 2 +_CD64_EXTRACT_VERSION = 3 +_CD64_DISK_NUMBER = 4 +_CD64_DISK_NUMBER_START = 5 +_CD64_NUMBER_ENTRIES_THIS_DISK = 6 +_CD64_NUMBER_ENTRIES_TOTAL = 7 +_CD64_DIRECTORY_SIZE = 8 +_CD64_OFFSET_START_CENTDIR = 9 + +def _check_zipfile(fp): + try: + if _EndRecData(fp): + return True # file has correct magic number + except IOError: + pass + return False + +def is_zipfile(filename): + """Quickly see if a file is a ZIP file by checking the magic number. + + The filename argument may be a file or file-like object too. + """ + result = False + try: + if hasattr(filename, "read"): + result = _check_zipfile(fp=filename) + else: + with open(filename, "rb") as fp: + result = _check_zipfile(fp) + except IOError: + pass + return result + +def _EndRecData64(fpin, offset, endrec): + """ + Read the ZIP64 end-of-archive records and use that to update endrec + """ + try: + fpin.seek(offset - sizeEndCentDir64Locator, 2) + except IOError: + # If the seek fails, the file is not large enough to contain a ZIP64 + # end-of-archive record, so just return the end record we were given. + return endrec + + data = fpin.read(sizeEndCentDir64Locator) + sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data) + if sig != stringEndArchive64Locator: + return endrec + + if diskno != 0 or disks != 1: + raise BadZipfile("zipfiles that span multiple disks are not supported") + + # Assume no 'zip64 extensible data' + fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2) + data = fpin.read(sizeEndCentDir64) + sig, sz, create_version, read_version, disk_num, disk_dir, \ + dircount, dircount2, dirsize, diroffset = \ + struct.unpack(structEndArchive64, data) + if sig != stringEndArchive64: + return endrec + + # Update the original endrec using data from the ZIP64 record + endrec[_ECD_SIGNATURE] = sig + endrec[_ECD_DISK_NUMBER] = disk_num + endrec[_ECD_DISK_START] = disk_dir + endrec[_ECD_ENTRIES_THIS_DISK] = dircount + endrec[_ECD_ENTRIES_TOTAL] = dircount2 + endrec[_ECD_SIZE] = dirsize + endrec[_ECD_OFFSET] = diroffset + return endrec + + +def _EndRecData(fpin): + """Return data from the "End of Central Directory" record, or None. + + The data is a list of the nine items in the ZIP "End of central dir" + record followed by a tenth item, the file seek offset of this record.""" + + # Determine file size + fpin.seek(0, 2) + filesize = fpin.tell() + + # Check to see if this is ZIP file with no archive comment (the + # "end of central directory" structure should be the last item in the + # file if this is the case). + try: + fpin.seek(-sizeEndCentDir, 2) + except IOError: + return None + data = fpin.read() + if data[0:4] == stringEndArchive and data[-2:] == b"\000\000": + # the signature is correct and there's no comment, unpack structure + endrec = struct.unpack(structEndArchive, data) + endrec=list(endrec) + + # Append a blank comment and record start offset + endrec.append(b"") + endrec.append(filesize - sizeEndCentDir) + + # Try to read the "Zip64 end of central directory" structure + return _EndRecData64(fpin, -sizeEndCentDir, endrec) + + # Either this is not a ZIP file, or it is a ZIP file with an archive + # comment. Search the end of the file for the "end of central directory" + # record signature. The comment is the last item in the ZIP file and may be + # up to 64K long. It is assumed that the "end of central directory" magic + # number does not appear in the comment. + maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0) + fpin.seek(maxCommentStart, 0) + data = fpin.read() + start = data.rfind(stringEndArchive) + if start >= 0: + # found the magic number; attempt to unpack and interpret + recData = data[start:start+sizeEndCentDir] + endrec = list(struct.unpack(structEndArchive, recData)) + comment = data[start+sizeEndCentDir:] + # check that comment length is correct + if endrec[_ECD_COMMENT_SIZE] == len(comment): + # Append the archive comment and start offset + endrec.append(comment) + endrec.append(maxCommentStart + start) + + # Try to read the "Zip64 end of central directory" structure + return _EndRecData64(fpin, maxCommentStart + start - filesize, + endrec) + + # Unable to find a valid end of central directory structure + return + + +class ZipInfo (object): + """Class with attributes describing each file in the ZIP archive.""" + + __slots__ = ( + 'orig_filename', + 'filename', + 'date_time', + 'compress_type', + 'comment', + 'extra', + 'create_system', + 'create_version', + 'extract_version', + 'reserved', + 'flag_bits', + 'volume', + 'internal_attr', + 'external_attr', + 'header_offset', + 'CRC', + 'compress_size', + 'file_size', + '_raw_time', + ) + + def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): + self.orig_filename = filename # Original file name in archive + + # Terminate the file name at the first null byte. Null bytes in file + # names are used as tricks by viruses in archives. + null_byte = filename.find(chr(0)) + if null_byte >= 0: + filename = filename[0:null_byte] + # This is used to ensure paths in generated ZIP files always use + # forward slashes as the directory separator, as required by the + # ZIP format specification. + if os.sep != "/" and os.sep in filename: + filename = filename.replace(os.sep, "/") + + self.filename = filename # Normalized file name + self.date_time = date_time # year, month, day, hour, min, sec + # Standard values: + self.compress_type = ZIP_STORED # Type of compression for the file + self.comment = b"" # Comment for each file + self.extra = b"" # ZIP extra data + if sys.platform == 'win32': + self.create_system = 0 # System which created ZIP archive + else: + # Assume everything else is unix-y + self.create_system = 3 # System which created ZIP archive + self.create_version = 20 # Version which created ZIP archive + self.extract_version = 20 # Version needed to extract archive + self.reserved = 0 # Must be zero + self.flag_bits = 0 # ZIP flag bits + self.volume = 0 # Volume number of file header + self.internal_attr = 0 # Internal attributes + self.external_attr = 0 # External file attributes + # Other attributes are set by class ZipFile: + # header_offset Byte offset to the file header + # CRC CRC-32 of the uncompressed file + # compress_size Size of the compressed file + # file_size Size of the uncompressed file + + def FileHeader(self): + """Return the per-file header as a string.""" + dt = self.date_time + dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2] + dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2) + if self.flag_bits & 0x08: + # Set these to zero because we write them after the file data + CRC = compress_size = file_size = 0 + else: + CRC = self.CRC + compress_size = self.compress_size + file_size = self.file_size + + extra = self.extra + + if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT: + # File is larger than what fits into a 4 byte integer, + # fall back to the ZIP64 extension + fmt = '= 24: + counts = unpack('> 1) & 0x7FFFFFFF) ^ poly + else: + crc = ((crc >> 1) & 0x7FFFFFFF) + table[i] = crc + return table + crctable = _GenerateCRCTable() + + def _crc32(self, ch, crc): + """Compute the CRC32 primitive on one byte.""" + return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ch) & 0xff] + + def __init__(self, pwd): + self.key0 = 305419896 + self.key1 = 591751049 + self.key2 = 878082192 + for p in pwd: + self._UpdateKeys(p) + + def _UpdateKeys(self, c): + self.key0 = self._crc32(c, self.key0) + self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295 + self.key1 = (self.key1 * 134775813 + 1) & 4294967295 + self.key2 = self._crc32((self.key1 >> 24) & 255, self.key2) + + def __call__(self, c): + """Decrypt a single character.""" + assert isinstance(c, int) + k = self.key2 | 2 + c = c ^ (((k * (k^1)) >> 8) & 255) + self._UpdateKeys(c) + return c + +class ZipExtFile: + """File-like object for reading an archive member. + Is returned by ZipFile.open(). + """ + + def __init__(self, fileobj, zipinfo, decrypt=None): + self.fileobj = fileobj + self.decrypter = decrypt + self.bytes_read = 0 + self.rawbuffer = b'' + self.readbuffer = b'' + self.linebuffer = b'' + self.eof = False + self.univ_newlines = False + self.nlSeps = (b"\n", ) + self.lastdiscard = b'' + + self.compress_type = zipinfo.compress_type + self.compress_size = zipinfo.compress_size + + self.closed = False + self.mode = "r" + self.name = zipinfo.filename + + # read from compressed files in 64k blocks + self.compreadsize = 64*1024 + if self.compress_type == ZIP_DEFLATED: + self.dc = zlib.decompressobj(-15) + elif self.compress_type == ZIP_BZIP2: + self.dc = bz2.BZ2Decompressor() + self.compreadsize = 900000 + + if hasattr(zipinfo, 'CRC'): + self._expected_crc = zipinfo.CRC + self._running_crc = crc32(b'') & 0xffffffff + else: + self._expected_crc = None + + def set_univ_newlines(self, univ_newlines): + self.univ_newlines = univ_newlines + + # pick line separator char(s) based on universal newlines flag + self.nlSeps = (b"\n", ) + if self.univ_newlines: + self.nlSeps = (b"\r\n", b"\r", b"\n") + + def __iter__(self): + return self + + def __next__(self): + nextline = self.readline() + if not nextline: + raise StopIteration() + + return nextline + + def close(self): + self.closed = True + + def _checkfornewline(self): + nl, nllen = -1, -1 + if self.linebuffer: + # ugly check for cases where half of an \r\n pair was + # read on the last pass, and the \r was discarded. In this + # case we just throw away the \n at the start of the buffer. + if (self.lastdiscard, self.linebuffer[:1]) == (b'\r', b'\n'): + self.linebuffer = self.linebuffer[1:] + + for sep in self.nlSeps: + nl = self.linebuffer.find(sep) + if nl >= 0: + nllen = len(sep) + return nl, nllen + + return nl, nllen + + def readline(self, size = -1): + """Read a line with approx. size. If size is negative, + read a whole line. + """ + if size < 0: + size = sys.maxsize + elif size == 0: + return b'' + + # check for a newline already in buffer + nl, nllen = self._checkfornewline() + + if nl >= 0: + # the next line was already in the buffer + nl = min(nl, size) + else: + # no line break in buffer - try to read more + size -= len(self.linebuffer) + while nl < 0 and size > 0: + buf = self.read(min(size, 100)) + if not buf: + break + self.linebuffer += buf + size -= len(buf) + + # check for a newline in buffer + nl, nllen = self._checkfornewline() + + # we either ran out of bytes in the file, or + # met the specified size limit without finding a newline, + # so return current buffer + if nl < 0: + s = self.linebuffer + self.linebuffer = b'' + return s + + buf = self.linebuffer[:nl] + self.lastdiscard = self.linebuffer[nl:nl + nllen] + self.linebuffer = self.linebuffer[nl + nllen:] + + # line is always returned with \n as newline char (except possibly + # for a final incomplete line in the file, which is handled above). + return buf + b"\n" + + def readlines(self, sizehint = -1): + """Return a list with all (following) lines. The sizehint parameter + is ignored in this implementation. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def _update_crc(self, newdata, eof): + # Update the CRC using the given data. + if self._expected_crc is None: + # No need to compute the CRC if we don't have a reference value + return + self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff + # Check the CRC if we're at the end of the file + if eof and self._running_crc != self._expected_crc: + raise BadZipfile("Bad CRC-32 for file %r" % self.name) + + def read(self, size = None): + # act like file obj and return empty string if size is 0 + if size == 0: + return b'' + + # determine read size + bytesToRead = self.compress_size - self.bytes_read + + # adjust read size for encrypted files since the first 12 bytes + # are for the encryption/password information + if self.decrypter is not None: + bytesToRead -= 12 + + if size is not None and size >= 0: + if self.compress_type == ZIP_STORED: + lr = len(self.readbuffer) + bytesToRead = min(bytesToRead, size - lr) + else: + if len(self.readbuffer) > size: + # the user has requested fewer bytes than we've already + # pulled through the decompressor; don't read any more + bytesToRead = 0 + else: + # user will use up the buffer, so read some more + lr = len(self.rawbuffer) + bytesToRead = min(bytesToRead, self.compreadsize - lr) + + # avoid reading past end of file contents + if bytesToRead + self.bytes_read > self.compress_size: + bytesToRead = self.compress_size - self.bytes_read + + # try to read from file (if necessary) + if bytesToRead > 0: + data = self.fileobj.read(bytesToRead) + self.bytes_read += len(data) + try: + self.rawbuffer += data + except: + print(repr(self.fileobj), repr(self.rawbuffer), + repr(data)) + raise + + # handle contents of raw buffer + if self.rawbuffer: + newdata = self.rawbuffer + self.rawbuffer = b'' + + # decrypt new data if we were given an object to handle that + if newdata and self.decrypter is not None: + newdata = bytes(map(self.decrypter, newdata)) + + # decompress newly read data if necessary + if newdata and self.compress_type != ZIP_STORED: + newdata = self.dc.decompress(newdata) + self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' + if self.eof and len(self.rawbuffer) == 0: + # we're out of raw bytes (both from the file and + # the local buffer); flush just to make sure the + # decompressor is done + try: + newdata += self.dc.flush() + except AttributeError: + pass + # prevent decompressor from being used again + self.dc = None + + self._update_crc(newdata, eof=( + self.compress_size == self.bytes_read and + len(self.rawbuffer) == 0)) + self.readbuffer += newdata + + # return what the user asked for + if size is None or len(self.readbuffer) <= size: + data = self.readbuffer + self.readbuffer = b'' + else: + data = self.readbuffer[:size] + self.readbuffer = self.readbuffer[size:] + + return data + + +class ZipFile: + """ Class with methods to open, read, write, close, list zip files. + + z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False) + + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read "r", write "w" or append "a". + compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), + or ZIP_BZIP2 (requires bz2). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. + + """ + + fp = None # Set here since __del__ checks it + + def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): + """Open the ZIP file with mode read "r", write "w" or append "a".""" + if mode not in ("r", "w", "a"): + raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') + + if compression == ZIP_STORED: + pass + elif compression == ZIP_DEFLATED: + if not zlib: + raise RuntimeError( + "Compression requires the (missing) zlib module") + elif compression == ZIP_BZIP2: + if not bz2: + raise RuntimeError( + "Compression requires the (missing) bz2 module") + else: + raise RuntimeError("That compression method is not supported") + + self._allowZip64 = allowZip64 + self._didModify = False + self.debug = 0 # Level of printing: 0 through 3 + self.NameToInfo = {} # Find file info given name + self.filelist = [] # List of ZipInfo instances for archive + self.compression = compression # Method of compression + self.mode = key = mode.replace('b', '')[0] + self.pwd = None + self.comment = b'' + + # Check if we were passed a file-like object + if isinstance(file, str): + # No, it's a filename + self._filePassed = 0 + self.filename = file + modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'} + try: + self.fp = io.open(file, modeDict[mode]) + except IOError: + if mode == 'a': + mode = key = 'w' + self.fp = io.open(file, modeDict[mode]) + else: + raise + else: + self._filePassed = 1 + self.fp = file + self.filename = getattr(file, 'name', None) + + if key == 'r': + self._GetContents() + elif key == 'w': + # set the modified flag so central directory gets written + # even if no files are added to the archive + self._didModify = True + elif key == 'a': + try: + # See if file is a zip file + self._RealGetContents() + # seek to start of directory and overwrite + self.fp.seek(self.start_dir, 0) + except BadZipfile: + # file is not a zip file, just append + self.fp.seek(0, 2) + + # set the modified flag so central directory gets written + # even if no files are added to the archive + self._didModify = True + else: + if not self._filePassed: + self.fp.close() + self.fp = None + raise RuntimeError('Mode must be "r", "w" or "a"') + + def _GetContents(self): + """Read the directory, making sure we close the file if the format + is bad.""" + try: + self._RealGetContents() + except BadZipfile: + if not self._filePassed: + self.fp.close() + self.fp = None + raise + + def _RealGetContents(self): + """Read in the table of contents for the ZIP file.""" + fp = self.fp + try: + endrec = _EndRecData(fp) + except IOError: + raise BadZipfile("File is not a zip file") + if not endrec: + raise BadZipfile("File is not a zip file") + if self.debug > 1: + print(endrec) + size_cd = endrec[_ECD_SIZE] # bytes in central directory + offset_cd = endrec[_ECD_OFFSET] # offset of central directory + self.comment = endrec[_ECD_COMMENT] # archive comment + + # "concat" is zero, unless zip was concatenated to another file + concat = endrec[_ECD_LOCATION] - size_cd - offset_cd + if endrec[_ECD_SIGNATURE] == stringEndArchive64: + # If Zip64 extension structures are present, account for them + concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator) + + if self.debug > 2: + inferred = concat + offset_cd + print("given, inferred, offset", offset_cd, inferred, concat) + # self.start_dir: Position of start of central directory + self.start_dir = offset_cd + concat + fp.seek(self.start_dir, 0) + data = fp.read(size_cd) + fp = io.BytesIO(data) + total = 0 + while total < size_cd: + centdir = fp.read(sizeCentralDir) + if centdir[0:4] != stringCentralDir: + raise BadZipfile("Bad magic number for central directory") + centdir = struct.unpack(structCentralDir, centdir) + if self.debug > 2: + print(centdir) + filename = fp.read(centdir[_CD_FILENAME_LENGTH]) + flags = centdir[5] + if flags & 0x800: + # UTF-8 file names extension + filename = filename.decode('utf-8') + else: + # Historical ZIP filename encoding + filename = filename.decode('cp437') + # Create ZipInfo instance to store file information + x = ZipInfo(filename) + x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH]) + x.comment = fp.read(centdir[_CD_COMMENT_LENGTH]) + x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] + (x.create_version, x.create_system, x.extract_version, x.reserved, + x.flag_bits, x.compress_type, t, d, + x.CRC, x.compress_size, x.file_size) = centdir[1:12] + x.volume, x.internal_attr, x.external_attr = centdir[15:18] + # Convert date/time code to (year, month, day, hour, min, sec) + x._raw_time = t + x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, + t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) + + x._decodeExtra() + x.header_offset = x.header_offset + concat + self.filelist.append(x) + self.NameToInfo[x.filename] = x + + # update total bytes read from central directory + total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH] + + centdir[_CD_EXTRA_FIELD_LENGTH] + + centdir[_CD_COMMENT_LENGTH]) + + if self.debug > 2: + print("total", total) + + + def namelist(self): + """Return a list of file names in the archive.""" + l = [] + for data in self.filelist: + l.append(data.filename) + return l + + def infolist(self): + """Return a list of class ZipInfo instances for files in the + archive.""" + return self.filelist + + def printdir(self, file=None): + """Print a table of contents for the zip file.""" + print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"), + file=file) + for zinfo in self.filelist: + date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] + print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size), + file=file) + + def testzip(self): + """Read all the files and check the CRC.""" + chunk_size = 2 ** 20 + for zinfo in self.filelist: + try: + # Read by chunks, to avoid an OverflowError or a + # MemoryError with very large embedded files. + f = self.open(zinfo.filename, "r") + while f.read(chunk_size): # Check CRC-32 + pass + except BadZipfile: + return zinfo.filename + + def getinfo(self, name): + """Return the instance of ZipInfo given 'name'.""" + info = self.NameToInfo.get(name) + if info is None: + raise KeyError( + 'There is no item named %r in the archive' % name) + + return info + + def setpassword(self, pwd): + """Set default password for encrypted files.""" + assert isinstance(pwd, bytes) + self.pwd = pwd + + def read(self, name, pwd=None): + """Return file bytes (as a string) for name.""" + return self.open(name, "r", pwd).read() + + def open(self, name, mode="r", pwd=None): + """Return file-like object for 'name'.""" + if mode not in ("r", "U", "rU"): + raise RuntimeError('open() requires mode "r", "U", or "rU"') + if not self.fp: + raise RuntimeError( + "Attempt to read ZIP archive that was already closed") + + # Only open a new file for instances where we were not + # given a file object in the constructor + if self._filePassed: + zef_file = self.fp + else: + zef_file = io.open(self.filename, 'rb') + + # Make sure we have an info object + if isinstance(name, ZipInfo): + # 'name' is already an info object + zinfo = name + else: + # Get info object for name + zinfo = self.getinfo(name) + + zef_file.seek(zinfo.header_offset, 0) + + # Skip the file header: + fheader = zef_file.read(sizeFileHeader) + if fheader[0:4] != stringFileHeader: + raise BadZipfile("Bad magic number for file header") + + fheader = struct.unpack(structFileHeader, fheader) + fname = zef_file.read(fheader[_FH_FILENAME_LENGTH]) + if fheader[_FH_EXTRA_FIELD_LENGTH]: + zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH]) + + if fname != zinfo.orig_filename.encode("utf-8"): + raise BadZipfile( + 'File name in directory %r and header %r differ.' + % (zinfo.orig_filename, fname)) + + # check for encrypted flag & handle password + is_encrypted = zinfo.flag_bits & 0x1 + zd = None + if is_encrypted: + if not pwd: + pwd = self.pwd + if not pwd: + raise RuntimeError("File %s is encrypted, " + "password required for extraction" % name) + + zd = _ZipDecrypter(pwd) + # The first 12 bytes in the cypher stream is an encryption header + # used to strengthen the algorithm. The first 11 bytes are + # completely random, while the 12th contains the MSB of the CRC, + # or the MSB of the file time depending on the header type + # and is used to check the correctness of the password. + bytes = zef_file.read(12) + h = list(map(zd, bytes[0:12])) + if zinfo.flag_bits & 0x8: + # compare against the file type from extended local headers + check_byte = (zinfo._raw_time >> 8) & 0xff + else: + # compare against the CRC otherwise + check_byte = (zinfo.CRC >> 24) & 0xff + if h[11] != check_byte: + raise RuntimeError("Bad password for file", name) + + # build and return a ZipExtFile + if zd is None: + zef = ZipExtFile(zef_file, zinfo) + else: + zef = ZipExtFile(zef_file, zinfo, zd) + + # set universal newlines on ZipExtFile if necessary + if "U" in mode: + zef.set_univ_newlines(True) + return zef + + def extract(self, member, path=None, pwd=None): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a ZipInfo object. You can + specify a different directory using `path'. + """ + if not isinstance(member, ZipInfo): + member = self.getinfo(member) + + if path is None: + path = os.getcwd() + + return self._extract_member(member, path, pwd) + + def extractall(self, path=None, members=None, pwd=None): + """Extract all members from the archive to the current working + directory. `path' specifies a different directory to extract to. + `members' is optional and must be a subset of the list returned + by namelist(). + """ + if members is None: + members = self.namelist() + + for zipinfo in members: + self.extract(zipinfo, path, pwd) + + def _extract_member(self, member, targetpath, pwd): + """Extract the ZipInfo object 'member' to a physical + file on the path targetpath. + """ + # build the destination pathname, replacing + # forward slashes to platform specific separators. + # Strip trailing path separator, unless it represents the root. + if (targetpath[-1:] in (os.path.sep, os.path.altsep) + and len(os.path.splitdrive(targetpath)[1]) > 1): + targetpath = targetpath[:-1] + + # don't include leading "/" from file name if present + if member.filename[0] == '/': + targetpath = os.path.join(targetpath, member.filename[1:]) + else: + targetpath = os.path.join(targetpath, member.filename) + + targetpath = os.path.normpath(targetpath) + + # Create all upper directories if necessary. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + os.makedirs(upperdirs) + + if member.filename[-1] == '/': + if not os.path.isdir(targetpath): + os.mkdir(targetpath) + return targetpath + + source = self.open(member, pwd=pwd) + target = open(targetpath, "wb") + shutil.copyfileobj(source, target) + source.close() + target.close() + + return targetpath + + def _writecheck(self, zinfo): + """Check for errors before writing a file to the archive.""" + if zinfo.filename in self.NameToInfo: + if self.debug: # Warning for duplicate names + print("Duplicate name:", zinfo.filename) + if self.mode not in ("w", "a"): + raise RuntimeError('write() requires mode "w" or "a"') + if not self.fp: + raise RuntimeError( + "Attempt to write ZIP archive that was already closed") + if zinfo.compress_type == ZIP_DEFLATED and not zlib: + raise RuntimeError( + "Compression requires the (missing) zlib module") + if zinfo.compress_type == ZIP_BZIP2 and not bz2: + raise RuntimeError( + "Compression requires the (missing) bz2 module") + if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): + raise RuntimeError("That compression method is not supported") + if zinfo.file_size > ZIP64_LIMIT: + if not self._allowZip64: + raise LargeZipFile("Filesize would require ZIP64 extensions") + if zinfo.header_offset > ZIP64_LIMIT: + if not self._allowZip64: + raise LargeZipFile( + "Zipfile size would require ZIP64 extensions") + + def write(self, filename, arcname=None, compress_type=None): + """Put the bytes from filename into the archive under the name + arcname.""" + if not self.fp: + raise RuntimeError( + "Attempt to write to ZIP archive that was already closed") + + st = os.stat(filename) + isdir = stat.S_ISDIR(st.st_mode) + mtime = time.localtime(st.st_mtime) + date_time = mtime[0:6] + # Create ZipInfo instance to store file information + if arcname is None: + arcname = filename + arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) + while arcname[0] in (os.sep, os.altsep): + arcname = arcname[1:] + if isdir: + arcname += '/' + zinfo = ZipInfo(arcname, date_time) + zinfo.external_attr = (st[0] & 0xFFFF) << 16 # Unix attributes + if compress_type is None: + zinfo.compress_type = self.compression + else: + zinfo.compress_type = compress_type + + zinfo.file_size = st.st_size + zinfo.flag_bits = 0x00 + zinfo.header_offset = self.fp.tell() # Start of header bytes + + self._writecheck(zinfo) + self._didModify = True + + if isdir: + zinfo.file_size = 0 + zinfo.compress_size = 0 + zinfo.CRC = 0 + self.filelist.append(zinfo) + self.NameToInfo[zinfo.filename] = zinfo + self.fp.write(zinfo.FileHeader()) + return + + with open(filename, "rb") as fp: + # Must overwrite CRC and sizes with correct data later + zinfo.CRC = CRC = 0 + zinfo.compress_size = compress_size = 0 + zinfo.file_size = file_size = 0 + self.fp.write(zinfo.FileHeader()) + if zinfo.compress_type == ZIP_DEFLATED: + cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, + zlib.DEFLATED, -15) + elif zinfo.compress_type == ZIP_BZIP2: + cmpr = bz2.BZ2Compressor() + else: + cmpr = None + while 1: + buf = fp.read(1024 * 8) + if not buf: + break + file_size = file_size + len(buf) + CRC = crc32(buf, CRC) & 0xffffffff + if cmpr: + buf = cmpr.compress(buf) + compress_size = compress_size + len(buf) + self.fp.write(buf) + if cmpr: + buf = cmpr.flush() + compress_size = compress_size + len(buf) + self.fp.write(buf) + zinfo.compress_size = compress_size + else: + zinfo.compress_size = file_size + zinfo.CRC = CRC + zinfo.file_size = file_size + # Seek backwards and write CRC and file sizes + position = self.fp.tell() # Preserve current position in file + self.fp.seek(zinfo.header_offset + 14, 0) + self.fp.write(struct.pack(" ZIP64_LIMIT \ + or zinfo.compress_size > ZIP64_LIMIT: + extra.append(zinfo.file_size) + extra.append(zinfo.compress_size) + file_size = 0xffffffff + compress_size = 0xffffffff + else: + file_size = zinfo.file_size + compress_size = zinfo.compress_size + + if zinfo.header_offset > ZIP64_LIMIT: + extra.append(zinfo.header_offset) + header_offset = 0xffffffff + else: + header_offset = zinfo.header_offset + + extra_data = zinfo.extra + if extra: + # Append a ZIP64 field to the extra's + extra_data = struct.pack( + '= ZIP_FILECOUNT_LIMIT or + centDirOffset > ZIP64_LIMIT or + centDirSize > ZIP64_LIMIT): + # Need to write the ZIP64 end-of-archive records + zip64endrec = struct.pack( + structEndArchive64, stringEndArchive64, + 44, 45, 45, 0, 0, centDirCount, centDirCount, + centDirSize, centDirOffset) + self.fp.write(zip64endrec) + + zip64locrec = struct.pack( + structEndArchive64Locator, + stringEndArchive64Locator, 0, pos2, 1) + self.fp.write(zip64locrec) + centDirCount = min(centDirCount, 0xFFFF) + centDirSize = min(centDirSize, 0xFFFFFFFF) + centDirOffset = min(centDirOffset, 0xFFFFFFFF) + + # check for valid comment length + if len(self.comment) >= ZIP_MAX_COMMENT: + if self.debug > 0: + msg = 'Archive comment is too long; truncating to %d bytes' \ + % ZIP_MAX_COMMENT + self.comment = self.comment[:ZIP_MAX_COMMENT] + + endrec = struct.pack(structEndArchive, stringEndArchive, + 0, 0, centDirCount, centDirCount, + centDirSize, centDirOffset, len(self.comment)) + self.fp.write(endrec) + self.fp.write(self.comment) + self.fp.flush() + + if not self._filePassed: + self.fp.close() + self.fp = None + + +class PyZipFile(ZipFile): + """Class to create ZIP archives with Python library files and packages.""" + + def writepy(self, pathname, basename=""): + """Add all files from "pathname" to the ZIP archive. + + If pathname is a package directory, search the directory and + all package subdirectories recursively for all *.py and enter + the modules into the archive. If pathname is a plain + directory, listdir *.py and enter all modules. Else, pathname + must be a Python *.py file and the module will be put into the + archive. Added modules are always module.pyo or module.pyc. + This method will compile the module.py into module.pyc if + necessary. + """ + dir, name = os.path.split(pathname) + if os.path.isdir(pathname): + initname = os.path.join(pathname, "__init__.py") + if os.path.isfile(initname): + # This is a package directory, add it + if basename: + basename = "%s/%s" % (basename, name) + else: + basename = name + if self.debug: + print("Adding package in", pathname, "as", basename) + fname, arcname = self._get_codename(initname[0:-3], basename) + if self.debug: + print("Adding", arcname) + self.write(fname, arcname) + dirlist = os.listdir(pathname) + dirlist.remove("__init__.py") + # Add all *.py files and package subdirectories + for filename in dirlist: + path = os.path.join(pathname, filename) + root, ext = os.path.splitext(filename) + if os.path.isdir(path): + if os.path.isfile(os.path.join(path, "__init__.py")): + # This is a package directory, add it + self.writepy(path, basename) # Recursive call + elif ext == ".py": + fname, arcname = self._get_codename(path[0:-3], + basename) + if self.debug: + print("Adding", arcname) + self.write(fname, arcname) + else: + # This is NOT a package directory, add its files at top level + if self.debug: + print("Adding files from directory", pathname) + for filename in os.listdir(pathname): + path = os.path.join(pathname, filename) + root, ext = os.path.splitext(filename) + if ext == ".py": + fname, arcname = self._get_codename(path[0:-3], + basename) + if self.debug: + print("Adding", arcname) + self.write(fname, arcname) + else: + if pathname[-3:] != ".py": + raise RuntimeError( + 'Files added with writepy() must end with ".py"') + fname, arcname = self._get_codename(pathname[0:-3], basename) + if self.debug: + print("Adding file", arcname) + self.write(fname, arcname) + + def _get_codename(self, pathname, basename): + """Return (filename, archivename) for the path. + + Given a module name path, return the correct file path and + archive name, compiling if necessary. For example, given + /python/lib/string, return (/python/lib/string.pyc, string). + """ + file_py = pathname + ".py" + file_pyc = pathname + ".pyc" + file_pyo = pathname + ".pyo" + if os.path.isfile(file_pyo) and \ + os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime: + fname = file_pyo # Use .pyo file + elif not os.path.isfile(file_pyc) or \ + os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime: + import py_compile + if self.debug: + print("Compiling", file_py) + try: + py_compile.compile(file_py, file_pyc, None, True) + except py_compile.PyCompileError as err: + print(err.msg) + fname = file_pyc + else: + fname = file_pyc + archivename = os.path.split(fname)[1] + if basename: + archivename = "%s/%s" % (basename, archivename) + return (fname, archivename) + + +def main(args = None): + import textwrap + USAGE=textwrap.dedent("""\ + Usage: + zipfile.py -l zipfile.zip # Show listing of a zipfile + zipfile.py -t zipfile.zip # Test if a zipfile is valid + zipfile.py -e zipfile.zip target # Extract zipfile into target dir + zipfile.py -c zipfile.zip src ... # Create zipfile from sources + """) + if args is None: + args = sys.argv[1:] + + if not args or args[0] not in ('-l', '-c', '-e', '-t'): + print(USAGE) + sys.exit(1) + + if args[0] == '-l': + if len(args) != 2: + print(USAGE) + sys.exit(1) + zf = ZipFile(args[1], 'r') + zf.printdir() + zf.close() + + elif args[0] == '-t': + if len(args) != 2: + print(USAGE) + sys.exit(1) + zf = ZipFile(args[1], 'r') + badfile = zf.testzip() + if badfile: + print("The following enclosed file is corrupted: {!r}".format(badfile)) + print("Done testing") + + elif args[0] == '-e': + if len(args) != 3: + print(USAGE) + sys.exit(1) + + zf = ZipFile(args[1], 'r') + out = args[2] + for path in zf.namelist(): + if path.startswith('./'): + tgt = os.path.join(out, path[2:]) + else: + tgt = os.path.join(out, path) + + tgtdir = os.path.dirname(tgt) + if not os.path.exists(tgtdir): + os.makedirs(tgtdir) + with open(tgt, 'wb') as fp: + fp.write(zf.read(path)) + zf.close() + + elif args[0] == '-c': + if len(args) < 3: + print(USAGE) + sys.exit(1) + + def addToZip(zf, path, zippath): + if os.path.isfile(path): + zf.write(path, zippath, ZIP_DEFLATED) + elif os.path.isdir(path): + for nm in os.listdir(path): + addToZip(zf, + os.path.join(path, nm), os.path.join(zippath, nm)) + # else: ignore + + zf = ZipFile(args[1], 'w', allowZip64=True) + for src in args[2:]: + addToZip(zf, src, os.path.basename(src)) + + zf.close() + +if __name__ == "__main__": + main() diff -r 06f1683c8db9 -r 245150080c48 zipfile313.diff --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/zipfile313.diff Sun Dec 19 23:23:24 2010 +0200 @@ -0,0 +1,126 @@ +--- /usr/local/lib/python3.1/zipfile.py 2010-11-29 00:59:28.000000000 +0000 ++++ zipfile313.py 2010-11-29 01:22:19.000000000 +0000 +@@ -3,6 +3,7 @@ + + XXX references to utf-8 need further investigation. + """ ++# Improved by Chortos-2 in 2010 (added bzip2 support) + import struct, os, time, sys, shutil + import binascii, io, stat + +@@ -13,8 +14,13 @@ + zlib = None + crc32 = binascii.crc32 + ++try: ++ import bz2 # We may need its compression method ++except ImportError: ++ bz2 = None ++ + __all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile", +- "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ] ++ "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ] + + class BadZipfile(Exception): + pass +@@ -35,6 +41,7 @@ + # constants for Zip file compression methods + ZIP_STORED = 0 + ZIP_DEFLATED = 8 ++ZIP_BZIP2 = 12 + # Other ZIP compression methods not supported + + # Below are some formats and associated data for reading/writing headers using +@@ -477,6 +484,9 @@ + self.compreadsize = 64*1024 + if self.compress_type == ZIP_DEFLATED: + self.dc = zlib.decompressobj(-15) ++ elif self.compress_type == ZIP_BZIP2: ++ self.dc = bz2.BZ2Decompressor() ++ self.compreadsize = 900000 + + if hasattr(zipinfo, 'CRC'): + self._expected_crc = zipinfo.CRC +@@ -604,7 +614,7 @@ + if self.compress_type == ZIP_STORED: + lr = len(self.readbuffer) + bytesToRead = min(bytesToRead, size - lr) +- elif self.compress_type == ZIP_DEFLATED: ++ else: + if len(self.readbuffer) > size: + # the user has requested fewer bytes than we've already + # pulled through the decompressor; don't read any more +@@ -639,14 +649,17 @@ + newdata = bytes(map(self.decrypter, newdata)) + + # decompress newly read data if necessary +- if newdata and self.compress_type == ZIP_DEFLATED: ++ if newdata and self.compress_type != ZIP_STORED: + newdata = self.dc.decompress(newdata) +- self.rawbuffer = self.dc.unconsumed_tail ++ self.rawbuffer = self.dc.unconsumed_tail if self.compress_type == ZIP_DEFLATED else '' + if self.eof and len(self.rawbuffer) == 0: + # we're out of raw bytes (both from the file and + # the local buffer); flush just to make sure the + # decompressor is done +- newdata += self.dc.flush() ++ try: ++ newdata += self.dc.flush() ++ except AttributeError: ++ pass + # prevent decompressor from being used again + self.dc = None + +@@ -674,7 +687,8 @@ + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read "r", write "w" or append "a". +- compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib). ++ compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), ++ or ZIP_BZIP2 (requires bz2). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. +@@ -694,6 +708,10 @@ + if not zlib: + raise RuntimeError( + "Compression requires the (missing) zlib module") ++ elif compression == ZIP_BZIP2: ++ if not bz2: ++ raise RuntimeError( ++ "Compression requires the (missing) bz2 module") + else: + raise RuntimeError("That compression method is not supported") + +@@ -1041,7 +1059,10 @@ + if zinfo.compress_type == ZIP_DEFLATED and not zlib: + raise RuntimeError( + "Compression requires the (missing) zlib module") +- if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED): ++ if zinfo.compress_type == ZIP_BZIP2 and not bz2: ++ raise RuntimeError( ++ "Compression requires the (missing) bz2 module") ++ if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2): + raise RuntimeError("That compression method is not supported") + if zinfo.file_size > ZIP64_LIMIT: + if not self._allowZip64: +@@ -1102,6 +1123,8 @@ + if zinfo.compress_type == ZIP_DEFLATED: + cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, + zlib.DEFLATED, -15) ++ elif zinfo.compress_type == ZIP_BZIP2: ++ cmpr = bz2.BZ2Compressor() + else: + cmpr = None + while 1: +@@ -1162,6 +1185,10 @@ + zlib.DEFLATED, -15) + data = co.compress(data) + co.flush() + zinfo.compress_size = len(data) # Compressed size ++ elif zinfo.compress_type == ZIP_BZIP2: ++ co = bz2.BZ2Compressor() ++ data = co.compress(data) + co.flush() ++ zinfo.compress_size = len(data) # Compressed size + else: + zinfo.compress_size = zinfo.file_size + zinfo.header_offset = self.fp.tell() # Start of header data