예제 #1
0
def create_zipinfo(filename, mtime=None, dir=False, executable=False, symlink=False,
                   comment=None):
    """Create a instance of `ZipInfo`.

    :param filename: file name of the entry
    :param mtime: modified time of the entry
    :param dir: if `True`, the entry is a directory
    :param executable: if `True`, the entry is a executable file
    :param symlink: if `True`, the entry is a symbolic link
    :param comment: comment of the entry
    """
    from zipfile import ZipInfo, ZIP_DEFLATED, ZIP_STORED
    zipinfo = ZipInfo()

    # The general purpose bit flag 11 is used to denote
    # UTF-8 encoding for path and comment. Only set it for
    # non-ascii files for increased portability.
    # See http://www.pkware.com/documents/casestudies/APPNOTE.TXT
    if any(ord(c) >= 128 for c in filename):
        zipinfo.flag_bits |= 0x0800
    zipinfo.filename = filename.encode('utf-8')

    if mtime is not None:
        mtime = to_datetime(mtime, utc)
        zipinfo.date_time = mtime.utctimetuple()[:6]
        # The "extended-timestamp" extra field is used for the
        # modified time of the entry in unix time. It avoids
        # extracting wrong modified time if non-GMT timezone.
        # See http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip
        #     /proginfo/extra.fld
        zipinfo.extra += struct.pack(
            '<hhBl',
            0x5455,                 # extended-timestamp extra block type
            1 + 4,                  # size of this block
            1,                      # modification time is present
            to_timestamp(mtime))    # time of last modification

    # external_attr is 4 bytes in size. The high order two
    # bytes represent UNIX permission and file type bits,
    # while the low order two contain MS-DOS FAT file
    # attributes, most notably bit 4 marking directories.
    if dir:
        if not zipinfo.filename.endswith('/'):
            zipinfo.filename += '/'
        zipinfo.compress_type = ZIP_STORED
        zipinfo.external_attr = 040755 << 16L       # permissions drwxr-xr-x
        zipinfo.external_attr |= 0x10               # MS-DOS directory flag
    else:
        zipinfo.compress_type = ZIP_DEFLATED
        zipinfo.external_attr = 0644 << 16L         # permissions -r-wr--r--
        if executable:
            zipinfo.external_attr |= 0755 << 16L    # -rwxr-xr-x
        if symlink:
            zipinfo.compress_type = ZIP_STORED
            zipinfo.external_attr |= 0120000 << 16L # symlink file type

    if comment:
        zipinfo.comment = comment.encode('utf-8')

    return zipinfo
예제 #2
0
 def _add_path(self, path, version, myzip):
     mtime = os.path.getmtime(path)
     info = ZipInfo("versions/%d/%s"%(version, path), Archive.unixtime_to_utcziptime(mtime))
     info.create_system = 3
     info.extra += struct.pack('<HHBl', 0x5455, 5, 1, mtime)
     # http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute
     # make mode without file type, which may be system-specific
     clean_mode = os.stat(path).st_mode & 0007777
     if (os.path.islink(path)):
         # set zip file type to link
         info.external_attr = (Archive.ZIP_EXT_ATTR_LINK | clean_mode) << 16L
         myzip.writestr(info, os.readlink(path))
     elif (os.path.isdir(path)):
         # set zip file type to dir 
         info.external_attr = (Archive.ZIP_EXT_ATTR_DIR | clean_mode) << 16L
         # dos directory flag
         info.external_attr |= 0x10
         # it seems we should have a trailing slash for dirs
         if not(info.filename.endswith('/')): info.filename = "%s/"%(info.filename)
         myzip.writestr(info, '')
         for name in os.listdir(path):
             self._add_path(os.path.join(path, name), version, myzip)
     elif (os.path.isfile(path)):
         info.external_attr = (Archive.ZIP_EXT_ATTR_FILE | clean_mode) << 16L
         myzip.writestr(info, open(path).read())
     else:
         raise Exception()
예제 #3
0
 def write_blob(self, path, blob, compression=ZIP_DEFLATED, mode=0644):
     """Add something to the zip without adding to manifest"""
     zinfo = ZipInfo(path)
     zinfo.external_attr = mode << 16L # set permissions
     zinfo.compress_type = compression
     zinfo.date_time = self.now
     self.zipfile.writestr(zinfo, blob)
예제 #4
0
    def _render_zip(self, req, filename, repos, data):
        """ZIP archive with all the added and/or modified files."""
        new_rev = data['new_rev']
        req.send_response(200)
        req.send_header('Content-Type', 'application/zip')
        req.send_header('Content-Disposition',
                        content_disposition('inline', filename + '.zip'))

        from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

        buf = StringIO()
        zipfile = ZipFile(buf, 'w', ZIP_DEFLATED)
        for old_node, new_node, kind, change in repos.get_changes(
            new_path=data['new_path'], new_rev=data['new_rev'],
            old_path=data['old_path'], old_rev=data['old_rev']):
            if kind == Node.FILE and change != Changeset.DELETE:
                assert new_node
                zipinfo = ZipInfo()
                zipinfo.filename = new_node.path.strip('/').encode('utf-8')
                # Note: unicode filenames are not supported by zipfile.
                # UTF-8 is not supported by all Zip tools either,
                # but as some do, I think UTF-8 is the best option here.
                zipinfo.date_time = new_node.last_modified.utctimetuple()[:6]
                zipinfo.external_attr = 0644 << 16L # needed since Python 2.5
                zipinfo.compress_type = ZIP_DEFLATED
                zipfile.writestr(zipinfo, new_node.get_content().read())
        zipfile.close()

        zip_str = buf.getvalue()
        req.send_header("Content-Length", len(zip_str))
        req.end_headers()
        req.write(zip_str)
        raise RequestDone
예제 #5
0
파일: vba2zip.py 프로젝트: loota/vimplugins
 def add(self, member):
   if (member.isdir):
     return # FIXME Should be able to add empty directories
   info = ZipInfo(member.name)
   info.date_time = member.mtime
   info.external_attr = member.perm << 16L
   self.archive.writestr(info, member.data)
예제 #6
0
파일: mathjax.py 프로젝트: NiLuJe/calibre
 def add_file(self, zf, path, name):
     with open(path, 'rb') as f:
         raw = f.read()
     self.h.update(raw)
     zi = ZipInfo(name)
     zi.external_attr = 0o444 << 16
     zf.writestr(zi, raw)
예제 #7
0
    def handle(self, *args, **options):
        options['platform'] = options['platform'].lower() # normalize

        if options['platform'] not in ["all", "linux", "macos", "darwin", "windows"]:
            raise CommandError("Unrecognized platform: %s; will include ALL files." % options['platform'])

        # Step 1: recursively add all static files
        kalite_base = os.path.realpath(settings.PROJECT_PATH + "/../")
        files_dict = recursively_add_files(dirpath=kalite_base, **options)

        # Step 2: Add a local_settings.py file.
        #   For distributed servers, this is a copy of the local local_settings.py,
        #   with a few properties (specified as command-line options) overridden
        ls_file = create_local_settings_file(location=os.path.realpath(kalite_base+"/kalite/local_settings.py"), server_type=options['server_type'], locale=options['locale'])
        files_dict[ls_file] = { "dest_path": "kalite/local_settings.py" }

        # Step 3: select output file.
        if options['file']=="__default__":
            options['file'] = create_default_archive_filename(options)

        # Step 4: package into a zip file
        with ZipFile(options['file'], "w", ZIP_DEFLATED if options['compress'] else ZIP_STORED) as zfile:
            for srcpath,fdict in files_dict.items():
                if options['verbosity'] >= 1:
                    print "Adding to zip: %s" % srcpath
                # Add without setting exec perms
                if os.path.splitext(fdict["dest_path"])[1] != ".sh":
                    zfile.write(srcpath, arcname=fdict["dest_path"])
                # Add with exec perms
                else:
                    info = ZipInfo(fdict["dest_path"])
                    info.external_attr = 0755 << 16L # give full access to included file
                    with open(srcpath, "r") as fh:
                        zfile.writestr(info, fh.read())
예제 #8
0
파일: utils.py 프로젝트: chevah/brink
    def createZipArchive(self, source, destination, exclude=None):
        """
        Create a zip file at `destination` based on files from `source`.
        """
        """
        Create a zip file at `destination` based on files from `source`.
        """
        if exclude is None:
            exclude = []

        source_path = self.fs.join(source)
        parent_path = os.path.dirname(source_path)
        archivename = self.fs.join(destination)
        with closing(ZipFile(archivename, 'w', ZIP_DEFLATED)) as z:
            for root, dirs, files in os.walk(source_path):
                # Write all files.
                for fn in files:
                    if fn in exclude:
                        continue
                    absolute_filename = os.path.join(root, fn)
                    zip_filename = absolute_filename[len(parent_path):]
                    # See http://bugs.python.org/issue1734346
                    # for adding unicode support.
                    z.write(str(absolute_filename), str(zip_filename))

                # For empty folders, we need to create a special ZipInfo
                # entry.
                # 16 works, but some places suggest using 48.
                if not files and not dirs:
                    foldername = root[len(parent_path):] + '/'
                    zip_info = ZipInfo(foldername)
                    zip_info.external_attr = 16
                    z.writestr(zip_info, "")
예제 #9
0
    def compileToZip(self):
        """ 
            Compile the exam as a .zip file
        """
        def cleanpath(path):
            if path=='': 
                return ''
            dirname, basename = os.path.split(path)
            dirname=cleanpath(dirname)
            if basename!='.':
                dirname = os.path.join(dirname,basename)
            return dirname

        f = ZipFile(self.options.output,'w')

        for (dst,src) in self.files.items():
            dst = ZipInfo(cleanpath(dst))
            dst.external_attr = 0o644<<16
            dst.date_time = datetime.datetime.today().timetuple()
            if isinstance(src,basestring):
                f.writestr(dst,open(src,'rb').read())
            else:
                f.writestr(dst,src.read())

        print("Exam created in %s" % os.path.relpath(self.options.output))

        f.close()
예제 #10
0
    def dump(self, fp):
        """Dump the plugin as package into the filepointer or file."""
        from zipfile import ZipFile, ZipInfo
        f = ZipFile(fp, 'w')

        # write all files into a "pdata/" folder
        offset = len(self.path) + 1
        for dirpath, dirnames, filenames in walk(self.path):
            # don't recurse into hidden dirs
            for i in range(len(dirnames)-1, -1, -1):
                if dirnames[i].startswith('.'):
                    del dirnames[i]
            for filename in filenames:
                if filename.endswith('.pyc') or \
                   filename.endswith('.pyo'):
                    continue
                f.write(path.join(dirpath, filename),
                        path.join('pdata', dirpath[offset:], filename))

        # add the package information files
        for name, data in [('ZINE_PLUGIN', self.name),
                           ('ZINE_PACKAGE', PACKAGE_VERSION)]:
            zinfo = ZipInfo(name, localtime(time()))
            zinfo.compress_type = f.compression
            zinfo.external_attr = (33188 & 0xFFFF) << 16L
            f.writestr(zinfo, str(data))

        f.close()
예제 #11
0
def system_specific_zipping(files_dict, zip_file=None, compression=ZIP_DEFLATED, callback=_default_callback_zip):
    """
    Zip up files, adding permissions when appropriate.
    """

    if not zip_file:
        zip_file = tempfile.mkstemp()[1]

    zfile = None
    try:
        zfile = ZipFile(zip_file, 'w', compression)
        for fi, (dest_path, src_path) in enumerate(files_dict.iteritems()):
            if callback:
                callback(src_path, fi, len(files_dict))
            # All platforms besides windows need permissions set.
            ext = os.path.splitext(dest_path)[1]
            if ext not in not_system_specific_scripts(system="windows"):
                zfile.write(src_path, arcname=dest_path)
            # Add with exec perms
            else:
                info = ZipInfo(dest_path)
                info.external_attr = 0775 << ((1 - is_osx()) * 16L) # give full access to included file
                with open(src_path, "r") as fh:
                    zfile.writestr(info, fh.read())
        zfile.close()
    finally:
        if zfile:
            zfile.close()
예제 #12
0
 def add_file(file_path, arcname):
     if not path.islink(file_path):
         archive.write(file_path, arcname, ZIP_DEFLATED)
     else:
         i = ZipInfo(arcname)
         i.create_system = 3
         i.external_attr = 0xA1ED0000
         archive.writestr(i, readlink(file_path))
예제 #13
0
    def write(self, filename, arcname=None, compress_type=None):
        with open(filename, 'rb') as f:
            st = os.fstat(f.fileno())
            data = f.read()

        zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime))
        zinfo.external_attr = st.st_mode << 16
        zinfo.compress_type = ZIP_DEFLATED
        self.writestr(zinfo, data, compress_type)
예제 #14
0
파일: __init__.py 프로젝트: sercom/sercom
 def enviar_zip(self, entregas, nombre, extras = None, ignoreFileNames = []):
     buffer = StringIO()
     zip = ZipFile(buffer, 'w', zipfile.ZIP_DEFLATED)
     for e in entregas:
         szip = ZipFile(StringIO(e.archivos), 'r')
         for file in szip.namelist():
             if file not in ignoreFileNames:
                 zipinfo = ZipInfo('%s_%u/%s' % (e.entregador.alumno.padron.encode('ascii'), e.instancia.numero, file))
                 zipinfo.external_attr = 0664 << 16L
                 zip.writestr(zipinfo, szip.read(file))
         szip.close()
     if extras is not None:
         for exk in extras.keys():
             zipinfo = ZipInfo('%s' % exk)
             zipinfo.external_attr = 0774 << 16L
             zip.writestr(zipinfo, extras[exk])
     zip.close()
     download = Downloader(cherrypy.response)
     return download.download_zip(buffer.getvalue(), nombre)
예제 #15
0
 def zip(cls):
     now = time.localtime(time.time())
     zipio = StringIO()
     zipfile = ZipFile(zipio, 'w', ZIP_DEFLATED)
     for file in cls.files:
         zipinfo = ZipInfo(file.filename, date_time=now)
         zipinfo.external_attr = 0644 << 16L
         zipfile.writestr(zipinfo, file.getvalue())
     zipfile.close()
     return zipio
예제 #16
0
 def write_file(filename, data, mode):
     if zip:
         info = ZipInfo(filename)
         info.external_attr = 0o755 << 16
         zip_file.writestr(info, data)
     else:
         f = open(os.path.join(target, filename), mode)
         try:
             f.write(data)
         finally:
             f.close()
예제 #17
0
    def close(self):
        # Write RECORD
        if self.fp is not None and self.mode == 'w' and self._file_hashes:
            content = '\n'.join('{},{}={},{}'.format(fname, algorithm, hash_,
                                                     self._file_sizes[fname])
                                for fname, (algorithm, hash_) in self._file_hashes.items())
            content += '\n{},,\n'.format(self.record_path)
            zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
            zinfo.compress_type = ZIP_DEFLATED
            zinfo.external_attr = 0o664 << 16
            self.writestr(zinfo, as_bytes(content))

        super(WheelFile, self).close()
예제 #18
0
	def write_file (self, data, filename, description = "") :
		"""Write a file into the archive
		
		:Parameters:
		 - `data` (str) - data to write
		 - `filename` (str) - name of the file in which to store data
		 - `description` (str) - textual description of the data
		"""
		info = ZipInfo(filename)
		info.comment = description
		info.date_time = localtime()[:6]
		info.external_attr = 0644 << 16L
		info.compress_type = ZIP_DEFLATED
		self._elms[filename] = (info,data)
예제 #19
0
def make_dir_entry(name=None, date_time=None, mode=MODE_DIRECTORY):
    tt = date_time.timetuple()
    dir = ZipInfo()

    dir.filename        = name+('/' if name[-1] != '/' else '')
    dir.orig_filename   = dir.filename
    dir.date_time        = date_time.isocalendar() + (tt.tm_hour,
                                                tt.tm_min, tt.tm_sec)
    dir.compress_type   = 0
    dir.create_system   = 0
    dir.create_version  = 20
    dir.extract_version = 10
    dir.external_attr   = mode

    return dir
예제 #20
0
def make_file_entry(name=None, date_time=None, mode=MODE_FILE | MODE_ARCHIVE):
    tt = date_time.timetuple()
    file = ZipInfo()

    file.filename        = name
    file.orig_filename   = file.filename
    file.date_time        = date_time.isocalendar() + (tt.tm_hour,
                                                tt.tm_min, tt.tm_sec)
    file.compress_type   = 8
    file.create_system   = 0
    file.create_version  = 20
    file.extract_version = 20
    file.flag_bits       = 2
    file.external_attr   = mode

    return file
예제 #21
0
def _zip_info(environment, name):
    """
    @type environment : C{str}
    @param environment : The environment name

    @type name : C{str}
    @param name : The name of the file

    @rtype: C{ZipInfo}
    @return: The Zip Info 
    """
    filename = "%s-%s" % (environment, name)
    info = ZipInfo(filename)
    info.date_time = time.localtime(time.time())[:6] #now
    info.external_attr = 0666 << 16L # read-write access to everyone
    info.compress_type = ZIP_DEFLATED
    return info    
def _inject_settings_python(zip_file, settings):

    first = True
    content = 'settings = {'
    for k, v in settings.iteritems():
        if not first:
            content += ','
        content += '\n    \'{}\': \'{}\''.format(k, v)
        first = False
    content += '\n}'

    print 'inserting settings', content

    info = ZipInfo('CloudCanvas/settings.py')
    info.external_attr = 0777 << 16L # give full access to included file

    zip_file.writestr(info, content)
def _inject_settings_nodejs(zip_file, settings):

    first = True
    content = 'module.exports = {'
    for k, v in settings.iteritems():
        if not first:
            content += ','
        content += '\n    "{}": "{}"'.format(k, v)
        first = False
    content += '\n};'

    print 'inserting settings', content

    info = ZipInfo('CloudCanvas/settings.js')
    info.external_attr = 0777 << 16L # give full access to included file

    zip_file.writestr(info, content)
예제 #24
0
파일: __init__.py 프로젝트: sligodave/akoe
def write_strings(path, items, pwd=None, compression=ZIP_DEFLATED):
    """
    Write a dictionary of strings to a zip file.
    If it already exsits, only append the new
    strings or over write what is already there.
    """
    old_items = {}
    if exists(path):
        old_items = read_strings(path)
        unlink(path)
    old_items.update(items)
    with ZipFile(path, mode='w', compression=compression) as zf:
        if pwd:
            zf.setpassword(pwd)
        for name, bytes in items.items():
            zi = ZipInfo(name)
            zi.external_attr = 0777 << 16L
            zf.write(zi, bytes)
예제 #25
0
파일: deploy.py 프로젝트: 314l5926/gimel
def prepare_zip():
    from pkg_resources import resource_filename as resource
    from config import config
    from json import dumps
    logger.info('creating/updating gimel.zip')
    with ZipFile('gimel.zip', 'w', ZIP_DEFLATED) as zipf:
        info = ZipInfo('config.json')
        info.external_attr = 0o664 << 16
        zipf.writestr(info, dumps(config))
        zipf.write(resource('gimel', 'config.py'), 'config.py')
        zipf.write(resource('gimel', 'gimel.py'), 'gimel.py')
        zipf.write(resource('gimel', 'logger.py'), 'logger.py')
        for root, dirs, files in os.walk(resource('gimel', 'vendor')):
            for file in files:
                real_file = os.path.join(root, file)
                relative_file = os.path.relpath(real_file,
                                                resource('gimel', ''))
                zipf.write(real_file, relative_file)
예제 #26
0
파일: deploy.py 프로젝트: Alephbet/gimel
def prepare_zip():
    from pkg_resources import resource_filename as resource
    from config import config
    from json import dumps

    logger.info("creating/updating gimel.zip")
    with ZipFile("gimel.zip", "w", ZIP_DEFLATED) as zipf:
        info = ZipInfo("config.json")
        info.external_attr = 0o664 << 16
        zipf.writestr(info, dumps(config))
        zipf.write(resource("gimel", "config.py"), "config.py")
        zipf.write(resource("gimel", "gimel.py"), "gimel.py")
        zipf.write(resource("gimel", "logger.py"), "logger.py")
        for root, dirs, files in os.walk(resource("gimel", "vendor")):
            for file in files:
                real_file = os.path.join(root, file)
                relative_file = os.path.relpath(real_file, resource("gimel", ""))
                zipf.write(real_file, relative_file)
예제 #27
0
    def add_object_to_zip(self, obj, parent_path):
        target = self.sm.queryAdapter(obj, IZipExportObject)
        if target is None:
            return

        if target.skip:
            return

        target_path = parent_path + target.filename

        if isinstance(target_path, unicode):
            # we could convert target_path to utf-8, but that would just mask
            # a bug elsewhere; better to raise an exception so we fix the
            # original cause.
            raise ValueError("All paths must be byte strings!")

        data = target.data
        if isinstance(data, unicode):
            data = data.encode('utf-8')

        t = target.timestamp
        if t is None:
            zipinfo = ZipInfo(target_path)

        else:
            date_time = (t.year(), t.month(), t.day(),
                         t.hour(), t.minute(), int(t.second()))
            zipinfo = ZipInfo(target_path, date_time)

        if target.export_as_folder:
            # Set external_attr to 16, otherwise empty folders will not be
            # preserved
            zipinfo.external_attr = 16

        self.zip_file.writestr(zipinfo, data)

        index_row = (target.title.encode('utf-8'),
                     target.meta_label, target_path)
        self.index_txt.write('\t'.join(index_row) + '\n')

        if target.export_as_folder:
            self.recurse(obj, target_path)
예제 #28
0
def create_zip_archive( zipfilename, path, inarchpath ):
    with ZipFile( zipfilename, "w", ZIP_DEFLATED ) as zf:
        for root, dirs, files in os.walk(path):
            archpath = os.path.join( inarchpath, os.path.relpath( root, path ) )
            zf.write( root, archpath )
            for f in files:
                filename = os.path.join( root, f )
                if not os.path.isfile(filename):
                    continue
                archname = os.path.join( archpath, f )
                # this hack is needed to avoid leading ./ in the archive
                while archname.startswith ('./'):
                    archname = archname[2:]
                zi = ZipInfo( archname)
                stat = os.stat( path + '/' + archname )
                zi.external_attr = stat.st_mode << 16L
                # this hack is needed to use the external attributes
                # there is no way to set a zipinfo object directly to an archive
                with open (filename, 'rb') as f:
                    zf.writestr( zi, f.read () )
예제 #29
0
파일: pack.py 프로젝트: tomoyosiki/taisei
def pack(args):
    with ZipFile(str(args.output), 'w', ZIP_DEFLATED) as zf:
        for path in sorted(args.directory.glob('**/*')):
            if path.name == 'meson.build':
                continue

            relpath = path.relative_to(args.directory)

            if path.is_dir():
                zi = ZipInfo(str(relpath) + "/", datetime.fromtimestamp(path.stat().st_mtime).timetuple())
                zi.compress_type = ZIP_STORED
                zi.external_attr = 0o40755 << 16  # drwxr-xr-x
                zf.writestr(zi, '')
            else:
                zf.write(str(path), str(relpath))

        if args.depfile is not None:
            write_depfile(args.depfile, args.output,
                [args.directory.resolve() / x for x in zf.namelist()] + [str(Path(__file__).resolve())]
            )
예제 #30
0
def zip_them(str, data_content):
    dimap_file     = str + 'dim'
    dimap_data_dir = str + 'data'
    zip_file_name  = 'zipped/' + str + 'zip'
    if not exists(zip_file_name):
        dest_zip_file = ZipFile(zip_file_name, 'w')
        print(("\nZip file created: ", zip_file_name, "."))
        data_content = glob(dimap_data_dir + '/*')   # find content inside the data dir
        zip_info = ZipInfo(dimap_data_dir + '/')     # create the directory inside
        zip_info.external_attr = 0o775 << 16         # the zipfile, set permissions for the directory
        zip_info.create_system = 3                   # tell the zip file that we are on UNIX
        dest_zip_file.writestr(zip_info, '')          # write the info to it
        print(("Writing to zip: ", dimap_file, "..."))
        dest_zip_file.write(dimap_file, dimap_file, ZIP_DEFLATED)  # write the dimap file into
        for item in data_content:
            print(("Writing to zip: ", item, "..."))
            dest_zip_file.write(item, item, ZIP_DEFLATED)          # write all dimap data to the zip file
        dest_zip_file.close()
    else:
        print(("Zip file ", zip_file_name, " exists already; skipping."))
예제 #31
0
파일: zipfile.py 프로젝트: hsoft/pdfmasher
def zip_add_dir(zipfile, path, prefix=''):
    '''
    Add a directory recursively to the zip file with an optional prefix.
    '''
    if prefix:
        zi = ZipInfo(prefix+'/')
        zi.external_attr = 16
        zipfile.writestr(zi, '')
    cwd = os.path.abspath(os.getcwd())
    try:
        os.chdir(path)
        fp = (prefix + ('/' if prefix else '')).replace('//', '/')
        for f in os.listdir('.'):
            arcname = fp + f
            if os.path.isdir(f):
                zipfile.add_dir(f, prefix=arcname)
            else:
                zipfile.write(f, arcname)
    finally:
        os.chdir(cwd)
예제 #32
0
def create_zip_archive(zipfilename, path, inarchpath):
    with ZipFile(zipfilename, "w", ZIP_DEFLATED) as zf:
        for root, dirs, files in os.walk(path):
            archpath = os.path.join(inarchpath, os.path.relpath(root, path))
            zf.write(root, archpath)
            for f in files:
                filename = os.path.join(root, f)
                if not os.path.isfile(filename):
                    continue
                archname = os.path.join(archpath, f)
                # this hack is needed to avoid leading ./ in the archive
                while archname.startswith('./'):
                    archname = archname[2:]
                zi = ZipInfo(archname)
                stat = os.stat(path + '/' + archname)
                zi.external_attr = stat.st_mode << 16L
                # this hack is needed to use the external attributes
                # there is no way to set a zipinfo object directly to an archive
                with open(filename, 'rb') as f:
                    zf.writestr(zi, f.read())
예제 #33
0
    def close(self):
        # Write RECORD
        if self.fp is not None and self.mode == 'w' and self._file_hashes:
            data = StringIO()
            writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n')
            writer.writerows((
                (
                    fname,
                    algorithm + "=" + hash_,
                    self._file_sizes[fname]
                )
                for fname, (algorithm, hash_) in self._file_hashes.items()
            ))
            writer.writerow((format(self.record_path), "", ""))
            zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
            zinfo.compress_type = self.compression
            zinfo.external_attr = 0o664 << 16
            self.writestr(zinfo, as_bytes(data.getvalue()))

        ZipFile.close(self)
예제 #34
0
    def compress_string(self, arcname, data):
        if isinstance(data, str):
            data = data.encode("utf-8")
        zinfo = ZipInfo(filename=arcname,
                        date_time=time.localtime(time.time())[:6])
        zinfo.compress_type = ZIP_DEFLATED
        zinfo.external_attr = 0o600 << 16
        zinfo.file_size = len(data)
        zinfo.header_offset = self._data_p
        zinfo.CRC = zlib.crc32(data) & 0xffffffff
        cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
        if cmpr:
            data = cmpr.compress(data) + cmpr.flush()
            zinfo.compress_size = len(data)
        else:
            zinfo.compress_size = zinfo.file_size
        zip64 = zinfo.file_size > ZIP64_LIMIT or zinfo.compress_size > ZIP64_LIMIT

        yield self.__get_data(zinfo.FileHeader(zip64))
        yield self.__get_data(data)
        self._filelist.append(zinfo)
예제 #35
0
파일: numbas.py 프로젝트: numbas/Numbas
    def compileToZip(self):
        """ 
            Compile the exam as a .zip file
        """
        Path(self.options.output).parent.mkdir(exist_ok=True, parents=True)
        
        f = ZipFile(self.options.output, 'w')

        for (dst, src) in self.files.items():
            dst = ZipInfo(str(Path(dst).relative_to('.')))
            dst.compress_type = zipfile.ZIP_DEFLATED
            dst.external_attr = 0o644<<16
            dst.date_time = datetime.datetime.today().timetuple()
            if isinstance(src, Path):
                f.writestr(dst, src.read_bytes())
            else:
                f.writestr(dst, src.read())

        print("Exam created in %s" % os.path.relpath(self.options.output))

        f.close()
예제 #36
0
    def add_str(self, str_to_add, name, dt=datetime.now()):
        # type: (str,str,datetime) -> None
        """
        Add a string to the archive as zip entry named 'name'
        :param str_to_add: string to add
        :param name: name of the zip.entry
        :param dt: datetime, optional if not specified, current date time is assumed
        :return: None
        """

        # always use forward slash regardless of platform, this allows the calling
        # code to use os.path.join for names
        if os.pathsep in name:
            name = name.replace(os.pathsep, "/")

        info = ZipInfo()
        info.filename = self.uuid + "/" + name
        info.external_attr = 0o644 << 16
        info.compress_type = ZIP_DEFLATED
        info.date_time = (dt.year, dt.month, dt.day, dt.hour, dt.minute,
                          dt.second)
        self.zip.writestr(info, str_to_add)
예제 #37
0
    def compress_file(self, filename, arcname):
        st = os.stat(filename)
        zinfo = ZipInfo(arcname, time.localtime(time.time())[:6])
        zinfo.external_attr = (st[0] & 0xFFFF) << 16
        zinfo.compress_type = ZIP_DEFLATED
        zinfo.flag_bits = 0x08
        zinfo.header_offset = self._data_p

        cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
        with open(filename, "rb") as fp:
            zinfo.CRC = crc = 0
            zinfo.compress_size = 0
            zinfo.file_size = 0
            yield self.__get_data(zinfo.FileHeader())

            while 1:
                buf = fp.read(CHUNK_SIZE)
                if not buf:
                    break
                zinfo.file_size += len(buf)
                crc = zlib.crc32(buf, crc) & 0xffffffff
                if cmpr:
                    buf = cmpr.compress(buf)
                    zinfo.compress_size += len(buf)
                yield self.__get_data(buf)
        if cmpr:
            buf = cmpr.flush()
            zinfo.compress_size += len(buf)
            yield self.__get_data(buf)
        else:
            zinfo.compress_size = zinfo.file_size
        zinfo.CRC = crc

        zip64 = zinfo.file_size > ZIP64_LIMIT or zinfo.compress_size > ZIP64_LIMIT
        fmt = '<4sLQQ' if zip64 else '<4sLLL'
        data_descriptor = struct.pack(fmt, stringDataDescriptor, zinfo.CRC,
                                      zinfo.compress_size, zinfo.file_size)
        yield self.__get_data(data_descriptor)
        self._filelist.append(zinfo)
예제 #38
0
def create_archive(tiles, root, max_zoom, meta, ext):
    # expand bounds
    roots = generate_tiles(root, root.z, meta.get("metatile", 1))

    min_x, min_y, max_x, max_y = mercantile.bounds(root)

    for r in roots:
        l_min_x, l_min_y, l_max_x, l_max_y = mercantile.bounds(r)
        min_x = min(min_x, l_min_x)
        max_x = max(max_x, l_max_x)
        min_y = min(min_y, l_min_y)
        max_y = max(max_y, l_max_y)

    meta["minzoom"] = root.z
    meta["maxzoom"] = max_zoom
    meta["bounds"] = [min_x, min_y, max_x, max_y]
    meta["root"] = "{}/{}/{}".format(root.z, root.x, root.y)

    date_time = gmtime()[0:6]
    out = BytesIO()

    with ZipFile(out, "w", ZIP_DEFLATED, allowZip64=True) as archive:
        archive.comment = json.dumps(meta).encode("utf-8")

        for tile, (_, data) in tiles:
            if data is None:
                logger.warn("Not writing tile %d/%d/%d because it has no data",
                            tile.z, tile.x, tile.y)
                continue

            logger.debug("Writing tile %d/%d/%d", tile.z, tile.x, tile.y)

            info = ZipInfo(
                "{}/{}/{}@2x.{}".format(tile.z, tile.x, tile.y, ext),
                date_time)
            info.external_attr = 0o755 << 16
            archive.writestr(info, data, ZIP_DEFLATED)

    return out.getvalue()
예제 #39
0
    def compress_stream(self, arcname, datagen):
        zinfo = ZipInfo(arcname, time.localtime(time.time())[:6])
        zinfo.external_attr = 0o600 << 16
        zinfo.compress_type = ZIP_DEFLATED
        zinfo.flag_bits = 0x08
        zinfo.header_offset = self._data_p

        cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
        zinfo.CRC = crc = 0
        zinfo.compress_size = 0
        zinfo.file_size = 0

        yield self.__get_data(zinfo.FileHeader())

        for buf in datagen:
            if not buf:
                continue
            zinfo.file_size += len(buf)
            crc = zlib.crc32(buf, crc) & 0xffffffff
            if cmpr:
                buf = cmpr.compress(buf)
                zinfo.compress_size += len(buf)
            yield self.__get_data(buf)

        if cmpr:
            buf = cmpr.flush()
            zinfo.compress_size += len(buf)
            yield self.__get_data(buf)
        else:
            zinfo.compress_size = zinfo.file_size
        zinfo.CRC = crc

        zip64 = zinfo.file_size > ZIP64_LIMIT
        fmt = '<4sLQQ' if zip64 else '<4sLLL'
        data_descriptor = struct.pack(fmt, stringDataDescriptor, zinfo.CRC,
                                      zinfo.compress_size, zinfo.file_size)
        yield self.__get_data(data_descriptor)
        self._filelist.append(zinfo)
예제 #40
0
def zip_them(str, data_content):
    dimap_file = str + 'dim'
    dimap_data_dir = str + 'data'
    zip_file_name = 'zipped/' + str + 'zip'
    if not exists(zip_file_name):
        dest_zip_file = ZipFile(zip_file_name, 'w')
        print(("\nZip file created: ", zip_file_name, "."))
        data_content = glob(dimap_data_dir +
                            '/*')  # find content inside the data dir
        zip_info = ZipInfo(dimap_data_dir + '/')  # create the directory inside
        zip_info.external_attr = 0o775 << 16  # the zipfile, set permissions for the directory
        zip_info.create_system = 3  # tell the zip file that we are on UNIX
        dest_zip_file.writestr(zip_info, '')  # write the info to it
        print(("Writing to zip: ", dimap_file, "..."))
        dest_zip_file.write(dimap_file, dimap_file,
                            ZIP_DEFLATED)  # write the dimap file into
        for item in data_content:
            print(("Writing to zip: ", item, "..."))
            dest_zip_file.write(
                item, item,
                ZIP_DEFLATED)  # write all dimap data to the zip file
        dest_zip_file.close()
    else:
        print(("Zip file ", zip_file_name, " exists already; skipping."))
예제 #41
0
def compileToZip(exam, files, options):
    def cleanpath(path):
        if path == '':
            return ''
        dirname, basename = os.path.split(path)
        dirname = cleanpath(dirname)
        if basename != '.':
            dirname = os.path.join(dirname, basename)
        return dirname

    f = ZipFile(options.output, 'w')

    for (dst, src) in files.items():
        dst = ZipInfo(cleanpath(dst))
        dst.external_attr = 0o644 << 16
        dst.date_time = datetime.datetime.today().timetuple()
        if isinstance(src, basestring):
            f.writestr(dst, open(src, 'rb').read())
        else:
            f.writestr(dst, src.read())

    print("Exam created in %s" % os.path.relpath(options.output))

    f.close()
예제 #42
0
    def upload(self, name, expansions=None):
        # Load and customize script:
        script = self._load(name)
        if expansions:
            for key, value in expansions.items():
                script = script.replace(key, value)
        encoded_script = script.encode('utf-8')

        # Hash script
        script_hash = self._hash(encoded_script)

        # Package into zipfile:
        zip_buffer = BytesIO()
        with ZipFile(zip_buffer, 'w') as zip_file:
            zip_info = ZipInfo('index.js')
            zip_info.compress_type = ZIP_DEFLATED
            zip_info.external_attr = 0o0755 << 16
            zip_file.writestr(zip_info, encoded_script)
        zip_buffer.seek(0)

        # Upload to S3:
        zip_path = '%s/%s.zip' % (name, script_hash)
        self._upload(zip_path, zip_buffer)
        return self._bucket, zip_path
예제 #43
0
    def write(self, filename, arcname=None, compress_type=None):
        """Put the bytes from filename into the archive under the name
        arcname."""

        st = os.stat(filename)
        isdir = stat.S_ISDIR(st.st_mode)
        mtime = time.localtime(st.st_mtime)
        date_time = mtime[0:6]
        # Create ZipInfo instance to store file information
        if arcname is None:
            arcname = filename
        arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
        while arcname[0] in (os.sep, os.altsep):
            arcname = arcname[1:]
        if isdir:
            arcname += '/'
        zinfo = ZipInfo(arcname, date_time)
        zinfo.external_attr = (st[0] & 0xFFFF) << 16L      # Unix attributes
        if self.compression == ZIP_AUTO:
            ext = os.path.splitext(filename)[1].lower()
            compression = ZIP_STORED if ext and ext[1:] in STORED_FORMATS \
                    else ZIP_DEFLATED
        else:
            compression = self.compression
        if compress_type is None:
            zinfo.compress_type = compression
        else:
            zinfo.compress_type = compress_type

        zinfo.file_size = st.st_size
        zinfo.flag_bits |= 0x08
        zinfo.header_offset = self.tell    # Start of header bytes

        self._writecheck(zinfo)
        self._didModify = True

        if isdir:
            zinfo.file_size = 0
            zinfo.compress_size = 0
            zinfo.CRC = 0
            self.filelist.append(zinfo)
            self.NameToInfo[zinfo.filename] = zinfo
            header = zinfo.FileHeader()
            yield header
            self.tell += len(header)
            return

        fp = open(filename, "rb")
        # Must overwrite CRC and sizes with correct data later
        zinfo.CRC = CRC = 0
        zinfo.compress_size = compress_size = 0
        zinfo.file_size = file_size = 0
        header = zinfo.FileHeader()
        yield header
        self.tell += len(header)
        if zinfo.compress_type == ZIP_DEFLATED:
            cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
                 zlib.DEFLATED, -15)
        else:
            cmpr = None
        while 1:
            buf = fp.read(1024 * 8)
            if not buf:
                break
            file_size = file_size + len(buf)
            CRC = crc32(buf, CRC) & 0xffffffff
            if cmpr:
                buf = cmpr.compress(buf)
                compress_size = compress_size + len(buf)
            yield buf
        fp.close()
        if cmpr:
            buf = cmpr.flush()
            compress_size = compress_size + len(buf)
            yield buf
            zinfo.compress_size = compress_size
        else:
            zinfo.compress_size = file_size
        self.tell += zinfo.compress_size
        zinfo.CRC = CRC
        zinfo.file_size = file_size
        # write the data descriptor
        data_descriptor =  struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
              zinfo.file_size)
        yield data_descriptor
        self.tell += len(data_descriptor)
        self.filelist.append(zinfo)
        self.NameToInfo[zinfo.filename] = zinfo
예제 #44
0
파일: send_zip.py 프로젝트: mat2uken/myojin
    def write_iter(self, filename, arcname=None, compress_type=None):
        """Put the bytes from filename into the archive under the name
        arcname."""
        from zipfile import stat, time, ZipInfo, ZIP_DEFLATED, crc32, struct
        if not self.fp:
            raise RuntimeError(
                "Attempt to write to ZIP archive that was already closed")

        st = os.stat(filename)
        isdir = stat.S_ISDIR(st.st_mode)
        mtime = time.localtime(st.st_mtime)
        date_time = mtime[0:6]
        # Create ZipInfo instance to store file information
        if arcname is None:
            arcname = filename
        arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
        while arcname[0] in (os.sep, os.altsep):
            arcname = arcname[1:]
        if isdir:
            arcname += '/'
        zinfo = ZipInfo(arcname, date_time)
        zinfo.external_attr = (st[0] & 0xFFFF) << 16L  # Unix attributes
        if compress_type is None:
            zinfo.compress_type = self.compression
        else:
            zinfo.compress_type = compress_type

        zinfo.file_size = st.st_size
        zinfo.flag_bits = 0x00
        zinfo.header_offset = self.fp.tell()  # Start of header bytes

        self._writecheck(zinfo)
        self._didModify = True

        if isdir:
            zinfo.file_size = 0
            zinfo.compress_size = 0
            zinfo.CRC = 0
            self.filelist.append(zinfo)
            self.NameToInfo[zinfo.filename] = zinfo
            yield zinfo.FileHeader()
            #self.fp.write(zinfo.FileHeader())
            return

        with open(filename, "rb") as fp:
            # Must overwrite CRC and sizes with correct data later
            zinfo.CRC = CRC = 0
            zinfo.compress_size = compress_size = 0
            zinfo.file_size = file_size = 0
            cmpr = None
            while 1:
                buf = fp.read(1024 * 8)
                if not buf:
                    break
                file_size = file_size + len(buf)
                CRC = crc32(buf, CRC) & 0xffffffff
            zinfo.compress_size = file_size
            zinfo.file_size = file_size
            zinfo.CRC = CRC
            #self.fp.write(zinfo.FileHeader())
            yield zinfo.FileHeader()
            fp.seek(0)
            while 1:
                buf = fp.read(1024 * 8)
                if not buf:
                    break
                #self.fp.write(buf)
                yield buf
        self.filelist.append(zinfo)
        self.NameToInfo[zinfo.filename] = zinfo
예제 #45
0
파일: pack.py 프로젝트: retronx-team/taisei
def pack(args):
    nocompress_file = args.directory / '.nocompress'

    if 1:
        comp_type = ZIP_ZSTANDARD
        comp_level = 20
    else:
        comp_type = ZIP_DEFLATED
        comp_level = 9

    try:
        nocompress = list(
            map(re.compile,
                filter(None,
                       nocompress_file.read_text().strip().split('\n'))))
    except FileNotFoundError:
        nocompress = []
        nocompress_file = None

    zkwargs = {}
    if (sys.version_info.major, sys.version_info.minor) >= (3, 7):
        zkwargs['compresslevel'] = comp_level

    dependencies = []

    with ZipFile(str(args.output), 'w', comp_type, **zkwargs) as zf:
        for path in sorted(args.directory.glob('**/*')):
            if path.name[0] == '.' or any(path.match(x) for x in args.exclude):
                continue

            relpath = path.relative_to(args.directory)

            if path.is_dir():
                zi = ZipInfo(
                    str(relpath) + "/",
                    datetime.fromtimestamp(path.stat().st_mtime).timetuple())
                zi.compress_type = ZIP_STORED
                zi.external_attr = 0o40755 << 16  # drwxr-xr-x
                log_file(path, zi.filename)
                zf.writestr(zi, '')
            else:
                dependencies.append(path)

                if path.suffix == '.zst':
                    write_zst_file(zf, path, str(relpath.with_suffix('')))
                else:
                    ctype = comp_type

                    for pattern in nocompress:
                        if pattern.match(str(relpath)):
                            ctype = ZIP_STORED
                            break

                    log_file(path, relpath, ctype)
                    zf.write(str(path), str(relpath), compress_type=ctype)

    if args.depfile is not None:
        if nocompress_file is not None:
            dependencies.append(nocompress_file)
        dependencies.append(Path(__file__).resolve())
        write_depfile(args.depfile, args.output, dependencies)
def test_subnet_connectivity(region,
                             stackid,
                             logical_resource_id,
                             physical_resource_id,
                             endpoints=[['www.amazon.com', "80"]]):
    logger.debug({"test_subnet_connectivity": "starting"})
    error_msg = []
    if region not in clients.get_available_regions('lambda'):
        msg = "Test for %s %s skipped, %s not supprted by lambda" % (
            stackid, logical_resource_id, region)
        logger.warning(msg)
        return {
            "success": True,
            "logical_resource_id": logical_resource_id,
            "physical_resource_id": physical_resource_id,
            "warning":
            "Test skipped, region %s not supprted by lambda" % region,
            "region": region,
            "stackid": stackid
        }
    try:
        function_name = 'test_subnet_%s_%s' % (physical_resource_id,
                                               random_string(8))
        iam_name = function_name.replace('_', '-')
        lambda_client = clients.get("lambda", region=region)
        ec2_client = clients.get("ec2", region=region)
        iam_role_arn = get_iam_role()
        response = ec2_client.describe_subnets(
            SubnetIds=[physical_resource_id])
        logger.debug({"test_subnet_connectivity:describe_subnets": response})
        vpc_id = response['Subnets'][0]['VpcId']
        logger.debug({"test_subnet_connectivity:vpc_id": vpc_id})
        security_group_id = ec2_client.create_security_group(
            GroupName=iam_name, Description=iam_name, VpcId=vpc_id)['GroupId']
        logger.debug(
            {"test_subnet_connectivity:security_group_id": security_group_id})
        now = datetime.now()
        zi_timestamp = (now.year, now.month, now.day, now.hour, now.minute,
                        now.second)
        zinfo = ZipInfo('lambda_function.py', zi_timestamp)
        zinfo.external_attr = 0x0744 << 16
        f = StringIO()
        z = ZipFile(f, 'w', ZIP_DEFLATED)
        z.writestr(zinfo, function_code)
        z.close()
        zip_bytes = f.getvalue()
        logger.debug({
            "test_subnet_connectivity:create_function_input": {
                "FunctionName": function_name,
                "Role": iam_role_arn,
                "Code": {
                    'ZipFile': zip_bytes
                },
                "VpcConfig": {
                    'SubnetIds': [physical_resource_id],
                    'SecurityGroupIds': [security_group_id]
                }
            }
        })
        lambda_function = False
        retries = 0
        max_retries = 4
        while not lambda_function:
            try:
                lambda_function = lambda_client.create_function(
                    FunctionName=function_name,
                    Runtime='python2.7',
                    Role=iam_role_arn,
                    Handler='lambda_function.lambda_handler',
                    Code={'ZipFile': zip_bytes},
                    Timeout=120,
                    MemorySize=128,
                    VpcConfig={
                        'SubnetIds': [physical_resource_id],
                        'SecurityGroupIds': [security_group_id]
                    })
            except botocore.exceptions.ClientError as e:
                codes = [
                    'InvalidParameterValueException', 'AccessDeniedException'
                ]
                logger.debug("boto exception: ", exc_info=1)
                logger.debug(e.response)
                if "The provided subnets contain availability zone Lambda doesn't support." in e.response[
                        'Error']['Message']:
                    raise
                if e.response['Error'][
                        'Code'] in codes and retries < max_retries:
                    logger.debug(
                        {"test_subnet_connectivity:create_function": str(e)},
                        exc_info=1)
                    msg = "role not propagated yet, sleeping a bit and then retrying"
                    logger.debug({
                        "test_subnet_connectivity:create_function_retry":
                        msg
                    })
                    retries += 1
                    sleep(10 * (retries**2))
                else:
                    raise
        for endpoint in endpoints:
            f = StringIO()
            f.write(json.dumps({"address": endpoint[0], "port": endpoint[1]}))
            payload = f.getvalue()
            f.close()
            response = lambda_client.invoke(FunctionName=function_name,
                                            InvocationType='RequestResponse',
                                            Payload=payload)
            response['Payload'] = response['Payload'].read()
            try:
                response['Payload'] = json.loads(response['Payload'])
            except Exception:
                pass
            logger.debug({"test_subnet_connectivity:response": response})

            if response[
                    'StatusCode'] != 200 or 'FunctionError' in response.keys():
                results = {
                    "success": False,
                    "logical_resource_id": logical_resource_id,
                    "physical_resource_id": physical_resource_id,
                    "region": region,
                    "stackid": stackid
                }
                error_msg.append({
                    "endpoint": endpoint,
                    "response": response['Payload']
                })
            elif response['StatusCode'] == 200 and len(error_msg) == 0:
                results = {
                    "success": True,
                    "logical_resource_id": logical_resource_id,
                    "physical_resource_id": physical_resource_id,
                    "region": region,
                    "stackid": stackid
                }
    except Exception as e:
        logger.error({"test_subnet_connectivity": str(e)}, exc_info=1)
        if "subnets contain availability zone Lambda doesn't support" in str(
                e):
            results = {
                "success": True,
                "logical_resource_id": logical_resource_id,
                "physical_resource_id": physical_resource_id,
                "region": region,
                "stackid": stackid
            }
            logger.warning(
                "test skipped as lambda is not supported in the subnet's az. %s"
                % str(results))
        else:
            results = {
                "success": False,
                "logical_resource_id": logical_resource_id,
                "physical_resource_id": physical_resource_id,
                "region": region,
                "stackid": stackid
            }
        error_msg.append({"exception": str(e)})
    finally:
        try:
            lambda_client.delete_function(FunctionName=function_name)
        except Exception:
            logger.warning("Failed to cleanup lambda function", exc_info=1)
        try:
            logger.debug({
                "test_subnet_connectivity:security_group_id":
                security_group_id
            })
            enis = ec2_client.describe_network_interfaces(
                Filters=[{
                    'Name': 'group-id',
                    'Values': [security_group_id]
                }])
            for eni in enis['NetworkInterfaces']:
                if 'Attachment' in eni.keys():
                    logger.debug("Detaching ENI...")
                    ec2_client.detach_network_interface(
                        AttachmentId=eni['Attachment']['AttachmentId'])
                    while 'Attachment' in ec2_client.describe_network_interfaces(
                            NetworkInterfaceIds=[eni['NetworkInterfaceId']]
                    )['NetworkInterfaces'][0].keys():
                        logger.debug(
                            "eni still attached, waiting 5 seconds...")
                        sleep(5)
                logger.debug("Deleting ENI %s" % eni['NetworkInterfaceId'])
                ec2_client.delete_network_interface(
                    NetworkInterfaceId=eni['NetworkInterfaceId'])
            sg = False
            retries = 0
            max_retries = 3
            while not sg:
                try:
                    sg = ec2_client.delete_security_group(
                        GroupId=security_group_id)
                except botocore.exceptions.ClientError as e:
                    msg = "has a dependent object"
                    dep_violation = e.response['Error'][
                        'Code'] == 'DependencyViolation'
                    logger.debug("boto exception: ", exc_info=1)
                    if dep_violation and msg in str(
                            e) and retries < max_retries:
                        msg = "eni deletion not propagated yet, sleeping a bit and then retrying"
                        logger.debug({
                            "test_subnet_connectivity:delete_sg_retry":
                            security_group_id
                        })
                        retries += 1
                        sleep(5 * (retries**2))
                    else:
                        raise
            logger.debug({
                "test_subnet_connectivity:security_group_id_response":
                response
            })
        except Exception:
            logger.warning("Failed to cleanup security group", exc_info=1)
        if len(error_msg) > 0:
            results["error_msg"] = error_msg
        return results
예제 #47
0
def decryptBook(userkey, inpath, outpath):
    if AES is None:
        raise ADEPTError(u"PyCrypto or OpenSSL must be installed.")
    rsa = RSA(userkey)
    with closing(ZipFile(open(inpath, 'rb'))) as inf:
        namelist = set(inf.namelist())
        if 'META-INF/rights.xml' not in namelist or \
           'META-INF/encryption.xml' not in namelist:
            print u"{0:s} is DRM-free.".format(os.path.basename(inpath))
            return 1
        for name in META_NAMES:
            namelist.remove(name)
        try:
            rights = etree.fromstring(inf.read('META-INF/rights.xml'))
            adept = lambda tag: '{%s}%s' % (NSMAP['adept'], tag)
            expr = './/%s' % (adept('encryptedKey'),)
            bookkey = ''.join(rights.findtext(expr))
            if len(bookkey) != 172:
                print u"{0:s} is not a secure Adobe Adept ePub.".format(os.path.basename(inpath))
                return 1
            bookkey = rsa.decrypt(bookkey.decode('base64'))
            # Padded as per RSAES-PKCS1-v1_5
            if bookkey[-17] != '\x00':
                print u"Could not decrypt {0:s}. Wrong key".format(os.path.basename(inpath))
                return 2
            encryption = inf.read('META-INF/encryption.xml')
            decryptor = Decryptor(bookkey[-16:], encryption)
            kwds = dict(compression=ZIP_DEFLATED, allowZip64=False)
            with closing(ZipFile(open(outpath, 'wb'), 'w', **kwds)) as outf:
                zi = ZipInfo('mimetype')
                zi.compress_type=ZIP_STORED
                try:
                    # if the mimetype is present, get its info, including time-stamp
                    oldzi = inf.getinfo('mimetype')
                    # copy across fields to be preserved
                    zi.date_time = oldzi.date_time
                    zi.comment = oldzi.comment
                    zi.extra = oldzi.extra
                    zi.internal_attr = oldzi.internal_attr
                    # external attributes are dependent on the create system, so copy both.
                    zi.external_attr = oldzi.external_attr
                    zi.create_system = oldzi.create_system
                except:
                    pass
                outf.writestr(zi, inf.read('mimetype'))
                for path in namelist:
                    data = inf.read(path)
                    zi = ZipInfo(path)
                    zi.compress_type=ZIP_DEFLATED
                    try:
                        # get the file info, including time-stamp
                        oldzi = inf.getinfo(path)
                        # copy across useful fields
                        zi.date_time = oldzi.date_time
                        zi.comment = oldzi.comment
                        zi.extra = oldzi.extra
                        zi.internal_attr = oldzi.internal_attr
                        # external attributes are dependent on the create system, so copy both.
                        zi.external_attr = oldzi.external_attr
                        zi.create_system = oldzi.create_system
                    except:
                        pass
                    outf.writestr(zi, decryptor.decrypt(path, data))
        except:
            print u"Could not decrypt {0:s} because of an exception:\n{1:s}".format(os.path.basename(inpath), traceback.format_exc())
            return 2
    return 0
예제 #48
0
    def write(self, filename, arcname=None, compress_type=None):
        """
        Fixed version of write supporting bitflag 0x08 to write crc and size
        at end of file.
        """
        if not self.fp:
            raise RuntimeError(
                "Attempt to write to ZIP archive that was already closed")

        st = os.stat(filename)
        isdir = stat.S_ISDIR(st.st_mode)
        mtime = time.localtime(st.st_mtime)
        date_time = mtime[0:6]
        # Create ZipInfo instance to store file information
        if arcname is None:
            arcname = filename
        arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
        while arcname[0] in (os.sep, os.altsep):
            arcname = arcname[1:]
        if isdir:
            arcname += '/'
        zinfo = ZipInfo(arcname, date_time)
        zinfo.external_attr = (st[0] & 0xFFFF) << 16  # Unix attributes
        if isdir:
            zinfo.compress_type = ZIP_STORED
        elif compress_type is None:
            zinfo.compress_type = self.compression
        else:
            zinfo.compress_type = compress_type

        zinfo.file_size = st.st_size
        zinfo.flag_bits = 0x00
        zinfo.header_offset = self.fp.tell()  # Start of header bytes

        self._writecheck(zinfo)
        self._didModify = True

        if isdir:
            zinfo.file_size = 0
            zinfo.compress_size = 0
            zinfo.CRC = 0
            zinfo.external_attr |= 0x10  # MS-DOS directory flag
            self.filelist.append(zinfo)
            self.NameToInfo[zinfo.filename] = zinfo
            self.fp.write(zinfo.FileHeader())
            self.start_dir = self.fp.tell()
            return

        zinfo.flag_bits |= 0x08
        with open(filename, "rb") as fp:
            # Must overwrite CRC and sizes with correct data later
            zinfo.CRC = CRC = 0
            zinfo.compress_size = compress_size = 0
            try:
                # Python > 2.7.3
                # Compressed size can be larger than uncompressed size
                zip64 = self._allowZip64 and \
                    zinfo.file_size * 1.05 > ZIP64_LIMIT
                self.fp.write(zinfo.FileHeader(zip64))
            except TypeError:
                # Python <= 2.7.3
                zip64 = zinfo.file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
                self.fp.write(zinfo.FileHeader())
            if zinfo.compress_type == ZIP_DEFLATED:
                cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
                                        zlib.DEFLATED, -15)
            else:
                cmpr = None
            file_size = 0
            while 1:
                buf = fp.read(CHUNK_SIZE)
                if not buf:
                    break
                file_size = file_size + len(buf)
                CRC = crc32(buf, CRC) & 0xffffffff
                if cmpr:
                    buf = cmpr.compress(buf)
                    compress_size = compress_size + len(buf)
                self.fp.write(buf)
        if cmpr:
            buf = cmpr.flush()
            compress_size = compress_size + len(buf)
            self.fp.write(buf)
            zinfo.compress_size = compress_size
        else:
            zinfo.compress_size = file_size
        zinfo.CRC = CRC
        zinfo.file_size = file_size
        if not zip64 and self._allowZip64:
            if file_size > ZIP64_LIMIT:
                raise RuntimeError('File size has increased during compressing')
            if compress_size > ZIP64_LIMIT:
                raise RuntimeError('Compressed size larger than uncompressed size')
        # Write CRC and file sizes after the file data
        fmt = b'<LQQ' if zip64 else b'<LLL'
        self.fp.write(struct.pack(fmt, zinfo.CRC, zinfo.compress_size,
                                  zinfo.file_size))
        self.start_dir = self.fp.tell()
        self.filelist.append(zinfo)
        self.NameToInfo[zinfo.filename] = zinfo
예제 #49
0
파일: bookizip.py 프로젝트: iSC-Labs/Booki
 def write_blob(self, filename, blob, compression=ZIP_DEFLATED, mode=0644):
     """Add something to the zip without adding to manifest"""
     zinfo = ZipInfo(filename)
     zinfo.external_attr = mode << 16L  # set permissions
     zinfo.compress_type = compression
     self.zipfile.writestr(zinfo, blob)
예제 #50
0
def write_folder(z, path):
    info = ZipInfo(path)
    info.external_attr = 16
    z.writestr(info, b'')
예제 #51
0
def decryptFontsBook(inpath, outpath):

    with closing(ZipFile(open(inpath, 'rb'))) as inf:
        namelist = inf.namelist()
        if 'META-INF/encryption.xml' not in namelist:
            return 1

        # Font key handling:

        font_master_key = None
        adobe_master_encryption_key = None

        contNS = lambda tag: '{%s}%s' % (
            'urn:oasis:names:tc:opendocument:xmlns:container', tag)
        path = None

        try:
            container = etree.fromstring(inf.read("META-INF/container.xml"))
            rootfiles = container.find(contNS("rootfiles")).findall(
                contNS("rootfile"))
            for rootfile in rootfiles:
                path = rootfile.get("full-path", None)
                if (path is not None):
                    break
        except:
            pass

        # If path is None, we didn't find an OPF, so we probably don't have a font key.
        # If path is set, it's the path to the main content OPF file.

        if (path is None):
            print("FontDecrypt: No OPF for font obfuscation found")
            return 1
        else:
            packageNS = lambda tag: '{%s}%s' % ('http://www.idpf.org/2007/opf',
                                                tag)
            metadataDCNS = lambda tag: '{%s}%s' % (
                'http://purl.org/dc/elements/1.1/', tag)

            try:
                container = etree.fromstring(inf.read(path))
            except:
                container = []

            ## IETF font key algorithm:
            print(
                "FontDecrypt: Checking {0} for IETF font obfuscation keys ... "
                .format(path),
                end='')
            secret_key_name = None
            try:
                secret_key_name = container.get("unique-identifier")
            except:
                pass

            try:
                identify_element = container.find(packageNS("metadata")).find(
                    metadataDCNS("identifier"))
                if (secret_key_name is None
                        or secret_key_name == identify_element.get("id")):
                    font_master_key = identify_element.text
            except:
                pass

            if (font_master_key is not None):
                if (secret_key_name is None):
                    print("found '%s'" % (font_master_key))
                else:
                    print("found '%s' (%s)" %
                          (font_master_key, secret_key_name))

                # Trim / remove forbidden characters from the key, then hash it:
                font_master_key = font_master_key.replace(' ', '')
                font_master_key = font_master_key.replace('\t', '')
                font_master_key = font_master_key.replace('\r', '')
                font_master_key = font_master_key.replace('\n', '')
                font_master_key = font_master_key.encode('utf-8')
                font_master_key = hashlib.sha1(font_master_key).digest()
            else:
                print("not found")

            ## Adobe font key algorithm
            print(
                "FontDecrypt: Checking {0} for Adobe font obfuscation keys ... "
                .format(path),
                end='')

            try:
                metadata = container.find(packageNS("metadata"))
                identifiers = metadata.findall(metadataDCNS("identifier"))

                uid = None
                uidMalformed = False

                for identifier in identifiers:
                    if identifier.get(packageNS("scheme")) == "UUID":
                        if identifier.text[:9] == "urn:uuid:":
                            uid = identifier.text[9:]
                        else:
                            uid = identifier.text
                        break
                    if identifier.text[:9] == "urn:uuid:":
                        uid = identifier.text[9:]
                        break

                if uid is not None:
                    uid = uid.replace(chr(0x20), '').replace(chr(0x09), '')
                    uid = uid.replace(chr(0x0D),
                                      '').replace(chr(0x0A),
                                                  '').replace('-', '')

                    if len(uid) < 16:
                        uidMalformed = True
                    if not all(c in "0123456789abcdefABCDEF" for c in uid):
                        uidMalformed = True

                    if not uidMalformed:
                        print("found '{0}'".format(uid))
                        uid = uid + uid
                        adobe_master_encryption_key = binascii.unhexlify(
                            uid[:32])

                if adobe_master_encryption_key is None:
                    print("not found")

            except:
                print("exception")
                pass

        # Begin decrypting.

        try:
            encryption = inf.read('META-INF/encryption.xml')
            decryptor = Decryptor(font_master_key, adobe_master_encryption_key,
                                  encryption)
            kwds = dict(compression=ZIP_DEFLATED, allowZip64=False)
            with closing(ZipFile(open(outpath, 'wb'), 'w', **kwds)) as outf:

                # Mimetype needs to be the first entry, so remove it from the list
                # whereever it is, then add it at the beginning.
                namelist.remove("mimetype")

                for path in (["mimetype"] + namelist):
                    data = inf.read(path)
                    zi = ZipInfo(path)
                    zi.compress_type = ZIP_DEFLATED

                    if path == "mimetype":
                        # mimetype must not be compressed
                        zi.compress_type = ZIP_STORED

                    elif path == "META-INF/encryption.xml":
                        # Check if there's still other entries not related to fonts
                        if (decryptor.check_if_remaining()):
                            data = decryptor.get_xml()
                            print(
                                "FontDecrypt: There's remaining entries in encryption.xml, adding file ..."
                            )
                        else:
                            # No remaining entries, no need for that file.
                            continue

                    try:
                        # get the file info, including time-stamp
                        oldzi = inf.getinfo(path)
                        # copy across useful fields
                        zi.date_time = oldzi.date_time
                        zi.comment = oldzi.comment
                        zi.extra = oldzi.extra
                        zi.internal_attr = oldzi.internal_attr
                        # external attributes are dependent on the create system, so copy both.
                        zi.external_attr = oldzi.external_attr
                        zi.create_system = oldzi.create_system
                        if any(ord(c) >= 128 for c in path) or any(
                                ord(c) >= 128 for c in zi.comment):
                            # If the file name or the comment contains any non-ASCII char, set the UTF8-flag
                            zi.flag_bits |= 0x800
                    except:
                        pass

                    if path == "mimetype":
                        outf.writestr(zi, inf.read('mimetype'))
                    elif path == "META-INF/encryption.xml":
                        outf.writestr(zi, data)
                    else:
                        outf.writestr(zi, decryptor.decrypt(path, data))
        except:
            print(
                "FontDecrypt: Could not decrypt fonts in {0:s} because of an exception:\n{1:s}"
                .format(os.path.basename(inpath), traceback.format_exc()))
            traceback.print_exc()
            return 2
    return 0
예제 #52
0
 def add_to_zip(file_obj, z_file):
     """Add the file to the zip and sets the attributes."""
     zinfo = ZipInfo(file_obj['name'], file_obj['time'])
     zinfo.compress_type = z_file.compression
     zinfo.external_attr = 0o644 << 16
     z_file.writestr(zinfo, file_obj['data'])
예제 #53
0
def create_zipinfo(filename,
                   mtime=None,
                   dir=False,
                   executable=False,
                   symlink=False,
                   comment=None):
    """Create a instance of `ZipInfo`.

    :param filename: file name of the entry
    :param mtime: modified time of the entry
    :param dir: if `True`, the entry is a directory
    :param executable: if `True`, the entry is a executable file
    :param symlink: if `True`, the entry is a symbolic link
    :param comment: comment of the entry
    """
    from zipfile import ZipInfo, ZIP_DEFLATED, ZIP_STORED
    zipinfo = ZipInfo()

    # The general purpose bit flag 11 is used to denote
    # UTF-8 encoding for path and comment. Only set it for
    # non-ascii files for increased portability.
    # See http://www.pkware.com/documents/casestudies/APPNOTE.TXT
    if any(ord(c) >= 128 for c in filename):
        zipinfo.flag_bits |= 0x0800
    zipinfo.filename = filename.encode('utf-8')

    if mtime is not None:
        mtime = to_datetime(mtime, utc)
        zipinfo.date_time = mtime.utctimetuple()[:6]
        # The "extended-timestamp" extra field is used for the
        # modified time of the entry in unix time. It avoids
        # extracting wrong modified time if non-GMT timezone.
        # See http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip
        #     /proginfo/extra.fld
        zipinfo.extra += struct.pack(
            '<hhBl',
            0x5455,  # extended-timestamp extra block type
            1 + 4,  # size of this block
            1,  # modification time is present
            to_timestamp(mtime))  # time of last modification

    # external_attr is 4 bytes in size. The high order two
    # bytes represent UNIX permission and file type bits,
    # while the low order two contain MS-DOS FAT file
    # attributes, most notably bit 4 marking directories.
    if dir:
        if not zipinfo.filename.endswith('/'):
            zipinfo.filename += '/'
        zipinfo.compress_type = ZIP_STORED
        zipinfo.external_attr = 040755 << 16L  # permissions drwxr-xr-x
        zipinfo.external_attr |= 0x10  # MS-DOS directory flag
    else:
        zipinfo.compress_type = ZIP_DEFLATED
        zipinfo.external_attr = 0644 << 16L  # permissions -r-wr--r--
        if executable:
            zipinfo.external_attr |= 0755 << 16L  # -rwxr-xr-x
        if symlink:
            zipinfo.compress_type = ZIP_STORED
            zipinfo.external_attr |= 0120000 << 16L  # symlink file type

    if comment:
        zipinfo.comment = comment.encode('utf-8')

    return zipinfo
예제 #54
0
 def mkdir(self, filename, arcname):
     st = os.stat(filename)
     zinfo = ZipInfo(arcname + '/', date_time=get_zipinfo_datetime(st.st_mtime))
     zinfo.external_attr = st.st_mode << 16
     zinfo.compress_type = ZIP_DEFLATED
     self.writestr(zinfo, b'')
예제 #55
0
 def _add_entry(self, name: str, external_attr: int, mtime: int,
                data: Union[bytes, str]) -> None:
     info = ZipInfo(name, self._prepare_zipinfo_date_time(mtime))
     info.external_attr = external_attr
     self._set_zipinfo_compression(info)
     self._inner.writestr(info, data)
예제 #56
0
             encoding='UTF-8'
     ) as outfile:  # Storing the initial config in the 'Input' folder.
         outfile.write(python_code)
 shutil.copy(
     path_prefix + '/Backup/PROD/Lambda/Code/' + df['Lambda'][index] +
     ".py", config['Python_file_name']
 )  # Moves and renames the python code file as 'lambda_function.py'
 file = open(config['Python_file_name'], 'rb')
 zip_code = file.read()
 zip_file_name = path_prefix + config['Zip_Code_prefix'] + df['Lambda'][
     index] + '.zip'  # Creates the zip file and add the permissions.
 zip_file = ZipFile(zip_file_name, 'w', compression=ZIP_DEFLATED)
 zip_info = ZipInfo(config['Python_file_name'])
 zip_info.compress_type = ZIP_DEFLATED
 zip_info.create_system = 3
 zip_info.external_attr = 0o777 << 16
 zip_file.writestr(zip_info, zip_code)  # Adds the code to the zip file.
 os.remove(config['Python_file_name'])
 if 'Environment' in func_config:
     env = func_config['Environment']
     variables = env['Variables']
     if 'LAMBDA_ENV' in variables:
         variables['LAMBDA_ENV'] = 'NPROD'
     if 'envprefix' in variables:
         variables['envprefix'] = 'us-nprod-odp'
     if 'BUCKET_NAME' in variables:
         variables['BUCKET_NAME'] = 'odp-us-nprod-servicesuite'
     env['Variables'] = variables
     func_config['Environment'] = env
 with open(path_prefix + config['Config_prefix'] + df['Lambda'][index] +
           '.json',
    def setUpClass(cls):
        """ get_some_resource() is slow, to avoid calling it for each test use setUpClass()
            and store the result as class variable
        """

        # Suppress ResourceWarning messages about unclosed connections.
        warnings.simplefilter('ignore')

        cls.setUpParams(cls)

        lambda_client = boto3.client('lambda', region_name=get_region())
        cls.test_lambda = 'IntTest-{}'.format(cls.domain).replace('.', '-')
        try:
            lambda_client.delete_function(FunctionName=cls.test_lambda)
        except botocore.exceptions.ClientError:
            pass

        resp = lambda_client.get_function(
            FunctionName=cls.object_store_config['page_out_lambda_function'])
        lambda_cfg = resp['Configuration']
        vpc_cfg = lambda_cfg['VpcConfig']
        # VpcId is not a valid field when creating a lambda fcn.
        del vpc_cfg['VpcId']

        temp_file = tempfile.NamedTemporaryFile()
        temp_name = temp_file.name + '.zip'
        temp_file.close()
        with ZipFile(temp_name, mode='w') as zip:
            t = time.localtime()
            lambda_file = ZipInfo('lambda_function.py',
                                  date_time=(t.tm_year, t.tm_mon, t.tm_mday,
                                             t.tm_hour, t.tm_min, t.tm_sec))
            # Set file permissions.
            lambda_file.external_attr = 0o777 << 16
            code = 'def handler(event, context):\n    return\n'
            zip.writestr(lambda_file, code)

        with open(temp_name, 'rb') as zip2:
            lambda_bytes = zip2.read()

        lambda_client.create_function(FunctionName=cls.test_lambda,
                                      VpcConfig=vpc_cfg,
                                      Role=lambda_cfg['Role'],
                                      Runtime=lambda_cfg['Runtime'],
                                      Handler='lambda_function.handler',
                                      MemorySize=128,
                                      Code={'ZipFile': lambda_bytes})

        # Set page out function to the test lambda.
        cls.object_store_config['page_out_lambda_function'] = cls.test_lambda

        print('standby for queue creation (slow ~30s)')
        try:
            cls.object_store_config[
                "s3_flush_queue"] = cls.setup_helper.create_flush_queue(
                    cls.s3_flush_queue_name)
        except ClientError:
            try:
                cls.setup_helper.delete_flush_queue(
                    cls.object_store_config["s3_flush_queue"])
            except:
                pass
            time.sleep(61)
            cls.object_store_config[
                "s3_flush_queue"] = cls.setup_helper.create_flush_queue(
                    cls.s3_flush_queue_name)

        print('done')
예제 #58
0
def decryptBook(userkey, inpath, outpath):
    with closing(ZipFile(open(inpath, 'rb'))) as inf:
        namelist = inf.namelist()
        if 'META-INF/rights.xml' not in namelist or \
           'META-INF/encryption.xml' not in namelist:
            print("{0:s} is DRM-free.".format(os.path.basename(inpath)))
            return 1
        for name in META_NAMES:
            namelist.remove(name)
        try:
            rights = etree.fromstring(inf.read('META-INF/rights.xml'))
            adept = lambda tag: '{%s}%s' % (NSMAP['adept'], tag)
            expr = './/%s' % (adept('encryptedKey'), )
            bookkeyelem = rights.find(expr)
            bookkey = bookkeyelem.text
            keytype = bookkeyelem.attrib.get('keyType', '0')
            if len(bookkey) >= 172 and int(keytype, 10) > 2:
                print("{0:s} is a secure Adobe Adept ePub with hardening.".
                      format(os.path.basename(inpath)))
            elif len(bookkey) == 172:
                print("{0:s} is a secure Adobe Adept ePub.".format(
                    os.path.basename(inpath)))
            elif len(bookkey) == 64:
                print("{0:s} is a secure Adobe PassHash (B&N) ePub.".format(
                    os.path.basename(inpath)))
            else:
                print("{0:s} is not an Adobe-protected ePub!".format(
                    os.path.basename(inpath)))
                return 1

            if len(bookkey) != 64:
                # Normal or "hardened" Adobe ADEPT
                rsakey = RSA.import_key(userkey)  # parses the ASN1 structure
                bookkey = base64.b64decode(bookkey)
                if int(keytype, 10) > 2:
                    bookkey = removeHardening(rights, keytype, bookkey)
                try:
                    bookkey = PKCS1_v1_5.new(rsakey).decrypt(
                        bookkey, None)  # automatically unpads
                except ValueError:
                    bookkey = None

                if bookkey is None:
                    print("Could not decrypt {0:s}. Wrong key".format(
                        os.path.basename(inpath)))
                    return 2
            else:
                # Adobe PassHash / B&N
                key = base64.b64decode(userkey)[:16]
                bookkey = base64.b64decode(bookkey)
                bookkey = unpad(
                    AES.new(key, AES.MODE_CBC, b'\x00' * 16).decrypt(bookkey),
                    16)  # PKCS#7

                if len(bookkey) > 16:
                    bookkey = bookkey[-16:]

            encryption = inf.read('META-INF/encryption.xml')
            decryptor = Decryptor(bookkey, encryption)
            kwds = dict(compression=ZIP_DEFLATED, allowZip64=False)
            with closing(ZipFile(open(outpath, 'wb'), 'w', **kwds)) as outf:

                for path in (["mimetype"] + namelist):
                    data = inf.read(path)
                    zi = ZipInfo(path)
                    zi.compress_type = ZIP_DEFLATED

                    if path == "mimetype":
                        zi.compress_type = ZIP_STORED

                    elif path == "META-INF/encryption.xml":
                        # Check if there's still something in there
                        if (decryptor.check_if_remaining()):
                            data = decryptor.get_xml()
                            print(
                                "Adding encryption.xml for the remaining embedded files."
                            )
                            # We removed DRM, but there's still stuff like obfuscated fonts.
                        else:
                            continue

                    try:
                        # get the file info, including time-stamp
                        oldzi = inf.getinfo(path)
                        # copy across useful fields
                        zi.date_time = oldzi.date_time
                        zi.comment = oldzi.comment
                        zi.extra = oldzi.extra
                        zi.internal_attr = oldzi.internal_attr
                        # external attributes are dependent on the create system, so copy both.
                        zi.external_attr = oldzi.external_attr
                        zi.create_system = oldzi.create_system
                        if any(ord(c) >= 128 for c in path) or any(
                                ord(c) >= 128 for c in zi.comment):
                            # If the file name or the comment contains any non-ASCII char, set the UTF8-flag
                            zi.flag_bits |= 0x800
                    except:
                        pass
                    if path == "META-INF/encryption.xml":
                        outf.writestr(zi, data)
                    else:
                        outf.writestr(zi, decryptor.decrypt(path, data))
        except:
            print("Could not decrypt {0:s} because of an exception:\n{1:s}".
                  format(os.path.basename(inpath), traceback.format_exc()))
            return 2
    return 0
from io import BytesIO, StringIO
from re import compile as re_compile
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

BUCKET_NAME = "dist-gov"

CFN_PREFIX = "cfn-templates/"
ZIP_PREFIX = ""

# Create a ZIP archive for Lambda.
archive_bytes_io = BytesIO()
with ZipFile(archive_bytes_io, mode="w", compression=ZIP_DEFLATED) as zf:
    zi = ZipInfo("ebs_snapshot_manager.py")
    zi.compress_type = ZIP_DEFLATED
    zi.create_system = 3  # Unix
    zi.external_attr = 0o775 << 16  # rwxrwxr-x
    with open("ebs_snapshot_manager.py", mode="rb") as src_file:
        zf.writestr(zi, src_file.read())

# Compute the SHA 256 value of the file we'll use.
archive_bytes = archive_bytes_io.getvalue()
digest = sha256(archive_bytes).hexdigest()
assert isinstance(digest, str)
zip_obj_name = ZIP_PREFIX + "ebs_snapshot_manager.zip.%s" % digest

# Upload the archie to our CFN endpoint.
s3 = boto3.resource("s3")
s3_obj = s3.Object(BUCKET_NAME, zip_obj_name)
s3_obj.put(ACL="public-read",
           Body=archive_bytes,
           ContentType="application/zip")
#!/usr/bin/env python2
import os, sys, mmap
from zipfile import ZipFile, ZipInfo

with ZipFile(sys.argv[1], "w") as z:
    for filename in sys.argv[2:]:
        with open(filename, "r+b") as f:
            mm = mmap.mmap(f.fileno(), 0)
            try:
                info = ZipInfo(os.path.basename(filename), (1980, 1, 1, 0, 0, 0))
                info.create_system = 3 # Unix
                info.external_attr = 0644 << 16L # rw-r--r--
                z.writestr(info, mm)
            finally:
                mm.close()