コード例 #1
0
ファイル: pluginsystem.py プロジェクト: rockyburt/Rezine
    def dump(self, fp):
        """Dump the plugin as package into the filepointer or file."""
        from zipfile import ZipFile, ZipInfo
        f = ZipFile(fp, 'w')

        # write all files into a "pdata/" folder
        offset = len(self.path) + 1
        for dirpath, dirnames, filenames in walk(self.path):
            # don't recurse into hidden dirs
            for i in range(len(dirnames)-1, -1, -1):
                if dirnames[i].startswith('.'):
                    del dirnames[i]
            for filename in filenames:
                if filename.endswith('.pyc') or \
                   filename.endswith('.pyo'):
                    continue
                f.write(path.join(dirpath, filename),
                        path.join('pdata', dirpath[offset:], filename))

        # add the package information files
        for name, data in [('ZINE_PLUGIN', self.name),
                           ('ZINE_PACKAGE', PACKAGE_VERSION)]:
            zinfo = ZipInfo(name, localtime(time()))
            zinfo.compress_type = f.compression
            zinfo.external_attr = (33188 & 0xFFFF) << 16L
            f.writestr(zinfo, str(data))

        f.close()
コード例 #2
0
    def doTest(self, expected_ext, files, *modules, **kw):
        z = ZipFile(TEMP_ZIP, "w")
        try:
            for name, (mtime, data) in files.items():
                zinfo = ZipInfo(name, time.localtime(mtime))
                zinfo.compress_type = self.compression
                z.writestr(zinfo, data)
            z.close()

            stuff = kw.get("stuff", None)
            if stuff is not None:
                # Prepend 'stuff' to the start of the zipfile
                f = open(TEMP_ZIP, "rb")
                data = f.read()
                f.close()

                f = open(TEMP_ZIP, "wb")
                f.write(stuff)
                f.write(data)
                f.close()

            sys.path.insert(0, TEMP_ZIP)

            mod = __import__(".".join(modules), globals(), locals(),
                             ["__dummy__"])
            if expected_ext:
                file = mod.get_file()
                self.assertEquals(file, os.path.join(TEMP_ZIP,
                                  *modules) + expected_ext)
        finally:
            z.close()
            os.remove(TEMP_ZIP)
コード例 #3
0
ファイル: changeset.py プロジェクト: gdgkyoto/kyoto-gtug
    def _render_zip(self, req, filename, repos, data):
        """ZIP archive with all the added and/or modified files."""
        new_rev = data['new_rev']
        req.send_response(200)
        req.send_header('Content-Type', 'application/zip')
        req.send_header('Content-Disposition',
                        content_disposition('inline', filename + '.zip'))

        from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

        buf = StringIO()
        zipfile = ZipFile(buf, 'w', ZIP_DEFLATED)
        for old_node, new_node, kind, change in repos.get_changes(
            new_path=data['new_path'], new_rev=data['new_rev'],
            old_path=data['old_path'], old_rev=data['old_rev']):
            if kind == Node.FILE and change != Changeset.DELETE:
                assert new_node
                zipinfo = ZipInfo()
                zipinfo.filename = new_node.path.strip('/').encode('utf-8')
                # Note: unicode filenames are not supported by zipfile.
                # UTF-8 is not supported by all Zip tools either,
                # but as some do, I think UTF-8 is the best option here.
                zipinfo.date_time = new_node.last_modified.utctimetuple()[:6]
                zipinfo.external_attr = 0644 << 16L # needed since Python 2.5
                zipinfo.compress_type = ZIP_DEFLATED
                zipfile.writestr(zipinfo, new_node.get_content().read())
        zipfile.close()

        zip_str = buf.getvalue()
        req.send_header("Content-Length", len(zip_str))
        req.end_headers()
        req.write(zip_str)
        raise RequestDone
コード例 #4
0
ファイル: vba2zip.py プロジェクト: loota/vimplugins
 def add(self, member):
   if (member.isdir):
     return # FIXME Should be able to add empty directories
   info = ZipInfo(member.name)
   info.date_time = member.mtime
   info.external_attr = member.perm << 16L
   self.archive.writestr(info, member.data)
コード例 #5
0
ファイル: epub_utils.py プロジェクト: jalal70/Objavi
 def write_blob(self, path, blob, compression=ZIP_DEFLATED, mode=0644):
     """Add something to the zip without adding to manifest"""
     zinfo = ZipInfo(path)
     zinfo.external_attr = mode << 16L # set permissions
     zinfo.compress_type = compression
     zinfo.date_time = self.now
     self.zipfile.writestr(zinfo, blob)
コード例 #6
0
ファイル: changeset.py プロジェクト: yeoupooh/tow
    def _render_zip(self, req, filename, repos, diff):
        """ZIP archive with all the added and/or modified files."""
        new_rev = diff.new_rev
        req.send_response(200)
        req.send_header('Content-Type', 'application/zip')
        req.send_header('Content-Disposition', 'attachment;'
                        'filename=%s.zip' % filename)

        from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

        buf = StringIO()
        zipfile = ZipFile(buf, 'w', ZIP_DEFLATED)
        for old_node, new_node, kind, change in repos.get_changes(**diff):
            if kind == Node.FILE and change != Changeset.DELETE:
                assert new_node
                zipinfo = ZipInfo()
                zipinfo.filename = new_node.path.encode('utf-8')
                # Note: unicode filenames are not supported by zipfile.
                # UTF-8 is not supported by all Zip tools either,
                # but as some does, I think UTF-8 is the best option here.
                zipinfo.date_time = time.gmtime(new_node.last_modified)[:6]
                zipinfo.compress_type = ZIP_DEFLATED
                zipfile.writestr(zipinfo, new_node.get_content().read())
        zipfile.close()

        buf.seek(0, 2) # be sure to be at the end
        req.send_header("Content-Length", buf.tell())
        req.end_headers()

        req.write(buf.getvalue())
コード例 #7
0
ファイル: setchangeset.py プロジェクト: nyuhuhuu/trachacks
    def _render_zip(self, req, repos, chgset):
        """ZIP archive with all the added and/or modified files."""
        req.send_response(200)
        req.send_header('Content-Type', 'application/zip')
        req.send_header('Content-Disposition', 'attachment;'
                        'filename=Changeset%s.zip' % chgset.rev)
        req.end_headers()

        try:
            from cStringIO import StringIO
        except ImportError:
            from StringIO import StringIO
        from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

        buf = StringIO()
        zipfile = ZipFile(buf, 'w', ZIP_DEFLATED)
        for path, kind, change, base_path, base_rev in chgset.get_changes():
            if kind == Node.FILE and change != Changeset.DELETE:
                node = repos.get_node(path, chgset.rev)
                zipinfo = ZipInfo()
                zipinfo.filename = node.path
                zipinfo.date_time = time.gmtime(node.last_modified)[:6]
                zipinfo.compress_type = ZIP_DEFLATED
                zipfile.writestr(zipinfo, node.get_content().read())
        zipfile.close()
        req.write(buf.getvalue())
コード例 #8
0
ファイル: main.py プロジェクト: suentiem/Dream-Stream
def export():
    name = request.form.get('name')
    settings = loads(open(PATH_SETTINGS + name + '.json', 'rb').read())

    # Build list of needed resources
    resources = []
    for listener in settings.get('listeners', []):
        for effect in listener.get('effects', []):
            resource = effect.get('resource', {}).get('source')
            if type(resource) is list:
                resources = resources + resource
            else:
                resources.append(resource)

    # Create ZIP with all files
    memory_file = BytesIO()
    with ZipFile(memory_file, 'w') as zf:
        # Resources
        for resource in resources:
            path = PATH_FILES + resource
            data = ZipInfo('files/' + resource)
            data.compress_type = ZIP_DEFLATED
            zf.writestr(data, open(path, 'rb').read())

        # Config
        data = ZipInfo('settings/' + name + '.json')
        data.compress_type = ZIP_DEFLATED
        zf.writestr(data, open(PATH_SETTINGS + name + '.json', 'rb').read())
    memory_file.seek(0)
    
    return send_file(memory_file, attachment_filename=name + '.zip', as_attachment=True)
コード例 #9
0
ファイル: zip_kalite.py プロジェクト: NiallEgan/ka-lite
    def handle(self, *args, **options):
        options['platform'] = options['platform'].lower() # normalize

        if options['platform'] not in ["all", "linux", "macos", "darwin", "windows"]:
            raise CommandError("Unrecognized platform: %s; will include ALL files." % options['platform'])

        # Step 1: recursively add all static files
        kalite_base = os.path.realpath(settings.PROJECT_PATH + "/../")
        files_dict = recursively_add_files(dirpath=kalite_base, **options)

        # Step 2: Add a local_settings.py file.
        #   For distributed servers, this is a copy of the local local_settings.py,
        #   with a few properties (specified as command-line options) overridden
        ls_file = create_local_settings_file(location=os.path.realpath(kalite_base+"/kalite/local_settings.py"), server_type=options['server_type'], locale=options['locale'])
        files_dict[ls_file] = { "dest_path": "kalite/local_settings.py" }

        # Step 3: select output file.
        if options['file']=="__default__":
            options['file'] = create_default_archive_filename(options)

        # Step 4: package into a zip file
        with ZipFile(options['file'], "w", ZIP_DEFLATED if options['compress'] else ZIP_STORED) as zfile:
            for srcpath,fdict in files_dict.items():
                if options['verbosity'] >= 1:
                    print "Adding to zip: %s" % srcpath
                # Add without setting exec perms
                if os.path.splitext(fdict["dest_path"])[1] != ".sh":
                    zfile.write(srcpath, arcname=fdict["dest_path"])
                # Add with exec perms
                else:
                    info = ZipInfo(fdict["dest_path"])
                    info.external_attr = 0755 << 16L # give full access to included file
                    with open(srcpath, "r") as fh:
                        zfile.writestr(info, fh.read())
コード例 #10
0
ファイル: ziputils.py プロジェクト: obriencj/python-javatools
def _collect_infos(dirname):

    """ Utility function used by ExplodedZipFile to generate ZipInfo
    entries for all of the files and directories under dirname """

    for r, _ds, fs in walk(dirname):
        if not islink(r) and r != dirname:
            i = ZipInfo()
            i.filename = join(relpath(r, dirname), "")
            i.file_size = 0
            i.compress_size = 0
            i.CRC = 0
            yield i.filename, i

        for f in fs:
            df = join(r, f)
            relfn = relpath(join(r, f), dirname)

            if islink(df):
                pass

            elif isfile(df):
                i = ZipInfo()
                i.filename = relfn
                i.file_size = getsize(df)
                i.compress_size = i.file_size
                i.CRC = file_crc32(df)
                yield i.filename, i

            else:
                # TODO: is there any more special treatment?
                pass
コード例 #11
0
ファイル: utils.py プロジェクト: chevah/brink
    def createZipArchive(self, source, destination, exclude=None):
        """
        Create a zip file at `destination` based on files from `source`.
        """
        """
        Create a zip file at `destination` based on files from `source`.
        """
        if exclude is None:
            exclude = []

        source_path = self.fs.join(source)
        parent_path = os.path.dirname(source_path)
        archivename = self.fs.join(destination)
        with closing(ZipFile(archivename, 'w', ZIP_DEFLATED)) as z:
            for root, dirs, files in os.walk(source_path):
                # Write all files.
                for fn in files:
                    if fn in exclude:
                        continue
                    absolute_filename = os.path.join(root, fn)
                    zip_filename = absolute_filename[len(parent_path):]
                    # See http://bugs.python.org/issue1734346
                    # for adding unicode support.
                    z.write(str(absolute_filename), str(zip_filename))

                # For empty folders, we need to create a special ZipInfo
                # entry.
                # 16 works, but some places suggest using 48.
                if not files and not dirs:
                    foldername = root[len(parent_path):] + '/'
                    zip_info = ZipInfo(foldername)
                    zip_info.external_attr = 16
                    z.writestr(zip_info, "")
コード例 #12
0
ファイル: mathjax.py プロジェクト: NiLuJe/calibre
 def add_file(self, zf, path, name):
     with open(path, 'rb') as f:
         raw = f.read()
     self.h.update(raw)
     zi = ZipInfo(name)
     zi.external_attr = 0o444 << 16
     zf.writestr(zi, raw)
コード例 #13
0
ファイル: serializers.py プロジェクト: TraMZzz/test
    def validate(self, data):
        url = data["file_url"]
        order_hash = data["order_hash"]
        if url and order_hash:
            try:
                # get file name and save
                file_name = os.path.basename(os.path.realpath(url))
                urllib.urlretrieve(url, file_name)

                # get data from file
                epub_old = ZipFile(file_name, "r")
                txt = epub_old.read("META-INF/container.xml")
                epub_old.close()

                # rewrite file and add comment
                epub_new = ZipFile(file_name, "w")
                epub_new.writestr("mimetype", "application/epub+zip")
                info = ZipInfo("META-INF/container.xml", date_time=time.localtime(time.time()))
                info.comment = "%s at %s" % (order_hash, time.strftime("%d/%m/%Y"))
                epub_new.writestr(info, txt)
                epub_new.close()
            except:
                raise serializers.ValidationError("Some error with file or not correct url")
            return file_name
        return data
コード例 #14
0
ファイル: platforms.py プロジェクト: AbhiUnni/ka-lite
def system_specific_zipping(files_dict, zip_file=None, compression=ZIP_DEFLATED, callback=_default_callback_zip):
    """
    Zip up files, adding permissions when appropriate.
    """

    if not zip_file:
        zip_file = tempfile.mkstemp()[1]

    zfile = None
    try:
        zfile = ZipFile(zip_file, 'w', compression)
        for fi, (dest_path, src_path) in enumerate(files_dict.iteritems()):
            if callback:
                callback(src_path, fi, len(files_dict))
            # All platforms besides windows need permissions set.
            ext = os.path.splitext(dest_path)[1]
            if ext not in not_system_specific_scripts(system="windows"):
                zfile.write(src_path, arcname=dest_path)
            # Add with exec perms
            else:
                info = ZipInfo(dest_path)
                info.external_attr = 0775 << ((1 - is_osx()) * 16L) # give full access to included file
                with open(src_path, "r") as fh:
                    zfile.writestr(info, fh.read())
        zfile.close()
    finally:
        if zfile:
            zfile.close()
コード例 #15
0
ファイル: OOoPy.py プロジェクト: gebi/ooopy
 def _write (self, zname, str) :
     now  = datetime.utcnow ().timetuple ()
     info = ZipInfo (zname, date_time = now)
     info.create_system = 0 # pretend to be fat
     info.compress_type = ZIP_DEFLATED
     self.ozip.writestr (info, str)
     self.written [zname] = 1
コード例 #16
0
    def testZipImporterMethodsInSubDirectory(self):
        packdir = TESTPACK + os.sep
        packdir2 = packdir + TESTPACK2 + os.sep
        files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
                 packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)}

        z = ZipFile(TEMP_ZIP, "w")
        try:
            for name, (mtime, data) in files.items():
                zinfo = ZipInfo(name, time.localtime(mtime))
                zinfo.compress_type = self.compression
                z.writestr(zinfo, data)
            z.close()

            zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir)
            self.assertEquals(zi.archive, TEMP_ZIP)
            self.assertEquals(zi.prefix, packdir)
            self.assertEquals(zi.is_package(TESTPACK2), True)
            zi.load_module(TESTPACK2)

            self.assertEquals(zi.is_package(TESTPACK2 + os.sep + '__init__'), False)
            self.assertEquals(zi.is_package(TESTPACK2 + os.sep + TESTMOD), False)

            mod_name = TESTPACK2 + os.sep + TESTMOD
            mod = __import__(module_path_to_dotted_name(mod_name))
            self.assertEquals(zi.get_source(TESTPACK2), None)
            self.assertEquals(zi.get_source(mod_name), None)
        finally:
            z.close()
            os.remove(TEMP_ZIP)
コード例 #17
0
    def compileToZip(self):
        """ 
            Compile the exam as a .zip file
        """
        def cleanpath(path):
            if path=='': 
                return ''
            dirname, basename = os.path.split(path)
            dirname=cleanpath(dirname)
            if basename!='.':
                dirname = os.path.join(dirname,basename)
            return dirname

        f = ZipFile(self.options.output,'w')

        for (dst,src) in self.files.items():
            dst = ZipInfo(cleanpath(dst))
            dst.external_attr = 0o644<<16
            dst.date_time = datetime.datetime.today().timetuple()
            if isinstance(src,basestring):
                f.writestr(dst,open(src,'rb').read())
            else:
                f.writestr(dst,src.read())

        print("Exam created in %s" % os.path.relpath(self.options.output))

        f.close()
コード例 #18
0
ファイル: test_zipimport.py プロジェクト: 3lnc/cpython
    def testZipImporterMethods(self):
        packdir = TESTPACK + os.sep
        packdir2 = packdir + TESTPACK2 + os.sep
        files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc),
                 packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
                 packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc),
                 "spam" + pyc_ext: (NOW, test_pyc)}

        z = ZipFile(TEMP_ZIP, "w")
        try:
            for name, (mtime, data) in files.items():
                zinfo = ZipInfo(name, time.localtime(mtime))
                zinfo.compress_type = self.compression
                zinfo.comment = b"spam"
                z.writestr(zinfo, data)
            z.close()

            zi = zipimport.zipimporter(TEMP_ZIP)
            self.assertEqual(zi.archive, TEMP_ZIP)
            self.assertEqual(zi.is_package(TESTPACK), True)

            find_mod = zi.find_module('spam')
            self.assertIsNotNone(find_mod)
            self.assertIsInstance(find_mod, zipimport.zipimporter)
            self.assertFalse(find_mod.is_package('spam'))
            load_mod = find_mod.load_module('spam')
            self.assertEqual(find_mod.get_filename('spam'), load_mod.__file__)

            mod = zi.load_module(TESTPACK)
            self.assertEqual(zi.get_filename(TESTPACK), mod.__file__)

            existing_pack_path = importlib.import_module(TESTPACK).__path__[0]
            expected_path_path = os.path.join(TEMP_ZIP, TESTPACK)
            self.assertEqual(existing_pack_path, expected_path_path)

            self.assertEqual(zi.is_package(packdir + '__init__'), False)
            self.assertEqual(zi.is_package(packdir + TESTPACK2), True)
            self.assertEqual(zi.is_package(packdir2 + TESTMOD), False)

            mod_path = packdir2 + TESTMOD
            mod_name = module_path_to_dotted_name(mod_path)
            mod = importlib.import_module(mod_name)
            self.assertTrue(mod_name in sys.modules)
            self.assertEqual(zi.get_source(TESTPACK), None)
            self.assertEqual(zi.get_source(mod_path), None)
            self.assertEqual(zi.get_filename(mod_path), mod.__file__)
            # To pass in the module name instead of the path, we must use the
            # right importer
            loader = mod.__loader__
            self.assertEqual(loader.get_source(mod_name), None)
            self.assertEqual(loader.get_filename(mod_name), mod.__file__)

            # test prefix and archivepath members
            zi2 = zipimport.zipimporter(TEMP_ZIP + os.sep + TESTPACK)
            self.assertEqual(zi2.archive, TEMP_ZIP)
            self.assertEqual(zi2.prefix, TESTPACK + os.sep)
        finally:
            z.close()
            os.remove(TEMP_ZIP)
コード例 #19
0
ファイル: test_zipimport.py プロジェクト: Eyepea/cpython
 def testUnencodable(self):
     filename = support.TESTFN_UNENCODABLE + ".zip"
     self.addCleanup(support.unlink, filename)
     with ZipFile(filename, "w") as z:
         zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW))
         zinfo.compress_type = self.compression
         z.writestr(zinfo, test_src)
     zipimport.zipimporter(filename).load_module(TESTMOD)
コード例 #20
0
ファイル: gitarchive.py プロジェクト: grzr/deployment
 def add_file(file_path, arcname):
     if not path.islink(file_path):
         archive.write(file_path, arcname, ZIP_DEFLATED)
     else:
         i = ZipInfo(arcname)
         i.create_system = 3
         i.external_attr = 0xA1ED0000
         archive.writestr(i, readlink(file_path))
コード例 #21
0
ファイル: wheelfile.py プロジェクト: IJDykeman/wangTiles
    def write(self, filename, arcname=None, compress_type=None):
        with open(filename, 'rb') as f:
            st = os.fstat(f.fileno())
            data = f.read()

        zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime))
        zinfo.external_attr = st.st_mode << 16
        zinfo.compress_type = ZIP_DEFLATED
        self.writestr(zinfo, data, compress_type)
コード例 #22
0
 def zip(cls):
     now = time.localtime(time.time())
     zipio = StringIO()
     zipfile = ZipFile(zipio, 'w', ZIP_DEFLATED)
     for file in cls.files:
         zipinfo = ZipInfo(file.filename, date_time=now)
         zipinfo.external_attr = 0644 << 16L
         zipfile.writestr(zipinfo, file.getvalue())
     zipfile.close()
     return zipio
コード例 #23
0
ファイル: __init__.py プロジェクト: dafrito/trac-mirror
def create_zipinfo(filename, mtime=None, dir=False, executable=False, symlink=False,
                   comment=None):
    """Create a instance of `ZipInfo`.

    :param filename: file name of the entry
    :param mtime: modified time of the entry
    :param dir: if `True`, the entry is a directory
    :param executable: if `True`, the entry is a executable file
    :param symlink: if `True`, the entry is a symbolic link
    :param comment: comment of the entry
    """
    from zipfile import ZipInfo, ZIP_DEFLATED, ZIP_STORED
    zipinfo = ZipInfo()

    # The general purpose bit flag 11 is used to denote
    # UTF-8 encoding for path and comment. Only set it for
    # non-ascii files for increased portability.
    # See http://www.pkware.com/documents/casestudies/APPNOTE.TXT
    if any(ord(c) >= 128 for c in filename):
        zipinfo.flag_bits |= 0x0800
    zipinfo.filename = filename.encode('utf-8')

    if mtime is not None:
        mtime = to_datetime(mtime, utc)
        zipinfo.date_time = mtime.utctimetuple()[:6]
        # The "extended-timestamp" extra field is used for the
        # modified time of the entry in unix time. It avoids
        # extracting wrong modified time if non-GMT timezone.
        # See http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip
        #     /proginfo/extra.fld
        zipinfo.extra += struct.pack(
            '<hhBl',
            0x5455,                 # extended-timestamp extra block type
            1 + 4,                  # size of this block
            1,                      # modification time is present
            to_timestamp(mtime))    # time of last modification

    # external_attr is 4 bytes in size. The high order two
    # bytes represent UNIX permission and file type bits,
    # while the low order two contain MS-DOS FAT file
    # attributes, most notably bit 4 marking directories.
    if dir:
        if not zipinfo.filename.endswith('/'):
            zipinfo.filename += '/'
        zipinfo.compress_type = ZIP_STORED
        zipinfo.external_attr = 040755 << 16L       # permissions drwxr-xr-x
        zipinfo.external_attr |= 0x10               # MS-DOS directory flag
    else:
        zipinfo.compress_type = ZIP_DEFLATED
        zipinfo.external_attr = 0644 << 16L         # permissions -r-wr--r--
        if executable:
            zipinfo.external_attr |= 0755 << 16L    # -rwxr-xr-x
        if symlink:
            zipinfo.compress_type = ZIP_STORED
            zipinfo.external_attr |= 0120000 << 16L # symlink file type

    if comment:
        zipinfo.comment = comment.encode('utf-8')

    return zipinfo
コード例 #24
0
ファイル: environment.py プロジェクト: AJH693/jinja2
 def write_file(filename, data, mode):
     if zip:
         info = ZipInfo(filename)
         info.external_attr = 0o755 << 16
         zip_file.writestr(info, data)
     else:
         f = open(os.path.join(target, filename), mode)
         try:
             f.write(data)
         finally:
             f.close()
コード例 #25
0
 def testUnencodable(self):
     filename = support.TESTFN_UNENCODABLE + ".zip"
     z = ZipFile(filename, "w")
     zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW))
     zinfo.compress_type = self.compression
     z.writestr(zinfo, test_src)
     z.close()
     try:
         zipimport.zipimporter(filename)
     finally:
         os.remove(filename)
コード例 #26
0
    def testBytesPath(self):
        filename = support.TESTFN + ".zip"
        self.addCleanup(support.unlink, filename)
        with ZipFile(filename, "w") as z:
            zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW))
            zinfo.compress_type = self.compression
            z.writestr(zinfo, test_src)

        zipimport.zipimporter(filename)
        zipimport.zipimporter(os.fsencode(filename))
        zipimport.zipimporter(bytearray(os.fsencode(filename)))
        zipimport.zipimporter(memoryview(os.fsencode(filename)))
コード例 #27
0
ファイル: wheelfile.py プロジェクト: IJDykeman/wangTiles
    def close(self):
        # Write RECORD
        if self.fp is not None and self.mode == 'w' and self._file_hashes:
            content = '\n'.join('{},{}={},{}'.format(fname, algorithm, hash_,
                                                     self._file_sizes[fname])
                                for fname, (algorithm, hash_) in self._file_hashes.items())
            content += '\n{},,\n'.format(self.record_path)
            zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
            zinfo.compress_type = ZIP_DEFLATED
            self.writestr(zinfo, as_bytes(content))

        super(WheelFile, self).close()
コード例 #28
0
ファイル: test_zipimport.py プロジェクト: 3lnc/cpython
    def testZipImporterMethodsInSubDirectory(self):
        packdir = TESTPACK + os.sep
        packdir2 = packdir + TESTPACK2 + os.sep
        files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
                 packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)}

        z = ZipFile(TEMP_ZIP, "w")
        try:
            for name, (mtime, data) in files.items():
                zinfo = ZipInfo(name, time.localtime(mtime))
                zinfo.compress_type = self.compression
                zinfo.comment = b"eggs"
                z.writestr(zinfo, data)
            z.close()

            zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir)
            self.assertEqual(zi.archive, TEMP_ZIP)
            self.assertEqual(zi.prefix, packdir)
            self.assertEqual(zi.is_package(TESTPACK2), True)
            mod = zi.load_module(TESTPACK2)
            self.assertEqual(zi.get_filename(TESTPACK2), mod.__file__)

            self.assertEqual(
                zi.is_package(TESTPACK2 + os.sep + '__init__'), False)
            self.assertEqual(
                zi.is_package(TESTPACK2 + os.sep + TESTMOD), False)

            pkg_path = TEMP_ZIP + os.sep + packdir + TESTPACK2
            zi2 = zipimport.zipimporter(pkg_path)
            find_mod_dotted = zi2.find_module(TESTMOD)
            self.assertIsNotNone(find_mod_dotted)
            self.assertIsInstance(find_mod_dotted, zipimport.zipimporter)
            self.assertFalse(zi2.is_package(TESTMOD))
            load_mod = find_mod_dotted.load_module(TESTMOD)
            self.assertEqual(
                find_mod_dotted.get_filename(TESTMOD), load_mod.__file__)

            mod_path = TESTPACK2 + os.sep + TESTMOD
            mod_name = module_path_to_dotted_name(mod_path)
            mod = importlib.import_module(mod_name)
            self.assertTrue(mod_name in sys.modules)
            self.assertEqual(zi.get_source(TESTPACK2), None)
            self.assertEqual(zi.get_source(mod_path), None)
            self.assertEqual(zi.get_filename(mod_path), mod.__file__)
            # To pass in the module name instead of the path, we must use the
            # right importer.
            loader = mod.__loader__
            self.assertEqual(loader.get_source(mod_name), None)
            self.assertEqual(loader.get_filename(mod_name), mod.__file__)
        finally:
            z.close()
            os.remove(TEMP_ZIP)
コード例 #29
0
 def process_one(self, infile, filename):
     """ process a non-zipped file. Is this needed? """
     self.init_report()
     resultfile = Tools.make_return_file(filename)
     
     stat = os.stat(infile.fileno)
     #Create a fake zipinfo
     zi = ZipInfo(filename=filename)
     zi.file_size = stat.st_size
     data = self.process_file(infile)
     resultfile.write(data)
     self.finalize_report(resultfile)
     return report        
コード例 #30
0
	def write_file (self, data, filename, description = "") :
		"""Write a file into the archive
		
		:Parameters:
		 - `data` (str) - data to write
		 - `filename` (str) - name of the file in which to store data
		 - `description` (str) - textual description of the data
		"""
		info = ZipInfo(filename)
		info.comment = description
		info.date_time = localtime()[:6]
		info.external_attr = 0644 << 16L
		info.compress_type = ZIP_DEFLATED
		self._elms[filename] = (info,data)
コード例 #31
0
 def setUp(self):
     for filename in ["test.exe", "test.zip", "test.jar", "test.apk"]:
         zippath = os.path.join(self.tempdir, filename)
         with ZipFile(zippath, "w") as zipfile:
             zipfile.writestr(ZipInfo("test.txt"), "feedface")
コード例 #32
0
#!/usr/bin/env python2
import os, sys, mmap
from zipfile import ZipFile, ZipInfo

with ZipFile(sys.argv[1], "w") as z:
    for filename in sys.argv[2:]:
        with open(filename, "r+b") as f:
            mm = mmap.mmap(f.fileno(), 0)
            try:
                info = ZipInfo(os.path.basename(filename), (1980, 1, 1, 0, 0, 0))
                info.create_system = 3 # Unix
                info.external_attr = 0644 << 16L # rw-r--r--
                z.writestr(info, mm)
            finally:
                mm.close()
コード例 #33
0
 def _add_entry(self, name: str, external_attr: int, mtime: int,
                data: Union[bytes, str]) -> None:
     info = ZipInfo(name, self._prepare_zipinfo_date_time(mtime))
     info.external_attr = external_attr
     self._set_zipinfo_compression(info)
     self._inner.writestr(info, data)
コード例 #34
0
ファイル: DeltaJen.py プロジェクト: schlumbeliene/DeltaJen
 def add_to_zip(file_obj, z_file):
     """Add the file to the zip and sets the attributes."""
     zinfo = ZipInfo(file_obj['name'], file_obj['time'])
     zinfo.compress_type = z_file.compression
     zinfo.external_attr = 0o644 << 16
     z_file.writestr(zinfo, file_obj['data'])
コード例 #35
0
def decryptBook(userkey, inpath, outpath):
    with closing(ZipFile(open(inpath, 'rb'))) as inf:
        namelist = inf.namelist()
        if 'META-INF/rights.xml' not in namelist or \
           'META-INF/encryption.xml' not in namelist:
            print("{0:s} is DRM-free.".format(os.path.basename(inpath)))
            return 1
        for name in META_NAMES:
            namelist.remove(name)
        try:
            rights = etree.fromstring(inf.read('META-INF/rights.xml'))
            adept = lambda tag: '{%s}%s' % (NSMAP['adept'], tag)
            expr = './/%s' % (adept('encryptedKey'), )
            bookkeyelem = rights.find(expr)
            bookkey = bookkeyelem.text
            keytype = bookkeyelem.attrib.get('keyType', '0')
            if len(bookkey) >= 172 and int(keytype, 10) > 2:
                print("{0:s} is a secure Adobe Adept ePub with hardening.".
                      format(os.path.basename(inpath)))
            elif len(bookkey) == 172:
                print("{0:s} is a secure Adobe Adept ePub.".format(
                    os.path.basename(inpath)))
            elif len(bookkey) == 64:
                print("{0:s} is a secure Adobe PassHash (B&N) ePub.".format(
                    os.path.basename(inpath)))
            else:
                print("{0:s} is not an Adobe-protected ePub!".format(
                    os.path.basename(inpath)))
                return 1

            if len(bookkey) != 64:
                # Normal or "hardened" Adobe ADEPT
                rsakey = RSA.import_key(userkey)  # parses the ASN1 structure
                bookkey = base64.b64decode(bookkey)
                if int(keytype, 10) > 2:
                    bookkey = removeHardening(rights, keytype, bookkey)
                try:
                    bookkey = PKCS1_v1_5.new(rsakey).decrypt(
                        bookkey, None)  # automatically unpads
                except ValueError:
                    bookkey = None

                if bookkey is None:
                    print("Could not decrypt {0:s}. Wrong key".format(
                        os.path.basename(inpath)))
                    return 2
            else:
                # Adobe PassHash / B&N
                key = base64.b64decode(userkey)[:16]
                bookkey = base64.b64decode(bookkey)
                bookkey = unpad(
                    AES.new(key, AES.MODE_CBC, b'\x00' * 16).decrypt(bookkey),
                    16)  # PKCS#7

                if len(bookkey) > 16:
                    bookkey = bookkey[-16:]

            encryption = inf.read('META-INF/encryption.xml')
            decryptor = Decryptor(bookkey, encryption)
            kwds = dict(compression=ZIP_DEFLATED, allowZip64=False)
            with closing(ZipFile(open(outpath, 'wb'), 'w', **kwds)) as outf:

                for path in (["mimetype"] + namelist):
                    data = inf.read(path)
                    zi = ZipInfo(path)
                    zi.compress_type = ZIP_DEFLATED

                    if path == "mimetype":
                        zi.compress_type = ZIP_STORED

                    elif path == "META-INF/encryption.xml":
                        # Check if there's still something in there
                        if (decryptor.check_if_remaining()):
                            data = decryptor.get_xml()
                            print(
                                "Adding encryption.xml for the remaining embedded files."
                            )
                            # We removed DRM, but there's still stuff like obfuscated fonts.
                        else:
                            continue

                    try:
                        # get the file info, including time-stamp
                        oldzi = inf.getinfo(path)
                        # copy across useful fields
                        zi.date_time = oldzi.date_time
                        zi.comment = oldzi.comment
                        zi.extra = oldzi.extra
                        zi.internal_attr = oldzi.internal_attr
                        # external attributes are dependent on the create system, so copy both.
                        zi.external_attr = oldzi.external_attr
                        zi.create_system = oldzi.create_system
                        if any(ord(c) >= 128 for c in path) or any(
                                ord(c) >= 128 for c in zi.comment):
                            # If the file name or the comment contains any non-ASCII char, set the UTF8-flag
                            zi.flag_bits |= 0x800
                    except:
                        pass
                    if path == "META-INF/encryption.xml":
                        outf.writestr(zi, data)
                    else:
                        outf.writestr(zi, decryptor.decrypt(path, data))
        except:
            print("Could not decrypt {0:s} because of an exception:\n{1:s}".
                  format(os.path.basename(inpath), traceback.format_exc()))
            return 2
    return 0
コード例 #36
0
    def testZipImporterMethods(self):
        packdir = TESTPACK + os.sep
        packdir2 = packdir + TESTPACK2 + os.sep
        files = {
            packdir + "__init__" + pyc_ext: (NOW, test_pyc),
            packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
            packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc),
            "spam" + pyc_ext: (NOW, test_pyc)
        }

        self.addCleanup(os_helper.unlink, TEMP_ZIP)
        with ZipFile(TEMP_ZIP, "w") as z:
            for name, (mtime, data) in files.items():
                zinfo = ZipInfo(name, time.localtime(mtime))
                zinfo.compress_type = self.compression
                zinfo.comment = b"spam"
                z.writestr(zinfo, data)

        zi = zipimport.zipimporter(TEMP_ZIP)
        self.assertEqual(zi.archive, TEMP_ZIP)
        self.assertTrue(zi.is_package(TESTPACK))

        # PEP 302
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", DeprecationWarning)

            mod = zi.load_module(TESTPACK)
            self.assertEqual(zi.get_filename(TESTPACK), mod.__file__)

        # PEP 451
        spec = zi.find_spec('spam')
        self.assertIsNotNone(spec)
        self.assertIsInstance(spec.loader, zipimport.zipimporter)
        self.assertFalse(spec.loader.is_package('spam'))
        exec_mod = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(exec_mod)
        self.assertEqual(spec.loader.get_filename('spam'), exec_mod.__file__)

        spec = zi.find_spec(TESTPACK)
        mod = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(mod)
        self.assertEqual(zi.get_filename(TESTPACK), mod.__file__)

        existing_pack_path = importlib.import_module(TESTPACK).__path__[0]
        expected_path_path = os.path.join(TEMP_ZIP, TESTPACK)
        self.assertEqual(existing_pack_path, expected_path_path)

        self.assertFalse(zi.is_package(packdir + '__init__'))
        self.assertTrue(zi.is_package(packdir + TESTPACK2))
        self.assertFalse(zi.is_package(packdir2 + TESTMOD))

        mod_path = packdir2 + TESTMOD
        mod_name = module_path_to_dotted_name(mod_path)
        mod = importlib.import_module(mod_name)
        self.assertTrue(mod_name in sys.modules)
        self.assertIsNone(zi.get_source(TESTPACK))
        self.assertIsNone(zi.get_source(mod_path))
        self.assertEqual(zi.get_filename(mod_path), mod.__file__)
        # To pass in the module name instead of the path, we must use the
        # right importer
        loader = mod.__spec__.loader
        self.assertIsNone(loader.get_source(mod_name))
        self.assertEqual(loader.get_filename(mod_name), mod.__file__)

        # test prefix and archivepath members
        zi2 = zipimport.zipimporter(TEMP_ZIP + os.sep + TESTPACK)
        self.assertEqual(zi2.archive, TEMP_ZIP)
        self.assertEqual(zi2.prefix, TESTPACK + os.sep)
コード例 #37
0
    def write(self, filename, arcname=None, compress_type=None):
        """Put the bytes from filename into the archive under the name
        arcname."""

        st = os.stat(filename)
        isdir = stat.S_ISDIR(st.st_mode)
        mtime = time.localtime(st.st_mtime)
        date_time = mtime[0:6]
        # Create ZipInfo instance to store file information
        if arcname is None:
            arcname = filename
        arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
        while arcname[0] in (os.sep, os.altsep):
            arcname = arcname[1:]
        if isdir:
            arcname += '/'
        zinfo = ZipInfo(arcname, date_time)
        zinfo.external_attr = (st[0] & 0xFFFF) << 16L      # Unix attributes
        if self.compression == ZIP_AUTO:
            ext = os.path.splitext(filename)[1].lower()
            compression = ZIP_STORED if ext and ext[1:] in STORED_FORMATS \
                    else ZIP_DEFLATED
        else:
            compression = self.compression
        if compress_type is None:
            zinfo.compress_type = compression
        else:
            zinfo.compress_type = compress_type

        zinfo.file_size = st.st_size
        zinfo.flag_bits |= 0x08
        zinfo.header_offset = self.tell    # Start of header bytes

        self._writecheck(zinfo)
        self._didModify = True

        if isdir:
            zinfo.file_size = 0
            zinfo.compress_size = 0
            zinfo.CRC = 0
            self.filelist.append(zinfo)
            self.NameToInfo[zinfo.filename] = zinfo
            header = zinfo.FileHeader()
            yield header
            self.tell += len(header)
            return

        fp = open(filename, "rb")
        # Must overwrite CRC and sizes with correct data later
        zinfo.CRC = CRC = 0
        zinfo.compress_size = compress_size = 0
        zinfo.file_size = file_size = 0
        header = zinfo.FileHeader()
        yield header
        self.tell += len(header)
        if zinfo.compress_type == ZIP_DEFLATED:
            cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
                 zlib.DEFLATED, -15)
        else:
            cmpr = None
        while 1:
            buf = fp.read(1024 * 8)
            if not buf:
                break
            file_size = file_size + len(buf)
            CRC = crc32(buf, CRC) & 0xffffffff
            if cmpr:
                buf = cmpr.compress(buf)
                compress_size = compress_size + len(buf)
            yield buf
        fp.close()
        if cmpr:
            buf = cmpr.flush()
            compress_size = compress_size + len(buf)
            yield buf
            zinfo.compress_size = compress_size
        else:
            zinfo.compress_size = file_size
        self.tell += zinfo.compress_size
        zinfo.CRC = CRC
        zinfo.file_size = file_size
        # write the data descriptor
        data_descriptor =  struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
              zinfo.file_size)
        yield data_descriptor
        self.tell += len(data_descriptor)
        self.filelist.append(zinfo)
        self.NameToInfo[zinfo.filename] = zinfo
コード例 #38
0
    def handle(self, *args, **options):

        if not options["filename"]:
            raise CommandError("-f filename.zip is required")

        if options["course"]:
            courses = Course.objects.filter(slug__in=options["course"])
            if not courses:
                raise CommandError("Course slug not found")
            else:
                course = courses[0]
        else:
            raise CommandError("Course slug not defined")

        if options["filename"].endswith(".zip"):
            self.filename = options["filename"]
        else:
            self.filename = "%s.zip" % options["filename"]

        h = HTMLParser()

        zip = ZipFile(self.filename, mode="w")

        self.message("Calculating course stats ... file %s.csv" % course.slug)

        course_file = StringIO.StringIO()

        course_csv = csv.writer(course_file, quoting=csv.QUOTE_ALL)

        units = Unit.objects.filter(course=course)
        kq_headers = [
            "unit_title", "kq_title", "kq_viewed", "kq_answered",
            "kq_submited", "kq_reviewed"
        ]
        course_csv.writerow(kq_headers)
        db = get_db()
        answers = db.get_collection('answers')
        activities = db.get_collection('activity')
        peer_review_submissions = db.get_collection('peer_review_submissions')
        peer_review_reviews = db.get_collection('peer_review_reviews')
        for unit in units:
            unit_title = h.unescape(unit.title.encode("ascii", "ignore"))
            kqs = KnowledgeQuantum.objects.filter(unit=unit)
            for kq in kqs:
                kq_title = h.unescape(kq.title.encode("ascii", "ignore"))
                kq_type = kq.kq_type()
                kq_answered = ''
                kq_submited = ''
                kq_reviewed = ''
                kq_viewed = activities.find({'kq_id': kq.id}).count()
                if kq_type == "Question":
                    answered = 0
                    questions = Question.objects.filter(kq=kq)
                    for question in questions:
                        answered += answers.find({
                            "question_id": question.id
                        }).count()
                    kq_answered = answered
                elif kq_type == "PeerReviewAssignment":
                    kq_submited = peer_review_submissions.find({
                        'kq': kq.id
                    }).count()
                    kq_reviewed = peer_review_reviews.find({
                        'kq': kq.id
                    }).count()
                elif kq_type == "Video":
                    pass
                else:
                    pass
                row = []
                row.append(unit_title)
                row.append(kq_title)
                row.append(kq_viewed)
                row.append(kq_answered)
                row.append(kq_submited)
                row.append(kq_reviewed)
                course_csv.writerow(row)
        course_fileinfo = ZipInfo("%s.csv" % course.slug)

        course_file.seek(0)

        zip.writestr(course_fileinfo, course_file.read())

        course_file.close()

        zip.close()

        self.message("Created %s file" % self.filename)
コード例 #39
0
def zipwrite(archive, filename, arcname=None):
    with open(filename, 'rb') as f:
        zi = ZipInfo(arcname or filename)
        zi.compress_type = ZIP_DEFLATED
        archive.writestr(zi, f.read())
コード例 #40
0
def test_subnet_connectivity(region,
                             stackid,
                             logical_resource_id,
                             physical_resource_id,
                             endpoints=[['www.amazon.com', "80"]]):
    logger.debug({"test_subnet_connectivity": "starting"})
    error_msg = []
    if region not in clients.get_available_regions('lambda'):
        msg = "Test for %s %s skipped, %s not supprted by lambda" % (
            stackid, logical_resource_id, region)
        logger.warning(msg)
        return {
            "success": True,
            "logical_resource_id": logical_resource_id,
            "physical_resource_id": physical_resource_id,
            "warning":
            "Test skipped, region %s not supprted by lambda" % region,
            "region": region,
            "stackid": stackid
        }
    try:
        function_name = 'test_subnet_%s_%s' % (physical_resource_id,
                                               random_string(8))
        iam_name = function_name.replace('_', '-')
        lambda_client = clients.get("lambda", region=region)
        ec2_client = clients.get("ec2", region=region)
        iam_role_arn = get_iam_role()
        response = ec2_client.describe_subnets(
            SubnetIds=[physical_resource_id])
        logger.debug({"test_subnet_connectivity:describe_subnets": response})
        vpc_id = response['Subnets'][0]['VpcId']
        logger.debug({"test_subnet_connectivity:vpc_id": vpc_id})
        security_group_id = ec2_client.create_security_group(
            GroupName=iam_name, Description=iam_name, VpcId=vpc_id)['GroupId']
        logger.debug(
            {"test_subnet_connectivity:security_group_id": security_group_id})
        now = datetime.now()
        zi_timestamp = (now.year, now.month, now.day, now.hour, now.minute,
                        now.second)
        zinfo = ZipInfo('lambda_function.py', zi_timestamp)
        zinfo.external_attr = 0x0744 << 16
        f = StringIO()
        z = ZipFile(f, 'w', ZIP_DEFLATED)
        z.writestr(zinfo, function_code)
        z.close()
        zip_bytes = f.getvalue()
        logger.debug({
            "test_subnet_connectivity:create_function_input": {
                "FunctionName": function_name,
                "Role": iam_role_arn,
                "Code": {
                    'ZipFile': zip_bytes
                },
                "VpcConfig": {
                    'SubnetIds': [physical_resource_id],
                    'SecurityGroupIds': [security_group_id]
                }
            }
        })
        lambda_function = False
        retries = 0
        max_retries = 4
        while not lambda_function:
            try:
                lambda_function = lambda_client.create_function(
                    FunctionName=function_name,
                    Runtime='python2.7',
                    Role=iam_role_arn,
                    Handler='lambda_function.lambda_handler',
                    Code={'ZipFile': zip_bytes},
                    Timeout=120,
                    MemorySize=128,
                    VpcConfig={
                        'SubnetIds': [physical_resource_id],
                        'SecurityGroupIds': [security_group_id]
                    })
            except botocore.exceptions.ClientError as e:
                codes = [
                    'InvalidParameterValueException', 'AccessDeniedException'
                ]
                logger.debug("boto exception: ", exc_info=1)
                logger.debug(e.response)
                if "The provided subnets contain availability zone Lambda doesn't support." in e.response[
                        'Error']['Message']:
                    raise
                if e.response['Error'][
                        'Code'] in codes and retries < max_retries:
                    logger.debug(
                        {"test_subnet_connectivity:create_function": str(e)},
                        exc_info=1)
                    msg = "role not propagated yet, sleeping a bit and then retrying"
                    logger.debug({
                        "test_subnet_connectivity:create_function_retry":
                        msg
                    })
                    retries += 1
                    sleep(10 * (retries**2))
                else:
                    raise
        for endpoint in endpoints:
            f = StringIO()
            f.write(json.dumps({"address": endpoint[0], "port": endpoint[1]}))
            payload = f.getvalue()
            f.close()
            response = lambda_client.invoke(FunctionName=function_name,
                                            InvocationType='RequestResponse',
                                            Payload=payload)
            response['Payload'] = response['Payload'].read()
            try:
                response['Payload'] = json.loads(response['Payload'])
            except Exception:
                pass
            logger.debug({"test_subnet_connectivity:response": response})

            if response[
                    'StatusCode'] != 200 or 'FunctionError' in response.keys():
                results = {
                    "success": False,
                    "logical_resource_id": logical_resource_id,
                    "physical_resource_id": physical_resource_id,
                    "region": region,
                    "stackid": stackid
                }
                error_msg.append({
                    "endpoint": endpoint,
                    "response": response['Payload']
                })
            elif response['StatusCode'] == 200 and len(error_msg) == 0:
                results = {
                    "success": True,
                    "logical_resource_id": logical_resource_id,
                    "physical_resource_id": physical_resource_id,
                    "region": region,
                    "stackid": stackid
                }
    except Exception as e:
        logger.error({"test_subnet_connectivity": str(e)}, exc_info=1)
        if "subnets contain availability zone Lambda doesn't support" in str(
                e):
            results = {
                "success": True,
                "logical_resource_id": logical_resource_id,
                "physical_resource_id": physical_resource_id,
                "region": region,
                "stackid": stackid
            }
            logger.warning(
                "test skipped as lambda is not supported in the subnet's az. %s"
                % str(results))
        else:
            results = {
                "success": False,
                "logical_resource_id": logical_resource_id,
                "physical_resource_id": physical_resource_id,
                "region": region,
                "stackid": stackid
            }
        error_msg.append({"exception": str(e)})
    finally:
        try:
            lambda_client.delete_function(FunctionName=function_name)
        except Exception:
            logger.warning("Failed to cleanup lambda function", exc_info=1)
        try:
            logger.debug({
                "test_subnet_connectivity:security_group_id":
                security_group_id
            })
            enis = ec2_client.describe_network_interfaces(
                Filters=[{
                    'Name': 'group-id',
                    'Values': [security_group_id]
                }])
            for eni in enis['NetworkInterfaces']:
                if 'Attachment' in eni.keys():
                    logger.debug("Detaching ENI...")
                    ec2_client.detach_network_interface(
                        AttachmentId=eni['Attachment']['AttachmentId'])
                    while 'Attachment' in ec2_client.describe_network_interfaces(
                            NetworkInterfaceIds=[eni['NetworkInterfaceId']]
                    )['NetworkInterfaces'][0].keys():
                        logger.debug(
                            "eni still attached, waiting 5 seconds...")
                        sleep(5)
                logger.debug("Deleting ENI %s" % eni['NetworkInterfaceId'])
                ec2_client.delete_network_interface(
                    NetworkInterfaceId=eni['NetworkInterfaceId'])
            sg = False
            retries = 0
            max_retries = 3
            while not sg:
                try:
                    sg = ec2_client.delete_security_group(
                        GroupId=security_group_id)
                except botocore.exceptions.ClientError as e:
                    msg = "has a dependent object"
                    dep_violation = e.response['Error'][
                        'Code'] == 'DependencyViolation'
                    logger.debug("boto exception: ", exc_info=1)
                    if dep_violation and msg in str(
                            e) and retries < max_retries:
                        msg = "eni deletion not propagated yet, sleeping a bit and then retrying"
                        logger.debug({
                            "test_subnet_connectivity:delete_sg_retry":
                            security_group_id
                        })
                        retries += 1
                        sleep(5 * (retries**2))
                    else:
                        raise
            logger.debug({
                "test_subnet_connectivity:security_group_id_response":
                response
            })
        except Exception:
            logger.warning("Failed to cleanup security group", exc_info=1)
        if len(error_msg) > 0:
            results["error_msg"] = error_msg
        return results
コード例 #41
0
    def copy(self, source: PathLike, target: PathLike) -> bool:
        """Copy file within ZipFile.

        Args:
            source: String or :term:`path-like object`, that points to a file in
                the directory structure of the ZipFile. If the file does not
                exist, a FileNotFoundError is raised. If the filepath points to
                a directory, an IsADirectoryError is raised.
            target: String or :term:`path-like object`, that points to a new
                filename or an existing directory in the directory structure of
                the ZipFile. If the target is a directory the target file
                consists of the directory and the basename of the source file.
                If the target file already exists a FileExistsError is raised.

        Returns:
            Boolean value which is True if the file was copied.

        """
        # Check if source file exists and is not a directory
        src_file = PurePath(source).as_posix()
        src_infos = self._locate(source)
        if not src_infos:
            raise FileNotFoundError(f"file '{src_file}' does not exist")
        src_info = src_infos[-1]
        if getattr(src_info, 'is_dir')():
            raise IsADirectoryError(f"'{src_file}/' is a directory not a file")

        # If target is a directory get name of target file from
        # source filename
        tgt_file = PurePath(target).as_posix()
        if tgt_file == '.':
            tgt_file = Path(src_file).name
        else:
            tgt_infos = self._locate(target)
            if tgt_infos:
                if getattr(tgt_infos[-1], 'is_dir')():
                    tgt_path = PurePath(tgt_file, Path(src_file).name)
                    tgt_file = tgt_path.as_posix()

        # Check if target file already exists
        if self._locate(tgt_file):
            raise FileExistsError(f"file '{tgt_file}' already exist.")

        # Read binary data from source file
        data = self._file.read(src_info, pwd=self._pwd)

        # Create ZipInfo for target file from source file info
        tgt_time = getattr(src_info, 'date_time')
        tgt_info = ZipInfo(filename=tgt_file,
                           date_time=tgt_time)  # type: ignore

        # Write binary data to target file
        # TODO ([email protected]): The zipfile standard module currently
        # does not support encryption in write mode. See:
        # https://docs.python.org/3/library/zipfile.html
        # When support is provided, the below line shall be replaced by:
        # self._file.writestr(tgt_info, data, pwd=self._pwd)
        self._file.writestr(tgt_info, data)
        self._changed = True

        # Check if new file exists
        return bool(self._locate(tgt_file))
コード例 #42
0
import boto3
from base64 import b64encode
from hashlib import sha256
from io import BytesIO, StringIO
from re import compile as re_compile
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED

BUCKET_NAME = "dist-gov"

CFN_PREFIX = "cfn-templates/"
ZIP_PREFIX = ""

# Create a ZIP archive for Lambda.
archive_bytes_io = BytesIO()
with ZipFile(archive_bytes_io, mode="w", compression=ZIP_DEFLATED) as zf:
    zi = ZipInfo("ebs_snapshot_manager.py")
    zi.compress_type = ZIP_DEFLATED
    zi.create_system = 3  # Unix
    zi.external_attr = 0o775 << 16  # rwxrwxr-x
    with open("ebs_snapshot_manager.py", mode="rb") as src_file:
        zf.writestr(zi, src_file.read())

# Compute the SHA 256 value of the file we'll use.
archive_bytes = archive_bytes_io.getvalue()
digest = sha256(archive_bytes).hexdigest()
assert isinstance(digest, str)
zip_obj_name = ZIP_PREFIX + "ebs_snapshot_manager.zip.%s" % digest

# Upload the archie to our CFN endpoint.
s3 = boto3.resource("s3")
s3_obj = s3.Object(BUCKET_NAME, zip_obj_name)
コード例 #43
0
ファイル: renderers.py プロジェクト: JonnoFTW/webcan
    def __call__(self, data, system):
        self._set_ct(system)
        writer = shapefile.Writer(shapeType=shapefile.POINT)
        writer.autoBalance = 1
        # iterate through the entire dataset and extract the field names
        headers = {}

        pattern = re.compile('[\W_]+')
        old_headers = {}

        def fix_field(k: str):
            remove = [
                'PID_TESLA',
                'PID_',
                'BUSTECH_',
                'OUTLANDER_',
                'FMS_',
            ]
            pieces = k.split(' ')
            first = pieces[0]
            if len(pieces) == 1:
                return k
            for r in remove:
                k = k.replace(r, '')
            k = pattern.sub('', k)
            if len(k) > 15:
                # compact underscores
                first = ''.join(f[0] for f in first.split('_'))
                k = first + pieces[1]

            return pattern.sub('', k)

        for row in data:
            row['date'] = row['timestamp'].date()
            row['time'] = str(row['timestamp'].time())
            del row['timestamp']
            headers.update({
                fix_field(k): type(v)
                for k, v in row.items() if v is not None
            })
            old_headers.update(
                {k: fix_field(k)
                 for k in row.keys() if row[k] is not None})

        del headers['pos']
        del old_headers['pos']
        # print("HEADERS: {}".format(headers))
        writer.field('latitude', 'N', '32', 15)
        writer.field('longitude', 'N', '32', 15)
        header_map = {
            int: ('N', ),
            float: ('N', 7, 3),
            str: ('C', 30),
            date: ('D', ),
            time: ('T', ),
            int64.Int64: ('N', )
        }
        for h, t in headers.items():
            writer.field(h, *header_map[t])

        def check_field(f, r):
            if f[0] in r:
                return r[f[0]]
            else:
                return ""

        for row in data:
            row = {fix_field(k): v for k, v in row.items()}
            if 'pos' is None:
                continue
            if 'pos' in row and row['pos'] is not None:
                row['latitude'] = row['pos']['coordinates'][1]
                row['longitude'] = row['pos']['coordinates'][0]
                del row['pos']

            writer.point(row['longitude'], row['latitude'])
            writer.record(*[check_field(f, row) for f in writer.fields])
        dbfout = BytesIO()
        shpout = BytesIO()
        shxout = BytesIO()
        README = StringIO()
        README.write("Field names are:\n" +
                     ("\n".join(f"{_h} -> {_d}"
                                for _h, _d in old_headers.items())))
        writer.save(shp=shpout, dbf=dbfout, shx=shxout)
        zipout = BytesIO()
        now = datetime.now()
        date_str = 'webcan_export_{}'.format(now.strftime('%Y%m%d_%H%M%S'))
        with ZipFile(zipout, 'w', ZIP_DEFLATED) as myzip:
            for k, v in [('shp', shpout), ('shx', shxout), ('dbf', dbfout)]:
                myzip.writestr(
                    ZipInfo(f'{date_str}.{k}', date_time=now.timetuple()),
                    v.getvalue())
            myzip.writestr(ZipInfo('README.txt', date_time=now.timetuple()),
                           README.getvalue())
        return zipout.getvalue()