Exemple #1
0
    def _validate_downloaded_file(self, filepath, md5sum):
        """ Validate the md5 checksum of a file and check downloaded zip
        file CRC

        :param str filepath: the absolute path of the file
        :param str md5sum: the md5 checksum it should have
        :raise InvalidCheckSumException: if the calculated md5 checksum
            does not match the one in the arguments
        :raise zipfile.BadZipfile: if the zip file CRC is not valid
        """
        # First check md5sum
        if md5sum:
            with open(filepath, 'rb') as fp:
                md5digest = hashlib.md5(fp.read()).hexdigest()

            self._log_debug('Verifying md5 checksum for %s. Expecting %s - found %s',
                            (filepath, md5sum, md5digest))

            if md5sum != md5digest:
                raise InvalidCheckSumException(
                    'File {} md5 checksum does not match {}'.format(filepath, md5sum))

        # Then check zip file
        with zipfile.ZipFile(filepath) as zip_fd:
            if zip_fd.testzip():
                raise zipfile.BadZipfile('Bad CRC on zipfile {}'.format(filepath))
Exemple #2
0
def validate_zip(path_to_zip, raise_on_fail=True):
    """
    validates a zip file
    :param path_to_zip:
    :param raise_on_fail:
    :return:
    """
    log.debug('validate_zip: zip={}'.format(path_to_zip))
    zip_ref = zipfile.ZipFile(path_to_zip, "r", zipfile.ZIP_DEFLATED)
    try:
        ret = zip_ref.testzip()
    except Exception as exc:
        log.error('Exception validating zip file: zip_file={} exc={}'.format(
            path_to_zip, exc))
        if raise_on_fail:
            raise
        return False
    else:
        if ret is not None:
            log.error('zip_file validation failed: zip_file={} ret={}'.format(
                path_to_zip, ret))
            if raise_on_fail:
                raise zipfile.BadZipfile(
                    'zip validation failed for zip file {} on {}'.format(
                        path_to_zip, ret))
            return False
    finally:
        zip_ref.close()
    return True
Exemple #3
0
def open_zip(path_or_file: Union[str, Any], *args,
             **kwargs) -> Iterator[zipfile.ZipFile]:
    """A with-context for zip files.

    Passes through *args and **kwargs to zipfile.ZipFile.

    :API: public

    :param path_or_file: Full path to zip file.
    :param args: Any extra args accepted by `zipfile.ZipFile`.
    :param kwargs: Any extra keyword args accepted by `zipfile.ZipFile`.
    :raises: `InvalidZipPath` if path_or_file is invalid.
    :raises: `zipfile.BadZipfile` if zipfile.ZipFile cannot open a zip at path_or_file.
    """
    if not path_or_file:
        raise InvalidZipPath(f"Invalid zip location: {path_or_file}")
    if "allowZip64" not in kwargs:
        kwargs["allowZip64"] = True
    try:
        zf = zipfile.ZipFile(path_or_file, *args, **kwargs)
    except zipfile.BadZipfile as bze:
        # Use the realpath in order to follow symlinks back to the problem source file.
        raise zipfile.BadZipfile(
            f"Bad Zipfile {os.path.realpath(path_or_file)}: {bze}")
    try:
        yield zf
    finally:
        zf.close()
Exemple #4
0
    def test_should_raise_if_zipfile_raised_BadZipfile(self, zipfile_mock,
                                                       tmpfile):
        zipfile_mock.side_effect = zipfile.BadZipfile()
        dp = datapackage.DataPackage({}, {})

        with pytest.raises(datapackage.exceptions.DataPackageException):
            dp.save(tmpfile)
Exemple #5
0
    def copiaDemoDB(self):
        path = AutomagicallyUpdater._getPathToDb()
        # copia il database di test nella directory indicata dall'utente
        dbpath = QFileDialog.getSaveFileName(
            self, u"Salva il DB dimostrativo", path if path != None else "",
            "Sqlite DB (*.sqlite *.db3);;Tutti i file (*)")
        if dbpath.isEmpty():
            return

        import os.path, zipfile
        demoZip = os.path.join(os.path.dirname(__file__), "docs", "demo.zip")

        try:
            zf = zipfile.ZipFile(unicode(demoZip))
            if len(zf.namelist()) <= 0:
                raise zipfile.BadZipfile("no files in the archive")

            outfile = open(unicode(dbpath), 'wb')
            try:
                outfile.write(zf.read(zf.namelist()[0]))
            finally:
                outfile.close()

        except (IOError, zipfile.BadZipfile), e:
            QMessageBox.critical(
                self, u"Errore",
                u"Impossibile estrarre dall'archivio contenente il DB dimostrativo.\n\nError message: %s"
                % unicode(str(e), 'utf8'))
            return self.reject()
Exemple #6
0
 def clean_source(self):
     source = self.cleaned_data.get('source')
     if source:
         try:
             if source.name.endswith('.zip'):
                 zip_file = SafeZip(source)
                 # testzip() returns None if there are no broken CRCs.
                 if zip_file.zip_file.testzip() is not None:
                     raise zipfile.BadZipfile()
             elif source.name.endswith(('.tar.gz', '.tar.bz2')):
                 # For tar files we need to do a little more work.
                 # Fortunately tarfile.open() already handles compression
                 # formats for us automatically.
                 with tarfile.open(fileobj=source) as archive:
                     archive_members = archive.getmembers()
                     for member in archive_members:
                         archive_member_validator(archive, member)
             else:
                 valid_extensions_string = u'(%s)' % u', '.join(
                     VALID_SOURCE_EXTENSIONS)
                 raise forms.ValidationError(
                     ugettext(
                         'Unsupported file type, please upload an archive '
                         'file {extensions}.'.format(
                             extensions=valid_extensions_string)))
         except (zipfile.BadZipfile, tarfile.ReadError, IOError):
             raise forms.ValidationError(
                 ugettext('Invalid or broken archive.'))
     return source
Exemple #7
0
def open_zip(path_or_file, *args, **kwargs):
  """A with-context for zip files.

  Passes through *args and **kwargs to zipfile.ZipFile.

  :API: public

  :param path_or_file: Full path to zip file.
  :param args: Any extra args accepted by `zipfile.ZipFile`.
  :param kwargs: Any extra keyword args accepted by `zipfile.ZipFile`.
  :raises: `InvalidZipPath` if path_or_file is invalid.
  :raises: `zipfile.BadZipfile` if zipfile.ZipFile cannot open a zip at path_or_file.
  :returns: `class 'contextlib.GeneratorContextManager`.
  """
  if not path_or_file:
    raise InvalidZipPath('Invalid zip location: {}'.format(path_or_file))
  allowZip64 = kwargs.pop('allowZip64', True)
  try:
    zf = zipfile.ZipFile(path_or_file, *args, allowZip64=allowZip64, **kwargs)
  except zipfile.BadZipfile as bze:
    # Use the realpath in order to follow symlinks back to the problem source file.
    raise zipfile.BadZipfile("Bad Zipfile {0}: {1}".format(os.path.realpath(path_or_file), bze))
  try:
    yield zf
  finally:
    zf.close()
Exemple #8
0
def unpack_worksheet(archive, filename):

    temp_file = tempfile.TemporaryFile(mode='r+', prefix='openpyxl.', suffix='.unpack.temp')

    zinfo = archive.getinfo(filename)

    if zinfo.compress_type == zipfile.ZIP_STORED:
        decoder = None
    elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
        decoder = zlib.decompressobj(-zlib.MAX_WBITS)
    else:
        raise zipfile.BadZipfile("Unrecognized compression method")

    archive.fp.seek(_get_file_offset(archive, zinfo))
    bytes_to_read = zinfo.compress_size

    while True:
        buff = archive.fp.read(min(bytes_to_read, 102400))
        if not buff:
            break
        bytes_to_read -= len(buff)
        if decoder:
            buff = decoder.decompress(buff)
        temp_file.write(buff)

    if decoder:
        temp_file.write(decoder.decompress('Z'))

    return temp_file
Exemple #9
0
 def __init__(self,
              package_fname,
              ignore_dep=None,
              ignore_file_conflicts=None):
     if not ctx.filesdb:
         ctx.filesdb = inary.db.filesdb.FilesDB()
     "initialize from a file name"
     super(Install, self).__init__(ignore_dep)
     if not ignore_file_conflicts:
         ignore_file_conflicts = ctx.config.get_option(
             'ignore_file_conflicts')
     self.ignore_file_conflicts = ignore_file_conflicts
     self.package_fname = package_fname
     try:
         self.package = inary.package.Package(package_fname)
         self.package.read()
     except zipfile.BadZipfile:
         raise zipfile.BadZipfile(self.package_fname)
     self.metadata = self.package.metadata
     self.files = self.package.files
     self.pkginfo = self.metadata.package
     self.installedSize = self.metadata.package.installedSize
     self.installdb = inary.db.installdb.InstallDB()
     self.operation = INSTALL
     self.store_old_paths = None
     self.old_path = None
     self.trigger = inary.trigger.Trigger()
     self.ask_reinstall = False
Exemple #10
0
def _read_central_directory_offsets(zfile):
  """Read in the table of contents for the ZIP file."""

  fp = zfile.fp
  try:
    endrec = zipfile._EndRecData(fp)
  except IOError:
    raise zipfile.BadZipfile("File is not a zip file")
  if not endrec:
    raise zipfile.BadZipfile, "File is not a zip file"
  size_cd = endrec[_ECD_SIZE]             # bytes in central directory
  offset_cd = endrec[_ECD_OFFSET]         # offset of central directory

  # "concat" is zero, unless zip was concatenated to another file
  concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
  if endrec[_ECD_SIGNATURE] == stringEndArchive64:
    # If Zip64 extension structures are present, account for them
    concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)

  # Go to start of central directory
  fp.seek(offset_cd + concat, 0)
  data = fp.read(size_cd)
  fp = cStringIO.StringIO(data)
  total = 0
  offsets = []
  while total < size_cd:
    # Tell gives use the offset inside the CD. We want the offset relative
    # to the beginning of file.
    offsets.append(fp.tell() + offset_cd + concat)
    centdir = fp.read(sizeCentralDir)
    if len(centdir) != sizeCentralDir:
      raise zipfile.BadZipfile("Truncated central directory")
    centdir = struct.unpack(structCentralDir, centdir)
    if centdir[_CD_SIGNATURE] != stringCentralDir:
      raise zipfile.BadZipfile("Bad magic number for central directory")

    # Skip everything else
    fp.seek(centdir[_CD_FILENAME_LENGTH]
            + centdir[_CD_EXTRA_FIELD_LENGTH]
            + centdir[_CD_COMMENT_LENGTH], 1)

    # update total bytes read from central directory
    total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
             + centdir[_CD_EXTRA_FIELD_LENGTH]
             + centdir[_CD_COMMENT_LENGTH])

  return offsets
Exemple #11
0
def unzip_external(request, directory, django_file, overwrite):
    # Make a temporary directory to hold a zip file and its unzipped contents
    from pootle_misc import ptempfile as tempfile
    tempdir = tempfile.mkdtemp(prefix='pootle')
    # Make a temporary file to hold the zip file
    tempzipfd, tempzipname = tempfile.mkstemp(prefix='pootle', suffix='.zip')
    try:
        # Dump the uploaded file to the temporary file
        try:
            os.write(tempzipfd, django_file.read())
        finally:
            os.close(tempzipfd)
        # Unzip the temporary zip file
        import subprocess
        if subprocess.call(["unzip", tempzipname, "-d", tempdir]):
            import zipfile
            raise zipfile.BadZipfile(_("Error while extracting archive"))
        # Enumerate the temporary directory...
        maybe_skip = True
        prefix = tempdir
        for basedir, dirs, files in os.walk(tempdir):
            if maybe_skip and not files and len(dirs) == 1:
                try:
                    directory.child_dirs.get(name=dirs[0])
                    maybe_skip = False
                except Directory.DoesNotExist:
                    prefix = os.path.join(basedir, dirs[0])
                    continue
            else:
                maybe_skip = False

            for fname in files:
                # Read the contents of a file...
                fcontents = open(os.path.join(basedir, fname), 'rb').read()
                newfile = StringIO.StringIO(fcontents)
                newfile.name = os.path.basename(fname)
                # Get the filesystem path relative to the temporary directory
                subdir = host_to_unix_path(basedir[len(prefix) + len(os.sep):])
                if subdir:
                    target_dir = directory.get_or_make_subdir(subdir)
                else:
                    target_dir = directory
                # Construct a full UNIX path relative to the current
                # translation project URL by attaching a UNIXified
                # 'relative_host_dir' to the root relative path
                # (i.e. the path from which the user is uploading the
                # ZIP file.
                try:
                    upload_file(request, target_dir, newfile, overwrite)
                except ValueError, e:
                    logging.error(u"Error adding %s\t%s", fname, e)
    finally:
        # Clean up temporary file and directory used in try-block
        import shutil
        os.unlink(tempzipname)
        shutil.rmtree(tempdir)
Exemple #12
0
    def readfile(self, name):
        """
        Return file-like object for name.
        """
        if self.mode not in ("r", "a"):
            raise RuntimeError('read() requires mode "r" or "a"')
        if not self.fp:
            raise RuntimeError(
                "Attempt to read ZIP archive that was already closed")
        zinfo = self.getinfo(name)

        self.fp.seek(zinfo.header_offset, 0)

        fheader = self.fp.read(_fileHeaderSize)
        if fheader[0:4] != zipfile.stringFileHeader:
            raise zipfile.BadZipfile("Bad magic number for file header")

        fheader = struct.unpack(zipfile.structFileHeader, fheader)
        fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])

        if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
            self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])


        if zinfo.flag_bits & 0x800:
            # UTF-8 filename
            fname_str = fname.decode("utf-8")
        else:
            fname_str = fname.decode("cp437")

        if fname_str != zinfo.orig_filename:
            raise zipfile.BadZipfile(
                'File name in directory "%s" and header "%s" differ.' % (
                    zinfo.orig_filename, fname_str))

        if zinfo.compress_type == zipfile.ZIP_STORED:
            return ZipFileEntry(self, zinfo.compress_size)
        elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
            return DeflatedZipFileEntry(self, zinfo.compress_size)
        else:
            raise zipfile.BadZipfile(
                "Unsupported compression method %d for file %s" %
                    (zinfo.compress_type, name))
Exemple #13
0
    def test_close_on_exception(self):
        """Check that the zipfile is closed if an exception is raised in the
        'with' block."""
        with zipfile.ZipFile(TESTFN2, "w") as zipfp:
            for fpath, fdata in SMALL_TEST_DATA:
                zipfp.writestr(fpath, fdata)

        try:
            with zipfile.ZipFile(TESTFN2, "r") as zipfp2:
                raise zipfile.BadZipfile()
        except zipfile.BadZipfile:
            self.assertTrue(zipfp2.fp is None, 'zipfp is not closed')
Exemple #14
0
    def test_refresh_archive_badzip_error(self, mock_io):
        mock_response = mock.Mock(headers={'content-type': 'application/zip'})
        mock_io.side_effect = zipfile.BadZipfile('Something wrong')
        self.conn.get.return_value = mock_response

        self.assertRaises(
            exceptions.SushyError,
            BaseResource,
            connector=self.conn,
            path='/Foo',
            redfish_version='1.0.2',
            reader=resource_base.JsonArchiveReader('Test.2.0.json'))
Exemple #15
0
def open_zip(path_or_file, *args, **kwargs):
  """
    A with-context for zip files.  Passes through positional and kwargs to zipfile.ZipFile.
  """
  try:
    zf = zipfile.ZipFile(path_or_file, *args, **kwargs)
  except zipfile.BadZipfile as bze:
    raise zipfile.BadZipfile("Bad Zipfile %s: %s" % (path_or_file, bze))
  try:
    yield zf
  finally:
    zf.close()
Exemple #16
0
def open_zip(path_or_file, *args, **kwargs):
  """
    A with-context for zip files.  Passes through positional and kwargs to zipfile.ZipFile.
  """
  try:
    allowZip64 = kwargs.pop('allowZip64', True)
    zf = zipfile.ZipFile(path_or_file, *args, allowZip64=allowZip64, **kwargs)
  except zipfile.BadZipfile as bze:
    raise zipfile.BadZipfile("Bad Zipfile {0}: {1}".format(path_or_file, bze))
  try:
    yield zf
  finally:
    zf.close()
Exemple #17
0
 def __init__(self, package_fname, ignore_dep = None, ignore_file_conflicts = None):
     "initialize from a file name"
     super(Install, self).__init__(ignore_dep)
     if not ignore_file_conflicts:
         ignore_file_conflicts = ctx.get_option('ignore_file_conflicts')
     self.ignore_file_conflicts = ignore_file_conflicts
     self.package_fname = package_fname
     try:
         self.package = pisi.package.Package(package_fname)
         self.package.read()
     except zipfile.BadZipfile:
         raise zipfile.BadZipfile(self.package_fname)
     self.metadata = self.package.metadata
     self.files = self.package.files
     self.pkginfo = self.metadata.package
     self.filesdb = pisi.db.filesdb.FilesDB()
     self.installdb = pisi.db.installdb.InstallDB()
     self.operation = INSTALL
     self.automatic = False
Exemple #18
0
    def runScriptsAndFill(self, pathToScripts, conn):
        try:
            # scripts are into a zip archive
            zf = zipfile.ZipFile(unicode(pathToScripts))
            if len(zf.namelist()) <= 0:
                raise zipfile.BadZipfile("no files in the archive")

            count = len(zf.namelist()) + 2
            index = 0
            for name in sorted(zf.namelist()):
                f = zf.open(zf.getinfo(name), 'r')
                if not f:
                    raise zipfile.BadZipfile(
                        u"unable to extract %s, the file may be broken" % name)

                # run the sql script file
                index += 1
                self.emit(
                    SIGNAL("resetProgress"), None,
                    "(%d/%d) Esecuzione dello script file %s..." %
                    (index, count, name))

                query = conn.getQuery(False)  # disable autocommit
                try:
                    if not query.executeScript(unicode(f.read(), 'utf8')):
                        raise SqlException(
                            u"Errore durante l'esecuzione del file %s: \n%s" %
                            (name, query.lastError().text()))
                    conn.commit()
                except:
                    conn.rollback()
                    raise
                finally:
                    f.close()

                # add geometry columns after the table creation
                if QString(name).startsWith("01"):
                    index += 1
                    self.emit(
                        SIGNAL("resetProgress"), len(self.geomTables),
                        "(%d/%d) Creazione colonne geometriche..." %
                        (index, count))

                    for tbl, geom in self.geomTables.iteritems():
                        geomcol, srid, geomtype, dim = geom[:4]
                        query = conn.getQuery(False)
                        sql = u"SELECT AddGeometryColumn('%s', '%s', %d, '%s', '%s')" % (
                            tbl, geomcol, srid, geomtype, dim)
                        if not query.exec_(sql):
                            raise SqlException(
                                u"Impossibile aggiungere una colonna geometrica alla tabella %s:\n%s"
                                % (tbl, query.lastError().text()))
                        self.emit(SIGNAL("updateProgress"))

                # populate geometry tables before create triggers
                if QString(name).startsWith("03"):
                    index += 1
                    self.emit(
                        SIGNAL("resetProgress"), None,
                        "(%d/%d) Riempimento tabelle geometriche..." %
                        (index, count))

                    if not self.populateGeometryTables(conn):
                        return False

            conn.commit()

            # init ZZ_COMUNI and ZZ_DISCLAIMER with data related to selected Municipio
            if not self.initZZ_COMUNI(conn):
                return False

            # set ZZ_DISCLAIMER values
            if not self.initZZ_DISCLAIMER(conn):
                return False

            conn.commit()

        except (IOError, KeyError, zipfile.BadZipfile), e:
            self.emit(
                SIGNAL("messageSent"), 2,
                u"Impossibile estrarre dall'archivio contenente gli scripts.\n\nError message: %s"
                % e.message)
            return False
def add(priority, name, code=None):
    """
    Adds a dependency to the loader

    :param priority:
        A two-digit string. If a dep has no dependencies, this can be
        something like '01'. If it does, use something like '10' leaving
        space for other entries

    :param name:
        The name of the dependency as a unicode string

    :param code:
        Any special loader code, otherwise the default will be used
    """

    if not code:
        code = _default_loader(name)

    loader_filename = '%s-%s.py' % (priority, name)

    just_created_loader = False

    loader_metadata = {
        "version": "1.0.0",
        "sublime_text": "*",
        # Tie the loader to the platform so we can detect
        # people syncing packages incorrectly.
        "platforms": [sublime.platform()],
        "url": "https://github.com/wbond/package_control/issues",
        "description": "Package Control dependency loader"
    }
    loader_metadata_enc = json.dumps(loader_metadata).encode('utf-8')

    if sys.version_info < (3, ):
        if not path.exists(loader_package_path):
            just_created_loader = True
            os.mkdir(loader_package_path, 0o755)
            with open(
                    path.join(loader_package_path, 'dependency-metadata.json'),
                    'wb') as f:
                f.write(loader_metadata_enc)

        loader_path = path.join(loader_package_path, loader_filename)
        with open(loader_path, 'wb') as f:
            f.write(code.encode('utf-8'))

    else:
        # Make sure Python doesn't use the old file listing for the loader
        # when trying to import modules
        if loader_package_path in zipimport._zip_directory_cache:
            del zipimport._zip_directory_cache[loader_package_path]

        try:
            loader_lock.acquire()

            # If a swap of the loader .sublime-package was queued because of a
            # file being removed, we need to add the new loader code the the
            # .sublime-package that will be swapped into place shortly.
            if swap_event.in_process() and os.path.exists(
                    new_loader_package_path):
                package_to_update = new_loader_package_path
            else:
                package_to_update = loader_package_path

            mode = 'w'
            just_created_loader = True

            # Only append if the file exists and is a valid zip file
            if os.path.exists(package_to_update):
                # Even if the loader was invalid, it still won't show up as a
                # "new" file via filesystem notifications, so we have to
                # manually load the code.
                just_created_loader = False
                try:
                    with zipfile.ZipFile(package_to_update, 'r') as rz:
                        # Make sure the zip file can be read
                        res = rz.testzip()
                        if res is not None:
                            raise zipfile.BadZipfile('zip test failed')
                        mode = 'a'
                except (zipfile.BadZipfile, OSError):
                    os.unlink(package_to_update)

            with zipfile.ZipFile(package_to_update, mode) as z:
                if mode == 'w':
                    z.writestr('dependency-metadata.json', loader_metadata_enc)
                z.writestr(loader_filename, code.encode('utf-8'))
                __update_loaders(z)

        finally:
            loader_lock.release()

        if not just_created_loader and not swap_event.in_process():
            # Manually execute the loader code because Sublime Text does not
            # detect changes to the zip archive, only if the file is new.
            importer = zipimport.zipimporter(loader_package_path)
            importer.load_module(loader_filename[0:-3])

    # Clean things up for people who were tracking the master branch
    if just_created_loader:
        old_loader_sp = path.join(installed_packages_dir,
                                  '0-package_control_loader.sublime-package')
        old_loader_dir = path.join(packages_dir, '0-package_control_loader')

        removed_old_loader = False

        if path.exists(old_loader_sp):
            removed_old_loader = True
            os.remove(old_loader_sp)

        if path.exists(old_loader_dir):
            removed_old_loader = True
            try:
                shutil.rmtree(old_loader_dir)
            except (OSError):
                open(os.path.join(old_loader_dir, 'package-control.cleanup'),
                     'w').close()

        if removed_old_loader:
            console_write(u'''
                Cleaning up remenants of old loaders
                ''')

            pc_settings = sublime.load_settings(pc_settings_filename())
            orig_installed_packages = load_list_setting(
                pc_settings, 'installed_packages')
            installed_packages = list(orig_installed_packages)

            if '0-package_control_loader' in installed_packages:
                installed_packages.remove('0-package_control_loader')

            for name in ['bz2', 'ssl-linux', 'ssl-windows']:
                dep_dir = path.join(packages_dir, name)
                if path.exists(dep_dir):
                    try:
                        shutil.rmtree(dep_dir)
                    except (OSError):
                        open(os.path.join(dep_dir, 'package-control.cleanup'),
                             'w').close()
                if name in installed_packages:
                    installed_packages.remove(name)

            save_list_setting(pc_settings, pc_settings_filename(),
                              'installed_packages', installed_packages,
                              orig_installed_packages)
def mainRoutine():
    """
    Main routine
    """
    cmdargs = getCmdargs()
    zipfilelist = []
    if cmdargs.zipfile is not None:
        zipfilelist.extend(cmdargs.zipfile)
    if cmdargs.zipfilelist is not None:
        zipfilelist.extend(
            [line.strip() for line in open(cmdargs.zipfilelist)])

    numZipfiles = len(zipfilelist)
    if cmdargs.md5esa is not None and numZipfiles != 1:
        print(
            "Can only use --md5esa with single zipfiles, but {} zipfiles were supplied"
            .format(numZipfiles),
            file=sys.stderr)
        sys.exit(1)

    filesWithErrors = []

    # Process each zipfile in the list
    for zipfilename in zipfilelist:
        (ok, msg) = checkZipfileName(zipfilename)

        if cmdargs.exitonziperror:
            try:
                zf = zipfile.ZipFile(zipfilename)
                zipcheck = zf.testzip()
                if zipcheck is not None:
                    raise zipfile.BadZipfile(
                        "Zipfile {} failed internal checks".format(
                            zipfilename))
            except zipfile.BadZipfile as e:
                raise zipfile.BadZipfile(
                    "Zipfile {} failed internal checks; {}".format(
                        zipfilename, e))

        sentinelNumber = int(os.path.basename(zipfilename)[1])
        if sentinelNumber not in (1, 2, 3, 5):
            msg = "Unknown Sentinel number '{}': {}".format(
                sentinelNumber, zipfilename)
            ok = False

        if ok:
            try:
                if sentinelNumber == 1:
                    metainfo = sen1meta.Sen1ZipfileMeta(
                        zipfilename=zipfilename)
                elif sentinelNumber == 2:
                    metainfo = sen2meta.Sen2ZipfileMeta(
                        zipfilename=zipfilename)
                elif sentinelNumber == 3:
                    metainfo = sen3meta.Sen3ZipfileMeta(
                        zipfilename=zipfilename)
                elif sentinelNumber == 5:
                    metainfo = sen5meta.Sen5Meta(ncfile=zipfilename)
            except Exception as e:
                msg = "Exception '{}' raised reading: {}".format(
                    str(e), zipfilename)
                ok = False

        if ok:
            relativeOutputDir = dirstruct.makeRelativeOutputDir(
                metainfo,
                dirstruct.stdGridCellSize[sentinelNumber],
                productDirGiven=cmdargs.productdirgiven)
            finalOutputDir = os.path.join(cmdargs.storagetopdir,
                                          relativeOutputDir)
            dirstruct.checkFinalDir(finalOutputDir, cmdargs.dummy,
                                    cmdargs.verbose)

            if sentinelNumber == 1:
                finalXmlFile = dirstruct.createSentinel1Xml(
                    zipfilename, finalOutputDir, metainfo, cmdargs.dummy,
                    cmdargs.verbose, cmdargs.nooverwrite, cmdargs.md5esa,
                    cmdargs.makereadonly)
            elif sentinelNumber == 2:
                finalXmlFile = dirstruct.createSentinel2Xml(
                    zipfilename, finalOutputDir, metainfo, cmdargs.dummy,
                    cmdargs.verbose, cmdargs.nooverwrite, cmdargs.md5esa,
                    cmdargs.makereadonly)
            elif sentinelNumber == 3:
                finalXmlFile = dirstruct.createSentinel3Xml(
                    zipfilename, finalOutputDir, metainfo, cmdargs.dummy,
                    cmdargs.verbose, cmdargs.nooverwrite, cmdargs.md5esa,
                    cmdargs.makereadonly)
            elif sentinelNumber == 5:
                finalXmlFile = dirstruct.createSentinel5Xml(
                    zipfilename, finalOutputDir, metainfo, cmdargs.dummy,
                    cmdargs.verbose, cmdargs.nooverwrite, cmdargs.md5esa,
                    cmdargs.makereadonly)

            if not cmdargs.xmlonly and not cmdargs.xmlandpreview:
                dirstruct.moveZipfile(zipfilename, finalOutputDir,
                                      cmdargs.dummy, cmdargs.verbose,
                                      cmdargs.copy, cmdargs.symlink,
                                      cmdargs.nooverwrite,
                                      cmdargs.moveandsymlink,
                                      cmdargs.makereadonly)

            if not cmdargs.xmlonly and not cmdargs.nopreview:
                if sentinelNumber != 3:
                    dirstruct.createPreviewImg(zipfilename, finalOutputDir,
                                               metainfo, cmdargs.dummy,
                                               cmdargs.verbose,
                                               cmdargs.nooverwrite,
                                               cmdargs.makereadonly)
                else:
                    sen3thumb(zipfilename,
                              finalOutputDir,
                              cmdargs.dummy,
                              cmdargs.verbose,
                              cmdargs.nooverwrite,
                              mountpath=cmdargs.mountpath)
            # Post to SARA if there's a xmlfile and user credential is provided
            if ':' in cmdargs.sarauser and finalXmlFile:
                username, password = cmdargs.sarauser.split(':')
                if username and password:
                    saraurl = urljoin(cmdargs.saraurl,
                                      'S{}'.format(sentinelNumber))
                    if cmdargs.dummy:
                        print("Would post to SARA at {}".format(saraurl))
                    else:
                        postToSara(finalXmlFile,
                                   saraurl,
                                   username,
                                   password,
                                   verbose=cmdargs.verbose,
                                   update=cmdargs.updatesara)

        if not ok:
            filesWithErrors.append(msg)

    # Report files which had errors
    if len(filesWithErrors) > 0:
        if cmdargs.errorlog is not None:
            f = open(cmdargs.errorlog, 'w')
        else:
            f = sys.stderr
        for msg in filesWithErrors:
            f.write(msg + '\n')
Exemple #21
0
def extract_render(
    path: str,
    version: str,
    input_title: str,
    doc_template_path: str,
    page_template_path: str,
    boot_template_path: str,
    asset_paths: Iterable[str],
    img_types: Iterable[str],
    dark: bool,
    horizontal: bool,
    outpath: Path = Path(tempfile.gettempdir()) / 'html-mangareader',
) -> Path:
    """Main controller procedure. Handles opening of archive, image, or directory and renders the images
    appropriately for each, then opens the document in the user's default browser.

    Parameters:
    * `path`: path to image, directory, or archive.
    * `version`: version of the app to display to user.
    * `doc_template_path`: path to HTML template for the main document.
    * `page_template_path`: path to HTML template for individual comic page elements.
    * `boot_template_path`: path to HTML template for bootstrap document.
    * `asset_paths`: paths of static assets to copy.
    * `image_types`: list of recognized image file extensions.
    * `outpath`: directory to write temporary files in. Defaults to OS temp directory.

    Returns: Path to the bootstrap document, which can be opened in a web browser.

    Throws:
    * `BadZipFile`: opened file was a zip file, but could not be read.
    * `BadRarFile`: opened file was a rar file, but could not be read.
    * `Bad7zFile`: opened file was a 7z file, but could not be read.
    * `ImagesNotFound`: if no images could be found in an opened directory or archive.
    """
    start = 0
    pPath = Path(path).resolve()
    rmtree(os.path.dirname(os.path.join(tempfile.gettempdir(), 'html-mangareader', 'render.html')), ignore_errors=True)
    doc_template, page_template, boot_template = (
        resolve_template(p) for p in (doc_template_path, page_template_path, boot_template_path)
    )
    try:
        if pPath.is_file():
            if pPath.suffix.lower()[1:] in img_types:
                imgpaths = scan_directory(pPath.parent, img_types)
                start = imgpaths.index(pPath)
                title = pPath.parent.name
            else:
                try:
                    imgpaths = extract_zip(pPath, img_types, str(outpath))
                    title = pPath.name
                except zipfile.BadZipFile as e:
                    raise zipfile.BadZipfile(
                        f'"{path}" does not appear to be a valid zip/cbz file.'
                    ).with_traceback(e.__traceback__)
                except rarfile.BadRarFile as e:
                    raise rarfile.BadRarFile(
                        f'"{path}" does not appear to be a valid rar/cbr file.'
                    ).with_traceback(e.__traceback__)
                except py7zr.Bad7zFile as e:
                    raise py7zr.Bad7zFile(
                        f'"{path}" does not appear to be a valid 7z/cb7 file.'
                    ).with_traceback(e.__traceback__)
        else:
            imgpaths = scan_directory(path, img_types)
            title = pPath.name
            
        title = input_title
        
        create_out_path(outpath)
        render_copy(asset_paths, outpath)
        renderfile = render_from_template(
            paths=imgpaths,
            version=version,
            title=title,
            doc_template=doc_template,
            page_template=page_template,
            outfile=str(outpath / 'index.html'),
        )
        bootfile = render_bootstrap(
            outfile=str(outpath / 'boot.html'),
            render=Path(renderfile).as_uri(),
            index=start,
            boot_template=boot_template,
        )
        return Path(bootfile)

    except ImagesNotFound:
        raise
    return
Exemple #22
0
 def raise_zip_error(url, filename, md5sum=None):
     raise zipfile.BadZipfile('bad crc')
    def unzip(self, params):
        if len(params) != 2:
            return None, error_embed(
                author=self.plugin.get_printer_name(),
                title='Wrong number of arguments',
                description='try "%sunzip [filename]"' %
                self.plugin.get_settings().get(["prefix"]))

        file_name = params[1]

        flat_filelist = self.get_flat_file_list()

        unzippable = None
        is_multivolume = False
        mv_paths = []

        if file_name.endswith('.zip'):
            for file in flat_filelist:
                if file_name.upper() in file.get('path').upper():
                    unzippable = self.plugin.get_file_manager().path_on_disk(
                        file.get('location'), file_name)
                    break

        elif file_name[:-4].endswith('.zip'):
            files = []
            is_multivolume = True
            truncated = file_name[:-3]
            current = 1
            found = True
            while found:
                found = False
                fn = truncated + str(current).zfill(3)
                for file in flat_filelist:
                    if fn.upper() in file.get('path').upper():
                        files.append(fn)
                        current += 1
                        found = True
                        break
            upload_file_path = self.plugin.get_file_manager().path_on_disk(
                'local', truncated[:-1])
            if self.plugin.get_file_manager().file_exists(
                    'local',
                    upload_file_path.rpartition('/')[2]):
                self.plugin.get_file_manager().remove_file(
                    'local',
                    upload_file_path.rpartition('/')[2])
            with open(upload_file_path, 'ab') as combined:
                for f in files:
                    path = self.plugin.get_file_manager().path_on_disk(
                        'local', f)
                    with open(path, 'rb') as temp:
                        combined.write(temp.read())
                    mv_paths.append(f.rpartition('/')[2])

            unzippable = upload_file_path

        else:
            return None, error_embed(
                author=self.plugin.get_printer_name(),
                title="Not a valid Zip file.",
                description=
                'try "%sunzip [filename].zip or %sunzip [filename].zip.001 for multi-volume files."'
                % (self.plugin.get_settings().get(
                    ["prefix"]), self.plugin.get_settings().get(["prefix"])))

        if unzippable == None:
            return None, error_embed(author=self.plugin.get_printer_name(),
                                     title="File %s not found." % file_name)

        try:
            with zipfile.ZipFile(unzippable) as zip:

                fileOK = zip.testzip()

                if fileOK is not None:
                    raise zipfile.BadZipfile()
                elif is_multivolume:
                    for f in mv_paths:
                        self.plugin.get_file_manager().remove_file('local', f)

                availablefiles = zip.namelist()
                filestounpack = []
                for f in availablefiles:
                    if f.endswith('.gcode'):
                        filestounpack.append(f)

                path = unzippable.rpartition('/')[0] + '/'

                for f in filestounpack:
                    with open(path + f, 'wb') as file:
                        with zip.open(f) as source:
                            file.write(source.read())

                self.plugin.get_file_manager().remove_file('local', unzippable)

        except:
            self.plugin.get_file_manager().remove_file('local', unzippable)
            return None, error_embed(
                author=self.plugin.get_printer_name(),
                title="Bad zip file.",
                description=
                'In case of multi-volume files, one could be missing.')

        filelist_string = ''
        for f in filestounpack:
            filelist_string += (f + '\n')

        return None, success_embed(author=self.plugin.get_printer_name(),
                                   title='File(s) unzipped. ',
                                   description=filelist_string)
Exemple #24
0
def install_pkg_files(package_URIs, reinstall = False):
    """install a number of pisi package files"""

    installdb = pisi.db.installdb.InstallDB()
    ctx.ui.debug('A = %s' % str(package_URIs))

    for x in package_URIs:
        if not x.endswith(ctx.const.package_suffix):
            raise Exception(_('Mixing file names and package names not supported yet.'))

    # filter packages that are already installed
    tobe_installed, already_installed = [], set()
    if not reinstall:
        for x in package_URIs:
            if not x.endswith(ctx.const.delta_package_suffix) and x.endswith(ctx.const.package_suffix):
                pkg_name, pkg_version = pisi.util.parse_package_name(os.path.basename(x))
                if installdb.has_package(pkg_name):
                    already_installed.add(pkg_name)
                else:
                    tobe_installed.append(x)
        if already_installed:
            ctx.ui.warning(_("The following package(s) are already installed "
                             "and are not going to be installed again:"))
            ctx.ui.info(util.format_by_columns(sorted(already_installed)))
        package_URIs = tobe_installed

    if ctx.config.get_option('ignore_dependency'):
        # simple code path then
        for x in package_URIs:
            atomicoperations.install_single_file(x, reinstall)
        return True

    # read the package information into memory first
    # regardless of which distribution they come from
    d_t = {}
    dfn = {}
    for x in package_URIs:
        try:
            package = pisi.package.Package(x)
            package.read()
        except zipfile.BadZipfile:
            # YALI needed to get which file is broken
            raise zipfile.BadZipfile(x)
        name = str(package.metadata.package.name)
        d_t[name] = package.metadata.package
        dfn[name] = x

    # check packages' DistributionReleases and Architecture
    if not ctx.get_option('ignore_check'):
        for x in d_t.keys():
            pkg = d_t[x]
            if pkg.distributionRelease != ctx.config.values.general.distribution_release:
                raise Exception(_('Package %s is not compatible with your distribution release %s %s.') \
                        % (x, ctx.config.values.general.distribution, \
                        ctx.config.values.general.distribution_release))
            if pkg.architecture != ctx.config.values.general.architecture:
                raise Exception(_('Package %s (%s) is not compatible with your %s architecture.') \
                        % (x, pkg.architecture, ctx.config.values.general.architecture))

    def satisfiesDep(dep):
        # is dependency satisfied among available packages
        # or packages to be installed?
        return dep.satisfied_by_installed() or dep.satisfied_by_dict_repo(d_t)

    # for this case, we have to determine the dependencies
    # that aren't already satisfied and try to install them
    # from the repository
    dep_unsatis = []
    for name in d_t.keys():
        pkg = d_t[name]
        deps = pkg.runtimeDependencies()
        for dep in deps:
            if not satisfiesDep(dep) and dep.package not in [x.package for x in dep_unsatis]:
                dep_unsatis.append(dep)

    # now determine if these unsatisfied dependencies could
    # be satisfied by installing packages from the repo
    for dep in dep_unsatis:
        if not dep.satisfied_by_repo():
            raise Exception(_('External dependencies not satisfied: %s') % dep)

    # if so, then invoke install_pkg_names
    extra_packages = [x.package for x in dep_unsatis]
    if extra_packages:
        ctx.ui.warning(_("The following packages will be installed "
                         "in order to satisfy dependencies:"))
        ctx.ui.info(util.format_by_columns(sorted(extra_packages)))
        if not ctx.ui.confirm(_('Do you want to continue?')):
            raise Exception(_('External dependencies not satisfied'))
        install_pkg_names(extra_packages, reinstall=True, extra=True)

    class PackageDB:
        def get_package(self, key, repo = None):
            return d_t[str(key)]

    packagedb = PackageDB()

    A = d_t.keys()

    if len(A)==0:
        ctx.ui.info(_('No packages to install.'))
        return

    # try to construct a pisi graph of packages to
    # install / reinstall

    G_f = pgraph.PGraph(packagedb)               # construct G_f

    # find the "install closure" graph of G_f by package
    # set A using packagedb
    for x in A:
        G_f.add_package(x)
    B = A
    while len(B) > 0:
        Bp = set()
        for x in B:
            pkg = packagedb.get_package(x)
            for dep in pkg.runtimeDependencies():
                if dep.satisfied_by_dict_repo(d_t):
                    if not dep.package in G_f.vertices():
                        Bp.add(str(dep.package))
                    G_f.add_dep(x, dep)
        B = Bp
    if ctx.config.get_option('debug'):
        G_f.write_graphviz(sys.stdout)
    order = G_f.topological_sort()
    if not ctx.get_option('ignore_package_conflicts'):
        conflicts = operations.helper.check_conflicts(order, packagedb)
        if conflicts:
            operations.remove.remove_conflicting_packages(conflicts)
    order.reverse()
    ctx.ui.info(_('Installation order: ') + util.strlist(order) )

    if ctx.get_option('dry_run'):
        return True

    ctx.ui.notify(ui.packagestogo, order = order)

    for x in order:
        atomicoperations.install_single_file(dfn[x], reinstall)

    return True
Exemple #25
0
def test_should_raise_if_zipfile_raised_BadZipfile(zipfile_mock, tmpfile):
    zipfile_mock.side_effect = zipfile.BadZipfile()
    package = Package({}, {})
    with pytest.raises(exceptions.DataPackageException):
        package.save(tmpfile)
Exemple #26
0
    def getText(self):
        fileToOpen = None
        for file in self.zipList:
            if file.type == 'text/plain' and file.charset not in bannedEncodings:
                fileToOpen = file
                break
        if fileToOpen is None:
            logger.warn(
                "Returning empty string on book %s - no suitable file format",
                self.path)
            self.contentValid = False
            return ""

        try:
            with zipfile.ZipFile(fileToOpen.path) as zfile:
                if len(zfile.namelist()) != 1:
                    txtCnt = 0
                    for name in zfile.namelist():
                        filename, file_extension = os.path.splitext(name)
                        if file_extension.lower() == '.txt':
                            txtCnt += 1
                    if txtCnt != 1:
                        logger.warn("Unexpected zip contents %s",
                                    fileToOpen.path)
                        logger.warn("Contents %s", str(zfile.namelist()))
                        raise zipfile.BadZipfile("Invalid file contents")

                for name in zfile.namelist():
                    filename, file_extension = os.path.splitext(name)
                    if file_extension.lower() == '.txt':
                        with zfile.open(name) as readfile:
                            if fileToOpen.charset == 'None':
                                logger.debug(
                                    "Opening file %s with None encoding",
                                    fileToOpen.path)
                                self.contentValid = True
                                strTmp = readfile.read()
                                #go for unicode format
                                if isinstance(strTmp, str):
                                    strTmp = unicode(strTmp, errors='ignore')
                                return strTmp
                            else:
                                logger.debug("Using encoding %s",
                                             fileToOpen.charset)
                                logger.debug(
                                    "Opening file %s with %s encoding",
                                    fileToOpen.path, fileToOpen.charset)
                                try:
                                    self.contentValid = True
                                    return io.TextIOWrapper(
                                        readfile,
                                        encoding=fileToOpen.charset,
                                        errors='replace').read()

                                except UnicodeDecodeError as e:
                                    logger.debug(
                                        "Could not decode file %s in %s encoding",
                                        fileToOpen.path, fileToOpen.charset)
                                    logger.debug("Exception: %s", str(e))
                                    if fileToOpen.charset == 'us-ascii':
                                        logger.debug(
                                            "Retrying with no encoding for us-ascii"
                                        )
                                        self.contentValid = True
                                        return readfile.read()
                                    else:
                                        logger.warn(
                                            "Giving up on file, cannot decode %s",
                                            fileToOpen.path)

        except NotImplementedError as e:

            logger.warn("Cannot open %s", fileToOpen.path)
            logger.warn("%s", str(e))
            logger.warn("%s", file_extension)

        except zipfile.BadZipfile as e:
            filename, file_extension = os.path.splitext(fileToOpen.path)
            if file_extension.lower() != '.txt':
                logger.warn("Cannot open %s", fileToOpen.path)
                logger.warn("%s", str(e))
            else:
                logger.debug("Recovering - found text file %s instead of zip",
                             fileToOpen.path)
                if fileToOpen.charset == 'None':
                    with io.open(fileToOpen.path) as readfile:
                        logger.debug("Opening file %s with None encoding",
                                     fileToOpen.path)
                        self.contentValid = True
                        strTmp = readfile.read()
                        if isinstance(strTmp, str):
                            strTmp = unicode(strTmp, errors='ignore')
                        return strTmp
                else:
                    logger.debug("Using encoding %s", fileToOpen.charset)
                    logger.debug("Opening file %s with %s encoding",
                                 fileToOpen.path, fileToOpen.charset)
                    with io.open(fileToOpen.path,
                                 "r",
                                 encoding=fileToOpen.charset,
                                 errors='replace') as readfile:
                        self.contentValid = True
                        return readfile.read()

        self.contentValid = False
        return ""
Exemple #27
0
    def init(self, pk3_filepath, settings, temporary_file_storage):

        self.arena_file = ""

        self.filename = ""
        self.filename_full = ""

        self.mapcount = 0

        self.longname_list = []

        self.levelcode_list = []
        self.levelshot_int_list = []
        self.levelshot_ext_list = []
        self.arenashot_list = []

        self.is_mappack = False
        self.is_multiarena = False

        self.filename_full = os.path.basename(pk3_filepath)

        self.filename = self.filename_full[:self.filename.find('.') -
                                           3]  #minus 3 ????
        #removes fileformat from name

        convert_image_format = "png"

        zippath = pk3_filepath
        #construct a list of all member inside the 'zip' file.
        if zipfile.is_zipfile(zippath):
            zipper = zipfile.ZipFile(zippath, 'r')
        else:
            raise zipfile.BadZipfile()

        zip_content = zipper.namelist()
        if "maplist.txt" in zip_content:
            zipper.getinfo("maplist.txt")
            self.is_mappack = True
        elif "maps.txt" in zip_content:
            zipper.getinfo("maps.txt")
            self.is_mappack = True

        for cont in zip_content:
            #find levelshot: is_image details acceptable image files
            if cont.startswith("levelshots") and is_image(cont):
                self.levelshot_int_list.append(cont)

            #find levelcode from the .bsp file
            elif cont.endswith(".bsp"):
                self.levelcode_list.append(
                    cont.replace("maps/", '').replace(".bsp", ''))
                self.mapcount += 1

            #find .arena file with map metadata
            elif cont.endswith(".arena"):
                self.arena_file = cont
            elif cont.endswith("arenas.txt"):
                self.arena_file = cont

            elif cont.startswith("arenashots") and is_image(cont):
                self.arenashot_list.append(cont)
                self.is_multiarena = True
                self.is_mappack = True

        #if there are several levelcodes in the map it's probably a mappack
        if len(self.levelcode_list) >= 2:
            self.is_mappack = True

        if settings['verbose'] and self.is_mappack:
            print(self.filename, "is a mappack and contains ",
                  str(self.mapcount), " maps")

        #at this point all variables should be initilized, parsed from the pk3 file.
        if self.arena_file == "":
            if settings['verbose']:
                print("warning no arena file found for " + self.filename)
            if settings['verbose']:
                print("longname/s copied from levelcode/s\n")

            self.longname_list = self.levelcode_list[:]

        #extract the .arena file with meta data to a temporary storage to read data from it.
        arena_file_map_code_list = []
        if self.arena_file != "":
            zipper.extract(self.arena_file, temporary_file_storage)
            with open(os.path.join(temporary_file_storage,
                                   self.arena_file)) as f:
                arena_data = f.readlines()
            for line in (arena_data):
                if "longname" in line:
                    longname = line[line.find('"'):]
                    longname = re.sub('[^0-9a-zA-Z ]+', '', longname)
                    self.longname_list.append(longname)  #

                if "map" in line:  #alternative metho of getting the levelcode. which is better?
                    mapcode = line[line.find('"'):]

                    mapcode = re.sub(
                        '[^0-9a-zA-Z]+', '',
                        mapcode)  #need to remove needless characters.
                    arena_file_map_code_list.append(mapcode)

        #new algorithm to get the correct longname for a levelcode in a mappack

        if self.is_mappack:
            if len(self.levelcode_list) > len(self.longname_list):
                for i in range(
                        len(self.levelcode_list) - len(self.longname_list)):
                    self.longname_list.append("")

            longname_list_copy = self.longname_list[:]
            for i, levelcode in enumerate(self.levelcode_list):
                for k in range(len(arena_file_map_code_list)):
                    level_c = re.sub('[^0-9a-zA-Z]+', '',
                                     levelcode.lower())  #strip chars and lower
                    a_level_c = re.sub('[^0-9a-zA-Z]+', '',
                                       arena_file_map_code_list[k].lower(
                                       ))  #strip chars and lower
                    if level_c == a_level_c:
                        self.longname_list[i] = longname_list_copy[k]

        #if the file is a multiarena, extract all the arenashots
        #first extracted levelshot is the parent levelshot for the arena map
        #the follwoing images are arenashots
        if self.is_multiarena:
            for arenashot in self.arenashot_list:
                arenacode = get_code_from_shot(arenashot)
                self.longname_list.append(arenacode)
                self.levelcode_list.append(arenacode)
                self.levelshot_int_list.append(arenashot)
                self.mapcount += 1

        #extract levelshots and save the extracted file paths in list levelshot_ext_list
        for levelshot in self.levelshot_int_list:
            if levelshot != "":
                zipper.extract(levelshot, settings['levelshot_extract_path'])
                levelshot_file_path = os.path.join(
                    settings['levelshot_extract_path'], levelshot)

                if is_non_html_image(levelshot_file_path):
                    levelshot_file_path = convert_image(
                        levelshot_file_path, convert_image_format)

                #here append
                self.levelshot_ext_list.append(levelshot_file_path)

        #if need be reorder levelshot_ext_list to correspond to levelshot_list's order
        levelshot_list_copy = self.levelshot_ext_list[:]

        if len(self.levelcode_list) > len(self.levelshot_ext_list):
            for i in range(
                    len(self.levelcode_list) - len(self.levelshot_ext_list)):
                self.levelshot_ext_list.append("")

        for i, levelshot in enumerate(self.levelshot_int_list):
            levelshot_code = get_code_from_shot(levelshot)
            for k in range(len(self.levelcode_list)):
                levelshot_c = re.sub(
                    '[^0-9a-zA-Z]+', '',
                    levelshot_code.lower())  #strip chars and lower
                self_levelc = re.sub('[^0-9a-zA-Z]+', '',
                                     self.levelcode_list[k].lower())
                if levelshot_c == self_levelc:
                    self.levelshot_ext_list[k] = levelshot_list_copy[i]

        if self.is_mappack:
            #make sure that all lists are the same length or throw a warning
            if len(self.levelshot_int_list) != self.mapcount:
                if settings['verbose']:
                    print(
                        "warning the number of internal levelshots do not match the number of levels"
                    )
            if len(self.levelshot_ext_list) != self.mapcount:
                if settings['verbose']:
                    print(
                        "warning the number of external levelsohts do not match the number of levels"
                    )
            if len(self.longname_list) != self.mapcount:
                if settings['verbose']:
                    print(
                        "warning the number of longnames do not match the number of levels"
                    )
            if len(self.levelcode_list) != self.mapcount:
                if settings['verbose']:
                    print(
                        "warning the number of levelcodes do not match the number of levels"
                    )

        if len(self.longname_list) <= len(self.levelcode_list):
            for i in range(len(self.longname_list)):
                if self.longname_list[i] == "":
                    self.longname_list[i] = self.levelcode_list[i]