Exemple #1
0
def extract_zip(infile):
    """
    Extract the required parts of the QuinCe export ZIP
    """

    manifest = None
    content = None

    if not zipfile.is_zipfile(infile):
        raise zipfile.BadZipFile('Supplied file is not a ZIP file')

    basename = os.path.splitext(os.path.basename(infile))[0]

    with zipfile.ZipFile(infile) as in_zip:
        manifest_path = zipfile.Path(in_zip, f'{basename}/manifest.json')
        if not manifest_path.exists():
            raise KeyError('ZIP file is missing manifest.json')

        manifest = json.loads(manifest_path.read_text())
        dataset_name = manifest['manifest']['metadata']['name']

        dataset_filename = f'{basename}/dataset/SOCAT/{dataset_name}.tsv'
        dataset_path = zipfile.Path(in_zip, dataset_filename)
        if not dataset_path.exists():
            raise KeyError('ZIP file does not contain SOCAT export')
        content = in_zip.read(dataset_filename)

    return manifest, content
Exemple #2
0
def create(args):
    """Create a CMake project according to the provided information
    """
    path = args.path
    root_exists = os.path.exists(path)
    if root_exists and os.path.isdir(path) and len(os.listdir(path)) != 0:
        print(
            f"Error - directory exists and is not empty:\n{path}",
            file=sys.stderr,
        )
        exit(1)
    if args.flags_used:
        with contextlib.redirect_stdout(io.StringIO()):
            d = get_substitutes(args, os.path.basename(path))
    else:
        d = get_substitutes(args, os.path.basename(path))
    mkdir(path)
    mapping = {"s": "shared/", "e": "executable/", "h": "header/"}
    write_dir(path, d, zipfile.Path(zip, "templates/" + mapping[d["type_id"]]))
    write_dir(path, d, zipfile.Path(zip, "templates/common/"))
    git_init(path)
    print_tips(d)
    print("""\
Now make sure you have at least CMake 3.19 installed for local development, to
make use of all the nice Quality-of-Life improvements in newer releases:
https://cmake.org/download/

For more tips, like integration with package managers, please see the Wiki:
http://github.com/friendlyanon/cmake-init/wiki

You are all set. Have fun programming and create something awesome!""")
Exemple #3
0
def test_1(act: Action, fdb_112_file: Path, fbk_file: Path):
    zipped_fdb_file = zipfile.Path(act.files_dir /
                                   'core_6023-ods-11_2-fdb.zip',
                                   at='core_6023-ods-11_2.fdb')
    fdb_112_file.write_bytes(zipped_fdb_file.read_bytes())
    # Change permissions
    fdb_112_file.chmod(16895)
    # Ensure that we really have deal with .fdb file with old ODS.
    act.expected_stderr = expected_stderr
    act.gstat(switches=['-h', str(fdb_112_file)], connect_db=False)
    assert act.clean_stderr == act.clean_expected_stderr
    # Backup work database and restore over extracted db
    act.reset()
    act.gbak(switches=['-b', act.db.dsn, str(fbk_file)])
    act.reset()
    act.gbak(switches=['-rep', str(fbk_file), act.get_dsn(fdb_112_file)])
    #
    act.reset()
    act.expected_stdout = expected_stdout
    act.isql(
        switches=['-q', act.get_dsn(fdb_112_file)],
        connect_db=False,
        input=
        'set list on; select sign(current_connection) as restore_with_replace_result from rdb$database;'
    )
    assert act.clean_stdout == act.clean_expected_stdout
Exemple #4
0
    def zip_children(self):
        zip_path = zipfile.Path(self.root)
        names = zip_path.root.namelist()
        self.joinpath = zip_path.joinpath

        return dict.fromkeys(
            child.split(posixpath.sep, 1)[0] for child in names)
Exemple #5
0
def test_1(act: Action, blob_src: Path):
    zipped_blob_file = zipfile.Path(act.files_dir / 'core_5618.zip',
                                    at='core_5618.bin')
    blob_src.write_bytes(zipped_blob_file.read_bytes())
    #
    with act.db.connect() as con:
        c = con.cursor()
        with open(blob_src, mode='rb') as blob_handle:
            c.execute('insert into test (b) values (?)', [blob_handle])
        c.close()
        con.execute_immediate('drop table test')
        con.commit()
    #
    log_before = act.get_firebird_log()
    # Run full validation (this is what 'gfix -v -full' does)
    with act.connect_server() as srv:
        srv.database.repair(database=act.db.db_path,
                            flags=SrvRepairFlag.FULL
                            | SrvRepairFlag.VALIDATE_DB)
        assert srv.readlines() == []
    #
    log_after = act.get_firebird_log()
    log_diff = [
        line.strip().upper() for line in unified_diff(log_before, log_after)
        if line.startswith('+') and 'WARNING' in line.upper()
    ]
    assert log_diff == [
        '+\tVALIDATION FINISHED: 0 ERRORS, 0 WARNINGS, 0 FIXED'
    ]
Exemple #6
0
def maybe_extract_from_zipfile(zip_file):
    """
    Extract files needed for Promtimer to run if necessary. Files needed by Promtimer are:
    * everything under the stats_snapshot directory; nothing is extracted if the
      stats_snapshot directory is already present
    * couchbase.log: extracted if not present
    """
    root = zipfile.Path(zip_file)
    for p in root.iterdir():
        if is_cbcollect_dir(p):
            stats_snapshot_exists = snapshot_dir_exists(pathlib.Path(p.name))
            logging.debug("{}/stats_snapshot exists: {}".format(
                p.name, stats_snapshot_exists))
            extracting = False
            for item in zip_file.infolist():
                item_path = path.join(*item.filename.split('/'))
                should_extract = False
                if is_stats_snapshot_file(item.filename):
                    should_extract = not stats_snapshot_exists
                elif item.filename.endswith(COUCHBASE_LOG):
                    should_extract = not path.exists(item_path)
                if should_extract:
                    logging.debug("zipfile item:{}, exists:{}".format(
                        item_path, path.exists(item_path)))
                    if not extracting:
                        extracting = True
                        logging.info(
                            'extracting stats, couchbase.log from cbcollect zip:{}'
                            .format(zip_file.filename))
                    zip_file.extract(item)
Exemple #7
0
 def open(ifc_path: str) -> ifcopenshell.file:
     ext = os.path.splitext(ifc_path)[1].lower()
     if ext == ".ifc":
         return ifcopenshell.open(ifc_path)
     if ext == ".ifcxml":
         # TODO: How to do this as ifcopenshell.ifcopenshell_wrapper has no parse_ifcxml ?
         raise NotImplementedError("No support for .ifcXML yet")
     if ext in (".ifczip", ".zip"):
         try:  # python 3.8+
             zip_path = zipfile.Path(ifc_path)
             for member in zip_path.iterdir():
                 zipped_ext = os.path.splitext(member.name)[1].lower()
                 if zipped_ext == ".ifc":
                     return ifcopenshell.file.from_string(
                         member.read_text())
                 if zipped_ext == ".ifcxml":
                     raise NotImplementedError("No support for .ifcXML yet")
         except AttributeError as python36_zip_error:  # python 3.6
             with zipfile.ZipFile(ifc_path) as zip_file:
                 for member in zip_file.namelist():
                     zipped_ext = os.path.splitext(member)[1].lower()
                     if zipped_ext == ".ifc":
                         with zip_file.open(member) as ifc_file:
                             return ifcopenshell.file.from_string(
                                 ifc_file.read().decode())
                     if zipped_ext == ".ifcxml":
                         raise NotImplementedError(
                             "No support for .ifcXML yet"
                         ) from python36_zip_error
     raise NotImplementedError("""Supported files :
 - unzipped : *.ifc | *.ifcXML
 - zipped : *.ifczip | *.zip containing un unzipped type""")
    def set_zip(self, path):
        if Path(path).is_dir():
            self.zip = ZipLikeFolder(path)
            spath = Path(path)
            self._is_folder = True
        else:
            self.zip = zipfile.ZipFile(path)
            # Find the root folder:
            spath = zipfile.Path(self.zip, "/")
            self._is_folder = False

        root = None
        for entry in spath.iterdir():
            if root is None:
                if entry.is_dir():
                    root = entry.name
                else:
                    # the first entry we found is not a dir
                    root = None
                    break
            else:
                # We found more than one entry in the root dir
                root = None
                break

        if root is None or self._is_folder:
            self.root = ""
        else:
            self.root = root + "/"

        if not self._is_folder:
            # Workaround for a weird bug where zipfile object considers itself closed
            # after the above zipfile.Path call
            self.zip = zipfile.ZipFile(path)
Exemple #9
0
def _get_zpath(pudl_table, pudl_settings):
    """Given a table and pudl_settings, return a Path to the requested file."""
    return zipfile.Path(
        pathlib.Path(pudl_settings["data_dir"],
                     "local/ferc714/ferc714.zip"),
        TABLE_FNAME[pudl_table]
    )
Exemple #10
0
def vcpkg(d):
    """Generate a vcpkg port in the ports folder, to make writing custom ports
    easier for dependencies on GitHub that are not present among the vcpkg
    provided ports
    """
    path = os.path.join("ports", d["name"])
    if os.path.isdir(path):
        print(f"""'{d["name"]}' already exists""", file=sys.stderr)
        exit(1)
    mkdir(path)
    write_dir(path, d, False, zipfile.Path(zip, "templates/vcpkg/"))
    vcpkg_root = "%VCPKG_ROOT:\=/%" if is_windows else "$VCPKG_ROOT"
    pwd = "%cd:\=/%" if is_windows else "$PWD"
    print(f"""\
The port has been generated in:

    {os.path.realpath(path)}

Please navigate there and edit the placeholders that are surrounded with
<angle brackets> and follow the instructions in the comments. After that's
done, make sure you add the port to your vcpkg manifest for easy integration.
When configuring your project, you must pass some extra variables:

    cmake --preset=dev
    -D "CMAKE_TOOLCHAIN_FILE={vcpkg_root}/scripts/buildsystems/vcpkg.cmake"
    -D "VCPKG_OVERLAY_PORTS={pwd}/ports"

Make sure you always keep up-to-date with vcpkg documentation:
https://github.com/microsoft/vcpkg

See the following example for how integration with vcpkg should look like:
https://github.com/friendlyanon/cmake-init-vcpkg-example""")
def test_1(act: Action, fbk_file: Path, db_tmp: Database, capsys):
    zipped_fbk_file = zipfile.Path(act.files_dir / 'core_5965.zip',
                                   at='core_5965.fbk')
    fbk_file.write_bytes(zipped_fbk_file.read_bytes())
    #
    with act.connect_server() as srv:
        srv.database.restore(backup=fbk_file, database=db_tmp.db_path)
        srv.wait()
    # Test
    with db_tmp.connect() as con:
        c1 = con.cursor()
        c2 = con.cursor()
        c1.execute(
            "select 1 from opt_test where clid = 23 and cust_type = 1 and cust_id = 73 order by order_no desc"
        )
        print(c1.statement.plan)
        #
        c2.execute(
            "select 2 from opt_test where sysid = 1 and clid = 23 and cust_type = 1 and cust_id = 73 order by order_no desc"
        )
        print(c2.statement.plan)
    # Check
    act.expected_stdout = expected_stdout
    act.stdout = capsys.readouterr().out
    assert act.clean_stdout == act.clean_expected_stdout
Exemple #12
0
 def _read_metadata_resources(self):
     if self.dst_path.endswith(".charm"):
         metadata_path = zipfile.Path(self.dst_path) / "metadata.yaml"
     else:
         metadata_path = Path(self.dst_path) / "metadata.yaml"
     metadata = yaml.safe_load(metadata_path.read_text())
     return metadata.get("resources", {})
Exemple #13
0
 def download(self, fname):
     """Fetch single file from associated store/charm/channel."""
     if not self.full_entity:
         return None
     elif self.store == "cs":
         entity_p = self.full_entity.lstrip("cs:")
         url = f"https://api.jujucharms.com/charmstore/v5/{entity_p}/archive/{fname}"
         self.echo(f"Downloading {fname} from {url}")
         resp = requests.get(url)
         if resp.ok:
             return yaml.safe_load(resp.content.decode())
         self.echo(
             f"Failed to read {fname} due to {resp.status_code} - {resp.content}"
         )
     elif self.store == "ch":
         name, channel = self.full_entity.rsplit(":")
         info = _CharmHub.info(
             name,
             channel=channel,
             fields="default-release.revision.download.url")
         try:
             url = info["default-release"]["revision"]["download"]["url"]
         except (KeyError, TypeError):
             self.echo(f"Failed to find in charmhub.io \n{info}")
             return None
         self.echo(f"Downloading {fname} from {url}")
         resp = requests.get(url, stream=True)
         if resp.ok:
             yaml_file = zipfile.Path(BytesIO(resp.content)) / fname
             return yaml.safe_load(yaml_file.read_text())
         self.echo(
             f"Failed to read {fname} due to {resp.status_code} - {resp.content}"
         )
Exemple #14
0
async def get_translation(file_q,res_q):
    while True:
        lang_name, file = await file_q.get()

        with zipfile.ZipFile(file) as zip_file:
                # path = browser/chrome/{language abbreviation}/locale/browser/browser.properties
                path = zipfile.Path(zip_file,"browser/chrome/")
                lang_code = list(path.iterdir())[0].name # middle folder
                
                new_path = f"browser/chrome/{lang_code}/locale/browser/browser.properties"

                with zip_file.open(new_path) as browser_properties:
                    translations = re.findall(
                        r"""
                        # userContextPersonal.label = Pa ngat moni
                        userContext
                        (Personal|Work|Banking|Shopping|None)
                        \.
                        label
                        \ = \ 
                        (.*?)
                        \n
                        """,
                        browser_properties.read().decode("utf-8"),
                        flags=re.X
                    )

        logging.info(f"Got translations for {lang_name}.")

        await res_q.put((lang_name, lang_code, translations))
        
        file_q.task_done()
Exemple #15
0
 def __parse_canopus(self):
     self.filename = None
     filename = None
     if self.maxZodiac:
         filename = self.maxZodiac
     elif self.maxSirius:
         filename = self.maxSirius
     self.topFormula = filename[3]
     self.topAdduct = filename[1]
     if self.maxZodiac is not None:
         self.zodiacScore = self.maxZodiac[2]
     else:
         self.zodiacScore = None
     canopusPath = Path(self.dirname, "canopus",
                        filename[0] + "_" + filename[1] + ".fpt")
     canopusZipPath = Path(self.dirname, "canopus")
     if canopusPath.exists():
         self.canopusfp = np.loadtxt(canopusPath)
         self.filename = canopusPath.name
     elif canopusZipPath.exists():
         with zipfile.ZipFile(canopusZipPath) as myzip:
             path = zipfile.Path(myzip,
                                 at=filename[0] + "_" + filename[1] +
                                 ".fpt")
             if path.exists():
                 with path.open("r") as myfile:
                     self.canopusfp = np.loadtxt(myfile)
                     self.filename = canopusPath.name
             else:
                 self.canopusfp = None
     else:
         self.canopusfp = None
Exemple #16
0
def test_1(act: Action, sec_fbk: Path, sec_fdb: Path):
    zipped_fbk_file = zipfile.Path(act.files_dir / 'core_5637.zip',
                                   at='core5637-security3.fbk')
    sec_fbk.write_bytes(zipped_fbk_file.read_bytes())
    #
    log_before = act.get_firebird_log()
    # Restore security database
    with act.connect_server() as srv:
        srv.database.restore(database=sec_fdb,
                             backup=sec_fbk,
                             flags=SrvRestoreFlag.REPLACE)
        restore_log = srv.readlines()
        #
        log_after = act.get_firebird_log()
        #
        srv.database.validate(database=sec_fdb)
        validation_log = srv.readlines()
        srv.database.shutdown(database=sec_fdb,
                              mode=ShutdownMode.FULL,
                              method=ShutdownMethod.FORCED,
                              timeout=0)
    #
    #
    assert [line for line in restore_log if 'ERROR' in line.upper()] == []
    assert [line for line in validation_log if 'ERROR' in line.upper()] == []
    assert list(unified_diff(log_before, log_after)) == []
Exemple #17
0
def _get_zpath(ferc714_table, pudl_settings):
    """
    Given a table name and pudl_settings, return a Path to the corresponding file.

    Args:
        ferc714_table
    """
    return zipfile.Path(get_ferc714(pudl_settings), TABLE_FNAME[ferc714_table])
Exemple #18
0
def list_files_in_zip_file_from_a_directory(
        zip_file_path: str,
        path_inside_of_zipfile: str = '',
        filenames_to_ignore: Optional[List[str]] = None) -> Set[Path]:
    """
    Return a list of files from a directory within a zipfile.

    The list of files returned is for a single directory within the zipfile.
    An empty string for path_inside_of_zipfile represents the root of the zip file.  Sub directories are access by
    passing a string of the path inside the the zip file.  'sub_folder' will return files in a directory called
    sub_folder that is in the root of the zip file.  'sub_folder/sub_sub_folder' would return the files with in the
    sub_sub_folder directory of the zip file.  If the directory does not exist an empty set is returned.
    The list of files paths returned can be further filtered by passing a list strings of names to ignore.  Only exact
    matches are ignored.  Passing ['file1.txt, 'file2.txt'] will exclude those files form the returned list if
    they exist.

    Parameters
    ----------
    zip_file_path : str
        Path to a zip file.
    path_inside_of_zipfile : str
        default is empty string which returns files in the root of the zip file.  if a non-empty string representing
        a path in the zip file is provided files in a directory matching that string will be returned.  String must be
        posix format.
    filenames_to_ignore : Optional list[str]
        Optional list of stings of file names that can be ignored.

    Returns
    -------
    list[Path]
        list containing paths to files in the zip file
    """

    if not filenames_to_ignore:
        filenames_to_ignore = []

    files_in_zip = set()

    try:
        zip_root_path = zipfile.Path(zip_file_path)
        folder_to_search = zip_root_path

        if path_inside_of_zipfile:
            folder_to_search = zip_root_path.joinpath(path_inside_of_zipfile)
            if not folder_to_search.exists():
                logger.warning(
                    f'Directory "{folder_to_search}" does not exist in zip file "{zip_file_path}"'
                )
                return set()

        for item in folder_to_search.iterdir():
            if item.is_file() and item.name not in filenames_to_ignore:
                files_in_zip.add(item)

        return files_in_zip

    except Exception as e:
        _error_handling(e, '', zip_file_path, '')
def test_package_zip(tmpdir):
    path = Path(tmpdir.mkdir("package"))
    (path / "foo.txt").write_text("foo")
    (path / "bar").mkdir()
    (path / "bar" / "baz.txt").write_text("baz")
    (path / ".qux").touch()

    pack_file = package_zip(path / "foo.txt")
    with zipfile.ZipFile(pack_file.name, "r") as zip_file:
        assert zipfile.Path(zip_file, "foo.txt").is_file()

    pack_dir = package_zip(path)
    h1 = gethash(Path(pack_dir.name))

    with zipfile.ZipFile(pack_dir.name, "r") as zip_file:
        assert zipfile.Path(zip_file, "foo.txt").is_file()
        assert zipfile.Path(zip_file, "bar/").is_dir()
        assert zipfile.Path(zip_file, "bar/baz.txt").is_file()
        assert not zipfile.Path(zip_file, ".qux").exists()

    time.sleep(1)
    (path / "foo.txt").touch()

    pack_dir = package_zip(path)
    h2 = gethash(Path(pack_dir.name))

    assert h1 == h2

    pack_dir = package_zip(path, [path / "bar" / "baz.txt"])
    with zipfile.ZipFile(pack_dir.name, "r") as zip_file:
        assert not zipfile.Path(zip_file, str(path / "foo.txt")).exists()
        assert zipfile.Path(zip_file, str(path / "bar" / "baz.txt")).is_file()
Exemple #20
0
def fallback_resources(spec):
    package_directory = pathlib.Path(spec.origin).parent
    try:
        archive_path = spec.loader.archive
        rel_path = package_directory.relative_to(archive_path)
        return zipfile.Path(archive_path, str(rel_path) + '/')
    except Exception:
        pass
    return package_directory
def write_gui_documents(gui_document_by_path: Dict[str, bytes]) -> None:
    for path, gui_document_contents in gui_document_by_path.items():
        gui_document_path = zipfile.Path(path, at='GuiDocument.xml')
        if gui_document_path.exists():
            continue
        with ZipFile(path, 'a', ZIP_DEFLATED) as fcstd:
            member = ZipInfo('GuiDocument.xml', time.localtime()[:6])
            member.compress_type = ZIP_DEFLATED
            fcstd.writestr(member, gui_document_contents)
 def _local(cls, root='.'):
     from pep517 import build, meta
     system = build.compat_system(root)
     builder = functools.partial(
         meta.build,
         source_dir=root,
         system=system,
     )
     return PathDistribution(zipfile.Path(meta.build_as_zip(builder)))
Exemple #23
0
    def zip_children(self):
        zip_path = zipfile.Path(self.root)
        names = zip_path.root.namelist()
        self.joinpath = zip_path.joinpath

        return (
            posixpath.split(child)[0]
            for child in names
            )
Exemple #24
0
    def html_zip(self, tmp_path, html_path):
        if not hasattr(zipfile, 'Path'):
            pytest.skip("Needs zipfile.Path")

        zip_path = tmp_path / 'qutebrowser.zip'
        with zipfile.ZipFile(zip_path, 'w') as zf:
            for path in html_path.iterdir():
                zf.write(path, path.relative_to(tmp_path))

        yield zipfile.Path(zip_path) / 'qutebrowser'
Exemple #25
0
def test_importlib_resource_load_zip_path() -> None:
    config_source = ImportlibResourcesConfigSource(provider="foo",
                                                   path="pkg://bar")
    conf = config_source._read_config(
        zipfile.Path(  # type: ignore
            "hydra/test_utils/configs/conf.zip",
            "config.yaml",
        ))
    assert conf.config == {"foo": "bar"}
    assert conf.header == {"package": None}
def test_1(act: Action, fbk_file: Path, fdb_file: Path):
    zipped_fbk_file = zipfile.Path(act.files_dir / 'core_5078.zip',
                                   at='tmp_core_5078.fbk')
    fbk_file.write_bytes(zipped_fbk_file.read_bytes())
    with act.connect_server() as srv:
        srv.database.restore(database=fdb_file, backup=fbk_file)
        srv.wait()
    # This should execute without errors
    act.isql(switches=[str(fdb_file)],
             input='set list on; select * from do_changeTxStatus;',
             connect_db=False)
Exemple #27
0
def describe_all_csvs_in_zips_0(fs):
    for zip_name in get_zips(fs):
        my_zip = zipfile.ZipFile("/tmp/dl.zip")
        print(zip_name)
        for zip_info in my_zip.infolist():
            if not zip_info.filename.endswith(".csv"):
                continue
            print(zip_info.filename)
            my_zip_open = zipfile.ZipFile("/tmp/dl.zip")
            df = pd.read_csv(
                zipfile.Path(my_zip_open, zip_info.filename).open())
            print(df.describe())
Exemple #28
0
 def __parse_npc(self):
     self.canopusnpc = None
     if self.filename:
         npcPath = Path(self.dirname, "canopus_npc", self.filename)
         npcZipPath = Path(self.dirname, "canopus_npc")
         if npcPath.exists():
             self.canopusnpc = np.loadtxt(npcPath)
         elif npcZipPath.exists():
             with zipfile.ZipFile(npcZipPath) as myzip:
                 path = zipfile.Path(myzip, at=self.filename)
                 if path.exists():
                     with path.open("r") as myfile:
                         self.canopusnpc = np.loadtxt(myfile)
def files(package):
    spec = import_module(package).__spec__
    if spec.submodule_search_locations is None:
        raise TypeError("{!r} is not a package".format(package))

    package_directory = pathlib.Path(spec.origin).parent
    try:
        archive_path = spec.loader.archive
        rel_path = package_directory.relative_to(archive_path)
        return zipfile.Path(archive_path, str(rel_path) + "/")
    except Exception:
        pass
    return package_directory
Exemple #30
0
 def _read(self, dirs, filename, binary=False):
     '''
     Return text/binary contents of a file, None if file does not exist.
     '''
     dirpath = '/'.join(dirs) + '/' + filename if len(dirs) else ''
     path = zipfile.Path(self._zipfile, dirpath + filename)
     if path.exists():
         if binary:
             return path.read_bytes()
         else:
             with path.open() as f:
                 return '\n'.join(f.readlines())
     else:
         return None