Exemplo n.º 1
0
def copy_filtered_files(
    orig: str,
    include: Union[str, Iterable[str]],
    target: Optional[str] = None,
    move_files: bool = False,
    flatten: bool = False,
) -> str:

    matches = find_matches(path=orig, include_patterns=include)
    matches = list(matches)

    if target is None:
        target = tempfile.mkdtemp(prefix="file_filter_")
    else:
        ensure_folder(target)

    if not matches:
        return target

    for m in matches:
        source_file = os.path.join(orig, m)
        if flatten:
            target_file = os.path.join(target, os.path.basename(m))
        else:
            target_file = os.path.join(target, m)
        parent = os.path.dirname(target_file)
        ensure_folder(parent)
        if move_files:
            shutil.move(source_file, target_file)
        else:
            shutil.copy2(source_file, target_file)

    return target
Exemplo n.º 2
0
    async def mogrify(self, *value_names: str, **requirements) -> Mapping[str, Any]:

        download_url = requirements["url"]
        target_file_name = requirements["target_file_name"]

        cache_path = calculate_cache_path(
            base_path=BRING_DOWNLOAD_CACHE, url=download_url
        )

        target_path = os.path.join(cache_path, target_file_name)

        if os.path.exists(target_path):
            return {"file_path": target_path}

        ensure_folder(cache_path)

        log.debug(f"Downloading url: {download_url}")
        try:
            client = httpx.AsyncClient()
            async with await aopen(target_path, "wb") as f:
                async with client.stream("GET", download_url) as response:
                    async for chunk in response.aiter_bytes():
                        await f.write(chunk)
        finally:
            await client.aclose()

        return {"file_path": target_path}
Exemplo n.º 3
0
Arquivo: git.py Projeto: makkus/bring
async def ensure_repo_cloned(url, update=False) -> str:

    path = calculate_cache_path(base_path=BRING_GIT_CHECKOUT_CACHE, url=url)
    parent_folder = os.path.dirname(path)

    exists = False
    if os.path.exists(path):
        exists = True

    if exists and not update:
        return path

    ensure_folder(parent_folder)

    if not exists:

        git = GitProcess("clone",
                         url,
                         path,
                         working_dir=parent_folder,
                         GIT_TERMINAL_PROMPT="0")

    else:
        git = GitProcess("fetch", working_dir=path)

    await git.run(wait=True)

    return path
Exemplo n.º 4
0
    async def mogrify(self, *value_names: str,
                      **requirements) -> Mapping[str, Any]:

        path: str = requirements["folder_path"]
        subfolder: str = requirements["subfolder"]
        flatten: bool = requirements.get("flatten", False)

        target_path = self.create_temp_dir(prefix="subfolder_")
        subfolder_path = os.path.join(target_path, subfolder)

        if not flatten:
            shutil.move(path, subfolder_path)
        else:
            all_files = find_matches(path, output_absolute_paths=True)
            ensure_folder(subfolder_path)
            for f in all_files:
                target = os.path.join(subfolder_path, os.path.basename(f))
                if os.path.exists(target):
                    log.info(
                        f"Duplicate file '{os.path.basename(target)}', ignoring..."
                    )
                    continue
                shutil.move(f, target)

        return {"folder_path": target_path}
Exemplo n.º 5
0
    async def mogrify(self, *value_names: str,
                      **requirements) -> Mapping[str, Any]:

        artefact_path = requirements["file_path"]
        remove_root = requirements.get("remove_root", None)

        base_target = self.create_temp_dir("extract_")
        target_folder = os.path.join(base_target, "extracted")

        extract_folder = os.path.join(base_target, "extract")

        if artefact_path.endswith(
                ".gz") and not artefact_path.endswith(".tar.gz"):
            new_file_name = os.path.basename(artefact_path)[0:-3]
            ensure_folder(extract_folder)
            new_path = os.path.join(extract_folder, new_file_name)
            with gzip.open(artefact_path, "rb") as f_in:
                with open(new_path, "wb") as f_out:
                    shutil.copyfileobj(f_in, f_out)
        else:
            shutil.unpack_archive(artefact_path, extract_folder)

        if remove_root is None:
            childs = os.listdir(extract_folder)
            if len(childs) == 1 and os.path.isdir(
                    os.path.join(extract_folder, childs[0])):
                remove_root = True
            else:
                remove_root = False

        if remove_root:
            childs = os.listdir(extract_folder)
            if len(childs) == 0:
                raise FrklException(
                    msg="Can't remove archive subfolder.",
                    reason=
                    f"No root file/folder for extracted archive: {artefact_path}",
                )
            elif len(childs) > 1:
                raise FrklException(
                    msg="Can't remove archive subfolder.",
                    reason=
                    f"More than one root files/folders: {', '.join(childs)}",
                )

            root = os.path.join(extract_folder, childs[0])
            if not os.path.isdir(root):
                raise FrklException(
                    msg="Can't remove archive root.",
                    reason=f"Not a folder: {childs[0]}",
                )
            shutil.move(root, target_folder)
            shutil.rmtree(extract_folder)
        else:
            shutil.move(extract_folder, target_folder)

        return {"folder_path": target_folder}
Exemplo n.º 6
0
    def __init__(self, **config: Any):

        self._cache_dir = os.path.join(
            BRING_PKG_CACHE, "resolvers",
            from_camel_case(self.__class__.__name__))
        ensure_folder(self._cache_dir, mode=0o700)

        self._config: Mapping[str,
                              Any] = get_seeded_dict(PKG_RESOLVER_DEFAULTS,
                                                     config)
Exemplo n.º 7
0
    async def _merge_item(
        self,
        item_id: str,
        item: Any,
        item_metadata: Mapping[str, Any],
        merge_config: Mapping[str, Any],
    ) -> Optional[MutableMapping[str, Any]]:

        item_details = self.pkg_spec.get_item_details(item_id)

        if not item_details:
            log.debug(f"Ignoring file item: {item_id}")
            return None

        target_id = item_details[PATH_KEY]

        if self.pkg_spec.flatten:
            target_path = os.path.join(self.path, os.path.basename(target_id))
        else:
            target_path = os.path.join(self.path, target_id)

        if self.pkg_spec.single_file:
            childs = os.listdir(self.path)
            if childs:
                raise FrklException(
                    msg=f"Can't merge item '{item_id}'.",
                    reason=f"Package is marked as single file, and target path '{self.path}' already contains a child.",
                )

        ensure_folder(os.path.dirname(target_path))

        move_method = merge_config.get("move_method", "copy")
        if move_method == "move":
            shutil.move(item, target_path)
        elif move_method == "copy":
            shutil.copy2(item, target_path)
        else:
            raise ValueError(f"Invalid 'move_method' value: {move_method}")

        if "mode" in item_details.keys():
            mode_value = item_details["mode"]
            if not isinstance(mode_value, str):
                mode_value = str(mode_value)

            mode = int(mode_value, base=8)
            os.chmod(target_path, mode)

        self._merged_items[target_path] = MetadataFileItem(
            id=target_path, parent=self, metadata=item_metadata
        )

        return {"msg": "installed"}
Exemplo n.º 8
0
    async def mogrify(self, *value_names: str,
                      **requirements) -> Mapping[str, Any]:

        repl_dict = requirements["repl_dict"]
        folder_path = requirements["folder_path"]
        include = requirements.get("include", None)
        flatten = requirements.get("flatten", False)
        template_type = requirements.get("template_type", "jinja")

        if isinstance(template_type, str):
            template_type = {
                "type": "jinja",
                "delimiter_profile": template_type
            }

        if template_type.get("type", "jinja") != "jinja":
            raise NotImplementedError(
                "Only jinja templating supported so far.")

        delimiter_profile = template_type.get("delimiter_profile", "default")

        jinja_env = get_global_jinja_env(delimiter_profile=delimiter_profile)

        matches = find_matches(path=folder_path, include_patterns=include)
        matches = list(matches)

        target = self.create_temp_dir("template_")
        for m in matches:
            file_path = os.path.join(folder_path, m)

            if flatten:
                target_file = os.path.join(target, os.path.basename(m))
            else:
                target_file = os.path.join(target, m)
                ensure_folder(os.path.dirname(target_file))

            await self.process_template(
                source=file_path,
                target=target_file,
                repl_dict=repl_dict,
                jinja_env=jinja_env,
            )

        return {"folder_path": target}
Exemplo n.º 9
0
    def __init__(
        self,
        t_id: str,
        tingistry: "Tingistry",
        working_dir=None,
        is_root_transmogrifier: bool = True,
        **kwargs,
    ):

        self._id = t_id

        self._is_root_transmogrifier = is_root_transmogrifier

        if working_dir is None:
            working_dir = os.path.join(BRING_WORKSPACE_FOLDER, "pipelines",
                                       self._id)

        self._working_dir = working_dir
        ensure_folder(self._working_dir)

        def delete_workspace():
            shutil.rmtree(self._working_dir, ignore_errors=True)

        debug = os.environ.get("DEBUG", "false")
        if debug.lower() != "true":
            atexit.register(delete_workspace)
        self._tingistry = tingistry

        self._target_folder = os.path.join(BRING_WORKSPACE_FOLDER, "results",
                                           self._id)
        ensure_folder(os.path.dirname(self._target_folder))

        super().__init__(**kwargs)

        self._current: Optional[Mogrifier] = None
        self._last_item: Optional[Mogrifier] = None
Exemplo n.º 10
0
    def __init__(
        self, meta: TingMeta, name: str = None, bring_config: BringConfig = None
    ):

        prototings: Iterable[Mapping] = BRINGISTRY_INIT["prototings"]  # type: ignore
        tings: Iterable[Mapping] = BRINGISTRY_INIT["tings"]  # type: ignore
        modules: Iterable[str] = BRINGISTRY_INIT["modules"]  # type: ignore
        classes: Iterable[Union[Type, str]] = BRINGISTRY_INIT[  # type: ignore
            "classes"
        ]

        if name is None:
            name = "bring"

        ensure_folder(BRING_WORKSPACE_FOLDER)

        if meta is None:
            raise Exception(
                "Can't create 'bring' object: 'meta' argument not provided, this is a bug"
            )

        self._tingistry_obj: Tingistry = meta.tingistry

        self._defaults: Optional[Mapping[str, Any]] = None

        self._tingistry_obj.add_module_paths(*modules)
        self._tingistry_obj.add_classes(*classes)

        if prototings:
            for pt in prototings:
                pt_name = pt["prototing_name"]
                existing = self._tingistry_obj.get_ting(pt_name)
                if existing is None:
                    self._tingistry_obj.register_prototing(**pt)

        if tings:
            for t in tings:
                self._tingistry_obj.create_ting(**t)

        super().__init__(name=name, meta=meta)

        env_conf: MutableMapping[str, Any] = {}
        for k, v in os.environ.items():
            k = k.lower()
            if not k.startswith("bring_"):
                continue
            env_conf[k[6:]] = v

        env_conf["bringistry"] = self
        self.typistry.get_plugin_manager("pkg_type", plugin_config=env_conf)

        # self._transmogritory = Transmogritory(self._tingistry_obj)
        self._transmogritory = self._tingistry_obj.get_ting(
            "bring.transmogritory", raise_exception=False
        )
        if self._transmogritory is None:
            self._transmogritory = self._tingistry_obj.create_singleting(
                "bring.transmogritory", Transmogritory
            )

        self._index_lock: Optional[Lock] = None

        self._bring_config: Optional[BringConfig] = bring_config
        self._freckles: Optional[Freckles] = None

        self._index_factory = IndexFactory(
            tingistry=self._tingistry_obj, bring_config=self._bring_config
        )

        if self._bring_config is not None:
            self._bring_config.set_bring(self)
            self._freckles = self._bring_config.freckles
            register_bring_frecklet_types(bring=self, freckles=self._freckles)

        self._indexes: Dict[str, Optional[BringIndexTing]] = {}
Exemplo n.º 11
0
    async def export_index(ctx, self, output_file, index: str, force: bool,
                           check: bool):

        click.echo()

        _index = os.path.abspath(os.path.expanduser(index))
        if not os.path.isdir(os.path.realpath(_index)):
            click.echo(
                f"Can't export index '{index}': path does not exist or not a folder"
            )
            sys.exit(1)

        if output_file is None:
            _path = os.path.join(_index, BRING_METADATA_FOLDER_NAME,
                                 DEFAULT_FOLDER_INDEX_NAME)
        elif os.path.isdir(os.path.realpath(output_file)):
            click.echo(
                f"Can't write index file, specified output file is a folder: {output_file}"
            )
        else:
            _path = os.path.abspath(os.path.expanduser(output_file))

        index_obj = await self._bring.get_index(_index)
        exported_index = await index_obj.export_index()

        empty: bool = True
        for k in exported_index.keys():
            if not k.startswith("_"):
                empty = False
                break

        if empty:
            click.echo("Index does not contain any packages, doing nothing...")
            sys.exit(1)

        inconsistent: Iterable[str] = []
        if os.path.exists(_path):

            old_index = await self._bring.get_index(_path)
            diff = IndexDiff(old_index, index_obj)

            inconsistent = await diff.get_inconsistent_package_names()

            if inconsistent:

                if not force:
                    console.print(
                        f"[red bold]Can't update index, inconsistencies exist for package(s): {', '.join(sorted(inconsistent))}[/red bold]"
                    )
                else:
                    console.print(
                        f"Force update old index, even though are inconsistencies for packages: {', '.join(sorted(inconsistent))}"
                    )
            else:
                console.print("Older index file exists, no inconsistencies.")

            console.line()
            console.print("Details:")
            console.line()
            console.print(diff)
        else:
            console.print("No previous index file exists, writing new one...")
            ensure_folder(os.path.dirname(_path))

        if inconsistent and not force:
            sys.exit(1)

        if not check:
            console.line()
            console.print(f"Exporting index to file: {_path}")

            json_data = json.dumps(exported_index, indent=2) + "\n"
            json_bytes = json_data.encode("utf-8")

            with gzip.GzipFile(_path, "w") as f:
                f.write(json_bytes)