Ejemplo n.º 1
0
    def __init__(self, directory: str, required: bool = False, **kwargs: Any) -> None:
        super().__init__(**kwargs)

        # Store configuration
        self.directory = validate_input_path(
            directory, file_ok=False, directory_ok=True
        )
        self.required = required
        self.infiles = {
            get_name(f): f
            for f in [
                validate_input_path(f, default_directory=self.directory)
                for f in listdir(self.directory)
                if f != ".DS_Store"
            ]
            if isfile(f)
        }

        # Prepare description
        desc = f"{self.name} {self.__class__.__name__} ({self.directory})"
        if self.downstream_stages is not None:
            if len(self.downstream_stages) >= 2:
                for stage in self.downstream_stages[:-1]:
                    desc += f"\n ├─ {stage}"
            desc += f"\n └─ {self.downstream_stages[-1]}"
        self.desc = desc
Ejemplo n.º 2
0
 def process_file_from_cl(cls, infile: str, outfile: str, **kwargs):
     infile = validate_input_path(infile)
     outfile = validate_output_path(outfile)
     model_infile = validate_input_path(kwargs.get("model_infile"))
     device = kwargs.get("device")
     upscaler = ESRGANProcessor.RRDBNetUpscaler(model_infile,
                                                torch.device(device))
     cls.process_file(infile, outfile, upscaler=upscaler, **kwargs)
Ejemplo n.º 3
0
    def process_file_from_cl(cls, infile: str, outfile: str,
                             **kwargs: Any) -> None:
        infile = validate_input_path(infile)
        outfile = validate_output_path(outfile)
        workflow = validate_input_path(kwargs.pop("workflow"),
                                       file_ok=False,
                                       directory_ok=True)

        cls.process_file(infile, outfile, workflow=workflow, **kwargs)
Ejemplo n.º 4
0
    def __init__(
        self,
        pipeline: Pipeline,
        directory: str,
        downstream_stages: Optional[Union[List[str], str]] = None,
        **kwargs: Any,
    ) -> None:

        # Prepare attributes
        self.pipeline = pipeline
        self.directory = validate_output_path(directory,
                                              file_ok=False,
                                              directory_ok=True)
        if isinstance(downstream_stages, str):
            downstream_stages = [downstream_stages]
        self.downstream_stages = downstream_stages
        self.infiles = [
            validate_input_path(f, default_directory=self.directory)
            for f in listdir(self.directory) if f != ".DS_Store"
        ]
        self.infiles.sort(key=self.sort)

        # Prepare name and description
        self.name = self.__class__.__name__.lower()
        desc = f"source {self.__class__.__name__} ({self.directory})"
        if self.downstream_stages is not None:
            if len(self.downstream_stages) >= 2:
                for stage in self.downstream_stages[:-1]:
                    desc += f"\n ├─ {stage}"
            desc += f"\n └─ {self.downstream_stages[-1]}"
        self.desc = desc
Ejemplo n.º 5
0
    def __init__(
        self,
        wip_directory: str,
        source: Dict[str, Dict[str, Any]],
        stages: Dict[str, Dict[str, Any]],
        verbosity: int = 1,
    ) -> None:

        # Store configuration
        self.wip_directory = validate_output_path(wip_directory,
                                                  file_ok=False,
                                                  directory_ok=True)
        self.verbosity = validate_int(verbosity, min_value=0)

        # Load configuration
        sources_module = import_module("pipescaler.sources")
        stage_modules = [
            import_module(f"pipescaler.{package}")
            for package in ["mergers", "processors", "sorters", "splitters"]
        ]

        # Configure source
        source_cls_name = list(source.keys())[0]
        source_args = list(source.values())[0]
        source_cls = getattr(sources_module, source_cls_name)
        self.source = source_cls(pipeline=self, **source_args)
        print(repr(self.source))

        # Configure stages
        self.stages: Dict[str, Stage] = {}
        for stage_name, stage_conf in stages.items():
            if "module" in stage_conf:
                module_path = validate_input_path(stage_conf.pop("module"))
            else:
                module_path = None
            stage_cls_name = list(stage_conf.keys())[0]
            stage_args = list(stage_conf.values())[0]
            if stage_args is None:
                stage_args = {}
            if module_path is not None:
                spec = spec_from_file_location(
                    splitext(basename(module_path))[0], module_path)
                module = module_from_spec(spec)
                spec.loader.exec_module(module)
                stage_cls = getattr(module, stage_cls_name)
            else:
                stage_cls = None
                for module in stage_modules:
                    try:
                        stage_cls = getattr(module, stage_cls_name)
                    except AttributeError:
                        continue
                if stage_cls is None:
                    raise AttributeError(f"Class {stage_cls_name} not found")
            stage = stage_cls(pipeline=self, name=stage_name, **stage_args)
            print(repr(stage))
            self.stages[stage_name] = stage()
            next(self.stages[stage_name])
Ejemplo n.º 6
0
 def process_file_in_pipeline(self, image: PipeImage) -> None:
     infile = validate_input_path(image.last)
     outfile = validate_output_path(
         self.pipeline.get_outfile(image, self.suffix))
     if not isfile(outfile):
         self.process_file(infile,
                           outfile,
                           verbosity=self.pipeline.verbosity)
     image.log(self.name, outfile)
Ejemplo n.º 7
0
 def process_file_in_pipeline(self, image: PipeImage) -> None:
     infile = validate_input_path(image.last)
     outfile = validate_output_path(
         f"{self.output_directory}/{image.name}.{get_ext(image.last)}")
     if not isfile(outfile):
         self.process_file(infile,
                           outfile,
                           verbosity=self.pipeline.verbosity)
     image.log(self.name, outfile)
Ejemplo n.º 8
0
    def __init__(self, infile: str) -> None:
        self.infile = validate_input_path(infile)
        self.name = get_name(self.infile)
        self.ext = get_ext(self.infile)

        image = Image.open(self.infile)
        self.mode: str = image.mode
        self.shape: Tuple[int] = image.size

        self.history = []
Ejemplo n.º 9
0
    def __init__(self, conf_file: str, **kwargs: Any) -> None:
        """
        Initializes.

        Args:
            conf_file (str): file from which to load configuration
        """
        super().__init__(**kwargs)

        # Input
        with open(validate_input_path(conf_file), "r") as f:
            conf = yaml.load(f, Loader=yaml.SafeLoader)
        self.pipeline = Pipeline(verbosity=self.verbosity, **conf)
Ejemplo n.º 10
0
    def __init__(self, workflow: str, **kwargs: Any) -> None:
        super().__init__(**kwargs)

        # Store configuration
        self.workflow = validate_input_path(
            workflow
            if workflow.endswith(".workflow") else f"{workflow}.workflow",
            file_ok=False,
            directory_ok=True,
            default_directory=join(*split(package_root), "data", "workflows"),
        )

        # Prepare description
        desc = f"{self.name} {self.__class__.__name__} ({self.workflow})"
        if self.downstream_stages is not None:
            if len(self.downstream_stages) >= 2:
                for stage in self.downstream_stages[:-1]:
                    desc += f"\n ├─ {stage}"
            desc += f"\n └─ {self.downstream_stages[-1]}"
        self.desc = desc
Ejemplo n.º 11
0
    def process_file_from_cl(cls, infile: str, outfile: str,
                             **kwargs: Any) -> None:
        infile = validate_input_path(infile)
        outfile = validate_output_path(outfile)

        cls.process_file(infile, outfile, **kwargs)
Ejemplo n.º 12
0
    def __init__(self, forks: Dict[str, Dict[str, Any]], **kwargs: Any) -> None:
        super().__init__(**kwargs)

        # Store configuration
        desc = f"{self.name} {self.__class__.__name__}"

        # Organize downstream forks
        forks_by_filename = {}
        default_fork_name = None
        default_downstream_stages = None
        for fork_name, fork_conf in forks.items():
            if fork_conf is None:
                fork_conf = {}

            # Parse filenames for this fork
            input_filenames = fork_conf.get("filenames")

            if input_filenames is None:
                if default_fork_name is not None:
                    raise ValueError(
                        "At most one configuration may omit 'filenames' and will be "
                        "used as the default fork; two or more have been provided."
                    )
                default_fork_name = fork_name
                downstream_stages = fork_conf.get("downstream_stages")
                if isinstance(downstream_stages, str):
                    downstream_stages = [downstream_stages]
                default_downstream_stages = downstream_stages
                continue

            if isinstance(input_filenames, str):
                input_filenames = [input_filenames]
            filenames = set()
            for input_filename in input_filenames:
                try:
                    input_filename = validate_input_path(
                        input_filename, file_ok=True, directory_ok=True
                    )
                    if isdir(input_filename):
                        filenames |= {get_name(f) for f in listdir(input_filename)}
                    else:
                        with open(input_filename, "r") as f:
                            filenames |= {
                                get_name(f)
                                for f in yaml.load(f, Loader=yaml.SafeLoader)
                            }
                except FileNotFoundError:
                    filenames |= {get_name(input_filename)}
                filenames.discard(".DS_Store")
            desc += f"\n ├─ {fork_name} ({len(filenames)} filenames)"

            # Parse downstream stages for this class
            downstream_stages = fork_conf.get("downstream_stages")
            if isinstance(downstream_stages, str):
                downstream_stages = [downstream_stages]
            for filename in filenames:
                forks_by_filename[filename] = downstream_stages
            if downstream_stages is not None:
                if len(downstream_stages) >= 2:
                    for stage in downstream_stages:
                        desc += f"\n │   ├─ {stage}"
                desc += f"\n │   └─ {downstream_stages[-1]}"
            else:
                desc += f"\n │   └─"

        # Add description for default fork
        if default_fork_name is None:
            default_fork_name = "default"
        desc += f"\n └─ {default_fork_name}"
        if default_downstream_stages is not None:
            if len(default_downstream_stages) >= 2:
                for stage in default_downstream_stages[:-1]:
                    desc += f"\n     ├─ {stage}:"
            desc += f"\n     └─ {default_downstream_stages[-1]}"
        else:
            desc += f"\n     └─"

        # Store results
        self.desc = desc
        self.forks_by_filename = forks_by_filename
        self.default_downstream_stages = default_downstream_stages
        print(self.forks_by_filename)
Ejemplo n.º 13
0
 def model_infile(self, value: str) -> None:
     self._model_infile = validate_input_path(value)