Esempio n. 1
0
 def _load(self):
     filename = self._filename
     if local.path(self._filename).exists():
         try:
             d = pickle_load(filename)
             del d["_filename"]
             self.update(d)
         except Exception:
             info(f"Store {self._filename} was unloadable")
Esempio n. 2
0
    def _write_runs(self, job_folder, run_descs, props=[]):
        """
        Convert the munch run_descs into folders
        """

        if not job_folder.exists():
            job_folder.mkdir()

        found_run_names = {}

        for i, run in enumerate(run_descs):
            # FIND or OVERRIDE run_name
            run_name = run.get("run_name")

            assert run_name not in found_run_names
            found_run_names[run_name] = True

            # SETUP _erisyon block
            if "_erisyon" not in run:
                run._erisyon = Munch()
            run._erisyon.run_i = i
            run._erisyon.run_i_of = len(run_descs)
            run._erisyon.run_name = run_name

            # OVERRIDE with props
            for prop in props:
                k, v, t = prop.split("=")
                if t == "bool":
                    v = True if v == "true" else False
                elif t == "int":
                    v = int(v)
                elif t == "float":
                    v = float(v)
                elif t == "int_list":
                    v = [int(i) for i in v.split(",")]
                elif t == "int_dict":
                    v = v.split(",")
                    v = {v[i]: int(v[i + 1]) for i in range(0, len(v), 2)}
                else:
                    raise TypeError(f"Unknown type in prop coversion '{t}")
                utils.block_update(run, k, v)

            # Keep the run_name out
            run.pop("run_name", None)
            folder = job_folder / run_name
            folder.mkdir()
            RunExecutor(folder, tasks=run).save()

            info(f"Wrote run to {folder}")
Esempio n. 3
0
def _do_work_orders_process_mode(zap):
    with ProcessPoolExecutor(max_workers=zap.max_workers) as executor:
        try:
            return _do_zap_with_executor(executor, zap)
        except KeyboardInterrupt:
            # If I do not os.kill the processes then it seems
            # that will gracefully send kill signals and wait
            # for the children. I typically want to just abandon
            # anything that the process is doing and have it end instantly.
            # Thus, I just reach in to get the child pids and kill -9 them.
            debug()
            for k, v in executor._processes.items():
                try:
                    debug(k, v.pid)
                    os.kill(v.pid, signal.SIGKILL)
                except ProcessLookupError:
                    info(f"{v.pid} had already died")
            raise
Esempio n. 4
0
 def progress(self, n_complete, n_total, retry):
     if retry:
         info(f"\nRetry {n_complete}")
     if self.progress_fn is not None:
         self.progress_fn(self.task_name, self.start_time, n_complete,
                          n_total, self.phase)
Esempio n. 5
0
def _info(msg):
    """mock point"""
    info(msg)
Esempio n. 6
0
def _show_work_order_exception(e):
    """Mock-point"""
    error("\nAn exception was thrown by a work_order ------")
    info("".join(e.exception_lines))
    error("----------------------------------------------")
Esempio n. 7
0
def counter(iterable):
    list_ = list(iterable)
    count = len(list_)
    for i, val in enumerate(list_):
        info(f"{i+1} of {count}")
        yield val
Esempio n. 8
0
    def main(self):
        if self.construct_fail:
            return

        with local.cwd("/erisyon"):
            assert local.path("erisyon_root").exists()
            job_folder = self.validate_job_name_and_folder()

            schema = self.generator_klass.schema
            defaults = self.generator_klass.defaults

            requirements = schema.requirements()
            # APPLY defaults and then ask user for any elements that are not declared
            generator_args = {}
            switches = self._switches_by_name

            if self.protein_random is not None:
                info(
                    f"Sampling {self.protein_random} random proteins from imported set"
                )
                n = len(self.derived_vals.protein)
                assert n >= self.protein_random
                self.derived_vals.protein = data.subsample(
                    self.derived_vals.protein, self.protein_random
                )
                assert len(self.derived_vals.protein) == self.protein_random

            for arg_name, arg_type, arg_help, arg_userdata in requirements:
                if (
                    arg_name in self.derived_vals
                    and self.derived_vals.get(arg_name) is not None
                ):
                    # Load from a derived switch (eg: protein)
                    generator_args[arg_name] = self.derived_vals[arg_name]
                elif arg_name in switches and switches.get(arg_name) is not None:
                    # Load from a switch
                    generator_args[arg_name] = getattr(self, arg_name)
                else:
                    # If the schema allows the user to enter manually
                    if arg_userdata.get("allowed_to_be_entered_manually"):
                        generator_args[arg_name] = self._request_field_from_user(
                            arg_name, arg_type, default=defaults.get(arg_name)
                        )

            # Intentionally run the generate before the job folder is written
            # so that if generate fails it doesn't leave around a partial job.
            try:
                generator_args["force_run_name"] = self.run_name
                generator = self.generator_klass(**generator_args)
                run_descs = generator.generate()
            except (SchemaValidationFailed, ValidationError) as e:
                # Emit clean failure and exit 1
                error(str(e))
                return 1

            # WRITE the job & copy any file sources
            self._write_runs(job_folder, run_descs, props=self.prop)
            (job_folder / "_gen_sources").delete()
            self.local_sources_tmp_folder.move(job_folder / "_gen_sources")

            if not self.skip_report:
                report = generator.report_assemble()
                utils.json_save(job_folder / "report.ipynb", report)

            utils.yaml_write(
                job_folder / "job_manifest.yaml",
                uuid=self.job_uuid,
                localtime=time.strftime("%Y-%m-%d, %H:%M:%S", time.localtime()),
                # Note: it seems localtime inside our container is UTC
                who=local.env.get("RUN_USER", "Unknown"),
                cmdline_args=sys.argv,
            )