Exemplo n.º 1
0
    def assert_env(self):
        must_exist = ("ERISYON_ROOT", "ERISYON_HEADLESS")
        found = 0
        for e in must_exist:
            if e in local.env:
                found += 1
            else:
                error(f'Environment variable "{e}" not found.')

        if found != len(must_exist):
            raise CommandError
Exemplo n.º 2
0
    def run(cls, argv=None, exit=True):
        """
        ZBS: Plumbum subcommand startup sequence is complicated.
        But, during the default run() it instantiates this class and passes
        only the next argument which prevents me from jamming dynamic switches
        into the class. So here I duplicate the argument I need argv[1]
        into the head of the list.  And then later I have to overload _parse_args()
        in order to pop those arguments back off.

        Also, if you pass in "--help" that would normally be handled by
        plumbum correctly, but these hacks prevent that so I have
        to keep track of the construct_fail and let it proceed so that
        an instance it correctly allocated because the "help" commands
        only work on a functional instance (ie you can not raise the Help
        exception during construction).
        """
        cls.construct_fail = False
        if not argv or len(argv) < 2 or argv[1].startswith("--"):
            if argv is not None and argv[1] == "--readme":
                # This is a crazy work-around to get the app instance
                # to construct so I can print the readme.
                cls.construct_fail = True
                inst = super(GenApp, cls).run(
                    argv=["", "calib", "--job=foo"], exit=False
                )
                inst[0].readme()
                return 0

            cls.construct_fail = True
            error(
                "You must specify a generator as the first argument after 'gen'.\n"
                f"Options are {', '.join(GenApp.generator_klass_by_name.keys())}"
            )
            argv = ["gen", "--help"]

        if argv is not None:
            return super(GenApp, cls).run(
                argv=[utils.safe_list_get(argv, 1)] + argv, exit=exit
            )
        else:
            return super(GenApp, cls).run(argv=argv, exit=exit)
Exemplo n.º 3
0
 def _print_error(message):
     """mock-point"""
     log.error(message)
Exemplo n.º 4
0
def http_method(url, method="GET", body="", headers={}, n_retries=0, **kwargs):
    """
    Simple url caller, avoids request library.

    Rules:
        Raises HTTPNonSuccessStatus on anything but 2XX
        Retries (with reasonable backoff) up to retry
        Passes kwargs to the HTTP Connection Class
        Uses Content-Length if provided
        Encodes to UTF-8 if not application/octet-stream
        Returns a dict from json.loads if application/json
        Returns str in any other cases
    """

    urlp = urlsplit(url)

    if urlp.scheme == "http":
        conn = http.client.HTTPConnection(urlp.netloc, **kwargs)
    elif urlp.scheme == "https":
        conn = http.client.HTTPSConnection(urlp.netloc, **kwargs)
    else:
        raise TypeError("Unknown protocol")

    def without_retry():
        conn.request(method,
                     urlp.path + "?" + urlp.query,
                     body=body,
                     headers=headers)
        response = conn.getresponse()
        if str(response.status)[0] != "2":
            raise HTTPNonSuccessStatus(response.status, url)
        return response

    @_retry(
        retry_on_exception=lambda e: isinstance(e, HTTPNonSuccessStatus) and
        str(e.code)[0] != "3",
        wait_exponential_multiplier=100,
        wait_exponential_max=500,
        stop_max_attempt_number=n_retries,
    )
    def with_retry():
        return without_retry()

    try:
        if n_retries > 0:
            response = with_retry()
        else:
            response = without_retry()
    except Exception as e:
        error(f"\nFailure during http request:\n"
              f"  domain={urlp.scheme}://{urlp.netloc}\n"
              f"  method={method}\n"
              f"  urlp.path={urlp.path}\n"
              f"  urlp.query={urlp.query}\n"
              f"  body={body}\n"
              f"  headers={headers}\n")
        raise e

    if response.getheader("Content-Length") is not None:
        length = int(response.getheader("Content-Length"))
        result = response.read(length)
    else:
        result = response.read()

    if response.getheader("Content-Type") != "application/octet-stream":
        result = result.decode("utf-8")

    if response.getheader("Content-Type") == "application/json":
        result = json.loads(result)

    return result
Exemplo n.º 5
0
def _error(msg):
    """Mock-point"""
    error(msg)
Exemplo n.º 6
0
def _show_work_order_exception(e):
    """Mock-point"""
    error("\nAn exception was thrown by a work_order ------")
    info("".join(e.exception_lines))
    error("----------------------------------------------")
Exemplo n.º 7
0
    def main(self):
        if self.construct_fail:
            return

        with local.cwd("/erisyon"):
            assert local.path("erisyon_root").exists()
            job_folder = self.validate_job_name_and_folder()

            schema = self.generator_klass.schema
            defaults = self.generator_klass.defaults

            requirements = schema.requirements()
            # APPLY defaults and then ask user for any elements that are not declared
            generator_args = {}
            switches = self._switches_by_name

            if self.protein_random is not None:
                info(
                    f"Sampling {self.protein_random} random proteins from imported set"
                )
                n = len(self.derived_vals.protein)
                assert n >= self.protein_random
                self.derived_vals.protein = data.subsample(
                    self.derived_vals.protein, self.protein_random
                )
                assert len(self.derived_vals.protein) == self.protein_random

            for arg_name, arg_type, arg_help, arg_userdata in requirements:
                if (
                    arg_name in self.derived_vals
                    and self.derived_vals.get(arg_name) is not None
                ):
                    # Load from a derived switch (eg: protein)
                    generator_args[arg_name] = self.derived_vals[arg_name]
                elif arg_name in switches and switches.get(arg_name) is not None:
                    # Load from a switch
                    generator_args[arg_name] = getattr(self, arg_name)
                else:
                    # If the schema allows the user to enter manually
                    if arg_userdata.get("allowed_to_be_entered_manually"):
                        generator_args[arg_name] = self._request_field_from_user(
                            arg_name, arg_type, default=defaults.get(arg_name)
                        )

            # Intentionally run the generate before the job folder is written
            # so that if generate fails it doesn't leave around a partial job.
            try:
                generator_args["force_run_name"] = self.run_name
                generator = self.generator_klass(**generator_args)
                run_descs = generator.generate()
            except (SchemaValidationFailed, ValidationError) as e:
                # Emit clean failure and exit 1
                error(str(e))
                return 1

            # WRITE the job & copy any file sources
            self._write_runs(job_folder, run_descs, props=self.prop)
            (job_folder / "_gen_sources").delete()
            self.local_sources_tmp_folder.move(job_folder / "_gen_sources")

            if not self.skip_report:
                report = generator.report_assemble()
                utils.json_save(job_folder / "report.ipynb", report)

            utils.yaml_write(
                job_folder / "job_manifest.yaml",
                uuid=self.job_uuid,
                localtime=time.strftime("%Y-%m-%d, %H:%M:%S", time.localtime()),
                # Note: it seems localtime inside our container is UTC
                who=local.env.get("RUN_USER", "Unknown"),
                cmdline_args=sys.argv,
            )
Exemplo n.º 8
0
    def main(self, job_folder=None):
        switches = utils.plumbum_switches(self)

        if job_folder is None:
            error(f"No job_folder was specified")
            return 1

        important(
            f"Plaster run {job_folder} limit={self.limit} started at {arrow.utcnow().format()}"
        )

        job_folder = assets.validate_job_folder_return_path(
            job_folder, allow_run_folders=True)
        if not job_folder.exists():
            error(f"Unable to find the path {job_folder}")
            return 1

        # Find all the plaster_run.yaml files. They might be in run subfolders
        found = list(
            job_folder.walk(filter=lambda p: p.name == "plaster_run.yaml"))
        run_dirs = [p.dirname for p in found]

        if len(run_dirs) == 0:
            error(
                "Plaster: Nothing to do because no run_dirs have plaster_run.yaml files"
            )
            return 1

        # A normal run where all happens in this process
        failure_count = 0
        for run_dir_i, run_dir in enumerate(sorted(run_dirs)):

            metrics(
                _type="plaster_start",
                run_dir=run_dir,
                run_dir_i=run_dir_i,
                run_dir_n=len(run_dirs),
                **switches,
            )
            important(
                f"Starting run subdirectory {run_dir}. {run_dir_i + 1} of {len(run_dirs)}"
            )

            try:
                with zap.Context(cpu_limit=self.cpu_limit,
                                 debug_mode=self.debug_mode):
                    run = RunExecutor(run_dir).load()
                    if "_erisyon" in run.config:
                        metrics(_type="erisyon_block", **run.config._erisyon)

                    failure_count += run.execute(
                        force=self.force,
                        limit=self.limit.split(",") if self.limit else None,
                        clean=self.clean,
                        n_fields_limit=self.n_fields_limit,
                        skip_s3=self.skip_s3,
                    )
            except Exception as e:
                failure_count += 1
                if not self.continue_on_error:
                    raise e

        if (failure_count == 0 and self.limit is None and not self.clean
                and not self.skip_reports):
            # RUN reports
            report_src_path = job_folder / "report.ipynb"
            report_dst_path = job_folder / "report.html"
            if (self.force or report_src_path.exists()
                    and utils.out_of_date(report_src_path, report_dst_path)):
                self.run_ipynb(report_src_path)
            return 0

        return failure_count