コード例 #1
0
def generate_image_previews(input_path, input_dir, filename_base):
    for suffix, size in OUTPUT_FORMATS:
        output_path = os.path.join(input_dir, f"{filename_base}{suffix}.jpg")
        try:
            generate_image_preview(input_path, output_path, size)
        except Exception:
            log.info("Preview file (imaging) not generated for", input_path, "with size", size)
コード例 #2
0
    def run(self, exit_code, cmd, *args):
        """Run the subprocess string `cmd`,  appending any extra values
        defined by `args`.

        Parameters
        ----------
        cmd : str
            A multi-word subprocess command string
        args : tuple of str
            Extra single-word parameters to append to `cmd`,  nominally
            filenames or switches.

        Returns
        -------
        None

        Notes
        -----
        Logs executed command tuple.

        Checks subprocess error status against class attribute `ignore_err_nums`
        to ignore instrument-specific error codes.

        Issues an ERROR message and exits the program for any other non-zero
        value.
        """
        cmd = tuple(cmd.split()) + args  # Handle stage values with switches.
        self.divider("Running:", cmd)
        with sysexit.exit_on_exception(exit_code, self.ipppssoot, "Command:",
                                       repr(cmd)):
            err = subprocess.call(cmd)
            if err in self.ignore_err_nums:
                log.info("Ignoring error status =", err)
            elif err:
                raise sysexit.SubprocessFailure(err)
コード例 #3
0
def main(ipppssoot, input_uri_prefix, output_uri_prefix):
    """Generates previews based on input and output directories
    according to specified args
    """
    output_path = messages.get_local_outpath(output_uri_prefix, ipppssoot)
    msg = messages.Messages(output_uri_prefix, output_path, ipppssoot)
    msg.preview_message()  # processing
    logger = log.CaldpLogger(enable_console=False, log_file="preview.txt")
    input_dir = file_ops.get_input_path(input_uri_prefix, ipppssoot)
    # append process.txt to trailer file
    # file_ops.append_trailer(input_dir, output_path, ipppssoot)
    input_paths = get_inputs(ipppssoot, input_dir)
    instr = process.get_instrument(ipppssoot)
    preview_inputs = get_preview_inputs(instr, input_paths)
    # create previews
    previews = create_previews(input_dir, preview_inputs)
    # upload/copy previews
    log.info("Saving previews...")
    if output_uri_prefix.startswith("s3"):
        preview_output = process.get_output_path("file:outputs",
                                                 ipppssoot) + "/previews"
        os.makedirs(preview_output, exist_ok=True)
        copy_previews(previews, preview_output)
        log.info("Preparing files for s3 upload...")
        file_ops.tar_outputs(ipppssoot, input_uri_prefix, output_uri_prefix)
    elif output_uri_prefix.startswith("file"):
        preview_output = process.get_output_path(output_uri_prefix,
                                                 ipppssoot) + "/previews"
        os.makedirs(preview_output, exist_ok=True)
        copy_previews(previews, preview_output)
    else:
        return
    del logger
コード例 #4
0
def main(ipppssoot, input_uri_prefix, output_uri_prefix):
    """Generates previews based on input and output directories
    according to specified args
    """
    # set appropriate path variables
    logger = log.CaldpLogger(enable_console=False, log_file="preview.txt")
    cwd = os.getcwd()
    if input_uri_prefix.startswith("file"):
        in_path = input_uri_prefix.split(":")[-1] or "."
    else:
        in_path = ipppssoot
    input_dir = os.path.join(cwd, in_path)
    input_paths = get_inputs(ipppssoot, input_dir)
    output_path = process.get_output_path(output_uri_prefix, ipppssoot) + "/previews"
    # create previews
    previews = create_previews(input_dir, input_paths)
    # upload/copy previews
    if len(previews) > 0:
        if output_uri_prefix.startswith("s3"):
            log.info("Uploading previews...")
            upload_previews(previews, output_path)
        elif output_uri_prefix.startswith("file"):
            log.info("Saving previews...")
            os.makedirs(output_path, exist_ok=True)
            copy_previews(previews, output_path)
        else:
            return
    else:
        log.error("Error - Previews not generated.")
    del logger
コード例 #5
0
ファイル: process.py プロジェクト: raswaters/caldp
 def find_input_files(self):
     """Scrape the input_uri for the needed input_files.
     Called if input_uri starts with `file:`
     Returns
     -------
     filepaths : sorted list
         Local file system paths of files which were found for `ipppssoot`,
         some of which will be selected for calibration processing.
     """
     test_path = self.input_uri.split(":")[-1]
     if os.path.isdir(test_path):
         base_path = os.path.abspath(test_path)
     elif os.path.isdir(os.path.join(os.getcwd(), test_path)):
         base_path = os.path.join(os.getcwd(), test_path)
     else:
         raise ValueError(f"input path {test_path} does not exist")
     # check for tarred inputs
     cwd = os.getcwd()
     search_tar = f"{base_path}/{self.ipppssoot.lower()[0:5]}*.tar.gz"
     tar_files = glob.glob(search_tar)
     if len(tar_files) == 1:
         log.info("Extracting inputs from: ", tar_files)
         os.chdir(base_path)
         with tarfile.open(tar_files[0], "r:gz") as tar_ref:
             tar_ref.extractall()
     os.chdir(cwd)
     # get input files
     search_str = f"{base_path}/{self.ipppssoot.lower()[0:5]}*.fits"
     self.divider("Finding input data using:", repr(search_str))
     # find the base path to the files
     files = glob.glob(search_str)
     return list(sorted(files))
コード例 #6
0
    def output_files(self):
        """Selects files from the current working directory and uploads them
        to the `output_uri`.   If `output_uri` is None or "none",  returns
        without copying files.

        Returns
        -------
        None
        """
        outputs = self.find_output_files()
        delete = [
            output for output in outputs
            if output.endswith(tuple(self.delete_endings))
        ]
        if delete:
            self.divider("Deleting files:", delete)
            for filename in delete:
                os.remove(filename)
            outputs = self.find_output_files()  # get again
        if self.output_uri is None or self.output_uri.startswith("none"):
            return
        output_path = get_output_path(self.output_uri, self.ipppssoot)
        self.divider(f"Saving {len(outputs)} outputs to:", output_path)
        for filepath in outputs:
            output_filename = f"{output_path}/{os.path.basename(filepath)}"
            log.info(f"\t{output_filename}")
            upload_filepath(filepath, output_filename)
        self.divider("Saving outputs complete.")
コード例 #7
0
def upload_tar(tar, output_path):
    client = boto3.client("s3")
    parts = output_path[5:].split("/")
    bucket, prefix = parts[0], "/".join(parts[1:])
    objectname = prefix + "/" + os.path.basename(tar)
    log.info(f"Uploading: s3://{bucket}/{objectname}")
    if output_path.startswith("s3"):
        with open(tar, "rb") as f:
            client.upload_fileobj(f, bucket, objectname, Callback=ProgressPercentage(tar))
コード例 #8
0
 def make_messages(self, stat):
     base = os.getcwd()
     msg_dir = os.path.join(base, "messages", stat)
     os.makedirs(msg_dir, exist_ok=True)
     msg_file = msg_dir + f"/{self.ipppssoot}"
     with open(msg_file, "w") as m:
         m.write(f"{stat} {self.ipppssoot}\n")
         log.info(f"Message file saved: {os.path.abspath(msg_file)}")
     return os.path.abspath(msg_file)
コード例 #9
0
def make_tar(file_list, ipppssoot):
    tar = ipppssoot + ".tar.gz"
    log.info("Creating tarfile: ", tar)
    if os.path.exists(tar):
        os.remove(tar)  # clean up from prev attempts
    with tarfile.open(tar, "x:gz") as t:
        for f in file_list:
            t.add(f)
    log.info("Tar successful: ", tar)
    tar_dest = os.path.join(ipppssoot, tar)
    shutil.copy(tar, ipppssoot)  # move tarfile to outputs/{ipst}
    os.remove(tar)
    return tar_dest
コード例 #10
0
def upload_tar(tar, output_path):
    with sysexit.exit_on_exception(exit_codes.S3_UPLOAD_ERROR,
                                   "S3 tar upload of", tar, "to", output_path,
                                   "FAILED."):
        client = boto3.client("s3")
        parts = output_path[5:].split("/")
        bucket, prefix = parts[0], "/".join(parts[1:])
        objectname = prefix + "/" + os.path.basename(tar)
        log.info(f"Uploading: s3://{bucket}/{objectname}")
        if output_path.startswith("s3"):
            with open(tar, "rb") as f:
                client.upload_fileobj(f,
                                      bucket,
                                      objectname,
                                      Callback=ProgressPercentage(tar))
コード例 #11
0
def upload_previews(previews, output_path):
    """Given `previews` list to upload, copy it to `output_uri_prefix`.
    previews : List of local preview filepaths to upload
       ['./odfa01030/previews/x1d_thumb.png','./odfa01030/previews/x1d.png' ]
    output_uri_prefix : Full path to object to upload including the bucket prefix
        s3://hstdp-batch-outputs/data/stis/odfa01030/previews/
    """
    client = boto3.client("s3")
    splits = output_path[5:].split("/")
    bucket, path = splits[0], "/".join(splits[1:])
    for preview in previews:
        preview_file = os.path.basename(preview)
        objectname = path + "/" + preview_file
        log.info(f"\t{output_path}/{preview_file}")
        with open(preview, "rb") as f:
            client.upload_fileobj(f, bucket, objectname)
コード例 #12
0
    def sync_dataset(self):
        if self.output_uri.startswith("file"):
            preview_output = os.path.join(self.output_path, "previews")
            files = glob.glob(f"{self.output_path}/{self.ipppssoot[0:5]}*")
            files.extend(glob.glob(f"{preview_output}/{self.ipppssoot[0:5]}*"))
            outputs = list(sorted(files))
            for line in outputs:
                with open(self.file, "a") as m:
                    m.write(f"{line}\n")
            log.info(f"Dataset synced: {outputs}")

        elif self.output_uri.startswith("s3"):
            s3_path = f"{self.output_path}/{self.ipppssoot}.tar.gz"
            with open(self.file, "w") as m:
                m.write(s3_path)
            log.info(f"Dataset synced: {s3_path}")
コード例 #13
0
 def set_env_vars(self):
     """looks for an ipppssoot_cal_env.txt file and sets the key=value
     pairs in the file in os.environ for the calibration code
     """
     env_file = f"{self.ipppssoot}_cal_env.txt"
     if os.path.isfile(env_file):
         self.divider(f"processing env file {env_file}")
         with open(env_file, "r") as f:
             for line in f.readlines():
                 try:
                     key, value = line.split("=")
                 except ValueError:
                     log.info(f"{line} is not a valid key=value pair")
                     continue
                 os.environ[key.strip()] = value.strip()
                 log.info(f"setting {key}={value} in processing env")
     return
コード例 #14
0
    def sync_dataset(self, stat):
        preview_output = os.path.join(self.output_path, "previews")
        files = glob.glob(f"{self.output_path}/{self.ipppssoot[0:5]}*")
        files.extend(glob.glob(f"{preview_output}/{self.ipppssoot[0:5]}*"))
        outputs = list(sorted(files))

        if stat == "dataset-processed":
            base = os.getcwd()
            sync_dir = os.path.join(base, "messages", "dataset-synced")
            os.makedirs(sync_dir, exist_ok=True)
            sync_msg = sync_dir + "/" + self.ipppssoot
            for line in outputs:
                with open(sync_msg, "a") as m:
                    m.write(f"{line}\n")
            log.info(f"Dataset synced: {os.path.abspath(sync_msg)}")
        else:
            log.error("Error found - skipping data sync.")
コード例 #15
0
def tar_outputs(ipppssoot, input_uri, output_uri):
    working_dir = os.getcwd()
    output_path = process.get_output_path(output_uri, ipppssoot)
    output_dir = get_output_dir(output_uri)
    os.chdir(output_dir)  # create tarfile with ipst/*fits (ipst is parent dir)
    output_files = find_output_files(ipppssoot)
    if len(output_files) == 0:
        log.info("No output files found. Tarring inputs for debugging.")
        os.chdir(working_dir)
        input_dir = get_input_dir(input_uri)
        os.chdir(input_dir)
        file_list = find_input_files(ipppssoot)
    else:
        file_list = find_previews(ipppssoot, output_files)
    tar = make_tar(file_list, ipppssoot)
    upload_tar(tar, output_path)
    clean_up(file_list, ipppssoot, dirs=["previews", "env"])
    os.chdir(working_dir)
    if output_uri.startswith("file"):  # test cov only
        return tar, file_list  # , local_outpath
コード例 #16
0
ファイル: process.py プロジェクト: jaytmiller/caldp
    def divider(self, *args, dash=">"):
        """Logs a standard divider made up of repeated `dash` characters as well as
        a message defined by the str() of each value in `args`.

        Parameters
        ----------
        args : list of values
            Values filtered through str() and joined with a single space into a message.
        dash : str
            Separator character repeated to create the divider string

        Returns
        -------
        None
        """
        assert len(dash) == 1
        msg = " ".join([str(a) for a in args])
        dashes = 100 - len(msg) - 2
        log.info(dash * 80)
        log.info(dash * 5, self.ipppssoot, msg, dash * (dashes - 6 - len(self.ipppssoot) - len(msg) - 1))
コード例 #17
0
    def run(self, cmd, *args):
        """Run the subprocess string `cmd`,  appending any extra values
        defined by `args`.

        Parameters
        ----------
        cmd : str
            A multi-word subprocess command string
        args : tuple of str
            Extra single-word parameters to append to `cmd`,  nominally
            filenames or switches.

        Returns
        -------
        None

        Notes
        -----
        Logs executed command tuple.

        Checks subprocess error status against class attribute `ignore_err_nums`
        to ignore instrument-specific error codes.

        Issues an ERROR message and exits the program for any other non-zero
        value.
        """
        cmd = tuple(cmd.split()) + args  # Handle stage values with switches.
        self.divider("Running:", cmd)
        err = subprocess.call(cmd)
        if err in self.ignore_err_nums:
            log.info("Ignoring error status =", err)
        elif err:
            log.error(self.ipppssoot, "Command:", repr(cmd),
                      "exited with error status:", err)
            sys.exit(
                1
            )  # should be 0-127, higher val's like 512 are set to 0 by shells
コード例 #18
0
 def upload_message(self):
     if self.output_uri.startswith("s3"):
         client = boto3.client("s3")
         bucket = self.output_uri[5:].split("/")[0]
         objectname = f"messages/{self.name}"
         log.info("Uploading message file...")
         with open(self.file, "rb") as f:
             client.upload_fileobj(f, bucket, objectname)
         log.info(f"\ts3://{bucket}/{objectname}")
         log.info("Message file uploaded.")
コード例 #19
0
ファイル: messages.py プロジェクト: bhayden53/caldp
 def copy_logs(self):
     log_output = self.get_log_output(local=True)
     log_dict = self.findlogs(log_output)
     log.info("Saving log files...")
     for k, v in log_dict.items():
         try:
             shutil.copy(k, v)
             log.info(f"\t{v}.")
         except FileExistsError:
             pass
     log.info("Log files saved.")
コード例 #20
0
 def copy_logs(self):
     log_dict = self.findlogs()
     os.makedirs(self.log_output, exist_ok=True)
     log.info("Saving log files...")
     for k, v in log_dict.items():
         try:
             shutil.copy(k, v)
             log.info(f"\t{v}.")
         except FileExistsError:
             pass
     log.info("Log files saved.")
コード例 #21
0
 def upload_logs(self):
     log_dict = self.findlogs()
     client = boto3.client("s3")
     parts = self.log_output[5:].split("/")
     bucket, objectname = parts[0], "/".join(parts[1:])
     log.info("Uploading log files...")
     for k, v in log_dict.items():
         obj = objectname + "/" + os.path.basename(v)
         with open(k, "rb") as f:
             client.upload_fileobj(f, bucket, obj)
             log.info(f"\t{v}.")
     log.info("Log files uploaded.")
コード例 #22
0
 def upload_messages(self, stat, msg_file):
     client = boto3.client("s3")
     parts = self.output_uri[5:].split("/")
     bucket = parts[0]
     if len(parts) > 1:
         objectname = parts[1] + f"/messages/{stat}/{self.ipppssoot}"
     else:
         objectname = f"messages/{stat}/{self.ipppssoot}"
     log.info("Uploading message file...")
     with open(msg_file, "rb") as f:
         client.upload_fileobj(f, bucket, objectname)
     log.info(f"\ts3://{bucket}/{objectname}")
     log.info("Message file uploaded.")
コード例 #23
0
def create_previews(input_dir, input_paths):
    """Generates previews based on s3 downloads
    Returns a list of file paths to previews
    """
    log.info("Processing", len(input_paths), "FITS files from ", input_dir)
    # Generate previews to local preview folder inside ipppssoot folder
    for input_path in input_paths:
        log.info("Generating previews for", input_path)
        filename_base = os.path.basename(input_path).split(".")[0]
        generate_previews(input_path, input_dir, filename_base)
    # list of full paths to preview files
    previews = get_previews(input_dir)
    log.info("Generated", len(previews), "preview files")
    return previews
コード例 #24
0
def append_trailer(input_path, output_path, ipppssoot):  # pragma: no cover
    """Fetch process log and append to trailer file
    Note: copies trailer file from inputs directory
    and copies to outputs directory prior to appending log
    """
    try:
        tra1 = list(glob.glob(f"{output_path}/{ipppssoot.lower()}.tra"))
        tra2 = list(glob.glob(f"{output_path}/{ipppssoot.lower()[0:5]}*.tra"))
        if os.path.exists(tra1[0]):
            trailer = tra1[0]
        elif os.path.exists(tra2[0]):
            trailer = tra2[0]
        else:
            log.info("Trailer file not found - skipping.")

        log.info(f"Updating {trailer} with process log:")
        proc_log = list(glob.glob(f"{os.getcwd()}/process.txt"))[0]
        with open(trailer, "a") as tra:
            with open(proc_log, "r") as proc:
                tra.write(proc.read())
        log.info("Trailer file updated: ", trailer)
    except IndexError:
        log.info("Trailer file not found - skipping.")
        return
コード例 #25
0
 def write_message(self):
     with open(self.file, "w") as m:
         m.write(f"{self.name}\n")
         log.info(f"Message file created: {os.path.abspath(self.file)}")
コード例 #26
0
def exit_on_exception(exit_code, *args):
    """exit_on_exception is a context manager which issues an error message
    based on *args and then does sys.exit(exit_code) if an exception is
    raised within the corresponding "with block".

    >>> with exit_on_exception(1, "As expected", "it did not fail."):
    ...    print("do it.")
    do it.

    >>> try: #doctest: +ELLIPSIS
    ...    with exit_on_exception(2, "As expected", "it failed."):
    ...        raise Exception("It failed!")
    ...        print("do it.")
    ... except SystemExit:
    ...    log.divider()
    ...    print("Trapping SystemExit normally caught by log.exit_reciever() at top level.")
    INFO - ----------------------------- Fatal Exception -----------------------------
    ERROR - As expected it failed.
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     yield
    ERROR -   File "<doctest ...sysexit.exit_on_exception[1]>", line ..., in <module>
    ERROR -     raise Exception("It failed!")
    ERROR - Exception: It failed!
    EXIT - CMDLINE_ERROR[2]: The program command line invocation was incorrect.
    INFO - ---------------------------------------------------------------------------
    Trapping SystemExit normally caught by log.exit_reciever() at top level.

    Never printed 'do it.'  SystemExit is caught for testing.

    If CALDP_SIMULATE_ERROR is set to one of exit_codes, it will cause the
    with exit_on_exception() block to act as if a failure has occurred:

    >>> os.environ["CALDP_SIMULATE_ERROR"] = "2"
    >>> try: #doctest: +ELLIPSIS
    ...    with exit_on_exception(2, "As expected a failure was simulated"):
    ...        print("should not see this")
    ... except SystemExit:
    ...    pass
    INFO - ----------------------------- Fatal Exception -----------------------------
    ERROR - As expected a failure was simulated
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     raise RuntimeError(f"Simulating error = {simulated_code}")
    ERROR - RuntimeError: Simulating error = 2
    EXIT - CMDLINE_ERROR[2]: The program command line invocation was incorrect.

    >>> os.environ["CALDP_SIMULATE_ERROR"] = str(exit_codes.CALDP_MEMORY_ERROR)
    >>> try: #doctest: +ELLIPSIS
    ...    with exit_on_exception(2, "Memory errors don't have to match"):
    ...        print("Oh unhappy day.")
    ... except SystemExit:
    ...    pass
    INFO - ----------------------------- Fatal Exception -----------------------------
    ERROR - Memory errors don't have to match
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     raise MemoryError("Simulated CALDP MemoryError.")
    ERROR - MemoryError: Simulated CALDP MemoryError.
    EXIT - CALDP_MEMORY_ERROR[32]: CALDP generated a Python MemoryError during processing or preview creation.


    >>> os.environ["CALDP_SIMULATE_ERROR"] = "999"
    >>> with exit_on_exception(3, "Only matching error codes are simulated."):
    ...    print("should print normally")
    should print normally

    >>> del os.environ["CALDP_SIMULATE_ERROR"]
    """
    simulated_code = int(os.environ.get("CALDP_SIMULATE_ERROR", "0"))
    try:
        if simulated_code == exit_codes.CALDP_MEMORY_ERROR:
            raise MemoryError("Simulated CALDP MemoryError.")
        elif simulated_code == exit_codes.SUBPROCESS_MEMORY_ERROR:
            print(
                "MemoryError", file=sys.stderr
            )  # Output to process log determines final program exit status
            raise RuntimeError(
                "Simulated subprocess memory error with subsequent generic program exception."
            )
        elif simulated_code == exit_codes.CONTAINER_MEMORY_ERROR:
            log.info("Simulating hard memory error by allocating memory")
            _ = bytearray(
                1024 *
                2**30)  # XXXX does not trigger container limit as intended
        elif exit_code == simulated_code:
            raise RuntimeError(f"Simulating error = {simulated_code}")
        yield
    # don't mask memory errors or nested exit_on_exception handlers
    except SystemExit:
        _report_exception(exit_code, *args)
        raise
    # Map MemoryError to SytemExit(CALDP_MEMORY_ERROR).
    except MemoryError as exc:
        _report_exception(exit_codes.CALDP_MEMORY_ERROR, *args)
        raise SystemExit(exit_codes.CALDP_MEMORY_ERROR) from exc
    # All other exceptions are remapped to the SystemExit(exit_code) declared by exit_on_exception().
    except Exception as exc:
        _report_exception(exit_code, *args)
        raise SystemExit(exit_code) from exc
コード例 #27
0
ファイル: sysexit.py プロジェクト: bhayden53/caldp
def exit_on_exception(exit_code, *args):
    """exit_on_exception is a context manager which issues an error message
    based on *args and then does sys.exit(exit_code) if an exception is
    raised within the corresponding "with block".

    >>> with exit_on_exception(1, "As expected", "it did not fail."):
    ...    print("do it.")
    do it.

    >>> try: #doctest: +ELLIPSIS
    ...    with exit_on_exception(2, "As expected", "it failed."):
    ...        raise Exception("It failed!")
    ...        print("do it.")
    ... except SystemExit:
    ...    log.divider()
    ...    print("Trapping SystemExit normally caught by exit_reciever() at top level.")
    ERROR - ----------------------------- Fatal Exception -----------------------------
    ERROR - As expected it failed.
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     yield
    ERROR -   File "<doctest ...exit_on_exception[1]>", line ..., in <module>
    ERROR -     raise Exception("It failed!")
    ERROR - Exception: It failed!
    EXIT - CMDLINE_ERROR[2]: The program command line invocation was incorrect.
    INFO - ---------------------------------------------------------------------------
    Trapping SystemExit normally caught by exit_reciever() at top level.

    Never printed 'do it.'  SystemExit is caught for testing.

    If CALDP_SIMULATE_ERROR is set to one of exit_codes, it will cause the
    with exit_on_exception() block to act as if a failure has occurred:

    >>> os.environ["CALDP_SIMULATE_ERROR"] = "2"
    >>> try: #doctest: +ELLIPSIS
    ...    with exit_on_exception(2, "As expected a failure was simulated"):
    ...        print("should not see this")
    ... except SystemExit:
    ...    pass
    ERROR - ----------------------------- Fatal Exception -----------------------------
    ERROR - As expected a failure was simulated
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     raise RuntimeError(f"Simulating error = {simulated_code}")
    ERROR - RuntimeError: Simulating error = 2
    EXIT - CMDLINE_ERROR[2]: The program command line invocation was incorrect.

    >>> os.environ["CALDP_SIMULATE_ERROR"] = str(exit_codes.CALDP_MEMORY_ERROR)
    >>> try: #doctest: +ELLIPSIS
    ...    with exit_on_exception(2, "Memory errors don't have to match"):
    ...        print("Oh unhappy day.")
    ... except SystemExit:
    ...    pass
    ERROR - ----------------------------- Fatal Exception -----------------------------
    ERROR - Memory errors don't have to match
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     raise MemoryError("Simulated CALDP MemoryError.")
    ERROR - MemoryError: Simulated CALDP MemoryError.
    EXIT - CALDP_MEMORY_ERROR[32]: CALDP generated a Python MemoryError during processing or preview creation.


    >>> os.environ["CALDP_SIMULATE_ERROR"] = "999"
    >>> with exit_on_exception(3, "Only matching error codes are simulated."):
    ...    print("should print normally")
    should print normally

    >>> del os.environ["CALDP_SIMULATE_ERROR"]

    >>> saved, os._exit = os._exit, lambda x: print(f"os._exit({x})")
    >>> with exit_receiver():  #doctest: +ELLIPSIS
    ...     with exit_on_exception(exit_codes.STAGE1_ERROR, "Failure running processing stage1."):
    ...         raise SubprocessFailure(-8)
    ERROR - ----------------------------- Fatal Exception -----------------------------
    ERROR - Failure running processing stage1.
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../caldp/sysexit.py", line ..., in exit_on_exception
    ERROR -     yield
    ERROR -   File "<doctest caldp.sysexit.exit_on_exception[...]>", line ..., in <module>
    ERROR -     raise SubprocessFailure(-8)
    ERROR - caldp.sysexit.SubprocessFailure: -8
    EXIT - Killed by UNIX signal SIGFPE[8]: 'Floating-point exception (ANSI).'
    EXIT - STAGE1_ERROR[23]: An error occurred in this instrument's stage1 processing step. e.g. calxxx
    os._exit(23)
    >>> os._exit = saved
    """
    simulated_code = int(os.environ.get("CALDP_SIMULATE_ERROR", "0"))
    try:
        if simulated_code == exit_codes.CALDP_MEMORY_ERROR:
            raise MemoryError("Simulated CALDP MemoryError.")
        elif simulated_code == exit_codes.SUBPROCESS_MEMORY_ERROR:
            print(
                "MemoryError", file=sys.stderr
            )  # Output to process log determines final program exit status
            raise RuntimeError(
                "Simulated subprocess memory error with subsequent generic program exception."
            )
        elif simulated_code == exit_codes.CONTAINER_MEMORY_ERROR:
            log.info("Simulating hard memory error by allocating memory")
            _ = bytearray(
                1024 *
                2**30)  # XXXX does not trigger container limit as intended
        elif exit_code == simulated_code:
            raise RuntimeError(f"Simulating error = {simulated_code}")
        yield
    # don't mask memory errors or nested exit_on_exception handlers
    except MemoryError:
        _report_exception(exit_codes.CALDP_MEMORY_ERROR, args)
        raise CaldpExit(exit_codes.CALDP_MEMORY_ERROR)
    except CaldpExit:
        raise
    # below as always exit_code defines what will be CALDP's program exit status.
    # in contrast,  exc.returncode is the subprocess exit status of a failed subprocess which may
    # define an OS signal that killed the process.
    except SubprocessFailure as exc:
        _report_exception(exit_code, args, exc.returncode)
        raise CaldpExit(exit_code)
    except Exception:
        _report_exception(exit_code, args)
        raise CaldpExit(exit_code)
コード例 #28
0
def copy_previews(previews, output_path):
    for filepath in previews:
        preview_file = os.path.join(output_path, os.path.basename(filepath))
        shutil.copy(filepath, preview_file)
        log.info(f"\t{preview_file}")
    os.listdir(output_path)
コード例 #29
0
def exit_receiver():
    """Use this contextmanager to bracket your top level code and land the sys.exit()
    exceptions thrown by _raise_exit_exception() and exit_on_exception().

    This program structure enables sys.exit() to fully unwind the stack doing
    cleanup, then calls the low level os._exit() function which does no cleanup
    as the "last thing".

    If SystemExit is not raised by the code nested in the "with" block then
    exit_receiver() essentially does nothing.

    The program is exited with the numerical code passed to sys.exit().

    >>> saved, os._exit = os._exit, lambda x: print(f"os._exit({x})")

    >>> with exit_receiver():  #doctest: +ELLIPSIS
    ...     print("Oh happy day.")
    INFO - Container memory limit is:  ...
    os._exit(0)

    Generic unhandled exceptions are mapped to GENERIC_ERROR (1):

    >>> with exit_receiver(): #doctest: +ELLIPSIS
    ...     raise RuntimeError("Unhandled exception.")
    INFO - Container memory limit is:  ...
    os._exit(1)

    MemoryError is remapped to CALDP_MEMORY_ERROR (32) inside exit_on_exception or not:

    >>> with exit_receiver(): #doctest: +ELLIPSIS
    ...     raise MemoryError("CALDP used up all memory directly.")
    INFO - Container memory limit is: ...
    os._exit(32)

    Inside exit_on_exception, exit status is remapped to the exit_code parameter
    of exit_on_exception():

    >>> with exit_receiver(): #doctest: +ELLIPSIS
    ...    with exit_on_exception(exit_codes.STAGE1_ERROR, "Stage1 processing failed for <ippssoot>"):
    ...        raise RuntimeError("Some obscure error")
    INFO - Container memory limit is:  ...
    INFO - ----------------------------- Fatal Exception -----------------------------
    ERROR - Stage1 processing failed for <ippssoot>
    ERROR - Traceback (most recent call last):
    ERROR -   File ".../sysexit.py", line ..., in exit_on_exception
    ERROR -     yield
    ERROR -   File "<doctest ...sysexit.exit_receiver[...]>", line ..., in <module>
    ERROR -     raise RuntimeError("Some obscure error")
    ERROR - RuntimeError: Some obscure error
    EXIT - STAGE1_ERROR[23]: An error occurred in this instrument's stage1 processing step. e.g. calxxx
    os._exit(23)

    >>> os._exit = saved

    """
    try:
        log.info("Container memory limit is: ", get_linux_memory_limit())
        yield  # go off and execute the block
        os._exit(exit_codes.SUCCESS)
    except SystemExit as exc:
        os._exit(exc.code)
    except MemoryError:
        os._exit(exit_codes.CALDP_MEMORY_ERROR)
    except Exception:
        os._exit(exit_codes.GENERIC_ERROR)