Example #1
0
def message_status_check(input_uri, output_uri, ipppssoot):
    output_path = messages.get_local_outpath(output_uri, ipppssoot)
    msg = messages.Messages(output_uri, output_path, ipppssoot)
    assert msg.msg_dir == os.path.join(os.getcwd(), "messages")
    assert msg.stat == 0

    msg.init()
    assert os.path.exists(msg.msg_dir)
    # assert msg.name == f"submit-{ipppssoot}"
    # assert msg.file == f"{msg.msg_dir}/{msg.name}"
    assert msg.stat == 1

    msg.process_message()
    assert msg.name == f"processing-{ipppssoot}"
    assert msg.file == f"{msg.msg_dir}/{msg.name}"
    assert msg.stat == 2

    msg.preview_message()
    assert msg.name == f"processing-{ipppssoot}"
    assert msg.file == f"{msg.msg_dir}/{msg.name}"
    assert msg.stat == 2

    msg.final_message()
    if msg.stat == -1:
        assert msg.name == f"error-{ipppssoot}"
    elif msg.stat == 3:
        assert msg.name == f"processed-{ipppssoot}.trigger"
Example #2
0
def process(ipppssoot, input_uri, output_uri):
    """Given an `ipppssoot`, `input_uri`, and `output_uri` where products should be stored,
    perform all required processing steps for the `ipppssoot` and store all
    products to `output_uri`.

    Parameters
    ----------
    ipppssoot : str
        The HST dataset name to be processed.
    output_uri : str
        The base path to which outputs will be copied, nominally S3://bucket/subdir/.../subdir
    input_uri : str
        either a local directory (path in the container) or astroquery to download from MAST

    Returns
    -------
    None
    """
    process_log = log.CaldpLogger(enable_console=False, log_file="process.txt")

    if output_uri is None:
        output_uri, output_path = messages.path_finder(input_uri, output_uri,
                                                       ipppssoot)
    output_path = get_output_path(output_uri, ipppssoot)

    msg = messages.Messages(output_uri, output_path, ipppssoot)
    msg.init()
    msg.process_message()  # processing-ipst

    manager = get_instrument_manager(ipppssoot, input_uri, output_uri)
    manager.main()

    del process_log
Example #3
0
def main(ipppssoot, input_uri_prefix, output_uri_prefix):
    """Generates previews based on input and output directories
    according to specified args
    """
    output_path = messages.get_local_outpath(output_uri_prefix, ipppssoot)
    msg = messages.Messages(output_uri_prefix, output_path, ipppssoot)
    msg.preview_message()  # processing
    logger = log.CaldpLogger(enable_console=False, log_file="preview.txt")
    input_dir = file_ops.get_input_path(input_uri_prefix, ipppssoot)
    # append process.txt to trailer file
    # file_ops.append_trailer(input_dir, output_path, ipppssoot)
    input_paths = get_inputs(ipppssoot, input_dir)
    instr = process.get_instrument(ipppssoot)
    preview_inputs = get_preview_inputs(instr, input_paths)
    # create previews
    previews = create_previews(input_dir, preview_inputs)
    # upload/copy previews
    log.info("Saving previews...")
    if output_uri_prefix.startswith("s3"):
        preview_output = process.get_output_path("file:outputs",
                                                 ipppssoot) + "/previews"
        os.makedirs(preview_output, exist_ok=True)
        copy_previews(previews, preview_output)
        log.info("Preparing files for s3 upload...")
        file_ops.tar_outputs(ipppssoot, input_uri_prefix, output_uri_prefix)
    elif output_uri_prefix.startswith("file"):
        preview_output = process.get_output_path(output_uri_prefix,
                                                 ipppssoot) + "/previews"
        os.makedirs(preview_output, exist_ok=True)
        copy_previews(previews, preview_output)
    else:
        return
    del logger