def check_pathfinder(ipppssoot): input_uri_prefix = "file:inputs" output_uri_prefix = None output_uri, output_path = messages.path_finder(input_uri_prefix, output_uri_prefix, ipppssoot) assert output_uri == input_uri_prefix assert output_path == os.path.abspath("inputs") input_uri_prefix = "astroquery:" output_uri, output_path = messages.path_finder(input_uri_prefix, output_uri_prefix, ipppssoot) prefix = os.path.join(os.getcwd(), "inputs", ipppssoot) assert output_uri == f"file:{prefix}"
def process(ipppssoot, input_uri, output_uri): """Given an `ipppssoot`, `input_uri`, and `output_uri` where products should be stored, perform all required processing steps for the `ipppssoot` and store all products to `output_uri`. Parameters ---------- ipppssoot : str The HST dataset name to be processed. output_uri : str The base path to which outputs will be copied, nominally S3://bucket/subdir/.../subdir input_uri : str either a local directory (path in the container) or astroquery to download from MAST Returns ------- None """ process_log = log.CaldpLogger(enable_console=False, log_file="process.txt") if output_uri is None: output_uri, output_path = messages.path_finder(input_uri, output_uri, ipppssoot) output_path = get_output_path(output_uri, ipppssoot) msg = messages.Messages(output_uri, output_path, ipppssoot) msg.init() msg.process_message() # processing-ipst manager = get_instrument_manager(ipppssoot, input_uri, output_uri) manager.main() del process_log
def check_logs(input_uri, output_uri, ipppssoot): if output_uri.startswith("file"): output_uri, output_path = messages.path_finder(input_uri, output_uri, ipppssoot) logs = messages.Logs(output_path, output_uri, ipppssoot) log_path = logs.get_log_output() assert os.path.exists(log_path)
def check_logs(input_uri, output_uri, ipppssoot): if output_uri.startswith("file"): output_uri, output_path = messages.path_finder(input_uri, output_uri, ipppssoot) logs = messages.Logs(output_path, output_uri, ipppssoot) log_path = logs.get_log_output() assert os.path.exists(log_path) try: logs.upload_logs() except Exception as e: print("s3 error check: ", e) assert True
def check_logs(input_uri, output_uri, ipppssoot): working_dir = os.getcwd() get_logs = list(glob.glob(f"{working_dir}/*.txt")) assert len(get_logs) == 4 output_uri, output_path = messages.path_finder(input_uri, output_uri, ipppssoot) log_path = messages.Logs(output_path, output_uri).log_output if CALDP_S3_TEST_OUTPUTS and output_uri.startswith("s3"): s3_logs = list_objects(log_path) assert len(s3_logs) == 4 else: assert os.path.exists(log_path)