Beispiel #1
0
def run_NEMO_agrif(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    host_name = parsed_args.host_name
    run_date = parsed_args.run_date
    ssh_key = Path(os.environ["HOME"], ".ssh",
                   config["run"]["enabled hosts"][host_name]["ssh key"])
    run_id = f'{run_date.format("DDMMMYY").lower()}nowcast-agrif'
    try:
        ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
        prev_run_namelists_info = _get_prev_run_namelists_info(
            sftp_client, host_name, run_date.shift(days=-1), config)
        _edit_namelist_times(sftp_client, host_name, prev_run_namelists_info,
                             run_date, config)
        _edit_run_desc(sftp_client, host_name, prev_run_namelists_info, run_id,
                       run_date, config)
        run_dir, job_id = _launch_run(ssh_client, host_name, run_id, config)
    finally:
        sftp_client.close()
        ssh_client.close()
    checklist = {
        "nowcast-agrif": {
            "host": host_name,
            "run id": run_id,
            "run dir": run_dir,
            "job id": job_id,
            "run date": run_date.format("YYYY-MM-DD"),
        }
    }
    return checklist
Beispiel #2
0
def make_forcing_links(parsed_args, config, *args):
    host_name = parsed_args.host_name
    run_type = parsed_args.run_type
    run_date = parsed_args.run_date
    shared_storage = parsed_args.shared_storage
    ssh_key = Path(
        os.environ["HOME"], ".ssh", config["run"]["enabled hosts"][host_name]["ssh key"]
    )
    ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
    _make_NeahBay_ssh_links(sftp_client, run_date, config, host_name, shared_storage)
    if run_type == "ssh":
        sftp_client.close()
        ssh_client.close()
        checklist = {
            host_name: {
                "links": f"{parsed_args.run_type} "
                f'{parsed_args.run_date.format("YYYY-MM-DD")} ssh',
                "run date": parsed_args.run_date.format("YYYY-MM-DD"),
            }
        }
        return checklist
    _make_runoff_links(sftp_client, run_type, run_date, config, host_name)
    _make_weather_links(sftp_client, run_date, config, host_name, run_type)
    _make_live_ocean_links(sftp_client, run_date, config, host_name, shared_storage)
    sftp_client.close()
    ssh_client.close()
    checklist = {
        host_name: {
            "links": f"{parsed_args.run_type} "
            f'{parsed_args.run_date.format("YYYY-MM-DD")} '
            f"ssh rivers weather LiveOcean ",
            "run date": parsed_args.run_date.format("YYYY-MM-DD"),
        }
    }
    return checklist
Beispiel #3
0
def upload_fvcom_atmos_forcing(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    host_name = parsed_args.host_name
    model_config = parsed_args.model_config
    run_type = parsed_args.run_type
    run_date = parsed_args.run_date
    checklist = {
        host_name: {
            parsed_args.run_type: {
                "run date": f'{parsed_args.run_date.format("YYYY-MM-DD")}',
                "model config": model_config,
                "files": [],
            }
        }
    }
    logger.info(
        f"Uploading VHFR FVCOM atmospheric forcing files for "
        f'{run_date.format("YYYY-MM-DD")} {parsed_args.model_config} {parsed_args.run_type} '
        f"run to {host_name}"
    )
    fvcom_atmos_dir = Path(
        config["vhfr fvcom runs"]["atmospheric forcing"]["fvcom atmos dir"]
    )
    atmos_file_tmpl = config["vhfr fvcom runs"]["atmospheric forcing"][
        "atmos file template"
    ]
    atmos_file_date = run_date if run_type == "nowcast" else run_date.shift(days=+1)
    for atmos_field_type in config["vhfr fvcom runs"]["atmospheric forcing"][
        "field types"
    ]:
        atmos_file = atmos_file_tmpl.format(
            model_config=model_config,
            run_type=run_type,
            field_type=atmos_field_type,
            yyyymmdd=atmos_file_date.format("YYYYMMDD"),
        )
        fvcom_input_dir = Path(config["vhfr fvcom runs"]["input dir"][model_config])
        ssh_key = Path(os.environ["HOME"], ".ssh", config["vhfr fvcom runs"]["ssh key"])
        ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
        ssh_sftp.upload_file(
            sftp_client,
            host_name,
            fvcom_atmos_dir / atmos_file,
            fvcom_input_dir / atmos_file,
            logger,
        )
        sftp_client.close()
        ssh_client.close()
        logger.debug(
            f"Uploaded {fvcom_atmos_dir/atmos_file} to "
            f"{host_name}:{fvcom_input_dir/atmos_file}"
        )
        checklist[host_name][parsed_args.run_type]["files"].append(atmos_file)
    return checklist
Beispiel #4
0
def watch_NEMO_hindcast(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    host_name = parsed_args.host_name
    run_id = parsed_args.run_id
    ssh_key = Path(
        os.environ["HOME"],
        ".ssh",
        config["run"]["hindcast hosts"][host_name]["ssh key"],
    )
    users = config["run"]["hindcast hosts"][host_name]["users"]
    scratch_dir = Path(
        config["run"]["hindcast hosts"][host_name]["scratch dir"])
    hpc_job_classes = {
        "qstat": _QstatHindcastJob,
        "squeue": _SqueueHindcastJob
    }
    queue_info_cmd = config["run"]["hindcast hosts"][host_name][
        "queue info cmd"].rsplit("/", 1)[-1]
    try:
        ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
        job = hpc_job_classes[queue_info_cmd](ssh_client, sftp_client,
                                              host_name, users, scratch_dir,
                                              run_id)
        job.get_run_id()
        while job.is_queued():
            time.sleep(60 * 5)
        job.get_tmp_run_dir()
        job.get_run_info()
        while job.is_running():
            time.sleep(60 * 5)
        while True:
            completion_state = job.get_completion_state()
            if completion_state == "completed":
                break
            if completion_state in {"cancelled", "aborted"}:
                raise WorkerError
            time.sleep(60)
    finally:
        sftp_client.close()
        ssh_client.close()
    checklist = {
        "hindcast": {
            "host": job.host_name,
            "run id": job.run_id,
            "run date": arrow.get(job.run_id[:7],
                                  "DDMMMYY").format("YYYY-MM-DD"),
            "completed": completion_state == "completed",
        }
    }
    return checklist
Beispiel #5
0
def upload_forcing(parsed_args, config, *args):
    host_name = parsed_args.host_name
    run_type = parsed_args.run_type
    run_date = parsed_args.run_date
    ssh_key = Path(
        os.environ["HOME"], ".ssh", config["run"]["enabled hosts"][host_name]["ssh key"]
    )
    host_config = config["run"]["enabled hosts"][host_name]
    ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
    checklist = {
        host_name: {
            run_type: {
                "run date": parsed_args.run_date.format("YYYY-MM-DD"),
                "file types": [],
            }
        }
    }
    # Neah Bay sea surface height
    _upload_ssh_files(sftp_client, run_type, run_date, config, host_name, host_config)
    if run_type == "ssh":
        sftp_client.close()
        ssh_client.close()
        checklist[host_name][run_type]["file types"] = ["ssh"]
        return checklist
    # Rivers turbidity and runoff
    if run_type == "turbidity":
        _upload_fraser_turbidity_file(
            sftp_client, run_date, config, host_name, host_config
        )
        sftp_client.close()
        ssh_client.close()
        checklist[host_name][run_type]["file types"] = ["turbidity"]
        return checklist
    _upload_river_runoff_files(sftp_client, run_date, config, host_name, host_config)
    # Weather
    _upload_weather(sftp_client, run_type, run_date, config, host_name, host_config)
    # Live Ocean Boundary Conditions
    _upload_live_ocean_files(
        sftp_client, run_type, run_date, config, host_name, host_config
    )
    sftp_client.close()
    ssh_client.close()
    checklist[host_name][run_type]["file types"] = [
        "ssh",
        "rivers",
        "weather",
        "boundary conditions",
    ]
    return checklist
Beispiel #6
0
def _tidy_dest_host(run_type, dest_host, dest_path, results_dir, config):
    ssh_key = Path(os.environ["HOME"], ".ssh",
                   config["run"]["enabled hosts"][dest_host]["ssh key"])
    ssh_client, sftp_client = ssh_sftp.sftp(dest_host, ssh_key)
    with contextlib.ExitStack() as stack:
        [stack.enter_context(client) for client in (ssh_client, sftp_client)]
        results_archive_dir = dest_path / results_dir
        if not run_type == "hindcast":
            # Keep FVCOM boundary slab files from hindcast runs so that we can do FVCOM hindcast runs
            fvcom_bdy_slabs = ("FVCOM_T.nc", "FVCOM_U.nc", "FVCOM_V.nc",
                               "FVCOM_W.nc")
            fvcom_bdy_files = [
                f for f in sftp_client.listdir(results_archive_dir)
                if Path(f).name in fvcom_bdy_slabs
            ]
            for f in fvcom_bdy_files:
                sftp_client.unlink(f)
Beispiel #7
0
def watch_NEMO_agrif(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    host_name = parsed_args.host_name
    job_id = parsed_args.job_id.split(".", 1)[0]
    ssh_key = Path(os.environ["HOME"], ".ssh",
                   config["run"]["enabled hosts"][host_name]["ssh key"])
    scratch_dir = Path(
        config["run"]["enabled hosts"][host_name]["scratch dir"])
    try:
        ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
        run_id = _get_run_id(ssh_client, host_name, job_id)
        while _is_queued(ssh_client, host_name, job_id, run_id):
            time.sleep(60)
        tmp_run_dir = _get_tmp_run_dir(ssh_client, host_name, scratch_dir,
                                       run_id)
        run_info = _get_run_info(sftp_client, host_name, tmp_run_dir)
        while _is_running(ssh_client, host_name, job_id, run_id, tmp_run_dir,
                          run_info):
            time.sleep(60 * 5)
    finally:
        sftp_client.close()
        ssh_client.close()
    checklist = {
        "nowcast-agrif": {
            "host": host_name,
            "job id": job_id,
            "run date": arrow.get(run_id[:7], "DDMMMYY").format("YYYY-MM-DD"),
            "completed": True,
        }
    }
    return checklist
Beispiel #8
0
def upload_all_files(host_name, run_date, config):
    host = config["run"][host_name]
    ssh_client, sftp_client = ssh_sftp.sftp(host_name,
                                            host["ssh key name"]["nowcast"])
    # Neah Bay sea surface height
    for day in range(-1, 2):
        filename = config["ssh"]["file template"].format(
            run_date.shift(days=day).date())
        dest_dir = "obs" if day == -1 else "fcst"
        localpath = os.path.join(config["ssh"]["ssh_dir"], dest_dir, filename)
        remotepath = os.path.join(host["ssh_dir"], dest_dir, filename)
        try:
            upload_file(sftp_client, host_name, localpath, remotepath)
        except OSError:
            if dest_dir != "obs":
                raise
            # obs file does not exist, to create symlink to corresponding
            # forecast file
            fcst = os.path.join(config["ssh"]["ssh_dir"], "fcst", filename)
            os.symlink(fcst, localpath)
            logger.warning(
                "ssh obs file not found; created symlink to {}".format(fcst))
            upload_file(sftp_client, host_name, localpath, remotepath)
    # Rivers runoff
    for tmpl in config["rivers"]["file templates"].values():
        filename = tmpl.format(run_date.shift(days=-1).date())
        localpath = os.path.join(config["rivers"]["rivers_dir"], filename)
        remotepath = os.path.join(host["rivers_dir"], filename)
        upload_file(sftp_client, host_name, localpath, remotepath)
    # Weather
    for day in range(-1, 2):
        filename = config["weather"]["file template"].format(
            run_date.shift(days=day).date())
        dest_dir = "" if day <= 0 else "fcst"
        localpath = os.path.join(config["weather"]["ops_dir"], dest_dir,
                                 filename)
        remotepath = os.path.join(host["weather_dir"], dest_dir, filename)
        upload_file(sftp_client, host_name, localpath, remotepath)
    # Live Ocean Boundary Conditions
    for day in range(-1, 2):
        filename = config["temperature salinity"]["file template"].format(
            run_date.shift(days=day).date())
        dest_dir = "" if day <= 0 else "fcst"
        localpath = os.path.join(config["temperature salinity"]["bc dir"],
                                 dest_dir, filename)
        remotepath = os.path.join(host["forcing"]["bc dir"], dest_dir,
                                  filename)
        upload_file(sftp_client, host_name, localpath, remotepath)

    # Restart File
    prev_run_id = run_date.shift(days=-1).date()
    prev_run_dir = prev_run_id.strftime("%d%b%y").lower()
    local_dir = os.path.join(config["run"]["results archive"]["nowcast"],
                             prev_run_dir)
    localpath = glob.glob(os.path.join(local_dir, "*restart.nc"))
    filename = os.path.basename(localpath[0])
    remote_dir = os.path.join(host["results"]["nowcast"], prev_run_dir)
    remotepath = os.path.join(remote_dir, filename)
    make_remote_directory(sftp_client, host_name, remote_dir)
    upload_file(sftp_client, host_name, localpath[0], remotepath)

    sftp_client.close()
    ssh_client.close()
    return {host_name: True}
Beispiel #9
0
def run_NEMO_hindcast(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    host_name = parsed_args.host_name
    ssh_key = Path(
        os.environ["HOME"],
        ".ssh",
        config["run"]["hindcast hosts"][host_name]["ssh key"],
    )
    try:
        ssh_client, sftp_client = ssh_sftp.sftp(host_name, ssh_key)
        if parsed_args.prev_run_date is None:
            # Assume that there is at least one job already queued or running
            # and get its run date from the queue manager
            prev_run_date, prev_job_id = _get_prev_run_queue_info(
                ssh_client, host_name, config)
        else:
            prev_run_date = arrow.get(parsed_args.prev_run_date)
            prev_job_id = None
        if parsed_args.full_month:
            # Calculate run date and number of run days for a full month run
            run_date = prev_run_date.shift(months=+1)
            run_days = (run_date.shift(months=+1) - run_date).days
        else:
            # Calculate run date and number of run days for a 5 day run
            if prev_run_date.day != 26:
                run_date = prev_run_date.shift(days=+5)
            else:
                run_date = prev_run_date.shift(months=+1).replace(day=1)
            if run_date.day != 26:
                run_days = 5
            else:
                run_days = (run_date.shift(months=+1).replace(day=1) -
                            run_date).days
        if run_date.shift(
                days=+(run_days - 1)).naive > arrow.now().floor("day").naive:
            # Don't try to run into the future - this is a hindcast!!
            if parsed_args.full_month:
                logger.info(
                    f"not launching {run_date.format('YYYY-MM-DD')} run because it extends beyond today"
                )
                sftp_client.close()
                ssh_client.close()
                checklist = {"hindcast": {"host": host_name, "run id": "None"}}
                return checklist
            else:
                run_days = (arrow.now().floor("day").naive -
                            run_date.shift(days=-1).naive).days
                logger.info(
                    f"launching {run_date.format('YYYY-MM-DD')} run for {run_days} days to end of today"
                )
        prev_namelist_info = _get_prev_run_namelist_info(
            ssh_client, sftp_client, host_name, prev_run_date, config)
        _edit_namelist_time(sftp_client, host_name, prev_namelist_info,
                            run_date, run_days, config)
        walltime = (
            # NOTE: values >23:59:59 must be in seconds
            parsed_args.walltime or 30 * 60 * 60
            if parsed_args.full_month else parsed_args.walltime or "10:00:00")
        _edit_run_desc(
            sftp_client,
            host_name,
            prev_run_date,
            prev_namelist_info,
            run_date,
            walltime,
            config,
        )
        run_id = f'{run_date.format("DDMMMYY").lower()}hindcast'
        _launch_run(ssh_client, host_name, run_id, prev_job_id, config)
    finally:
        sftp_client.close()
        ssh_client.close()
    checklist = {"hindcast": {"host": host_name, "run id": run_id}}
    return checklist