예제 #1
0
def _crop_to_watersheds(
    config,
    ymd,
    ist,
    ien,
    jst,
    jen,
    outgrib,
    outzeros,
):
    """Crop the grid to the sub-region of GEM 2.5km operational forecast
    grid that encloses the watersheds that are used to calculate river
    flows for runoff forcing files for the Salish Sea NEMO model.
    """
    OPERdir = config['weather']['ops dir']
    wgrib2 = config['weather']['wgrib2']
    newgrib = os.path.join(OPERdir,
                           'oper_allvar_small_{ymd}.grib'.format(ymd=ymd))
    newzeros = os.path.join(OPERdir,
                            'oper_000_small_{ymd}.grib'.format(ymd=ymd))
    istr = '{ist}:{ien}'.format(ist=ist, ien=ien)
    jstr = '{jst}:{jen}'.format(jst=jst, jen=jen)
    cmd = [wgrib2, outgrib, '-ijsmall_grib', istr, jstr, newgrib]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(
        'cropped hourly file to watersheds sub-region: {}'.format(newgrib))
    cmd = [wgrib2, outzeros, '-ijsmall_grib', istr, jstr, newzeros]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(
        'cropped zero-hour file to watersheds sub-region: {}'.format(newgrib))
    os.remove(outgrib)
    os.remove(outzeros)
    return newgrib, newzeros
예제 #2
0
def _rotate_grib_wind(config, fcst_section_hrs):
    """Use wgrib2 to consolidate each hour's u and v wind components into a
    single file and then rotate the wind direction to geographical
    coordinates.
    """
    GRIBdir = config['weather']['GRIB dir']
    wgrib2 = config['weather']['wgrib2']
    grid_defn = config['weather']['grid_defn.pl']
    # grid_defn.pl expects to find wgrib2 in the pwd,
    # create a symbolic link to keep it happy (if its not already there)
    try:
        os.symlink(wgrib2, 'wgrib2')
    except:
        pass
    for day_fcst, realstart, start_hr, end_hr in fcst_section_hrs.values():
        for fhour in range(start_hr, end_hr + 1):
            # Set up directories and files
            sfhour = '{:03d}'.format(fhour)
            suff = '_pid_' + str(os.getpid())
            outuv = os.path.join(GRIBdir, day_fcst, sfhour, 'UV.grib' + suff)
            outuvrot = os.path.join(GRIBdir, day_fcst, sfhour,
                                    'UVrot.grib' + suff)
            # Delete residual instances of files that are created so that
            # function can be re-run cleanly
            try:
                os.remove(outuv)
            except OSError:
                pass
            try:
                os.remove(outuvrot)
            except OSError:
                pass
            # Consolidate u and v wind component values into one file
            for fpattern in ['*UGRD*', '*VGRD*']:
                pattern = os.path.join(GRIBdir, day_fcst, sfhour, fpattern)
                fn = glob.glob(pattern)
                try:
                    if os.stat(fn[0]).st_size == 0:
                        logger.critical('Problem: 0 size file {}'.format(
                            fn[0]))
                        raise WorkerError
                except IndexError:
                    logger.critical(
                        'No GRIB files match pattern; '
                        'a previous download may have failed: {}'.format(
                            pattern))
                    raise WorkerError
                cmd = [wgrib2, fn[0], '-append', '-grib', outuv]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            # rotate
            GRIDspec = subprocess.check_output([grid_defn, outuv])
            cmd = [wgrib2, outuv]
            cmd.extend('-new_grid_winds earth'.split())
            cmd.append('-new_grid')
            cmd.extend(GRIDspec.split())
            cmd.append(outuvrot)
            lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            os.remove(outuv)
    logger.debug('consolidated and rotated wind components')
예제 #3
0
def _netCDF4_deflate(outnetcdf):
    """Run ncks in a subprocess to convert outnetcdf to netCDF4 format
    with it variables compressed with Lempel-Ziv deflation.
    """
    cmd = ["ncks", "-4", "-L4", "-O", outnetcdf, outnetcdf]
    try:
        lib.run_in_subprocess(cmd, logger.debug, logger.error)
        logger.debug(f"netCDF4 deflated {outnetcdf}")
    except WorkerError:
        raise
예제 #4
0
def _netCDF4_deflate(outnetcdf):
    """Run ncks in a subprocess to convert outnetcdf to netCDF4 format
    with it variables compressed with Lempel-Ziv deflation.
    """
    cmd = ['ncks', '-4', '-L4', '-O', outnetcdf, outnetcdf]
    try:
        lib.run_in_subprocess(cmd, logger.debug, logger.error)
        logger.debug('netCDF4 deflated {}'.format(outnetcdf))
    except WorkerError:
        raise
예제 #5
0
def _rotate_grib_wind(config, fcst_section_hrs):
    """Use wgrib2 to consolidate each hour's u and v wind components into a
    single file and then rotate the wind direction to geographical
    coordinates.
    """
    GRIBdir = config["weather"]["download"]["2.5 km"]["GRIB dir"]
    wgrib2 = config["weather"]["wgrib2"]
    grid_defn = config["weather"]["grid_defn.pl"]
    for day_fcst, realstart, start_hr, end_hr in fcst_section_hrs.values():
        for fhour in range(start_hr, end_hr + 1):
            # Set up directories and files
            sfhour = f"{fhour:03d}"
            outuv = os.path.join(GRIBdir, day_fcst, sfhour, "UV.grib")
            outuvrot = os.path.join(GRIBdir, day_fcst, sfhour, "UVrot.grib")
            # Delete residual instances of files that are created so that
            # function can be re-run cleanly
            try:
                os.remove(outuv)
            except OSError:
                pass
            try:
                os.remove(outuvrot)
            except OSError:
                pass
            # Consolidate u and v wind component values into one file
            for fpattern in ["*UGRD*", "*VGRD*"]:
                pattern = os.path.join(GRIBdir, day_fcst, sfhour, fpattern)
                fn = glob.glob(pattern)
                try:
                    if os.stat(fn[0]).st_size == 0:
                        logger.critical(f"Problem: 0 size file {fn[0]}")
                        raise WorkerError
                except IndexError:
                    logger.critical(
                        f"No GRIB files match pattern; a previous download;"
                        f" may have failed: {pattern}")
                    raise WorkerError
                cmd = [wgrib2, fn[0], "-append", "-grib", outuv]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            # rotate
            GRIDspec = subprocess.check_output([grid_defn, outuv],
                                               cwd=os.path.dirname(wgrib2))
            cmd = [wgrib2, outuv]
            cmd.extend("-new_grid_winds earth".split())
            cmd.append("-new_grid")
            cmd.extend(GRIDspec.split())
            cmd.append(outuvrot)
            lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            os.remove(outuv)
    logger.debug("consolidated and rotated wind components")
예제 #6
0
def _collect_grib_scalars(config, fcst_section_hrs):
    """Use wgrib2 and grid_defn.pl to consolidate each hour's scalar
    variables into an single file and then re-grid them to match the
    u and v wind components.
    """
    GRIBdir = config['weather']['GRIB dir']
    wgrib2 = config['weather']['wgrib2']
    grid_defn = config['weather']['grid_defn.pl']
    # grid_defn.pl expects to find wgrib2 in the pwd,
    # create a symbolic link to keep it happy
    try:
        os.symlink(wgrib2, 'wgrib2')
    except:
        pass
    for day_fcst, realstart, start_hr, end_hr in fcst_section_hrs.values():
        for fhour in range(start_hr, end_hr + 1):
            # Set up directories and files
            sfhour = '{:03d}'.format(fhour)
            suff = '_pid_' + str(os.getpid())
            outscalar = os.path.join(GRIBdir, day_fcst, sfhour,
                                     'scalar.grib' + suff)
            outscalargrid = os.path.join(GRIBdir, day_fcst, sfhour,
                                         'gscalar.grib' + suff)
            # Delete residual instances of files that are created so that
            # function can be re-run cleanly
            try:
                os.remove(outscalar)
            except OSError:
                pass
            try:
                os.remove(outscalargrid)
            except OSError:
                pass
            # Consolidate scalar variables into one file
            for fn in glob.glob(os.path.join(GRIBdir, day_fcst, sfhour, '*')):
                if not ('GRD' in fn) and ('CMC' in fn):
                    cmd = [wgrib2, fn, '-append', '-grib', outscalar]
                    lib.run_in_subprocess(cmd, wgrib2_logger.debug,
                                          logger.error)
            #  Re-grid
            GRIDspec = subprocess.check_output([grid_defn, outscalar])
            cmd = [wgrib2, outscalar]
            cmd.append('-new_grid')
            cmd.extend(GRIDspec.split())
            cmd.append(outscalargrid)
            lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            os.remove(outscalar)
    logger.debug('consolidated and re-gridded scalar variables')
예제 #7
0
def _make_netCDF_files(config, ymd, subdir, outgrib, outzeros):
    """Convert the GRIB files to netcdf (classic) files."""
    OPERdir = config["weather"]["ops dir"]
    wgrib2 = config["weather"]["wgrib2"]
    outnetcdf = os.path.join(OPERdir, subdir, f"ops_{ymd}.nc")
    out0netcdf = os.path.join(OPERdir, subdir, f"oper_000_{ymd}.nc")
    cmd = [wgrib2, outgrib, "-netcdf", outnetcdf]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(f"created hourly netCDF classic file: {outnetcdf}")
    lib.fix_perms(outnetcdf, grp_name=config["file group"])
    cmd = [wgrib2, outzeros, "-netcdf", out0netcdf]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(f"created zero-hour netCDF classic file: {out0netcdf}")
    os.remove(outgrib)
    os.remove(outzeros)
    return outnetcdf, out0netcdf
예제 #8
0
def _make_netCDF_files(config, ymd, subdir, outgrib, outzeros):
    """Convert the GRIB files to netcdf (classic) files.
    """
    OPERdir = config['weather']['ops dir']
    wgrib2 = config['weather']['wgrib2']
    outnetcdf = os.path.join(OPERdir, subdir, f'ops_{ymd}.nc')
    out0netcdf = os.path.join(OPERdir, subdir, f'oper_000_{ymd}.nc')
    cmd = [wgrib2, outgrib, '-netcdf', outnetcdf]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(f'created hourly netCDF classic file: {outnetcdf}')
    lib.fix_perms(outnetcdf, grp_name=config['file group'])
    cmd = [wgrib2, outzeros, '-netcdf', out0netcdf]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(f'created zero-hour netCDF classic file: {out0netcdf}')
    os.remove(outgrib)
    os.remove(outzeros)
    return outnetcdf, out0netcdf
예제 #9
0
def _crop_to_watersheds(config, ymd, ist, ien, jst, jen, outgrib, outzeros):
    """Crop the grid to the sub-region of GEM 2.5km operational forecast
    grid that encloses the watersheds that are used to calculate river
    flows for runoff forcing files for the Salish Sea NEMO model.
    """
    OPERdir = config["weather"]["ops dir"]
    wgrib2 = config["weather"]["wgrib2"]
    newgrib = os.path.join(OPERdir, f"oper_allvar_small_{ymd}.grib")
    newzeros = os.path.join(OPERdir, f"oper_000_small_{ymd}.grib")
    istr = f"{ist}:{ien}"
    jstr = f"{jst}:{jen}"
    cmd = [wgrib2, outgrib, "-ijsmall_grib", istr, jstr, newgrib]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(f"cropped hourly file to watersheds sub-region: {newgrib}")
    cmd = [wgrib2, outzeros, "-ijsmall_grib", istr, jstr, newzeros]
    lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
    logger.debug(f"cropped zero-hour file to watersheds sub-region: {newgrib}")
    os.remove(outgrib)
    os.remove(outzeros)
    return newgrib, newzeros
예제 #10
0
def _collect_grib_scalars(config, fcst_section_hrs):
    """Use wgrib2 and grid_defn.pl to consolidate each hour's scalar
    variables into an single file and then re-grid them to match the
    u and v wind components.
    """
    GRIBdir = config["weather"]["download"]["2.5 km"]["GRIB dir"]
    wgrib2 = config["weather"]["wgrib2"]
    grid_defn = config["weather"]["grid_defn.pl"]
    for day_fcst, realstart, start_hr, end_hr in fcst_section_hrs.values():
        for fhour in range(start_hr, end_hr + 1):
            # Set up directories and files
            sfhour = f"{fhour:03d}"
            outscalar = os.path.join(GRIBdir, day_fcst, sfhour, "scalar.grib")
            outscalargrid = os.path.join(GRIBdir, day_fcst, sfhour,
                                         "gscalar.grib")
            # Delete residual instances of files that are created so that
            # function can be re-run cleanly
            try:
                os.remove(outscalar)
            except OSError:
                pass
            try:
                os.remove(outscalargrid)
            except OSError:
                pass
            # Consolidate scalar variables into one file
            for fn in glob.glob(os.path.join(GRIBdir, day_fcst, sfhour, "*")):
                if not ("GRD" in fn) and ("CMC" in fn):
                    cmd = [wgrib2, fn, "-append", "-grib", outscalar]
                    lib.run_in_subprocess(cmd, wgrib2_logger.debug,
                                          logger.error)
            #  Re-grid
            GRIDspec = subprocess.check_output([grid_defn, outscalar],
                                               cwd=os.path.dirname(wgrib2))
            cmd = [wgrib2, outscalar]
            cmd.append("-new_grid")
            cmd.extend(GRIDspec.split())
            cmd.append(outscalargrid)
            lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            os.remove(outscalar)
    logger.debug("consolidated and re-gridded scalar variables")
예제 #11
0
def _concat_hourly_gribs(config, ymd, fcst_section_hrs):
    """Concatenate in hour order the wind velocity components
    and scalar variables from hourly files into a daily file.

    Also create the zero-hour file that is used to initialize the
    calculation of instantaneous values from the forecast accumulated
    values.
    """
    GRIBdir = config["weather"]["download"]["2.5 km"]["GRIB dir"]
    OPERdir = config["weather"]["ops dir"]
    wgrib2 = config["weather"]["wgrib2"]
    outgrib = os.path.join(OPERdir, f"oper_allvar_{ymd}.grib")
    outzeros = os.path.join(OPERdir, f"oper_000_{ymd}.grib")

    # Delete residual instances of files that are created so that
    # function can be re-run cleanly
    try:
        os.remove(outgrib)
    except OSError:
        pass
    try:
        os.remove(outzeros)
    except OSError:
        pass
    for day_fcst, realstart, start_hr, end_hr in fcst_section_hrs.values():
        for fhour in range(start_hr, end_hr + 1):
            # Set up directories and files
            sfhour = f"{fhour:03d}"
            outuvrot = os.path.join(GRIBdir, day_fcst, sfhour, "UVrot.grib")
            outscalargrid = os.path.join(GRIBdir, day_fcst, sfhour,
                                         "gscalar.grib")
            if fhour == start_hr and realstart == -1:
                cmd = [wgrib2, outuvrot, "-append", "-grib", outzeros]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
                cmd = [wgrib2, outscalargrid, "-append", "-grib", outzeros]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            else:
                cmd = [wgrib2, outuvrot, "-append", "-grib", outgrib]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
                cmd = [wgrib2, outscalargrid, "-append", "-grib", outgrib]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            os.remove(outuvrot)
            os.remove(outscalargrid)
    logger.debug(
        f"concatenated variables in hour order from hourly files to daily "
        f"file {outgrib}")
    logger.debug(
        f"created zero-hour file for initialization of accumulated -> "
        f"instantaneous values calculations: {outzeros}")
    return outgrib, outzeros
예제 #12
0
def _concat_hourly_gribs(config, ymd, fcst_section_hrs):
    """Concatenate in hour order the wind velocity components
    and scalar variables from hourly files into a daily file.

    Also create the zero-hour file that is used to initialize the
    calculation of instantaneous values from the forecast accumulated
    values.
    """
    GRIBdir = config['weather']['GRIB dir']
    OPERdir = config['weather']['ops dir']
    wgrib2 = config['weather']['wgrib2']
    outgrib = os.path.join(OPERdir, 'oper_allvar_{ymd}.grib'.format(ymd=ymd))
    outzeros = os.path.join(OPERdir, 'oper_000_{ymd}.grib'.format(ymd=ymd))

    # Delete residual instances of files that are created so that
    # function can be re-run cleanly
    try:
        os.remove(outgrib)
    except OSError:
        pass
    try:
        os.remove(outzeros)
    except OSError:
        pass
    for day_fcst, realstart, start_hr, end_hr in fcst_section_hrs.values():
        for fhour in range(start_hr, end_hr + 1):
            # Set up directories and files
            sfhour = '{:03d}'.format(fhour)
            suff = '_pid_' + str(os.getpid())
            outuvrot = os.path.join(GRIBdir, day_fcst, sfhour,
                                    'UVrot.grib' + suff)
            outscalargrid = os.path.join(GRIBdir, day_fcst, sfhour,
                                         'gscalar.grib' + suff)
            if (fhour == start_hr and realstart == -1):
                cmd = [wgrib2, outuvrot, '-append', '-grib', outzeros]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
                cmd = [wgrib2, outscalargrid, '-append', '-grib', outzeros]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            else:
                cmd = [wgrib2, outuvrot, '-append', '-grib', outgrib]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
                cmd = [wgrib2, outscalargrid, '-append', '-grib', outgrib]
                lib.run_in_subprocess(cmd, wgrib2_logger.debug, logger.error)
            os.remove(outuvrot)
            os.remove(outscalargrid)
    logger.debug('concatenated variables in hour order from hourly files '
                 'to daily file {}'.format(outgrib))
    logger.debug('created zero-hour file for initialization of accumulated -> '
                 'instantaneous values calculations: {}'.format(outzeros))
    return outgrib, outzeros
예제 #13
0
def download_results(parsed_args, config, *args):
    host_name = parsed_args.host_name
    run_type = parsed_args.run_type
    dest_host = parsed_args.dest_host
    run_date = parsed_args.run_date
    try:
        try:
            # Hindcast special case 1st due to hindcast host in enabled hosts
            # with empty run types collection to enable forcing uploads
            host_config = config["run"]["hindcast hosts"][host_name]
        except KeyError:
            host_config = config["run"]["enabled hosts"][host_name]
    except KeyError:
        logger.critical(f"unrecognized host: {host_name}")
        raise WorkerError
    results_dir = run_date.format("DDMMMYY").lower()
    run_type_results = Path(host_config["run types"][run_type]["results"])
    src_dir = run_type_results / results_dir
    src = f"{host_name}:{src_dir}"
    try:
        dest = Path(config["results archive"][run_type])
    except TypeError:
        dest_path = Path(config["results archive"][run_type][dest_host])
        dest = dest_path if dest_host == "localhost" else f"{dest_host}:{dest_path}"
    logger.info(f"downloading results from {src} to {dest}")
    cmd = shlex.split(f"scp -pr {src} {dest}")
    lib.run_in_subprocess(cmd, logger.debug, logger.error)
    checklist = {run_type: {"run date": run_date.format("YYYY-MM-DD")}}
    if dest_host == "localhost":
        results_archive_dir = _tidy_localhost(run_type, dest, results_dir,
                                              config)
        for freq in "1h 1d".split():
            checklist[run_type][freq] = list(
                map(os.fspath,
                    results_archive_dir.glob(f"*SalishSea_{freq}_*.nc")))
    else:
        _tidy_dest_host(run_type, dest_host, dest_path, results_dir, config)
        checklist[run_type]["destination"] = dest
    return checklist
예제 #14
0
def download_wwatch3_results(parsed_args, config, *args):
    host_name = parsed_args.host_name
    run_type = parsed_args.run_type
    run_date = parsed_args.run_date
    results_dir = run_date.format("DDMMMYY").lower()
    run_type_results = Path(config["wave forecasts"]["results"][run_type])
    src = f"{host_name}:{run_type_results / results_dir}"
    dest = Path(config["wave forecasts"]["results archive"][run_type])
    cmd = shlex.split(f"scp -Cpr {src} {dest}")
    lib.run_in_subprocess(cmd, logger.debug, logger.error)
    results_archive_dir = dest / results_dir
    lib.fix_perms(
        dest / results_dir,
        mode=int(lib.FilePerms(user="******", group="rwx", other="rx")),
        grp_name=config["file group"],
    )
    for filepath in results_archive_dir.glob("*"):
        lib.fix_perms(filepath, grp_name=config["file group"])
    checklist = {
        run_type:
        list(map(os.fspath, results_archive_dir.glob(f"SoG_ww3_*.nc")))
    }
    return checklist
def download_results(parsed_args, config, *args):
    host_name = parsed_args.host_name
    run_date = parsed_args.run_date
    run_type = parsed_args.run_type
    host_run_config = config['run'][host_name]
    results_dir = run_date.strftime('%d%b%y').lower()
    run_type_results = Path(host_run_config['results'][run_type])
    src_dir = run_type_results / results_dir
    src = f'{host_name}:{src_dir}'
    dest = Path(config['results archive'][run_type])
    cmd = ['scp', '-Cpr', src, str(dest)]
    lib.run_in_subprocess(cmd, logger.debug, logger.error)
    lib.fix_perms(str(dest / results_dir),
                  mode=lib.PERMS_RWX_RWX_R_X,
                  grp_name='sallen')
    results_archive_dir = dest / results_dir
    for filepath in results_archive_dir.glob('*'):
        lib.fix_perms(str(filepath), grp_name='sallen')
    checklist = {run_type: {}}
    for freq in '1h 1d'.split():
        checklist[run_type][freq] = list(
            map(str, results_archive_dir.glob(f'SalishSea_{freq}_*.nc')))
    return checklist
예제 #16
0
def download_fvcom_results(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    host_name = parsed_args.host_name
    model_config = parsed_args.model_config
    run_type = parsed_args.run_type
    run_date = parsed_args.run_date
    results_dir = run_date.format("DDMMMYY").lower()
    run_type_results = Path(config["vhfr fvcom runs"]["run types"]
                            [f"{run_type} {model_config}"]["results"])
    src = f"{host_name}:{run_type_results / results_dir}"
    dest = Path(config["vhfr fvcom runs"]["results archive"]
                [f"{run_type} {model_config}"])
    cmd = shlex.split(f"scp -Cpr {src} {dest}")
    lib.run_in_subprocess(cmd, logger.debug, logger.error)
    results_archive_dir = dest / results_dir
    lib.fix_perms(
        dest / results_dir,
        mode=int(lib.FilePerms(user="******", group="rwx", other="rx")),
        grp_name=config["file group"],
    )
    for filepath in results_archive_dir.glob("*"):
        lib.fix_perms(filepath, grp_name=config["file group"])
    checklist = {
        run_type: {
            "host": host_name,
            "model config": model_config,
            "run date": run_date.format("YYYY-MM-DD"),
            "files": list(map(os.fspath, results_archive_dir.glob("vh*.nc"))),
        }
    }
    return checklist