def _get_water_data(ferry_platform, device_category, ymd, devices_config): sensors = ",".join(devices_config[device_category]["sensors"].values()) logger.info( f"requesting ONC {ferry_platform} {device_category} data for {ymd}") try: onc_data = data_tools.get_onc_data( "scalardata", "getByStation", os.environ["ONC_USER_TOKEN"], station=ferry_platform, deviceCategory=device_category, sensors=sensors, dateFrom=(data_tools.onc_datetime(f"{ymd} 00:00", "utc")), ) except requests.HTTPError as e: if e.response.status_code == 504: return _empty_device_data(ferry_platform, device_category, ymd, sensors) else: logger.error( f"request for ONC {ferry_platform} {device_category} data " f"for {ymd} failed: {e}") raise WorkerError try: device_data = data_tools.onc_json_to_dataset(onc_data) except TypeError: return _empty_device_data(ferry_platform, device_category, ymd, sensors) logger.debug( f"ONC {ferry_platform} {device_category} data for {ymd} received and parsed" ) return device_data
def _get_nav_data(ferry_platform, ymd, location_config): for station in location_config["stations"]: device_category = location_config["device category"] sensors = ",".join(location_config["sensors"]) logger.info( f"requesting ONC {station} {device_category} data for {ymd}") try: onc_data = data_tools.get_onc_data( "scalardata", "getByStation", os.environ["ONC_USER_TOKEN"], station=station, deviceCategory=device_category, sensors=sensors, dateFrom=(data_tools.onc_datetime(f"{ymd} 00:00", "utc")), ) except requests.HTTPError as e: msg = ( f"request for ONC {station} {device_category} data for {ymd} " f"failed: {e}") logger.error(msg) raise WorkerError(msg) try: nav_data = data_tools.onc_json_to_dataset(onc_data) logger.debug( f"ONC {station} {device_category} data for {ymd} received and parsed" ) return nav_data except TypeError: # Invalid data from NAV device, so try the next one continue msg = (f"no valid nav data found from ONC ferry nav station devices " f'{location_config["stations"]} for {ymd} ') logger.error(msg) raise WorkerError(msg)
def get_onc_ctd(parsed_args, config, *args): ymd = parsed_args.data_date.format("YYYY-MM-DD") logger.info( f"requesting ONC {parsed_args.onc_station} CTD T&S data for {ymd}") TOKEN = os.environ["ONC_USER_TOKEN"] onc_data = data_tools.get_onc_data( "scalardata", "getByStation", TOKEN, station=parsed_args.onc_station, deviceCategory="CTD", sensors="salinity,temperature", dateFrom=data_tools.onc_datetime(f"{ymd} 00:00", "utc"), ) try: ctd_data = data_tools.onc_json_to_dataset(onc_data) except TypeError: logger.error( f"No ONC {parsed_args.onc_station} CTD T&S data for {ymd}") raise WorkerError logger.debug( f"ONC {parsed_args.onc_station} CTD T&S data for {ymd} received and parsed" ) logger.debug( f"filtering ONC {parsed_args.onc_station} temperature data for {ymd} " f"to exlude qaqcFlag!=1") temperature = _qaqc_filter(ctd_data, "temperature") logger.debug( f"filtering ONC {parsed_args.onc_station} salinity data for {ymd} " f"to exlude qaqcFlag!=1") salinity = _qaqc_filter(ctd_data, "salinity") logger.debug( f"creating ONC {parsed_args.onc_station} CTD T&S dataset for {ymd}") ds = _create_dataset(parsed_args.onc_station, temperature, salinity) dest_dir = Path(config["observations"]["ctd data"]["dest dir"]) filepath_tmpl = config["observations"]["ctd data"]["filepath template"] nc_filepath = dest_dir / filepath_tmpl.format( station=parsed_args.onc_station, yyyymmdd=parsed_args.data_date.format("YYYYMMDD"), ) logger.debug(f"storing ONC {parsed_args.onc_station} CTD T&S dataset " f"for {ymd} as {nc_filepath}") encoding = { var: { "dtype": "int64", "_FillValue": 0 } for var in ds.data_vars if var.endswith("sample_count") } encoding["time"] = {"units": "minutes since 1970-01-01 00:00"} ds.to_netcdf(os.fspath(nc_filepath), encoding=encoding, unlimited_dims=("time", )) checklist = {parsed_args.onc_station: os.fspath(nc_filepath)} return checklist
def _prep_plot_data( place, grid_T_hr, dev_grid_T_hr, timezone, mesh_mask, dev_mesh_mask, ): try: j, i = places.PLACES[place]['NEMO grid ji'] except KeyError as e: raise KeyError( 'place name or info key not found in ' 'salishsea_tools.places.PLACES: {e}') node_depth = places.PLACES[place]['depth'] station_code = places.PLACES[place]['ONC stationCode'] # Production model results model_time = nc_tools.timestamp( grid_T_hr, range(grid_T_hr.variables['time_counter'].size)) try: # NEMO-3.4 mesh mask gdept = mesh_mask.variables['gdept'] except KeyError: # NEMO-3.6 mesh mask gdept = mesh_mask.variables['gdept_0'] tracer_depths = gdept[..., j, i][0] tracer_mask = mesh_mask.variables['tmask'][..., j, i][0] try: # NEMO-3.4 mesh mask gdepw = mesh_mask.variables['gdepw'] except KeyError: # NEMO-3.6 mesh mask gdepw = mesh_mask.variables['gdepw_0'] w_depths = gdepw[..., j, i][0] salinity_profiles = grid_T_hr.variables['vosaline'][..., j, i] temperature_profiles = grid_T_hr.variables['votemper'][..., j, i] model_salinity_ts = _calc_results_time_series( salinity_profiles, model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths) model_temperature_ts = _calc_results_time_series( temperature_profiles, model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths) # Development model results dev_model_time = nc_tools.timestamp( dev_grid_T_hr, range(grid_T_hr.variables['time_counter'].size)) tracer_depths = dev_mesh_mask.variables['gdept_0'][..., j, i][0] tracer_mask = dev_mesh_mask.variables['tmask'][..., j, i][0] w_depths = dev_mesh_mask.variables['gdepw_0'][..., j, i][0] salinity_profiles = dev_grid_T_hr.variables['vosaline'][..., j, i] temperature_profiles = dev_grid_T_hr.variables['votemper'][..., j, i] dev_model_salinity_ts = _calc_results_time_series( salinity_profiles, dev_model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths) dev_model_temperature_ts = _calc_results_time_series( temperature_profiles, dev_model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths) # Observations onc_data = data_tools.get_onc_data( 'scalardata', 'getByStation', os.environ['ONC_USER_TOKEN'], station=station_code, deviceCategory='CTD', sensors='salinity,temperature', dateFrom=data_tools.onc_datetime(model_time[0], 'utc'), dateTo=data_tools.onc_datetime(model_time[-1], 'utc')) plot_data = namedtuple( 'PlotData', 'model_salinity_ts, model_temperature_ts, ' 'dev_model_salinity_ts, dev_model_temperature_ts, ' 'ctd_data') return plot_data( model_salinity_ts=model_salinity_ts, model_temperature_ts=model_temperature_ts, dev_model_salinity_ts=dev_model_salinity_ts, dev_model_temperature_ts=dev_model_temperature_ts, ctd_data=data_tools.onc_json_to_dataset(onc_data), )
def _prep_plot_data( place, grid_T_hr, dev_grid_T_hr, timezone, mesh_mask, dev_mesh_mask ): try: j, i = places.PLACES[place]["NEMO grid ji"] except KeyError as e: raise KeyError( "place name or info key not found in salishsea_tools.places.PLACES: {e}" ) node_depth = places.PLACES[place]["depth"] station_code = places.PLACES[place]["ONC stationCode"] # Production model results model_time = nc_tools.timestamp( grid_T_hr, range(grid_T_hr.variables["time_counter"].size) ) try: # NEMO-3.4 mesh mask gdept = mesh_mask.variables["gdept"] except KeyError: # NEMO-3.6 mesh mask gdept = mesh_mask.variables["gdept_0"] tracer_depths = gdept[..., j, i][0] tracer_mask = mesh_mask.variables["tmask"][..., j, i][0] try: # NEMO-3.4 mesh mask gdepw = mesh_mask.variables["gdepw"] except KeyError: # NEMO-3.6 mesh mask gdepw = mesh_mask.variables["gdepw_0"] w_depths = gdepw[..., j, i][0] salinity_profiles = grid_T_hr.variables["vosaline"][..., j, i] temperature_profiles = grid_T_hr.variables["votemper"][..., j, i] model_salinity_ts = _calc_results_time_series( salinity_profiles, model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths, ) model_temperature_ts = _calc_results_time_series( temperature_profiles, model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths, ) # Development model results dev_model_time = nc_tools.timestamp( dev_grid_T_hr, range(grid_T_hr.variables["time_counter"].size) ) tracer_depths = dev_mesh_mask.variables["gdept_0"][..., j, i][0] tracer_mask = dev_mesh_mask.variables["tmask"][..., j, i][0] w_depths = dev_mesh_mask.variables["gdepw_0"][..., j, i][0] salinity_profiles = dev_grid_T_hr.variables["vosaline"][..., j, i] temperature_profiles = dev_grid_T_hr.variables["votemper"][..., j, i] dev_model_salinity_ts = _calc_results_time_series( salinity_profiles, dev_model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths, ) dev_model_temperature_ts = _calc_results_time_series( temperature_profiles, dev_model_time, node_depth, timezone, tracer_depths, tracer_mask, w_depths, ) # Observations onc_data = data_tools.get_onc_data( "scalardata", "getByStation", os.environ["ONC_USER_TOKEN"], station=station_code, deviceCategory="CTD", sensors="salinity,temperature", dateFrom=data_tools.onc_datetime(model_time[0], "utc"), dateTo=data_tools.onc_datetime(model_time[-1], "utc"), ) plot_data = namedtuple( "PlotData", "model_salinity_ts, model_temperature_ts, " "dev_model_salinity_ts, dev_model_temperature_ts, " "ctd_data", ) return plot_data( model_salinity_ts=model_salinity_ts, model_temperature_ts=model_temperature_ts, dev_model_salinity_ts=dev_model_salinity_ts, dev_model_temperature_ts=dev_model_temperature_ts, ctd_data=data_tools.onc_json_to_dataset(onc_data), )
def get_onc_ctd(parsed_args, config, *args): ymd = parsed_args.data_date.format('YYYY-MM-DD') logger.info( f'requesting ONC {parsed_args.onc_station} CTD T&S data for {ymd}', extra={ 'data_date': ymd, 'onc_station': parsed_args.onc_station }) TOKEN = os.environ['ONC_USER_TOKEN'] onc_data = data_tools.get_onc_data( 'scalardata', 'getByStation', TOKEN, station=parsed_args.onc_station, deviceCategory='CTD', sensors='salinity,temperature', dateFrom=data_tools.onc_datetime(f'{ymd} 00:00', 'utc'), ) ctd_data = data_tools.onc_json_to_dataset(onc_data) logger.debug( f'ONC {parsed_args.onc_station} CTD T&S data for {ymd} received and ' f'parsed', extra={ 'data_date': ymd, 'onc_station': parsed_args.onc_station }) logger.debug( f'filtering ONC {parsed_args.onc_station} temperature data for {ymd} ' f'to exlude qaqcFlag!=1', extra={ 'data_date': ymd, 'onc_station': parsed_args.onc_station }) temperature = _qaqc_filter(ctd_data, 'temperature') logger.debug( f'filtering ONC {parsed_args.onc_station} salinity data for {ymd} ' f'to exlude qaqcFlag!=1', extra={ 'data_date': ymd, 'onc_station': parsed_args.onc_station }) salinity = _qaqc_filter(ctd_data, 'salinity') logger.debug( f'creating ONC {parsed_args.onc_station} CTD T&S dataset for {ymd}', extra={ 'data_date': ymd, 'onc_station': parsed_args.onc_station }) ds = _create_dataset(parsed_args.onc_station, temperature, salinity) dest_dir = Path(config['observations']['ctd data']['dest dir']) filepath_tmpl = config['observations']['ctd data']['filepath template'] nc_filepath = dest_dir / filepath_tmpl.format( station=parsed_args.onc_station, yyyymmdd=parsed_args.data_date.format('YYYYMMDD')) logger.debug( f'storing ONC {parsed_args.onc_station} CTD T&S dataset ' f'for {ymd} as {nc_filepath}', extra={ 'data_date': ymd, 'onc_station': parsed_args.onc_station }) ds.to_netcdf( nc_filepath.as_posix(), encoding={'time': { 'units': 'minutes since 1970-01-01 00:00' }}) checklist = {parsed_args.onc_station: nc_filepath.as_posix()} return checklist