Esempio n. 1
0
 def get(self):
     config_time = timestamp_to_iso_string(datetime.datetime.fromtimestamp(self.service_context.config_mtime),
                                           freq="ms")
     server_time = timestamp_to_iso_string(datetime.datetime.now(), freq="ms")
     self.set_header('Content-Type', 'application/json')
     self.write(json.dumps(dict(name=SERVER_NAME,
                                description=SERVER_DESCRIPTION,
                                version=version,
                                configTime=config_time,
                                serverTime=server_time,
                                serverPID=os.getpid()),
                           indent=2))
Esempio n. 2
0
 def test_it_with_default_res(self):
     self.assertEqual("2018-09-05T00:00:00Z",
                      timestamp_to_iso_string(np.datetime64("2018-09-05")))
     self.assertEqual(
         "2018-09-05T10:35:42Z",
         timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42")))
     self.assertEqual(
         "2018-09-05T10:35:42Z",
         timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42.164")))
     self.assertEqual(
         "2019-10-04T10:13:49Z",
         timestamp_to_iso_string(
             pd.to_datetime("04-OCT-2019 10:13:48.538184")))
Esempio n. 3
0
def get_time_series_info(ctx: ServiceContext) -> Dict:
    """
    Get time-series meta-information for variables.

    :param ctx: Service context object
    :return: a dictionary with a single entry "layers" which is a list of entries that are
             dictionaries containing a variable's "name", "dates", and "bounds".
    """
    time_series_info = {'layers': []}
    descriptors = ctx.get_dataset_descriptors()
    for descriptor in descriptors:
        if 'Identifier' in descriptor:
            if descriptor.get('Hidden'):
                continue
            dataset = ctx.get_dataset(descriptor['Identifier'])
            if 'time' not in dataset.variables:
                continue
            xmin, ymin, xmax, ymax = get_dataset_bounds(dataset)
            time_data = dataset.variables['time'].data
            time_stamps = []
            for time in time_data:
                time_stamps.append(timestamp_to_iso_string(time))
            var_names = sorted(dataset.data_vars)
            for var_name in var_names:
                ds_id = descriptor['Identifier']
                variable_dict = {
                    'name': f'{ds_id}.{var_name}',
                    'dates': time_stamps,
                    'bounds': dict(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax)
                }
                time_series_info['layers'].append(variable_dict)
    return time_series_info
Esempio n. 4
0
def _collect_ts_result(ts_ds: xr.Dataset,
                       var_name: str,
                       uncert_var_name: str = None,
                       count_var_name: str = None,
                       max_valids: int = None):
    if not (max_valids is None or max_valids == -1 or max_valids > 0):
        raise ValueError('max_valids must be either None, -1 or positive')

    average_var = ts_ds.get(var_name, ts_ds.get(var_name + '_mean'))
    uncert_var = ts_ds.get(uncert_var_name) if uncert_var_name else None
    count_var = ts_ds.get(count_var_name) if count_var_name else None

    total_count_value = ts_ds.attrs.get('max_number_of_observations', 1)

    num_times = average_var.time.size
    time_series = []

    pos_max_valids = max_valids is not None and max_valids > 0
    if pos_max_valids:
        time_indexes = range(num_times - 1, -1, -1)
    else:
        time_indexes = range(num_times)

    average_values = average_var.values
    count_values = count_var.values if count_var is not None else None
    uncert_values = uncert_var.values if uncert_var is not None else None

    for time_index in time_indexes:
        if len(time_series) == max_valids:
            break

        average_value = _get_float_value(average_values, time_index)
        if average_value is None:
            if max_valids is not None:
                continue
            count_value = 0
        else:
            count_value = int(
                count_values[time_index]) if count_values is not None else 1

        statistics = {
            'average': average_value,
            'validCount': count_value,
            'totalCount': total_count_value
        }
        if uncert_values is not None:
            statistics['uncertainty'] = _get_float_value(
                uncert_values, time_index)

        time_series.append(
            dict(result=statistics,
                 date=timestamp_to_iso_string(
                     average_var.time[time_index].values)))

    if pos_max_valids:
        return {'results': time_series[::-1]}
    else:
        return {'results': time_series}
Esempio n. 5
0
def _collect_ts_result(ts_ds: xr.Dataset,
                       var_name: str,
                       uncert_var_name: str = None,
                       count_var_name: str = None,
                       max_valids: int = None):
    if not (max_valids is None or max_valids == -1 or max_valids > 0):
        raise ValueError('max_valids must be either None, -1 or positive')

    var = ts_ds[var_name]
    uncert_var = ts_ds[uncert_var_name] if uncert_var_name else None
    count_var = ts_ds[count_var_name] if count_var_name else None

    total_count = ts_ds.attrs.get('max_number_of_observations', 1)

    num_times = var.time.size
    time_series = []

    pos_max_valids = max_valids is not None and max_valids > 0
    if pos_max_valids:
        time_indexes = range(num_times - 1, -1, -1)
    else:
        time_indexes = range(num_times)

    for time_index in time_indexes:
        if len(time_series) == max_valids:
            break

        value = float(var[time_index])
        if np.isnan(value):
            if max_valids is not None:
                continue
            statistics = dict(average=None,
                              validCount=0,
                              totalCount=total_count)
        else:
            statistics = dict(average=value,
                              validCount=int(count_var[time_index])
                              if count_var is not None else 1,
                              totalCount=total_count)
        if uncert_var is not None:
            value = float(uncert_var[time_index])
            # TODO (forman): agree with Dirk on how we call provided uncertainty
            if np.isnan(value):
                statistics['uncertainty'] = None
            else:
                statistics['uncertainty'] = float(value)

        time_series.append(
            dict(result=statistics,
                 date=timestamp_to_iso_string(var.time[time_index].values)))

    if pos_max_valids:
        return {'results': time_series[::-1]}
    else:
        return {'results': time_series}
Esempio n. 6
0
def _collect_timeseries_result(time_series_ds: xr.Dataset,
                               key_to_var_names: Dict[str, str],
                               max_valids: int = None) -> TimeSeries:
    if not (max_valids is None or max_valids == -1 or max_valids > 0):
        raise ValueError('max_valids must be either None, -1 or positive')

    vars = {
        key: time_series_ds[var_name]
        for key, var_name in key_to_var_names.items()
    }
    time = time_series_ds.time
    max_number_of_observations = time_series_ds.attrs.get(
        'max_number_of_observations', 1)
    num_times = time.size
    time_series = []

    max_valids_is_pos = max_valids is not None and max_valids > 0
    if max_valids_is_pos:
        time_indexes = range(num_times - 1, -1, -1)
    else:
        time_indexes = range(num_times)

    for time_index in time_indexes:

        if len(time_series) == max_valids:
            break

        time_series_value = dict()
        all_null = True
        for key, var in vars.items():
            var_values = var.values
            var_value = var_values[time_index]
            if np.isfinite(var_value):
                all_null = False
                if np.issubdtype(var_value.dtype, np.floating):
                    var_value = float(var_value)
                elif np.issubdtype(var_value.dtype, np.integer):
                    var_value = int(var_value)
                elif np.issubdtype(var_value.dtype, np.dtype(bool)):
                    var_value = bool(var_value)
                else:
                    raise ValueError(
                        f'cannot convert {var_value.dtype} into JSON-convertible value'
                    )
            else:
                var_value = None
            time_series_value[key] = var_value

        has_count = 'count' in time_series_value
        no_obs = all_null or (has_count and time_series_value['count'] == 0)
        if no_obs and max_valids is not None:
            continue

        time_series_value['time'] = timestamp_to_iso_string(
            time[time_index].values)
        if has_count:
            time_series_value['count_tot'] = max_number_of_observations

        time_series.append(time_series_value)

    if max_valids_is_pos:
        time_series = time_series[::-1]

    return time_series