Esempio n. 1
0
    def get_metadata(
            self, keys: Union[Sequence[str], Mapping[str,
                                                     str]]) -> Dict[str, Any]:
        keys = tuple(self._key_dict_to_sequence(keys))

        if len(keys) != len(self.key_names):
            raise exceptions.InvalidKeyError(
                f'Got wrong number of keys (available keys: {self.key_names})')

        conn = self._connection

        where_string = ' AND '.join([f'{key}=?' for key in self.key_names])
        row = conn.execute(f'SELECT * FROM metadata WHERE {where_string}',
                           keys).fetchone()

        if not row:  # support lazy loading
            filepath = self.get_datasets(dict(zip(self.key_names, keys)),
                                         page=0,
                                         limit=1)
            if not filepath:
                raise exceptions.DatasetNotFoundError(
                    f'No dataset found for given keys {keys}')

            # compute metadata and try again
            metadata = self.compute_metadata(
                filepath[keys], max_shape=self.LAZY_LOADING_MAX_SHAPE)
            self.insert(keys, filepath[keys], metadata=metadata)
            row = conn.execute(f'SELECT * FROM metadata WHERE {where_string}',
                               keys).fetchone()

        assert row

        data_columns, _ = zip(*self._METADATA_COLUMNS)
        encoded_data = {col: row[col] for col in self.key_names + data_columns}
        return self._decode_data(encoded_data)
Esempio n. 2
0
    def get_metadata(
            self, keys: Union[Sequence[str], Mapping[str,
                                                     str]]) -> Dict[str, Any]:
        keys = tuple(self._key_dict_to_sequence(keys))

        if len(keys) != len(self.key_names):
            raise exceptions.InvalidKeyError('Got wrong number of keys')

        cursor = self._cursor

        where_string = ' AND '.join([f'{key}=%s' for key in self.key_names])
        cursor.execute(f'SELECT * FROM metadata WHERE {where_string}', keys)
        row = cursor.fetchone()

        if not row:  # support lazy loading
            filepath = self.get_datasets(dict(zip(self.key_names, keys)))
            if not filepath:
                raise exceptions.DatasetNotFoundError(
                    f'No dataset found for given keys {keys}')
            assert len(filepath) == 1

            # compute metadata and try again
            self.insert(keys, filepath[keys], skip_metadata=False)
            cursor.execute(f'SELECT * FROM metadata WHERE {where_string}',
                           keys)
            row = cursor.fetchone()

        assert row

        data_columns, _ = zip(*self._METADATA_COLUMNS)
        encoded_data = {col: row[col] for col in self.key_names + data_columns}
        return self._decode_data(encoded_data)
Esempio n. 3
0
    def get_metadata(self, keys: ExtendedKeysType) -> Dict[str, Any]:
        """Return all stored metadata for given keys.

        Arguments:

            keys: Keys of the requested dataset. Can either be given as a sequence of key values,
                or as a mapping ``{key_name: key_value}``.

        Returns:

            A :class:`dict` with the values

            - ``range``: global minimum and maximum value in dataset
            - ``bounds``: physical bounds covered by dataset in latitude-longitude projection
            - ``convex_hull``: GeoJSON shape specifying total data coverage in latitude-longitude
              projection
            - ``percentiles``: array of pre-computed percentiles from 1% through 99%
            - ``mean``: global mean
            - ``stdev``: global standard deviation
            - ``metadata``: any additional client-relevant metadata

        """
        keys = self._standardize_keys(keys)

        metadata = self.meta_store.get_metadata(keys)

        if metadata is None:
            # metadata is not computed yet, trigger lazy loading
            dataset = self.get_datasets(keys)
            if not dataset:
                raise exceptions.DatasetNotFoundError('No dataset found')

            path = squeeze(dataset.values())
            metadata = self.compute_metadata(
                path, max_shape=self.LAZY_LOADING_MAX_SHAPE)

            try:
                self.insert(keys, path, metadata=metadata)
            except exceptions.DatabaseNotWritableError as exc:
                raise exceptions.DatabaseNotWritableError(
                    "Lazy loading requires a writable database") from exc

            # ensure standardized/consistent output (types and floating point precision)
            metadata = self.meta_store.get_metadata(keys)
            assert metadata is not None

        return metadata
Esempio n. 4
0
    def delete(self, keys: Union[Sequence[str], Mapping[str, str]]) -> None:
        cursor = self._cursor

        if len(keys) != len(self.key_names):
            raise exceptions.InvalidKeyError(
                f'Got wrong number of keys (available keys: {self.key_names})')

        keys = self._key_dict_to_sequence(keys)
        key_dict = dict(zip(self.key_names, keys))

        if not self.get_datasets(key_dict):
            raise exceptions.DatasetNotFoundError(
                f'No dataset found with keys {keys}')

        where_string = ' AND '.join([f'{key}=%s' for key in self.key_names])
        cursor.execute(f'DELETE FROM datasets WHERE {where_string}', keys)
        cursor.execute(f'DELETE FROM metadata WHERE {where_string}', keys)
    def delete(self, keys: KeysType) -> None:
        if not self.get_datasets(keys):
            raise exceptions.DatasetNotFoundError(f'No dataset found with keys {keys}')

        datasets_table = sqla.Table('datasets', self.sqla_metadata, autoload_with=self.sqla_engine)
        metadata_table = sqla.Table('metadata', self.sqla_metadata, autoload_with=self.sqla_engine)

        self.connection.execute(
            datasets_table
            .delete()
            .where(*[datasets_table.c.get(column) == value for column, value in keys.items()])
        )
        self.connection.execute(
            metadata_table
            .delete()
            .where(*[metadata_table.c.get(column) == value for column, value in keys.items()])
        )