def _describe_dataset(self, input_config: InputConfig) \ -> DatasetDescriptor: opener_id = input_config.opener_id store_params = input_config.store_params or {} if input_config.store_id: store_instance = get_data_store_instance( input_config.store_id, store_params=store_params, store_pool=self._store_pool ) opener = store_instance.store else: opener = new_data_opener(opener_id) try: descriptor = opener.describe_data(input_config.data_id, data_type=DATASET_TYPE) except DataStoreError as dse: raise CubeGeneratorError(f'{dse}', status_code=400) from dse if not isinstance(descriptor, DatasetDescriptor): raise RuntimeError(f'internal error: data store ' f'"{input_config.store_id}": ' f'expected DatasetDescriptor but got ' f'a {type(descriptor)}') return descriptor
def open_cube(self, input_config: InputConfig) -> TransformedCube: cube_config = self._cube_config cube_params = cube_config.to_dict() opener_id = input_config.opener_id store_params = input_config.store_params or {} open_params = input_config.open_params or {} with observe_progress('reading cube', 3) as observer: try: if input_config.store_id: store_instance = get_data_store_instance( input_config.store_id, store_params=store_params, store_pool=self._store_pool) store = store_instance.store if opener_id is None: opener_id = self._get_opener_id(input_config, store) opener = store open_params = dict(open_params) open_params['opener_id'] = opener_id else: opener = new_data_opener(opener_id) open_params = dict(open_params) open_params.update(store_params) open_params_schema = opener.get_open_data_params_schema( input_config.data_id) dataset_open_params = { k: v for k, v in cube_params.items() if k in open_params_schema.properties } observer.worked(1) dataset = opener.open_data(input_config.data_id, **open_params, **dataset_open_params) observer.worked(1) except DataStoreError as dse: raise CubeGeneratorError(f'{dse}', status_code=400) from dse # Turn dataset into cube and grid_mapping try: cube, gm, _ = decode_cube(dataset, normalize=True) except DatasetIsNotACubeError as e: raise CubeGeneratorError(f'{e}') from e observer.worked(1) if dataset_open_params: drop_names = [ k for k in dataset_open_params.keys() if k not in _STEADY_CUBE_CONFIG_NAMES ] cube_config = cube_config.drop_props(drop_names) return cube, gm, cube_config
def open_data(self, data_id: str, opener_id: str = None, **open_params) -> xr.Dataset: self._assert_valid_data_id(data_id) if not opener_id: opener_id = self._get_opener_id(data_id) path = self._resolve_data_id_to_path(data_id) return new_data_opener(opener_id).open_data(path, **open_params)
def test_data_opener_params_schema(self): opener = new_data_opener(SH_DATA_OPENER_ID) schema = opener.get_open_data_params_schema('S2L2A') self.assertIsInstance(schema, JsonObjectSchema) self.assertEqual('object', schema.type) self.assertEqual({'time_range', 'spatial_res', 'bbox'}, schema.required) self.assertIn('time_range', schema.properties) self.assertIn('time_period', schema.properties) self.assertIn('spatial_res', schema.properties) self.assertIn('bbox', schema.properties) self.assertIn('crs', schema.properties)
def opener_info(opener_id: str): """ Show data opener information. You can obtain valid OPENER names using command "xcube io opener list". """ extension = get_extension_registry().get_extension( EXTENSION_POINT_DATA_OPENERS, opener_id) description = extension.metadata.get('description') if description: print(description) from xcube.core.store import new_data_opener opener_ = new_data_opener(opener_id) params_schema = opener_.get_open_data_params_schema() print(_format_params_schema(params_schema))
def get_open_data_params_schema(self, data_id: str = None, opener_id: str = None) -> JsonObjectSchema: if not opener_id and data_id: opener_id = self._get_opener_id(data_id) if not opener_id: extensions = find_data_opener_extensions( predicate=get_data_accessor_predicate( type_specifier='dataset', format_id=_DEFAULT_FORMAT_ID, storage_id=_STORAGE_ID)) assert extensions opener_id = extensions[0].name return new_data_opener(opener_id).get_open_data_params_schema( data_id=data_id)
def open_cubes(input_configs: Sequence[InputConfig], cube_config: CubeConfig, store_pool: DataStorePool = None): cubes = [] all_cube_params = cube_config.to_dict() with observe_progress('Opening input(s)', len(input_configs)) as progress: for input_config in input_configs: open_params = {} opener_id = input_config.opener_id if input_config.store_id: store_instance = get_data_store_instance( input_config.store_id, store_params=input_config.store_params, store_pool=store_pool) store = store_instance.store if opener_id is None: opener_ids = store.get_data_opener_ids( data_id=input_config.data_id, type_specifier=TYPE_SPECIFIER_CUBE) if not opener_ids: raise DataStoreError( f'Data store "{input_config.store_id}" does not support data cubes' ) opener_id = opener_ids[0] opener = store open_params.update(opener_id=opener_id, **input_config.open_params) else: opener = new_data_opener(opener_id) open_params.update(**input_config.store_params, **input_config.open_params) open_params_schema = opener.get_open_data_params_schema( input_config.data_id) cube_params = { k: v for k, v in all_cube_params.items() if k in open_params_schema.properties } cube = opener.open_data(input_config.data_id, **open_params, **cube_params) cubes.append(cube) progress.worked(1) return cubes
def test_new_data_opener(self): opener = new_data_opener(SH_DATA_OPENER_ID) self.assertIsInstance(opener, SentinelHubDataOpener)
def _new_s3_opener(self, opener_id): self._assert_not_closed() return new_data_opener(opener_id, s3=self._s3)