def get_schema(cls) -> JsonObjectSchema: return JsonObjectSchema(properties=dict( required=JsonIntegerSchema(minimum=0), available=JsonIntegerSchema(minimum=0), limit=JsonIntegerSchema(minimum=0), ), required=['required'], additional_properties=False, factory=cls)
def get_schema(cls) -> JsonObjectSchema: return JsonObjectSchema(properties=dict( succeeded=JsonIntegerSchema(nullable=True), failed=JsonIntegerSchema(nullable=True), active=JsonIntegerSchema(nullable=True), start_time=JsonStringSchema(nullable=True), completion_time=JsonStringSchema(nullable=True), conditions=JsonArraySchema( items=JsonObjectSchema(additional_properties=True), nullable=True)), additional_properties=True, factory=cls)
def test_base_props_validated(self): with self.assertRaises(ValueError) as cm: JsonComplexSchema() self.assertEqual('exactly one of one_of, any_of, all_of must be given', f'{cm.exception}') with self.assertRaises(ValueError) as cm: JsonComplexSchema(one_of=[JsonStringSchema(), JsonIntegerSchema()], all_of=[JsonStringSchema(), JsonIntegerSchema()]) self.assertEqual('exactly one of one_of, any_of, all_of must be given', f'{cm.exception}')
def test_from_json_object_object(self): person_schema = JsonObjectSchema( properties=dict(name=JsonStringSchema(), age=JsonIntegerSchema(), deleted=JsonBooleanSchema(default=False))) schema = JsonObjectSchema(properties=dict(person=person_schema)) value = {'person': {'name': 'Bibo', 'age': 15}} self.assertEqual( {'person': { 'name': 'Bibo', 'age': 15, 'deleted': False }}, schema.from_instance(value)) Assignment = namedtuple('Assignment', ['person']) schema.factory = Assignment self.assertEqual( Assignment(person={ 'name': 'Bibo', 'age': 15, 'deleted': False }), schema.from_instance(value)) Person = namedtuple('Person', ['name', 'age', 'deleted']) person_schema.factory = Person self.assertEqual( Assignment(person=Person(name='Bibo', age=15, deleted=False)), schema.from_instance(value))
def test_to_json_object(self): person_schema = JsonObjectSchema( properties=dict(name=JsonStringSchema(), age=JsonIntegerSchema(), deleted=JsonBooleanSchema(default=False))) value = {'name': 'Bibo', 'age': 12, 'deleted': True} self.assertEqual(value, person_schema.to_instance(value)) # ok, because person_schema does not explicitly say additional_properties=False value_extra = { 'name': 'Bibo', 'age': 12, 'deleted': True, 'comment': 'Hello!' } self.assertEqual(value_extra, person_schema.to_instance(value_extra)) Person = namedtuple('Person', ['name', 'age', 'deleted']) def serialize(person: Person) -> Dict[str, Any]: return person._asdict() person_schema.serializer = serialize person = Person(**value) self.assertEqual(value, person_schema.to_instance(person))
def get_data_store_params_schema(cls) -> JsonObjectSchema: return JsonObjectSchema(properties=dict( root=JsonStringSchema(default=''), max_depth=JsonIntegerSchema(nullable=True, default=1), read_only=JsonBooleanSchema(default=False), ), additional_properties=False)
def test_from_instance_tuple(self): self.assertEqual([False, 2, 'U'], JsonArraySchema(items=[ JsonBooleanSchema(), JsonIntegerSchema(), JsonStringSchema() ]).from_instance([False, 2, 'U']))
def test_to_dict(self): self.assertEqual({'type': 'number'}, JsonNumberSchema().to_dict()) self.assertEqual({'type': 'number'}, JsonNumberSchema(nullable=False).to_dict()) self.assertEqual({'type': ['number', 'null']}, JsonNumberSchema(nullable=True).to_dict()) self.assertEqual( { 'type': 'number', 'exclusiveMinimum': 0, 'maximum': 100, 'multipleOf': 10 }, JsonNumberSchema(exclusive_minimum=0, maximum=100, multiple_of=10).to_dict()) self.assertEqual( { 'type': 'integer', 'minimum': 100, 'exclusiveMaximum': 200, 'multipleOf': 20 }, JsonIntegerSchema(minimum=100, exclusive_maximum=200, multiple_of=20).to_dict())
def get_schema(cls) -> JsonObjectSchema: schema = super().get_schema() schema.properties.update(num_levels=JsonIntegerSchema(minimum=1), ) schema.required.append('num_levels') schema.additional_properties = False schema.factory = cls return schema
def test_from_json_object_additional_properties_is_schema(self): Person = namedtuple('Person', ['name', 'age', 'deleted']) person_schema = JsonObjectSchema( properties=dict(name=JsonStringSchema(), age=JsonIntegerSchema(), deleted=JsonBooleanSchema(default=False)), factory=Person, ) schema = JsonObjectSchema(additional_properties=person_schema, ) value = { 'p1': { 'name': 'Bibo', 'age': 15, 'deleted': True }, 'p2': { 'name': 'Ernie', 'age': 12, 'deleted': False }, } self.assertEqual( { 'p1': Person(name='Bibo', age=15, deleted=True), 'p2': Person(name='Ernie', age=12, deleted=False), }, schema.from_instance(value))
def get_process_params_schema(cls) -> JsonObjectSchema: return JsonObjectSchema( properties=dict( name=JsonStringSchema(min_length=1), value=JsonIntegerSchema(minimum=1), ), required=['name', 'value'], additional_properties=False, )
def get_data_store_params_schema(cls) -> JsonObjectSchema: cciodp_params = dict( endpoint_url=JsonStringSchema(default=OPENSEARCH_CEDA_URL), endpoint_description_url=JsonStringSchema(default=CCI_ODD_URL), enable_warnings=JsonBooleanSchema( default=False, title='Whether to output warnings'), num_retries=JsonIntegerSchema( default=DEFAULT_NUM_RETRIES, minimum=0, title='Number of retries when requesting data fails'), retry_backoff_max=JsonIntegerSchema( default=DEFAULT_RETRY_BACKOFF_MAX, minimum=0), retry_backoff_base=JsonNumberSchema( default=DEFAULT_RETRY_BACKOFF_BASE, exclusive_minimum=1.0), user_agent=JsonStringSchema(default=None)) return JsonObjectSchema(properties=dict(**cciodp_params), required=None, additional_properties=False)
def get_delete_data_params_schema(self, data_id: str = None) \ -> JsonObjectSchema: return JsonObjectSchema( properties=dict( recursive=JsonBooleanSchema(), maxdepth=JsonIntegerSchema(), storage_options=self.get_storage_options_schema(), ), additional_properties=False, )
def _get_open_data_params_schema(self, dsd: DatasetDescriptor = None ) -> JsonObjectSchema: min_date, max_date = dsd.time_range if dsd.time_range is not None else ( None, None) cube_params = dict( dataset_name=JsonStringSchema(min_length=1), variable_names=JsonArraySchema(items=JsonStringSchema( enum=[v.name for v in dsd. data_vars] if dsd and dsd.data_vars else None)), variable_units=JsonArraySchema(), variable_sample_types=JsonArraySchema(), tile_size=JsonArraySchema( items=(JsonNumberSchema(minimum=1, maximum=2500, default=DEFAULT_TILE_SIZE), JsonNumberSchema(minimum=1, maximum=2500, default=DEFAULT_TILE_SIZE)), default=(DEFAULT_TILE_SIZE, DEFAULT_TILE_SIZE)), crs=JsonStringSchema(default=DEFAULT_CRS, enum=AVAILABLE_CRS_IDS), bbox=JsonArraySchema(items=(JsonNumberSchema(), JsonNumberSchema(), JsonNumberSchema(), JsonNumberSchema())), spatial_res=JsonNumberSchema(exclusive_minimum=0.0), time_range=JsonDateSchema.new_range(min_date=min_date, max_date=max_date), # TODO: add pattern time_period=JsonStringSchema( default='1D', nullable=True, enum=[None, *map(lambda n: f'{n}D', range(1, 14)), '1W', '2W']), time_tolerance=JsonStringSchema(default=DEFAULT_TIME_TOLERANCE, format='^([1-9]*[0-9]*)[NULSTH]$'), collection_id=JsonStringSchema(), four_d=JsonBooleanSchema(default=False), ) cache_params = dict(max_cache_size=JsonIntegerSchema(minimum=0), ) # required cube_params required = [ 'bbox', 'spatial_res', 'time_range', ] sh_params = {} if self._sentinel_hub is None: # If we are NOT connected to the API (yet), we also include store parameters sh_schema = SentinelHubDataStore.get_data_store_params_schema() sh_params = sh_schema.properties required.extend(sh_schema.required or []) return JsonObjectSchema(properties=dict(**sh_params, **cube_params, **cache_params), required=required)
def get_schema(cls) -> JsonObjectSchema: return JsonObjectSchema(properties=dict( name=JsonStringSchema(min_length=1), dtype=JsonStringSchema(min_length=1), dims=JsonArraySchema(items=JsonStringSchema(min_length=1)), chunks=JsonArraySchema(items=JsonIntegerSchema(minimum=0)), attrs=JsonObjectSchema(additional_properties=True), ), required=['name', 'dtype', 'dims'], additional_properties=False, factory=cls)
def get_data_store_params_schema(cls) -> JsonObjectSchema: params = dict(normalize_names=JsonBooleanSchema(default=False)) # For now, let CDS API use defaults or environment variables for # most parameters. cds_params = dict(num_retries=JsonIntegerSchema( default=DEFAULT_NUM_RETRIES, minimum=0), ) params.update(cds_params) return JsonObjectSchema(properties=params, required=None, additional_properties=False)
def get_data_store_params_schema(cls) -> JsonObjectSchema: sh_params = dict( client_id=JsonStringSchema( title='SENTINEL Hub API client identifier', description= 'Preferably set by environment variable SH_CLIENT_ID'), client_secret=JsonStringSchema( title='SENTINEL Hub API client secret', description= 'Preferably set by environment variable SH_CLIENT_SECRET'), api_url=JsonStringSchema(default=DEFAULT_SH_API_URL, title='SENTINEL Hub API URL'), oauth2_url=JsonStringSchema( default=DEFAULT_SH_OAUTH2_URL, title='SENTINEL Hub API authorisation URL'), enable_warnings=JsonBooleanSchema( default=False, title='Whether to output warnings'), error_policy=JsonStringSchema( default='fail', enum=['fail', 'warn', 'ignore'], title='Policy for errors while requesting data'), num_retries=JsonIntegerSchema( default=DEFAULT_NUM_RETRIES, minimum=0, title='Number of retries when requesting data fails'), retry_backoff_max=JsonIntegerSchema( default=DEFAULT_RETRY_BACKOFF_MAX, minimum=0), retry_backoff_base=JsonNumberSchema( default=DEFAULT_RETRY_BACKOFF_BASE, exclusive_minimum=1.0), ) required = None if not DEFAULT_CLIENT_ID or not DEFAULT_CLIENT_SECRET: required = [] if DEFAULT_CLIENT_ID is None: required.append('client_id') if DEFAULT_CLIENT_SECRET is None: required.append('client_secret') return JsonObjectSchema(properties=sh_params, required=required, additional_properties=False)
def test_from_json_object_array_object(self): person_schema = JsonObjectSchema( properties=dict(name=JsonStringSchema(), age=JsonIntegerSchema(), deleted=JsonBooleanSchema(default=False))) schema = JsonObjectSchema(properties=dict(persons=JsonArraySchema( items=person_schema))) value = { 'persons': [{ 'name': 'Bibo', 'age': 15 }, { 'name': 'Ernie', 'age': 12 }] } self.assertEqual( { 'persons': [{ 'name': 'Bibo', 'age': 15, 'deleted': False }, { 'name': 'Ernie', 'age': 12, 'deleted': False }] }, schema.from_instance(value)) Assignment = namedtuple('Assignment', ['persons']) schema.factory = Assignment self.assertEqual( Assignment(persons=[{ 'name': 'Bibo', 'age': 15, 'deleted': False }, { 'name': 'Ernie', 'age': 12, 'deleted': False }]), schema.from_instance(value)) Person = namedtuple('Person', ['name', 'age', 'deleted']) person_schema.factory = Person self.assertEqual( Assignment(persons=[ Person(name='Bibo', age=15, deleted=False), Person(name='Ernie', age=12, deleted=False) ]), schema.from_instance(value))
def get_schema(cls) -> JsonObjectSchema: return JsonObjectSchema( properties=dict( status=JsonStringSchema(enum=STATUS_IDS), status_code=JsonIntegerSchema(), result=cls.get_result_schema(), message=JsonStringSchema(), output=JsonArraySchema(items=JsonStringSchema()), traceback=JsonArraySchema(items=JsonStringSchema()), versions=JsonObjectSchema(additional_properties=True)), required=['status'], additional_properties=True, factory=cls, )
def get_schema(cls): return JsonObjectSchema(properties=dict( variable_names=JsonArraySchema( nullable=True, items=JsonStringSchema(min_length=1), min_items=0), crs=JsonStringSchema(nullable=True, min_length=1), bbox=JsonArraySchema(nullable=True, items=[ JsonNumberSchema(), JsonNumberSchema(), JsonNumberSchema(), JsonNumberSchema() ]), spatial_res=JsonNumberSchema(nullable=True, exclusive_minimum=0.0), tile_size=JsonArraySchema(nullable=True, items=[ JsonIntegerSchema(minimum=1, maximum=2500), JsonIntegerSchema(minimum=1, maximum=2500), ]), time_range=JsonDateSchema.new_range(nullable=True), time_period=JsonStringSchema(nullable=True, pattern=r'^([1-9][0-9]*)?[DWMY]$'), chunks=JsonObjectSchema(nullable=True, additional_properties=JsonIntegerSchema( nullable=True, minimum=1)), metadata=JsonObjectSchema(nullable=True, additional_properties=True), variable_metadata=JsonObjectSchema( nullable=True, additional_properties=JsonObjectSchema( additional_properties=True)), ), additional_properties=False, factory=cls)
def get_schema(cls) -> JsonObjectSchema: schema = super().get_schema() schema.properties.update( dims=JsonObjectSchema(additional_properties=JsonIntegerSchema( minimum=0)), spatial_res=JsonNumberSchema(exclusive_minimum=0.0), coords=JsonObjectSchema( additional_properties=VariableDescriptor.get_schema()), data_vars=JsonObjectSchema( additional_properties=VariableDescriptor.get_schema()), attrs=JsonObjectSchema(additional_properties=True), ) schema.required = ['data_id', 'data_type'] schema.additional_properties = False schema.factory = cls return schema
def get_write_data_params_schema(self) -> JsonObjectSchema: schema = super().get_write_data_params_schema() # creates deep copy # TODO: remove use_saved_levels, instead see #619 schema.properties['use_saved_levels'] = JsonBooleanSchema( description='Whether to open an already saved level' ' and downscale it then.' ' May be used to avoid computation of' ' entire Dask graphs at each level.', default=False, ) schema.properties['base_dataset_id'] = JsonStringSchema( description='If given, avoids writing the base dataset' ' at level 0. Instead a file "{data_id}/0.link"' ' is created whose content is the given base dataset' ' identifier.', ) schema.properties['tile_size'] = JsonIntegerSchema( description='Tile size to be used for all levels of the' ' written multi-level dataset.', ) return schema
def test_process_kwargs_subset(self): schema = JsonObjectSchema( properties=dict( client_id=JsonStringSchema(default='bibo'), client_secret=JsonStringSchema(default='2w908435t'), geom=JsonStringSchema(), crs=JsonStringSchema(const='WGS84'), spatial_res=JsonNumberSchema(), time_range=JsonStringSchema(), time_period=JsonStringSchema(default='8D'), max_cache_size=JsonIntegerSchema(), ), required=[ 'client_id', 'client_secret', 'geom', 'crs', 'spatial_res', 'time_range' ], ) kwargs = dict(client_secret='094529g', geom='POINT (12.2, 53.9)', spatial_res=0.5, time_range='2010,2014', max_cache_size=2**32) cred_kwargs, kwargs = schema.process_kwargs_subset( kwargs, ['client_id', 'client_secret']) self.assertEqual(dict(client_id='bibo', client_secret='094529g'), cred_kwargs) self.assertEqual( dict(geom='POINT (12.2, 53.9)', spatial_res=0.5, time_range='2010,2014', max_cache_size=2**32), kwargs) ds_kwargs, kwargs = schema.process_kwargs_subset( kwargs, ['geom', 'crs', 'spatial_res', 'time_range', 'time_period']) self.assertEqual( dict(crs='WGS84', geom='POINT (12.2, 53.9)', spatial_res=0.5, time_range='2010,2014'), ds_kwargs) self.assertEqual(dict(max_cache_size=2**32), kwargs)
def test_from_json_object(self): value = {'name': 'Bibo', 'age': 12, 'deleted': True} person_schema = JsonObjectSchema( properties=dict(name=JsonStringSchema(), age=JsonIntegerSchema(), deleted=JsonBooleanSchema(default=False))) self.assertEqual(value, person_schema.from_instance(value)) self.assertEqual({ 'name': 'Bibo', 'age': 12, 'deleted': False }, person_schema.from_instance({ 'name': 'Bibo', 'age': 12 })) Person = namedtuple('Person', ['name', 'age', 'deleted']) person_schema.factory = Person self.assertEqual(Person(name='Bibo', age=12, deleted=True), person_schema.from_instance(value))
def test_to_dict(self): self.assertEqual( { 'oneOf': [{ 'multipleOf': 5, 'type': 'integer' }, { 'multipleOf': 3, 'type': 'integer' }] }, JsonComplexSchema(one_of=[ JsonIntegerSchema(multiple_of=5), JsonIntegerSchema(multiple_of=3) ]).to_dict()) self.assertEqual( { 'anyOf': [{ 'multipleOf': 5, 'type': 'integer' }, { 'multipleOf': 3, 'type': 'integer' }] }, JsonComplexSchema(any_of=[ JsonIntegerSchema(multiple_of=5), JsonIntegerSchema(multiple_of=3) ]).to_dict()) self.assertEqual( { 'allOf': [{ 'multipleOf': 5, 'type': 'integer' }, { 'multipleOf': 3, 'type': 'integer' }] }, JsonComplexSchema(all_of=[ JsonIntegerSchema(multiple_of=5), JsonIntegerSchema(multiple_of=3) ]).to_dict())
def get_schema(cls): return JsonObjectSchema(properties=dict(name=JsonStringSchema(), age=JsonIntegerSchema()), required=['name', 'age'], factory=cls)
from xcube.util.jsonschema import JsonBooleanSchema from xcube.util.jsonschema import JsonIntegerSchema from xcube.util.jsonschema import JsonNumberSchema from xcube.util.jsonschema import JsonObjectSchema from ..accessor import DataOpener from ..accessor import DataWriter from ..datatype import DataType from ..error import DataStoreError COMMON_STORAGE_OPTIONS_SCHEMA_PROPERTIES = dict( # passed to ``DirCache``, if the implementation supports # directory listing caching. Pass use_listings_cache=False # to disable such caching. use_listings_cache=JsonBooleanSchema(), listings_expiry_time=JsonNumberSchema(), max_paths=JsonIntegerSchema(), # If this is a cachable implementation, pass True here to force # creating a new instance even if a matching instance exists, and prevent # storing this instance. skip_instance_cache=JsonBooleanSchema(), asynchronous=JsonBooleanSchema(), ) PROTOCOL_PARAM_NAME = 'protocol' STORAGE_OPTIONS_PARAM_NAME = 'storage_options' FS_PARAM_NAME = 'fs' ROOT_PARAM_NAME = 'root' class FsAccessor: """
from xcube.util.assertions import assert_instance from xcube.util.jsonschema import JsonArraySchema from xcube.util.jsonschema import JsonBooleanSchema from xcube.util.jsonschema import JsonIntegerSchema from xcube.util.jsonschema import JsonObjectSchema from xcube.util.jsonschema import JsonStringSchema from xcube.util.temp import new_temp_file from ..accessor import FsDataAccessor from ..helpers import is_local_fs from ...datatype import DATASET_TYPE from ...datatype import DataType from ...error import DataStoreError ZARR_OPEN_DATA_PARAMS_SCHEMA = JsonObjectSchema(properties=dict( log_access=JsonBooleanSchema(default=False), cache_size=JsonIntegerSchema(minimum=0, ), group=JsonStringSchema( description='Group path.' ' (a.k.a. path in zarr terminology.).', min_length=1, ), chunks=JsonObjectSchema( description='Optional chunk sizes along each dimension.' ' Chunk size values may be None, "auto"' ' or an integer value.', examples=[{ 'time': None, 'lat': 'auto', 'lon': 90 }, { 'time': 1,
def test_from_instance(self): self.assertEqual( 45, JsonIntegerSchema(minimum=0, maximum=100, multiple_of=5).from_instance(45))
def get_process_params_schema(cls) -> JsonObjectSchema: # noinspection PyTypeChecker return JsonIntegerSchema()