def load_spec(self, spec: typing.Dict): if not all( [ i in spec for i in [ OpenAPIKeyWord.OPENAPI, OpenAPIKeyWord.INFO, OpenAPIKeyWord.PATHS, ] ] ): raise ValueError("Invaliad openapi document") self._spec = spec.copy() _spec = spec.copy() servers = _spec.pop(OpenAPIKeyWord.SERVERS, []) for key in _spec: rkey = key.replace("-", "_") self.__setattr__(rkey, _spec[key]) self.servers = [ Server( url=s.get(OpenAPIKeyWord.URL), description=s.get(OpenAPIKeyWord.DESCRIPTION), variables=s.get(OpenAPIKeyWord.VARIABLES), ) for s in servers ] if not self._server and self.servers: self._server = self.servers[0] self._collect_operations()
def _format_other_params(self, template: typing.Dict) -> typing.Dict: new_dict = {} for id in self._relate_mount.keys(): blank = template.copy() blank.update(pipetteId=str(id)) new_dict[str(id)] = blank return new_dict
def from_dict(cls, order: int, dict: t.Dict) -> "Episode": input_dict = dict.copy() input_dict["order"] = order args = { field.name: field.type(input_dict[field.name]) for field in fields(cls) } return cls(**args)
def copy_dict(dict_: Dict[Any, Any], key: Any, value: Any) -> Tuple[Dict[Any], Dict[Any]]: dict_copy = Dict.copy(dict_) dict_copy[key] = value dicts = (dict_, dict_copy) return dicts
def _update_existing_geonode_resource(self, geonode_resource: ResourceBase, defaults: typing.Dict): resource_defaults = defaults.copy() if len(resource_defaults.get("files", [])) > 0: result = resource_manager.replace(geonode_resource, vals=resource_defaults) else: result = resource_manager.update(geonode_resource.uuid, vals=resource_defaults) return result
def each(self, item: _typing.Dict) -> _typing.Dict: if self.__copy: item = item.copy() for k, v in self.__replace.items(): try: item[v] = item.pop(k) except KeyError: if self.__none: item[v] = None else: raise return item
def generate_plot_task( plot_task_template: typing.Dict, start_time: pd.Timestamp, forecast_time: pd.Timedelta, data_source: typing.Dict, ) -> typing.Optional[typing.Dict]: """ Generate plot task for GISET. Parameters ---------- plot_task_template data_path Returns ------- """ data_path = get_data_path(data_source, start_time=start_time, forecast_time=forecast_time, data_type="grib2/orig") if data_path is None: logger.warning("WARNING: data is not found", start_time, forecast_time) return None plot_task = plot_task_template.copy() for layer in plot_task["maplayer"]: layer["file_path"] = str(data_path) task = { 'steps': [ { 'step_type': 'plotter', 'type': 'ploto_gidat.plotter.meteor_draw_plotter', 'plot_task': plot_task, }, { 'step_type': 'distributor', 'type': 'ploto_gidat.distributor.giset_distributor', "username": data_source["username"], "user_id": data_source["user_id"], "routing_key": data_source["routing_key"], "test_ID": data_source["test_ID"], "meteor_type": plot_task["maplayer"][0]["meteor_type"], "start_time": start_time.isoformat(), "forecast_time": forecast_time.isoformat(), }, ], } return task
def update(self, other: tp.Dict): """Update from another :class:`dict` """ other = other.copy() with self._setitem_lock: oth_devices = other.get('devices', {}) if 'devices' in other: del other['devices'] self.data.update(other) for device in oth_devices.values(): self.add_device(device) self.write()
def each(self, item: _typing.Dict) -> _typing.Dict: if self.__copy: item = item.copy() keys = self.__keys or tuple(item) for k in keys: try: item[k] = self.__func(item[k]) except Exception: if self.__none: item[k] = None else: raise return item
def _get_nsmap(original: typing.Dict): """Prepare namespaces dict for running xpath queries. lxml complains when a namespaces dict has an entry with None as a key. """ result = original.copy() try: result["wms"] = original[None] except KeyError: pass else: del result[None] return result
def parse_query(query: typing.Dict) -> typing.Dict: fixed_query = query.copy() if "start_time" in fixed_query: fixed_query["start_time"] = pd.to_datetime(query["start_time"]) if "forecast_time" in fixed_query: fixed_query["forecast_time"] = pd.to_timedelta(query["forecast_time"]) if "class" in fixed_query: fixed_query["data_class"] = fixed_query.pop("class") if "type" in fixed_query: fixed_query["data_type"] = fixed_query.pop("type") if "name" in fixed_query: fixed_query["data_name"] = fixed_query.pop("name") return fixed_query
def _create_new_geonode_resource(self, geonode_resource_type, defaults: typing.Dict): logger.debug( f"Creating a new GeoNode resource for resource with uuid: {defaults['uuid']!r}..." ) resource_defaults = defaults.copy() resource_files = resource_defaults.pop("files", []) if len(resource_files) > 0: logger.debug("calling resource_manager.ingest...") geonode_resource = resource_manager.ingest( resource_files, uuid=resource_defaults["uuid"], resource_type=geonode_resource_type, defaults=resource_defaults, importer_session_opts={"name": resource_defaults["uuid"]}, ) else: logger.debug("calling resource_manager.create...") geonode_resource = resource_manager.create( resource_defaults["uuid"], geonode_resource_type, defaults) return geonode_resource
def __new__(mcs, name: typing.AnyStr, bases: typing.Tuple, class_dict: typing.Dict): class_dict_copy = class_dict.copy() _id = Field( verbose_name='Instance ID', data_type=str, required=True, default=None, ) _ts = Field( verbose_name='Timestamp', data_type=int, required=True, default=lambda: time.time(), ) class_dict_copy.update({ '_id': _id, '_ts': _ts, }) # init all field instances for k, v in class_dict_copy.items(): if isinstance(v, Field): v.name = k v.fullname = f'{name.lower()}.{k}' v.internal_name = f'field__{k}' meta_data: typing.Dict = {} meta_class = class_dict_copy.get('Meta') if meta_class: assert isinstance(meta_class, type) meta_class_dict = meta_class.__dict__ for mk, mv in meta_class_dict.items(): if not mk.startswith('__'): meta_data.update({mk: mv}) class_dict_copy.update({'meta_data': meta_data}) def generate_id(self): return 'test' class_dict_copy.update({'generate_id': generate_id}) def initialize_instance(self, init_data: typing.Dict): cls_dict: typing.Dict = self.__class__.__dict__ for sk, sv in cls_dict.items(): if isinstance(sv, Field): if sk in init_data: sk_v = init_data[sk] data_type: typing.Any = sv.data_type multi: bool = sv.multi if multi: assert isinstance(sk_v, list) if isinstance(data_type, BlueprintMeta): v_deserialized = [] for d in sk_v: # init every blueprint instance deserialized_instance = data_type(**d) # who is the parent of this blueprint ? deserialized_instance.parent = self v_deserialized.append( deserialized_instance) else: # Note: create new instance of list v_deserialized = [item for item in sk_v] else: assert not isinstance(sk_v, list) if isinstance(data_type, BlueprintMeta): v_deserialized = data_type(**sk_v) v_deserialized.parent = self else: v_deserialized = sk_v # set attr value (through descriptor) setattr(self, sk, v_deserialized) else: # user not provide value for the field, use default value to initialize the field if possible # default value for multi field should be [] if sv.multi and sv.default is None: sv.default = [] # if data type of field is Blueprint, create a new blueprint instance as default value if sv.data_type and isinstance(sv.data_type, BlueprintMeta): if sv.default and isinstance( sv.default, Blueprint): sv.default = copy.copy(sv.default) # will get default value if any, # and set instance field value to default (force check and clean, create new instance if needed) sk_v = getattr(self, sk) # after initialize, value of every field should be in valid state (pass descriptor's check) # check_and_clean_if_possible setattr(self, sk, sk_v) # generate id if needed if self.is_new: assert getattr(self, self.ID_NAME) is None id_template = self.meta_data.get('id_template') if id_template is None: raise BlueprintTypeException( f'cannot generate id for new created blueprint ' f'because id_template not specified in Meta') context_render: typing.Dict = { key: getattr(self, key) for key in cls_dict if isinstance(cls_dict[key], Field) } new_id = id_template.format(**context_render) setattr(self, self.ID_NAME, new_id) # check required value for sk, sv in cls_dict.items(): if isinstance(sv, Field): sk_v = getattr(self, sk, None) if sk_v is None and sv.required and sv.default is None: raise BlueprintTypeException( f'{sv.fullname} is required ' f'but no value provided and no default value set') def init(self, **kwargs): self.kwargs = kwargs # used to copy a new blueprint instance self.parent = None self.id_context = {} self.is_new = False if self.ID_NAME not in kwargs: self.is_new = True self.initialize_instance(kwargs) def serialize(self, selected_fields=None): if selected_fields is None: selected_fields = [] else: selected_fields = list(selected_fields) cls_dict: typing.Dict = self.__class__.__dict__ serialized: typing.Dict = {} if not self.should_serialize(): return serialized else: for sk, sv in cls_dict.items(): if isinstance(sv, Field): if selected_fields and sv.name not in selected_fields: continue sk_v = getattr(self, sk) # serialize each field according to sv.data_type and sv.multi data_type: typing.Any = sv.data_type multi: bool = sv.multi if multi: # should serialize each item in the value assert isinstance(sk_v, list) if isinstance(data_type, BlueprintMeta): serialized.update( {sk: [item.serialize() for item in sk_v]}) else: # just create a new list with the same content serialized.update( {sk: [item for item in sk_v]}) else: assert not isinstance(sk_v, list) if isinstance(data_type, BlueprintMeta): if sk_v.should_serialize(): serialized.update({sk: sk_v.serialize()}) else: serialized.update({sk: sk_v}) return serialized def should_serialize(self): return True class_dict_copy.update({ 'ID_NAME': '_id', 'TS_NAME': '_ts', '__init__': init, 'initialize_instance': initialize_instance, 'serialize': serialize, 'should_serialize': should_serialize, }) cls = type.__new__(mcs, name, bases, class_dict_copy) return cls