def from_dict(cls, payload) -> "Relationship": # FIXME handle alternate form if data is present logger.trace("creating Relationship from {}", payload) kwargs = {} # check if we have a links object if "links" in payload: kwargs["links"] = { key: Link.from_dict(value) for key, value in payload["links"].items() } data = payload["data"] if isinstance(data, list): # list of identifiers kwargs["data"] = cattr.structure(data, List[ObjectIdentifier]) elif isinstance(data, dict): # single identifier kwargs["data"] = cattr.structure(data, ObjectIdentifier) elif data is None: # null is permissible here kwargs["data"] = None kwargs["meta"] = payload.get("meta", {}) return cls(**kwargs)
async def _get_rescue(self, key: UUID, impersonation: Impersonation) -> Optional[ApiRescue]: await self.ensure_connection() work = Request( endpoint=["rescues", "read"], query={"id": f"{key}", "representing": impersonation} ) response = await self.connection.execute(work) return cattr.structure(response.body["data"], Optional[ApiRescue])
def load(cls, base_path): config_file = Path(cls.resolve_file(base_path)) with open(config_file, 'rb') as f: self_dict = json.load(f) return cattr.structure(self_dict, cls)
def __init__(self, *, config=None, config_file=None, dc_env=None): if config is not None: self.config = config else: with fsspec.open(config_file, mode="r") as f: self.config = cattr.structure(yaml.safe_load(f), AlchemistSettings) # Connect to the ODC Index self.dc = datacube.Datacube(env=dc_env) self.input_products = [] if self.config.specification.product and self.config.specification.products: _LOG.warning( "Both `product` and `products` are defined, only using product." ) # Store the products that we're allowing as inputs if self.config.specification.product: self.input_products.append( self.dc.index.products.get_by_name( self.config.specification.product)) elif self.config.specification.products: for product in self.config.specification.products: self.input_products.append( self.dc.index.products.get_by_name(product)) # Rasterio environment activation configure_s3_access( cloud_defaults=True, aws_unsigned=self.config.specification.aws_unsigned)
def queue_callback(self, dtype: str, data: Dict) -> None: """Process robot order queue messages.""" cls = self.__class__ # Get robco ewm data classes try: robcoewmtype = get_robcoewmtype(dtype) except TypeError as err: _LOGGER.error( 'Message type "%s" is invalid - %s - message SKIPPED: %s', dtype, err, data) return # Convert message data to robcoewmtypes data classes robocoewmdata = structure(data, robcoewmtype) # if data set is not a list yet, convert it for later processing if not isinstance(robocoewmdata, list): robocoewmdata = [robocoewmdata] # Check if datasets have a supported type before starting to process valid_robcoewmdata = [] for dataset in robocoewmdata: if isinstance(dataset, cls.VALID_QUEUE_MSG_TYPES): valid_robcoewmdata.append(dataset) else: _LOGGER.error( 'Dataset includes an unsupported type: "%s". Dataset SKIPPED: %s', type(dataset), dataset) # Process the datasets for dataset in valid_robcoewmdata: if isinstance(dataset, WarehouseOrder): self.state_machine.update_warehouseorder( warehouseorder=dataset)
def setup(filename: str) -> Tuple[ConfigRoot, str]: """ Validates and applies the configuration from disk. Args: filename (str): path and filename to load. Returns: configuration data located at `filename`. """ # do the loading part logger.info("loading configuration....") config_dict, file_hash = load_config(filename) logger.info("structuring new configuration...") configuration: ConfigRoot = cattr.structure(config_dict, ConfigRoot) gelf_config = configuration.logging.gelf setup_logging(configuration.logging.log_file, gelf_configuration=gelf_config) logger.info(f"new config hash is {file_hash}") logger.info("verifying configuration....") # NOTE: these members are dynamic, and only exist at runtime. (pylint can't see them.) PLUGIN_MANAGER.hook.validate_config(data=config_dict) # pylint: disable=no-member logger.info("done verifying. config loaded without error.") logger.info(f"emitting new configuration to plugins...") # NOTE: these members are dynamic, and only exist at runtime. (pylint can't see them.) PLUGIN_MANAGER.hook.rehash_handler(data=configuration) # pylint: disable=no-member return configuration, file_hash
def _validate(self, req, type, validate=True): """Validates users input to be passed to api Args: req (object): user input. type (object): the type to be validated against. validate (bool): to validate or not the input against the type. Raises: ValueError: if validates=True, Raises in case the input is not type serializable. ValueError: if validates=True,Raises in case the input is not a dict. Returns: dict: the input data in dict format. """ data = req if validate: if not isinstance(req, type): try: data = structure(req, type) except Exception as err: raise ValueError('Request is a valid {0}: {1}'.format( type.__name__, err)) return unstructure(data) else: if not isinstance(req, dict): raise ValueError( 'Request is not a dict. {0} passed instead.'.format(req)) return data
def test_job_env_flow(test_environs): job_environs = { k : cattr.structure(v, jobs.JobEnviron) for k, v in test_environs.items() } def assert_create(action): assert isinstance(action, checks.CreateRun) assert action.owner == "asford" assert action.repo == "test_checks" assert action.run.id is None assert action.run.head_sha is not None assert action.run.head_branch is not None def assert_update(action): assert isinstance(action, checks.UpdateRun) assert action.owner == "asford" assert action.repo == "test_checks" assert action.run.id == "success" assert action.run.head_sha is None assert action.run.head_branch is None inited = attr.evolve(job_environ_to_run_details(job_environs["pre_success"]), id="success") alt = attr.evolve(job_environ_to_run_details(job_environs["pre_failure"]), id="failure") assert_create(job_environ_to_check_action(job_environs["pre_success"], [])) assert_create(job_environ_to_check_action(job_environs["post_success"], [])) assert_create(job_environ_to_check_action(job_environs["post_success"], [alt])) assert_update(job_environ_to_check_action(job_environs["post_success"], [inited])) assert_update(job_environ_to_check_action(job_environs["post_success"], [inited, alt])) assert_update(job_environ_to_check_action(job_environs["post_success"], [alt, inited]))
def validate(self, data): try: return True, cattr.structure(data, self.model) except ValueError as e: return False, str(e) except TypeError as e: return False, str(e)
def test_response_error_create_product_v3(): data = { "header": { "process_time": 0.702986751, "messages": "Your request has been processed successfully", }, "data": { "total_data": 1, "success_data": 0, "fail_data": 1, "failed_rows_data": [{ "product_name": "Product Testing V3 1.39", "product_price": 10000, "sku": "TST21", "error": [ "Value [Product Testing V3 1.39] of field [name] is already used, please use different value" ], }], }, } response_create_product_v3 = cattr.structure(data, ResponsesCreateProductV3) assert isinstance(response_create_product_v3, ResponsesCreateProductV3)