def __init__(self, fn: SupportsTask) -> None: self.compatibility_check(fn) self.target_spec = TargetSpec( module=fn.__module__, name=fn.__name__, ) self.wrapped_func = validate_arguments(fn)
def get_step(el: Union[Type[Step], BasePipeLine, Step], cache): if isinstance(type(el), BaseStep): return el.run(data=cache) elif isinstance(el, FunctionType): return validate_arguments(el)(cache) elif issubclass(el, Step): return el().run(data=cache) else: raise TypeError('Invalid type in pipeline')
def __getattribute__(self, name: str) -> Any: obj = super().__getattribute__(name) if name.startswith("_"): return obj elif not callable(obj): return obj elif not self.type_checking: return obj return validate_arguments(obj)
async def handle(): try: _handler = validate_arguments(handler) if inspect.iscoroutinefunction(handler): return await _handler(**handler_kwargs) return _handler(**handler_kwargs) except ValidationError as exc: logger.exception(exc.errors()) return make_response(200, "") except Exception as exc: logger.exception(exc) return make_response(200, "")
async def handle(): try: _handler = validate_arguments(handler) if inspect.iscoroutinefunction(handler): return await _handler(**handler_kwargs) return _handler(**handler_kwargs) except ValidationError as exc: logger.exception(exc.errors()) return make_response(400, exc.errors()) except HTTPError as exc: logger.exception(exc) return requests_http_error_handler(exc)
def __new__(cls, clsname, bases, dct): if 'run' in dct: fu = dct['run'] elif 'run' in bases[0].__dict__: fu = bases[0].run else: for obj in bases[0].__mro__: if hasattr(obj, 'run'): fu = obj.run break dct['run'] = validate_arguments(fu) return super(BaseStep, cls).__new__(cls, clsname, bases, dct)
def _wrapper( wrapped_: Callable, instance: Optional[Any], args: Tuple[Any, ...], kwargs: Dict[str, Any], ): if with_pydantic: out = validate_arguments(wrapped_)(*args, **kwargs) else: validated_pos, validated_kwd = validate_inputs( instance, args, kwargs ) out = wrapped_(*validated_pos.values(), **validated_kwd) return _check_arg("return", out)
def __init_subclass__(cls, name=None, public=True): if public: name = name or cls.__name__ cls._all[name] = cls cls._name = name for attr_name in vars(cls): attr = getattr(cls, attr_name) if (attr_name == '__init__' or not attr_name.startswith('_') and attr_name.islower() and callable(attr)): setattr(cls, attr_name, validate_arguments(config=cls._pydantic_config)(attr)) return super().__init_subclass__()
def test_var_args_kwargs(validated): def foo(a, b, *args, d=3, **kwargs): return f'a={a!r}, b={b!r}, args={args!r}, d={d!r}, kwargs={kwargs!r}' if validated: foo = validate_arguments(foo) assert foo(1, 2) == 'a=1, b=2, args=(), d=3, kwargs={}' assert foo(1, 2, 3, d=4) == 'a=1, b=2, args=(3,), d=4, kwargs={}' assert foo(*[1, 2, 3], d=4) == 'a=1, b=2, args=(3,), d=4, kwargs={}' assert foo(1, 2, args=(10, 11)) == "a=1, b=2, args=(), d=3, kwargs={'args': (10, 11)}" assert foo(1, 2, 3, args=(10, 11)) == "a=1, b=2, args=(3,), d=3, kwargs={'args': (10, 11)}" assert foo(1, 2, 3, e=10) == "a=1, b=2, args=(3,), d=3, kwargs={'e': 10}" assert foo(1, 2, kwargs=4) == "a=1, b=2, args=(), d=3, kwargs={'kwargs': 4}" assert foo(1, 2, kwargs=4, e=5) == "a=1, b=2, args=(), d=3, kwargs={'kwargs': 4, 'e': 5}"
def generate_pipes(steps: list, data): le = len(steps) if not le: return cache = data for el in steps: if isinstance(el, Step) or isinstance( el, BasePipeLine): # isinstance(type(el), BaseStep) step = el.run(data=cache) elif isinstance(el, FunctionType): step = validate_arguments(el)(cache) elif issubclass(el, Step): step = el().run(data=cache) else: raise TypeError('Invalid type in pipeline') yield step
def __getattribute__(self, name: str) -> Any: obj = super().__getattribute__(name) if name.startswith("_"): return obj elif not callable(obj): return obj elif not self.type_checking: return obj return validate_arguments( obj, config={ "extra": Extra.forbid, "allow_mutation": False, }, )
def cell(func, *args, **kwargs): """Decorator for Component functions. Wraps cell_without_validator Validates type annotations with pydantic. Implements a cache so that if a component has already been build it will return the component from the cache directly. This avoids 2 exact cells that are not references of the same cell You can always over-ride this with `cache = False`. When decorate your functions with @cell you get: - CACHE: avoids creating duplicated cells. - name: gives Components a unique name based on parameters. - adds Component.info with default, changed and full component settings. Keyword Args: autoname (bool): if True renames component based on args and kwargs name (str): Optional (ignored when autoname=True) cache (bool): returns component from the cache if it already exists. if False creates a new component by default True avoids having duplicated cells with the same name info: updates component.info dict prefix: name_prefix, defaults to function name max_name_length: truncates name beyond some characters (32) with a hash decorator: function to run over the component .. plot:: :include-source: import gdsfactory as gf @gf.cell def rectangle(size=(4,2), layer=0)->gf.Component: c = gf.Component() w, h = size points = [[w, h], [w, 0], [0, 0], [0, h]] c.add_polygon(points, layer=layer) return c c = rectangle(layer=(1,0)) c.plot() """ return cell_without_validator(validate_arguments(func), *args, **kwargs)
def argument_validation(function: _AnyCallable) -> _AnyCallable: """ :说明: 通过函数签名中的类型注解来对传入参数进行运行时校验 会在参数出错时释放 ``InvalidArgument`` 异常 """ function = validate_arguments(config={ 'arbitrary_types_allowed': True, 'extra': Extra.forbid })(function) @wraps(function) def wrapper(*args, **kwargs): try: return function(*args, **kwargs) except ValidationError: raise InvalidArgument return wrapper # type: ignore
project_uuid) await vc_repo.commit(repo_id, message="auto commit") commit_id = await vc_repo.checkout(repo_id, commit_id) commit, tags = await vc_repo.get_commit_log(commit_id) return Checkpoint.from_commit_log(commit, tags) async def get_workbench( vc_repo: VersionControlRepository, project_uuid: UUID, ref_id: RefID, ) -> WorkbenchView: repo_id, commit_id = await vc_repo.as_repo_and_commit_ids( project_uuid, ref_id) content: Dict = await vc_repo.get_snapshot_content(repo_id, commit_id) return WorkbenchView.parse_obj(content) # # All above with validated arguments # list_repos_safe = validate_arguments(list_repos, config=CFG) list_checkpoints_safe = validate_arguments(list_checkpoints, config=CFG) create_checkpoint_safe = validate_arguments(create_checkpoint, config=CFG) get_checkpoint_safe = validate_arguments(get_checkpoint, config=CFG) update_checkpoint_safe = validate_arguments(update_checkpoint, config=CFG) checkout_checkpoint_safe = validate_arguments(checkout_checkpoint, config=CFG) get_workbench_safe = validate_arguments(get_workbench, config=CFG)
def cell(func, *args, **kwargs): return cell_without_validator(validate_arguments(func), *args, **kwargs)
async def handle(): _handler = validate_arguments(handler) if inspect.iscoroutinefunction(handler): return await _handler(**handler_kwargs) return _handler(**handler_kwargs)
def __new__(cls, clsname, bases, clsdict: dict): for k, v in clsdict.items(): if not k.startswith('_') and inspect.isfunction(v): clsdict[k] = validate_arguments(v) return super().__new__(cls, clsname, bases, clsdict)