def test_inputs_SWUpdateRepo_attrs(): assert SWUpdateRepo._param_di == inputs.param_spec['swupdate/repo'] fattr = attr.fields_dict(SWUpdateRepo)['release'] assert fattr.type is str assert fattr.default is attr.NOTHING fattr = attr.fields_dict(SWUpdateRepo)['source'] assert fattr.type is Union[str, Path] assert fattr.default is UNCHANGED
def test_ParserFiller_extract_positional_args_default_is_not_NOTHING(): SC = attr.make_class( "SC", { "x": attr.ib(default=123, metadata={METADATA_ARGPARSER: { 'help': 'some help' }}, type=int) }) ret = ParserFiller.extract_positional_args(SC, attr.fields_dict(SC)) assert ret == ([], attr.fields_dict(SC))
def test_inputs_PillarInputBase_attrs(): fdict = attr.fields_dict(PillarInputBase) type_fun = fdict['value'].metadata[METADATA_ARGPARSER]['type'] # TODO IMPROVE use mocks to verify that # value_from_str is called with v_type='json' assert type_fun('123') == 123 assert type_fun('{"1" : 23}') == {'1': 23}
def test_log_log_args_cmd(log_args_builder, cmd): handler = 'somehandler' logging = { 'handlers': { handler: {} }, 'root': { 'handlers': [handler], } } if cmd: for default in (None, 'somedefault'): logging['filters'] = {cmd_filter: {}} if default: logging['filters'][cmd_filter]['cmd'] = default LogArgs = log_args_builder(logging) check_attr_in_cls( 'cmd', LogArgs, type=str, default=('unknown' if default is None else default)) else: LogArgs = log_args_builder(logging) assert 'cmd' not in attr.fields_dict(LogArgs)
def test_log_log_args_formatter(log_args_builder, formatter): handler = 'somehandler' logging = { 'formatters': { 'formatter1': {}, 'formatter2': {}, }, 'handlers': { handler: {} }, 'root': { 'handlers': [handler], } } if formatter: logging['handlers'][handler]['formatter'] = formatter LogArgs = log_args_builder(logging) if formatter: check_attr_in_cls('somehandler_formatter', LogArgs, type=str, default=formatter, metadata={ inputs.METADATA_ARGPARSER: { 'help': f"{handler} log records format", 'choices': list(logging['formatters']) } }) log_args = LogArgs(somehandler_formatter=formatter) assert hasattr(log_args.handlers[handler], 'formatter') assert log_args.handlers[handler].formatter == formatter else: assert 'somehandler_formatter' not in attr.fields_dict(LogArgs)
def test_log_log_args_level(log_args_builder, level): handler = 'somehandler' logging = { 'handlers': { handler: {} }, 'root': { 'handlers': [handler], } } if level: logging['handlers'][handler]['level'] = level LogArgs = log_args_builder(logging) if level: check_attr_in_cls('somehandler_level', LogArgs, type=str, default=level, metadata={ inputs.METADATA_ARGPARSER: { 'help': f"{handler} log level", 'choices': ['DEBUG', 'INFO', 'WARN', 'ERROR'] } }) log_args = LogArgs(somehandler_level=level) assert hasattr(log_args.handlers[handler], 'level') assert log_args.handlers[handler].level == level else: assert 'somehandler_level' not in attr.fields_dict(LogArgs)
def from_grains(cls, **kwargs): # Assumption: 'not_used' doesn't appear in grains not_used = { k: kwargs.pop(k) for k in list(kwargs) if k not in attr.fields_dict(cls) } return cls(**kwargs, not_used=not_used)
def test_inputs_NTP(): assert NTP._param_group == 'ntp' for param in _param_spec: param = Path(param) if str(param.parent) == NTP._param_group: fattr = attr.fields_dict(NTP)[param.name] assert fattr.type is str assert fattr.default is UNCHANGED
def run(helper_t, *args, **kwargs): helper_kwargs = { k: kwargs.pop(k) for k in list(kwargs) if k in attr.fields_dict(helper_t) } helper = helper_t(*args, **helper_kwargs) return helper.run(**kwargs)
def run(cmd_t, *args, **kwargs): cmd_kwargs = { k: kwargs.pop(k) for k in list(kwargs) if k in attr.fields_dict(cmd_t) } cmd = cmd_t(*args, **cmd_kwargs) return cmd.run(**kwargs)
def test_inputs_NETWORK(): assert NetworkParams._param_group == 'network' for param in _param_spec: param = Path(param) if str(param.parent) == NetworkParams._param_group: fattr = attr.fields_dict(Network)[param.name] if param.name in ('dns_servers', 'search_domains', 'primary_data_network_iface', 'secondary_data_network_iface'): assert fattr.type is List else: assert fattr.type is str assert fattr.default is UNCHANGED
def check_attr_in_cls(attr_name, cls, **fields): _attr = attr.fields_dict(cls)[attr_name] metadata = fields.pop('metadata', None) if metadata: assert len(_attr.metadata) == len(metadata) for k, v in metadata.items(): assert _attr.metadata[k] == v else: assert not len(_attr.metadata) for k, v in fields.items(): assert getattr(_attr, k) == v
def test_log_log_args_no_formatters(log_args_builder): LogArgs = log_args_builder({ 'handlers': { 'somehandler': { 'class': 'logging.StreamHandler', 'formatter': 'someformatter' } }, 'root': { 'handlers': ['somehandler'], }, }) assert 'somehandler_formatter' not in attr.fields_dict(LogArgs)
def test_ParserFiller_extract_positional_args_happy_path(): SC = attr.make_class( "SC", { "x": attr.ib(metadata={ METADATA_ARGPARSER: { 'action': 'store_bool', 'help': 'some help' } }, type=int) }) ret = ParserFiller.extract_positional_args(SC, attr.fields_dict(SC)) assert ret == ([attr.fields(SC).x], {})
def from_args(cls, data: Union[list, dict]): unexpected_attrs = dict() if isinstance(data, dict): # TODO: it is good to make copy of input data parameter for _attr in (data.keys() - set(a for a in attr.fields_dict(cls))): # NOTE: Remove unexpected attributes from initialization `data` # dictionary unexpected_attrs[_attr] = data.pop(_attr) # If some attributes are missed in `data`, the `attr` module is # responsible for that validation obj = cls(**data) obj._unexpected_attributes = unexpected_attrs return obj elif isinstance(data, list): obj = cls(*data) obj._unexpected_attributes = unexpected_attrs return obj else: raise ValidationError(f"Unexpected content type: '{type(data)}'")
def test_ParserFiller_extract_positional_args_no_metadata_argparser(): SC = attr.make_class("SC", {"x": attr.ib(type=int, repr=False, init=False)}) ret = ParserFiller.extract_positional_args(SC, attr.fields_dict(SC)) assert ret == ([], attr.fields_dict(SC))