class SomeClass(salt.SaltArgsMixin): fun_args: Tuple = attr.ib(converter=lambda v: () if v is None else v, default=None) fun_kwargs: Dict = attr.ib(converter=lambda v: {} if v is None else v, default=None) kw: Dict = attr.Factory(dict) secure: bool = False
class CheckISOAuthenticityArgs: iso_path: str = attr.ib(metadata={ inputs.METADATA_ARGPARSER: { 'help': "Path to the SW upgrade single ISO bundle" } }, converter=utils.converter_path_resolved) sig_file: str = attr.ib( metadata={ inputs.METADATA_ARGPARSER: { 'help': "Path to the file with ISO signature" } }, validator=attr.validators.optional(utils.validator_path_exists), converter=utils.converter_path_resolved, default=None # TODO: construct the default sig path based on ISO path ) gpg_pub_key: str = attr.ib(metadata={ inputs.METADATA_ARGPARSER: { 'help': "(Optional) Path to the custom GPG public key" } }, validator=attr.validators.optional( utils.validator_path_exists), converter=utils.converter_path_resolved, default=None) import_pub_key: bool = attr.ib(metadata={ inputs.METADATA_ARGPARSER: { 'help': ("(Optional) Specifies whether to import a given GPG " "public key or not") } }, default=False)
class SetupOpts(inputs.ParserMixin): parser_prefix = 'setup-' interactive: bool = attr.ib(default=False, metadata={ inputs.METADATA_ARGPARSER: { 'help': "do interactive break before run", 'metavar': 'PATH' } }) logdir: Optional[Union[str, Path]] = attr.ib( default=None, metadata={ inputs.METADATA_ARGPARSER: { 'help': "logdir to mount inside the container", 'metavar': 'PATH' } }, converter=utils.converter_path_resolved, validator=attr.validators.optional(utils.validator_dir_exists)) cortx_iso: Optional[Union[str, Path]] = attr.ib( default=None, metadata={ inputs.METADATA_ARGPARSER: { 'help': "path to CORTX deploy ISO", 'metavar': 'PATH' } }, converter=utils.converter_path_resolved, validator=attr.validators.optional(utils.validator_path_exists))
class SomePrvsnrClass2(PrvsnrType): attr1: str = attr.ib(default=None) attr2: str = attr.ib(default='store') attr3: str = attr.ib(init=False, default='') def to_args(self): return (None, attr.asdict(self, filter=lambda attr, _: attr.name != 'attr3'))
class VagrantBox: name = attr.ib() path = attr.ib(converter=lambda v: v.resolve() if v else None, default=None) @path.validator def _check_path(self, attribute, value): if value and (not value.is_file): raise ValueError("{} is not a file".format(value))
class ContentFileValidator(PathValidator): """ Class implements the basic logic of file content validation. Attributes ---------- scheme: Type[FileContentScheme] File content scheme for validation. content_type: ContentType Content type of the file. `ContentType.YAML` is default value """ scheme: Type[FileContentScheme] = attr.ib( validator=utils.validator__subclass_of(FileContentScheme)) content_type: ContentType = attr.ib( validator=attr.validators.in_(ContentType), default=ContentType.YAML) _CONTENT_LOADER = { ContentType.YAML: utils.load_yaml, ContentType.JSON: utils.load_json, } def validate(self, path: Path): """ Validates the file content of the provided `path`. Parameters ---------- path: Path File path for content validation Returns ------- None Raises ------ ValidationError If validation is failed. """ logging.debug(f"File content type: '{self.content_type}'") try: content = self._CONTENT_LOADER[self.content_type](path) except Exception as e: raise ValidationError( f"Failed to load the content of {path}: {e}") from e logging.debug(f"File content: '{content}'") try: self.scheme.from_args(content) except TypeError as e: raise ValidationError( f"File content validation is failed for {path}: {e}") from e else: logging.info(f"File content validation is succeeded for '{path}'")
class HashSumValidator(FileValidator): """ Validator of hash-sum for the provided file and expected hash-sum for this file. Attributes ---------- hash_sum: Union[str, bytes, bytearray] Hexadecimal string or byte-array object with expected hash-sum value of validated file. hash_type: HashType Type of hash sum. See `Hashtype` for more information """ hash_sum: Union[str, bytes, bytearray] = attr.ib(validator=attr.validators.instance_of( (str, bytes, bytearray)), converter=lambda x: bytes.fromhex(x) if isinstance(x, str) else x, default=None) hash_type: HashType = attr.ib(validator=attr.validators.in_(HashType), default=HashType.MD5, converter=lambda x: HashType.MD5 if x is None else HashType(x)) def validate(self, path: Path): """ Validates if hash-sum of the file provided by `path` matches the attribute value of `_hash_sum`. Parameters ---------- path: Path path to the file which hash-sum will be validated Returns ------- None Raises ------ ValidationError If validation is failed. """ super().validate(path) hash_obj = utils.calc_hash(path, self.hash_type) # hash_obj here is an object returned by `hashlib` # python istandard library module so we compare against # one provided by a caller if not compare_digest(hash_obj.digest(), self.hash_sum): raise ValidationError( f"Hash sum of file '{path}': '{hash_obj.hexdigest()}' " f"mismatches the provided one '{self.hash_sum.hex()}'")
class VagrantParsedRow: _row = attr.ib() # should be a csv row for now ts = attr.ib(init=False, default=None) target = attr.ib(init=False, default=None) data_type = attr.ib(init=False, default=None) data = attr.ib(init=False, default=None) def __attrs_post_init__(self): row = next(csv.reader([self._row])) self.ts, self.target, self.data_type = row[:3] self.data = row[3:]
class Packer: packerfile = attr.ib( converter=lambda v: v.resolve() ) log = attr.ib(default=True) err_to_out = attr.ib(default=True) _localhost = attr.ib( init=False, default=localhost ) @packerfile.validator def _check_packerfile(self, attribute, value): if not value.is_file: raise ValueError( "{} is not a file".format(value) ) self.validate() def check_output(self, cmd, err_to_out=None): res = None err_to_out = (self.err_to_out if err_to_out is None else err_to_out) try: res = self._localhost.run( cmd + (' 2>&1' if err_to_out else '') ) assert res.rc == 0 finally: if res is not None: for line in res.stderr.split(os.linesep): logger.debug(line) for line in res.stdout.split(os.linesep): logger.debug(line) return res.stdout def packer(self, command, *args, **kwargs): return self.check_output( "{}packer {} {} '{}'".format( "PACKER_LOG=1 " if self.log else '', command, ' '.join([*args]), self.packerfile ), **kwargs ) # TODO use some dynamic way instead def build(self, *args): return self.packer("build", *args) def validate(self, *args): return self.packer("validate", *args)
class ProxyCommand: """Class implements the simple proxy pattern. Attributes ---------- _proxy: Any Instance (class instance) that is responsible for proxy calls if base class doesn't implement the desired method. """ _proxy: Any = attr.ib() def __getattr__(self, item): """ Delegate method-call/attribute to the proxy object Parameters ---------- item: str attribute name Returns ------- Any: method or attribute of the proxy object """ # NOTE: __getattr__ is called when item method is not defined in # current class. Delegate call to proxy-object return self._proxy.__getattribute__(item)
def test_ParserFiller_fill_parser_input_checks(mocker): parser = argparse.ArgumentParser() SC = attr.make_class( "SC", { "y": attr.ib(default=True, metadata={ METADATA_ARGPARSER: { 'action': 'store_bool', 'help': 'some help' } }, type=int) }) add_args_m = mocker.patch.object(parser, 'add_argument', autospec=True) ParserFiller.fill_parser(SC, parser) expected_calls = [ call('--y', action='store_const', const=True, default=True, dest='y', help='enable some help', metavar='INT'), call('--noy', action='store_const', const=False, default=False, dest='y', help='disable some help', metavar='INT') ] add_args_m.assert_has_calls(expected_calls) assert add_args_m.call_count == len(expected_calls)
class RunArgsPillarExportAttrs: export_file: str = attr.ib(metadata={ METADATA_ARGPARSER: { 'help': "output file to export JSON format of pillar data" } }, default=CONFSTORE_CLUSTER_CONFIG)
def test_inputs_AttrParserArgs_type_default(): SC = attr.make_class("SC", {"x": attr.ib()}) attr_parser_type = AttrParserArgs(attr.fields(SC).x).type assert type(attr_parser_type) is functools.partial assert attr_parser_type.func == AttrParserArgs.value_from_str assert attr_parser_type.args == () assert attr_parser_type.keywords == dict(v_type=attr.fields(SC).x.type)
def test_attr_parser_args_action_specified(): SC = attr.make_class( "SC", { "x": attr.ib(default=None, metadata={METADATA_ARGPARSER: dict(action='someaction')}) }) assert AttrParserArgs(attr.fields(SC).x).action == 'someaction'
def test_attr_parser_args_kwargs_keys_for_store_const(): SC = attr.make_class( "SC", { "x": attr.ib(type=int, metadata={METADATA_ARGPARSER: dict(action='store_const')}) }) assert set(AttrParserArgs(attr.fields(SC).x).kwargs.keys()) == set( ('action', 'metavar', 'default', 'help'))
def test_ParserFiller_fill_parser_with_no_metadata_argparser(): parser = argparse.ArgumentParser() SC = attr.make_class("SC", {"y": attr.ib(default=123, type=int)}) ParserFiller.fill_parser(SC, parser) with pytest.raises(SystemExit) as excinfo: parser.parse_args(['--y', 'some-value']) assert excinfo.value.code == 2
class GetSWUpgradeInfoArgs: iso_path: str = attr.ib( metadata={ inputs.METADATA_ARGPARSER: { 'help': "Path to SW upgrade single ISO bundle" } }, validator=attr.validators.optional(utils.validator_path_exists), converter=utils.converter_path_resolved, default=None ) release: str = attr.ib( metadata={ inputs.METADATA_ARGPARSER: { 'help': "SW upgrade repository release version" } }, default=None )
def test_attr_parser_args_kwargs_keys_with_choices(): SC = attr.make_class( "SC", { "x": attr.ib(type=str, default='123', metadata={METADATA_ARGPARSER: dict(choices='somechoices')}) }) assert set(AttrParserArgs(attr.fields(SC).x).kwargs.keys()) == set( ('action', 'metavar', 'default', 'help', 'type', 'choices'))
class CortxISOInfo: """ Result class that aggregates information about Cortx repository and its packages. """ _prvsnr_type_ = True packages: dict = attr.ib(validator=attr.validators.instance_of(dict)) metadata: dict = attr.ib(validator=attr.validators.instance_of(dict)) exceptions: list = attr.ib(validator=attr.validators.instance_of(list), default=list()) def __attrs_post_init__(self): # NOTE: for the convenience we add compatability information to # packages attribute key = SWUpgradeInfoFields.VERSION_COMPATIBILITY.value for entry in self.metadata.get(ReleaseInfo.REQUIRES.value, list()): # NOTE: the format are following: # REQUIRES: # - "CORTX > 2.0.0" # - "cortx-motr > 2.0.0-0" pkg_name = re.split(VERSION_DELIMITERS, entry)[0] # NOTE: the format of compatibility_version variable is the # following: "> 2.0.0" compatibility_version = entry.replace(pkg_name, '').strip() pkg_name = pkg_name.strip() # NOTE: normalize the constraint version: add build number # if missed compatibility_version = normalize_rpm_version( compatibility_version) if pkg_name in self.packages: self.packages[pkg_name][key] = compatibility_version elif pkg_name == CORTX_VERSION: self.packages[pkg_name] = {key: compatibility_version} else: logger.warning('Found version compatibility constraint ' f'for the package "{pkg_name}" not listed in ' 'CORTX repository') def __str__(self): return (f"{{'packages': {self.packages}, 'metadata': {self.metadata}, " f"'exceptions': {self.exceptions} }}")
class PkgsOpts(inputs.ParserMixin): parser_prefix = 'pkgs-' version: str = attr.ib( default='2.0.0', metadata={ inputs.METADATA_ARGPARSER: { 'help': ("release version (source version)." " Note. ignored for api package," " to set version for package please edit" " 'api/python/provisioner/__metadata__.py'") } }) pkg_version: str = attr.ib(default=1, metadata={ inputs.METADATA_ARGPARSER: { 'help': ("package version (release tag)," " should be greater or equal 1"), 'metavar': 'INT' }, }, converter=int, validator=attr.validators.instance_of(int)) output: Union[str, Path] = attr.ib(default='.', metadata={ inputs.METADATA_ARGPARSER: { 'help': "path to directory to output", 'metavar': 'DIR' } }, converter=utils.converter_path_resolved, validator=utils.validator_path_exists) def __attrs_post_init__(self): if not self.output.is_dir(): raise ValueError(f"{self.output} is not a directory") if self.pkg_version < 1: raise ValueError("'pkg_version' should be greate or equal 1")
class ReleaseInfoCommonContentScheme(FileContentScheme): """ Common Cortx release info files content scheme. Attributes ---------- NAME: str Name of SW upgrade repository. It is the `NAME` field of `RELEASE.INFO` file RELEASE: Optional[str] Release of SW upgrade repository. Can be absent. It is the `RELEASE` field of `RELEASE.INFO` file VERSION: str Version number of SW upgrade repository. It is the `VERSION` field of `RELEASE.INFO` file BUILD: str Build number of SW upgrade repository. It is the `BUILD` field of `RELEASE.INFO` file OS: str OS version for which this SW upgrade repo is intended. It is the `OS` field of `RELEASE.INFO` file """ NAME: str = attr.ib(validator=attr.validators.instance_of(str)) VERSION: str = attr.ib( # regex is based on the current representation of `RELEASE` field # number. It is 3 numbers divided by dots "." validator=attr.validators.matches_re(r"^[0-9]+\.[0-9]+\.[0-9]+$"), converter=str) BUILD: str = attr.ib( # regex is based on the current representation of `BUILD` number. # It is 1 or more numbers validator=attr.validators.matches_re(r"^[0-9]+$"), converter=str) OS: str = attr.ib(validator=attr.validators.instance_of(str)) RELEASE: Optional[str] = attr.ib( # TODO: when the `RELEASE` field will be introduced need to use here # a proper regex validation validator=attr.validators.optional(attr.validators.instance_of(str)), converter=attr.converters.optional(str), default=None)
def test_ParserFiller_extract_positional_args_default_is_not_NOTHING(): SC = attr.make_class( "SC", { "x": attr.ib(default=123, metadata={METADATA_ARGPARSER: { 'help': 'some help' }}, type=int) }) ret = ParserFiller.extract_positional_args(SC, attr.fields_dict(SC)) assert ret == ([], attr.fields_dict(SC))
def test_attr_parser_args_const_from_metadata_for_optional(): SC = attr.make_class( "SC", { "x": attr.ib(type=str, metadata={ METADATA_ARGPARSER: { 'const': 'someconst' }, }) }) assert AttrParserArgs(attr.fields(SC).x).const == 'someconst'
def test_inputs_AttrParserArgs_help_from_metadata_for_optional(): SC = attr.make_class( "SC", { "x": attr.ib(type=str, metadata={ METADATA_ARGPARSER: { 'help': 'some help' }, }) }) assert AttrParserArgs(attr.fields(SC).x).help == 'some help'
class ThirdPartyReleaseInfoContentScheme(ReleaseInfoCommonContentScheme): """ RELEASE.INFO file content scheme. This class is used for `RELEASE.INFO` file content validation. Attributes ---------- NAME: str Name of SW upgrade repository. It is the `NAME` field of `RELEASE.INFO` file RELEASE: Optional[str] Release of SW upgrade repository. Can be absent. It is the `RELEASE` field of `RELEASE.INFO` file VERSION: str Version number of SW upgrade repository. It is the `VERSION` field of `RELEASE.INFO` file BUILD: str Build number of SW upgrade repository. It is the `BUILD` field of `RELEASE.INFO` file OS: str OS version for which this SW upgrade repo is intended. It is the `OS` field of `RELEASE.INFO` file COMPONENTS: list List of RPMs provided by this SW upgrade repository. It is the `COMPONENTS` field of `RELEASE.INFO` file """ # TODO validator (current format looks odd: os-version-build) THIRD_PARTY_VERSION: str = attr.ib(converter=str, kw_only=True) THIRD_PARTY_COMPONENTS: dict = attr.ib( validator=attr.validators.instance_of(dict), kw_only=True) # FIXME remove once RE team fixes upgrade.iso BUILD: Optional[str] = attr.ib( # regex is based on the current representation of `BUILD` number. # It is 1 or more numbers validator=attr.validators.optional( attr.validators.matches_re(r"^[0-9]+$")), converter=attr.converters.optional(str), default=None)
def test_inputs_AttrParserArgs_metavar_from_metadata_for_optional(): SC = attr.make_class( "SC", { "x": attr.ib(type=str, metadata={ METADATA_ARGPARSER: { 'metavar': 'SOME-METAVAR' }, }, default='123') }) assert AttrParserArgs(attr.fields(SC).x).metavar == 'SOME-METAVAR'
def test_ParserFiller_extract_positional_args_happy_path(): SC = attr.make_class( "SC", { "x": attr.ib(metadata={ METADATA_ARGPARSER: { 'action': 'store_bool', 'help': 'some help' } }, type=int) }) ret = ParserFiller.extract_positional_args(SC, attr.fields_dict(SC)) assert ret == ([attr.fields(SC).x], {})
def test_ParserFiller_fill_parser_with_no_action_in_metadata(): parser = argparse.ArgumentParser() SC = attr.make_class( "SC", { "y": attr.ib(default=123, metadata={METADATA_ARGPARSER: { 'help': 'some help' }}, type=int) }) ParserFiller.fill_parser(SC, parser) args = parser.parse_args(['--y', 'some-value']) assert args.y == 'some-value'
def attr_ib(key: str = None, **kwargs): _kwargs = {} if key: _kwargs = KeyPath(key).value(attrs_spec) _kwargs.update(kwargs) cli_spec = _kwargs.pop('cli_spec', None) if cli_spec: _kwargs['metadata'] = _kwargs.pop('metadata', {}) _kwargs['metadata'][inputs.METADATA_ARGPARSER] = cli_spec return attr.ib(**_kwargs)
class ConsulInstallSLS(ConsulSLS): sls = 'install' state_t = consul.ConsulInstall _version = str = attr.ib(init=False, default=None) def __attrs_post_init__(self): super().__attrs_post_init__() self._version = self.state.consul_version or VERSION_LATEST self.set_vendored(self.state.vendored) def setup_roots(self): super().setup_roots() self.pillar_set(dict(version=str(self._version)))