Exemplo n.º 1
0
class CatalogItemConfig:
    CONF_NAME = 'name'
    CONF_TYPE = 'type'
    CONF_DESCRIPTION = 'desc'
    CONF_LOCATION = 'location'

    SCHEMA = {
        CONF_NAME: sc.Use(str),
        CONF_TYPE: sc.Use(str),
        sc.Optional(CONF_DESCRIPTION, default=''): sc.Use(str),
        CONF_LOCATION: {
            str: object
        },
        sc.Optional(str): object
    }

    @classmethod
    def item_type_map(cls):
        return {'text': TextCatalogItemConfig}

    @classmethod
    def from_dict(cls, dct: Dict[str, Any], base_path: str) -> Dataset:
        validated = sc.Schema(cls.SCHEMA, ignore_extra_keys=True).validate(dct)
        item_name = validated[cls.CONF_NAME]
        item_type = validated[cls.CONF_TYPE]
        parser = cls.item_type_map().get(item_type)
        if not parser:
            raise UnknownCatalogItemError(item_type)

        return parser.load_from_dict(dct,
                                     base_path=os.path.join(
                                         base_path, item_name))
Exemplo n.º 2
0
def get_room_dict():
    room_defaults_schema = schema.Schema({
        schema.Optional('use_secure_urls', default=False):
        bool,
        schema.Optional('participant_label_file'):
        str,
    })

    room_schema = schema.Schema({
        'name': str,
        'display_name': str,
        schema.Optional('use_secure_urls'): bool,
        schema.Optional('participant_label_file'): str,
    })

    ROOM_DICT = OrderedDict()
    ROOM_DEFAULTS = getattr(settings, 'ROOM_DEFAULTS', {})
    try:
        ROOM_DEFAULTS = room_defaults_schema.validate(ROOM_DEFAULTS)
    except schema.SchemaError as e:
        raise (ValueError('settings.ROOM_DEFAULTS: {}'.format(e))) from None
    for room in getattr(settings, 'ROOMS', []):
        room = augment_room(room, ROOM_DEFAULTS)
        try:
            room = room_schema.validate(room)
        except schema.SchemaError as e:
            raise (ValueError('settings.ROOMS: {}'.format(e))) from None
        room_object = Room(**room)
        ROOM_DICT[room_object.name] = room_object
    return ROOM_DICT
Exemplo n.º 3
0
def _check_format(schema_dic):
    """
    check if a mspass.yaml file user provides is valid or not

    :param schema_dic: the dictionary that a yaml file is dumped
    :type schema_dic: dict
    :return: `True` if the schema is valid, else return `False`
    :rtype: bool
    """
    # Make sure Database and Metadata exist
    schema.Schema({'Database': dict, 'Metadata': dict}).validate(schema_dic)
    collection_name_list = schema_dic['Database'].keys()
    
    database_collection_schema = schema.Schema({
        schema.Optional('default'):str,
        schema.Optional('data_type'):str,
        schema.Optional('base'):schema.And(str, lambda s: s in collection_name_list),
        'schema':schema.And(dict, lambda dic: _is_valid_database_schema_definition(dic, collection_name_list))
    }, ignore_extra_keys=True)
    
    metadata_collection_schema = schema.Schema({
        'schema':schema.And(dict, lambda dic: _is_valid_metadata_schema_definition(dic, collection_name_list))
    }, ignore_extra_keys=True)
    
    yaml_schema = schema.Schema({
        'Database': schema.And(dict, schema.And(lambda dic: _is_valid_schema(dic, database_collection_schema),
                                               lambda dic: _check_min_default_key(dic))),
        'Metadata': schema.And(dict, lambda dic: _is_valid_schema(dic, metadata_collection_schema)),
    }, ignore_extra_keys=True)
    
    yaml_schema.validate(schema_dic)
Exemplo n.º 4
0
class TextCatalogItemConfig:
    CONF_TEXT_TARGET = 'target_column'
    CONF_TEXT_TEXT = 'text_column'

    SCHEMA = {
        **CatalogItemConfig.SCHEMA,
        sc.Optional(CONF_TEXT_TARGET, default='target'):
        sc.Use(str),
        sc.Optional(CONF_TEXT_TEXT, default='text'):
        sc.Use(str)
    }

    @classmethod
    def load_from_dict(cls, dct: Dict[str, Any],
                       base_path: str) -> TextDataset:
        validated = sc.Schema(cls.SCHEMA).validate(dct)
        data = LocationConfig.from_dict(
            validated[CatalogItemConfig.CONF_LOCATION], base_path=base_path)
        target = validated[cls.CONF_TEXT_TARGET]
        feature = validated[cls.CONF_TEXT_TEXT]
        data[target] = data[target].astype(str)
        data[feature] = data[feature].astype(str)
        return TextDataset(
            data=data,
            description=validated[CatalogItemConfig.CONF_DESCRIPTION],
            text_column=feature,
            target_column=target)
Exemplo n.º 5
0
def validate_check_config(check_config):

    class PrettyReprAnd(schema.And):

        def __repr__(self):
            return self._error

    check_name = PrettyReprAnd(
        str,
        lambda val: len(val) > 0,
        lambda val: not any(w in val for w in string.whitespace),
        error='Check name must be a nonzero length string with no whitespace')

    timeout_units = ['ns', 'us', 'µs', 'ms', 's', 'm', 'h']
    timeout = schema.Regex(
        '^\d+(\.\d+)?({})$'.format('|'.join(timeout_units)),
        error='Timeout must be a string containing an integer or float followed by a unit: {}'.format(
            ', '.join(timeout_units)))

    check_config_schema = schema.Schema({
        schema.Optional('cluster_checks'): {
            check_name: {
                'description': str,
                'cmd': [str],
                'timeout': timeout,
            },
        },
        schema.Optional('node_checks'): {
            'checks': {
                check_name: {
                    'description': str,
                    'cmd': [str],
                    'timeout': timeout,
                    schema.Optional('roles'): schema.Schema(
                        ['master', 'agent'],
                        error='roles must be a list containing master or agent or both',
                    ),
                },
            },
            schema.Optional('prestart'): [check_name],
            schema.Optional('poststart'): [check_name],
        },
    })

    check_config_obj = validate_json_dictionary(check_config)
    try:
        check_config_schema.validate(check_config_obj)
    except schema.SchemaError as exc:
        raise AssertionError(str(exc).replace('\n', ' ')) from exc

    if 'node_checks' in check_config_obj.keys():
        node_checks = check_config_obj['node_checks']
        assert any(k in node_checks.keys() for k in ['prestart', 'poststart']), (
            'At least one of prestart or poststart must be defined in node_checks')
        assert node_checks['checks'].keys() == set(
            node_checks.get('prestart', []) + node_checks.get('poststart', [])), (
            'All node checks must be referenced in either prestart or poststart, or both')

    return check_config_obj
Exemplo n.º 6
0
    def load_config(self):
        manifest_schema = schema.Schema({
            "branch": str,
            "url": str,
            schema.Optional("tag"): str,
            schema.Optional("groups"): [str],
        })

        return tsrc.config.parse_config_file(self.cfg_path, manifest_schema)
Exemplo n.º 7
0
 def _validate_config(self):
     expected_schema = schema.Schema({'KataGRepo': {'User': str,
                                                    'Repo': str},
                                      'HasTemplateAtRoot': {schema.Optional(str): bool},
                                      'Auth': {'SkipNotLoggedInWarning': bool,
                                               schema.Optional('Token'): str}})
     try:
         expected_schema.validate(self._config)
     except schema.SchemaError as error:
         raise InvalidConfig(error)
Exemplo n.º 8
0
def validate_repo(data: Any) -> None:
    copy_schema = {"file": str, schema.Optional("dest"): str}
    symlink_schema = {"source": str, "target": str}
    remote_schema = {"name": str, "url": str}
    repo_schema = schema.Schema({
        "dest": str,
        schema.Optional("branch"): str,
        schema.Optional("copy"): [copy_schema],
        schema.Optional("symlink"): [symlink_schema],
        schema.Optional("sha1"): str,
        schema.Optional("tag"): str,
        schema.Optional("ignore_submodules"): bool,
        schema.Optional("remotes"): [remote_schema],
        schema.Optional("url"): str,
    })
    repo_schema.validate(data)
    url = data.get("url")
    remotes = data.get("remotes")
    if url and remotes:
        raise schema.SchemaError(
            "Repo config cannot contain both an url and a list of remotes")
    if not url and not remotes:
        raise schema.SchemaError(
            "Repo config should contain either a url or a non-empty list of remotes"
        )
Exemplo n.º 9
0
def test_opt_up():
    assert schema.Schema(opt_up({
        schema.Optional('foo'): {
                schema.Optional('foo'): {
                    schema.Optional('foo'): {
                            schema.Optional('bar', default='baz'): str,
                        },
                        schema.Optional('bar', default='baz'): str,
                    },
                }
        })).validate({}) == {'foo': {'foo': {'foo': {'bar': 'baz'}, 'bar': 'baz'}}}
Exemplo n.º 10
0
class FileExistsValidator(Whiteprint):

    cfg_schema = {
        "path": str,
        schema.Optional("owner"): str,
        schema.Optional("group"): str,
        schema.Optional("mode"): int,
    }

    def _execute(self, mode: str) -> None:
        return None

    def _validate(self, mode: str) -> Optional[str]:
        quoted_path = shlex.quote(self.cfg["path"])
        if mode in "install":
            res = self.exec(
                'stat -c "%F %U %G %a" {!r}'.format(quoted_path), error_ok=True
            )
            if res.exit_status == 1:
                return "%r does not exist." % quoted_path
            # Use rsplit because %F can return "directory" or a multi-word like
            # "regular empty file"
            file_type, owner, group, file_mode_raw = (
                res.stdout.decode("utf-8").strip().rsplit(maxsplit=3)
            )
            file_mode = int(file_mode_raw, base=8)
            if file_type != "regular file":
                return "%r is not a file." % quoted_path
            elif self.cfg.get("owner") is not None and owner != self.cfg["owner"]:
                return "expected %r to have owner %r, got %r" % (
                    quoted_path,
                    self.cfg["owner"],
                    owner,
                )
            elif self.cfg.get("group") is not None and group != self.cfg["group"]:
                return "expected %r to have group %r, got %r" % (
                    quoted_path,
                    self.cfg["group"],
                    group,
                )
            elif self.cfg.get("mode") is not None and file_mode != self.cfg["mode"]:
                return "expected {!r} to have mode {:o}, got {:o}.".format(
                    quoted_path, self.cfg["mode"], file_mode
                )
            else:
                return None
        elif mode == "clean":
            res = self.exec("stat {!r}".format(quoted_path), error_ok=True)
            if res.exit_status != 1:
                return "expected %r to not exist." % quoted_path
            else:
                return None
        else:
            return None
Exemplo n.º 11
0
def parse_tsrc_config(config_path: Path = None,
                      roundtrip: bool = False) -> Config:
    auth_schema = {
        schema.Optional("gitlab"): {
            "token": str
        },
        schema.Optional("github"): {
            "token": str
        },
    }
    tsrc_schema = schema.Schema({"auth": auth_schema})
    if not config_path:
        config_path = get_tsrc_config_path()
    return parse_config(config_path, tsrc_schema, roundtrip=roundtrip)
Exemplo n.º 12
0
class FileFromPath(Whiteprint):
    """
    Creates a file from a local resource template.
    """

    cfg_schema = {
        "src_path": str,
        schema.Optional("cfg"): object,
        "dest_path": str,
        schema.Optional("owner"): str,
        schema.Optional("group"): str,
        schema.Optional("mode"): int,
        "remove_on_clean": bool,
    }

    default_cfg = {
        "remove_on_clean": True,
    }

    @classmethod
    def _compute_prefabs_tail(cls, cfg: Config) -> List[Prefab]:
        prefab_cfg = {"path": cfg["dest_path"]}
        if cfg.get("owner"):
            prefab_cfg["owner"] = cfg["owner"]
        if cfg.get("group"):
            prefab_cfg["group"] = cfg["group"]
        if cfg.get("mode"):
            prefab_cfg["mode"] = cfg["mode"]
        return [Prefab(FileExistsValidator, prefab_cfg)]

    def _execute(self, mode: str) -> None:
        quoted_path = shlex.quote(self.cfg["dest_path"])
        if mode in {"install", "update"}:
            self.scp_up_template(
                self.cfg["src_path"],
                self.cfg["dest_path"],
                self.cfg.get("mode"),
                self.cfg.get("cfg"),
            )
            if self.cfg.get("owner") is not None:
                self.exec("chown {} {}".format(self.cfg["owner"], quoted_path))
            if self.cfg.get("group") is not None:
                self.exec("chgrp {} {}".format(self.cfg["group"], quoted_path))
        elif mode == "clean":
            if self.cfg["remove_on_clean"]:
                self.exec("rm -rf {}".format(quoted_path))

    def _validate(self, mode: str) -> Optional[str]:
        return None
Exemplo n.º 13
0
def validate(data, set_dir):
    def is_file(path):
        return path.is_file()

    def to_fixture(data):
        return Fixture(**data)

    def to_step(data):
        return Step(**data)

    def to_test(data):
        return Test(**data)

    def absolute_path(path):
        absolute = Path(os.path.expanduser(path))
        if not absolute.is_absolute():
            absolute = (set_dir / path).resolve()
        return absolute

    fixture = schema.Schema(
        schema.And({
            'enter': schema.And(str, len),
            'exit': schema.And(str, len)
        }, schema.Use(to_fixture)))

    fixtures = schema.Schema({schema.And(str, len): fixture})

    step = schema.Schema(
        schema.And({
            'command':
            schema.And(
                schema.Const(schema.And(str, len)), schema.Use(shlex.split)),
            schema.Optional('input', default=None):
            schema.And(schema.Use(absolute_path), is_file)
        }, schema.Use(to_step)))

    test = schema.Schema(
        schema.And({
            schema.Optional('tags', default=None): [str],
            schema.Optional('condition', default=None): str,
            schema.Optional('fixture', default=None): str,
            'steps': [step]
        }, schema.Use(to_test)))

    tests = schema.Schema({schema.And(str, len): test})

    sch = schema.Schema({'fixtures': fixtures, 'tests': tests})

    return sch.validate(data)
Exemplo n.º 14
0
def get_pool_config():
    posint = schema.And(int, lambda a: a >=0)
    pool_conf = schema_builder.opt_up({
        opt('instance_size'): schema.And(int, lambda n: 0 < n <= 64),
        opt('instances'): int,
        opt('instance_group'): schema.Or('Backend', 'Common'),
        opt('one_per_host', default=True): bool,
        opt('use_canary', default=True): bool,
        opt('hostname'): str,
        opt('vips'): schema_builder.none_or(schema.Use(_parse_vip)),
        opt('health_check'): {
            opt('grace_period_seconds'): posint,
            opt('interval_seconds'): posint,
            opt('timeout_seconds'): posint,
            opt('max_consecutive_failures'): posint,
            opt('command', default=DEFAULT_HEALTH_CMD): str,
        },
        opt('haproxy_group', default='internal'):
            schema.Or('internal', 'external'),
    })
    return {
        schema.Optional('default'): pool_conf,
        schema.Or(*ENVIRONMENTS): {
            #instance-id e.g. /backends/urlshortener
            str: schema_builder.nullable_dict(pool_conf),
        }
    }
Exemplo n.º 15
0
    def create(self, objective_data, by_user):
        """Create a new Objective from the given data

        :param objective_data: is a dictionary of data used to populate the
                               initial Objective.  It must match the schema
                               defined within.
        :param by_user: the `User` who is creating the `Objective`.
        """

        creation_schema = merge(self._base_schema, {
            s.Optional('id'): None,
            'subject_id': s.Or(None, s.Use(int)),
        })

        o = s.Schema(creation_schema).validate(objective_data)
        self._validate_topic(o['topic_id'], o['subject_id'])
        self._validate_name(o['name'])

        prerequisites = self._validate_prerequisites(o['prerequisites'], by_user)

        now = datetime.utcnow()
        o['prerequisites'] = prerequisites
        o['created_by_id'] = by_user.id
        o['last_updated'] = now
        o['time_created'] = now
        objective = Objective(**o)
        db.session.add(objective)
        db.session.commit()
        return objective
Exemplo n.º 16
0
 def _transform(d):
     result = d.copy()
     for k, v in result.items():
         if isinstance(v, dict):
             result[k] = _transform(v)
     result[schema.Optional(str)] = object
     return result
Exemplo n.º 17
0
 def get_token(self, password):
     response = requests.post(
         self.endpoint + '/authorizations',
         auth=(self.username, password),
         headers=self.default_headers,
         data=json.dumps({
             'scopes': ['repo'],
             'note': 'git-wat',
         })
     )
     if response.status_code < 200 or response.status_code >= 300:
         raise errors.RequestFailed(response)
     try:
         result = response.json()
         s.Schema({
             'hashed_token': s.And(
                 basestring,
                 lambda t: re.match(r'^[0-9a-fA-F]+$', t)
             ),
             s.Optional(basestring): object,
         }).validate(result)
         self._set_token(result['hashed_token'])
     except ValueError:
         raise errors.UnexpectedResponse(response, "Body is not JSON")
     except s.SchemaError as e:
         raise errors.UnexpectedResponse(response, str(e))
Exemplo n.º 18
0
def _build_schema():
    config_schema = {
        'pip': {
            'extra_pypi_urls': [str]
        },
        'services': {
            str: object
        },
        'service_groups': {
            str: object
        },
        'sk_custom': {
            'host_port_map': {
                str: [str]
            }
        },
    }
    site_config_plugins = plugins._SITE_CONFIG.collect()
    for plugin_builder in site_config_plugins.values():
        config_schema[schema.Optional(
            plugin_builder.sky_plugin.name)] = plugin_builder()
    # ignore_extra_keys allows for site-configs to be forwards compatible
    # as long as current keys aren't deleted or removed, new data
    # can safely be added without breaking the existing sky deployments
    return schema.Schema(config_schema, ignore_extra_keys=True)
Exemplo n.º 19
0
Arquivo: url.py Projeto: jkloetzke/bob
class UrlAudit(ScmAudit):

    SCHEMA = schema.Schema({
        'type': 'url',
        'dir': str,
        'digest': {
            'algorithm': 'sha1',
            'value': str
        },
        schema.Optional('url'): str,  # Added in Bob 0.16
    })

    def _scanDir(self, workspace, dir, extra):
        self.__dir = dir
        self.__hash = asHexStr(hashFile(os.path.join(workspace, dir)))
        self.__url = extra.get("url")

    def _load(self, data):
        self.__dir = data["dir"]
        self.__hash = data["digest"]["value"]
        self.__url = data.get("url")

    def dump(self):
        ret = {
            "type": "url",
            "dir": self.__dir,
            "digest": {
                "algorithm": "sha1",
                "value": self.__hash
            }
        }
        if self.__url is not None:
            ret["url"] = self.__url

        return ret
Exemplo n.º 20
0
class FlatpakRepositoriesPlugin(_AbstractFlatpakPlugin):
    key = 'flatpak-repositories'
    schema = [{
        'name': str,
        'location': str,
        schema.Optional('target'): schema.Or('user', 'system'),
    }]

    def perform(self):
        # Install flatpak if not already installed
        if not self._check_is_flatpak_installed():
            self._install_flatpak()

        assert self._check_is_flatpak_installed()

        for repo in self.config:
            cmd = ['flatpak', 'remote-add', '--if-not-exists']
            target = 'system'

            if 'target' in repo:
                target = repo['target']

            if target == 'user':
                cmd += ['--user']
            elif target == 'system':
                cmd += ['--system']

            cmd += [repo['name'], repo['location']]

            if target == 'system':
                self.run_command_sudo(*cmd)
            else:
                self.run_command(*cmd)
Exemplo n.º 21
0
def load(manifest_path: Path) -> Manifest:
    gitlab_schema = {"url": str}
    repo_schema = schema.Use(validate_repo)
    group_schema = {"repos": [str], schema.Optional("includes"): [str]}
    manifest_schema = schema.Schema(
        {
            "repos": [repo_schema],
            schema.Optional("gitlab"): gitlab_schema,
            schema.Optional("groups"): {str: group_schema},
        }
    )
    parsed = tsrc.parse_config(manifest_path, manifest_schema)
    parsed = ManifestConfig(parsed)  # type: ignore
    as_manifest_config = cast(ManifestConfig, parsed)
    res = Manifest()
    res.load(as_manifest_config)
    return res
Exemplo n.º 22
0
def validate_basic_schema(config: Any) -> None:
    """First pass of validation, using schema"""
    # Note: asserts that we won't get KeyError or invalid types
    # when building or initial Config instance
    file_schema = schema.Schema({
        "src": str,
        schema.Optional("search"): str,
        schema.Optional("version_template"): str,
    })

    hook_schema = schema.Schema({"name": str, "cmd": str})

    def validate_re(regex: str) -> str:
        re.compile(regex, re.VERBOSE)
        return regex

    tbump_schema = schema.Schema({
        "version": {
            "current": str,
            "regex": schema.Use(validate_re)
        },
        "git": {
            "message_template": str,
            "tag_template": str
        },
        "file": [file_schema],
        schema.Optional("hook"): [hook_schema],  # retro-compat
        schema.Optional("before_push"): [hook_schema],  # retro-compat
        schema.Optional("before_commit"): [hook_schema],
        schema.Optional("after_push"): [hook_schema],
        schema.Optional("github_url"):
        str,
    })
    tbump_schema.validate(config)
Exemplo n.º 23
0
def main() -> None:
    args_schema = schema.Schema({
        schema.Optional("<input_path>"):
        schema.Or(None, schema.Use(pathlib.Path, os.path.exists)),
        "<output_path>":
        schema.Use(pathlib.Path, os.path.exists),
        schema.Optional("--num_records"):
        schema.Or(None, schema.Use(int)),
        "--opportunity_rate":
        schema.Use(float),
        "--test_rate":
        schema.Use(float),
        "--purchase_rate":
        schema.Use(float),
        "--incrementality_rate":
        schema.Use(float),
        "--min_ts":
        schema.Use(int),
        "--max_ts":
        schema.Use(int),
        "--num_conversions":
        schema.Use(int),
        "--md5_id":
        bool,
        "--help":
        bool,
        schema.Optional("--from_header"):
        schema.Or(None, schema.Use(str)),
    })
    args = args_schema.validate(docopt.docopt(__doc__))

    # verify input source arguments
    if args.get("<input_path>") is None and args.get("--from_header") is None:
        raise RuntimeError(
            "Missing input source, please supply either <input_path> or --from_header option"
        )

    if (args.get("<input_path>") is None
            and args.get("--from_header") is not None
            and args.get("--num_records") is None):
        raise RuntimeError(
            "Missing argument, please specify --num_records with --from_header option"
        )

    _make_input_csv(args)
Exemplo n.º 24
0
def load(manifest_path):
    gitlab_schema = {"url": str}
    copy_schema = {"src": str, schema.Optional("dest"): str}
    repo_schema = {
        "src": str,
        "url": str,
        schema.Optional("branch"): str,
        schema.Optional("copy"): [copy_schema],
        schema.Optional("fixed_ref"): str,
    }
    group_schema = {
        "repos": [str],
        schema.Optional("includes"): [str],
    }
    manifest_schema = schema.Schema({
        "repos": [repo_schema],
        schema.Optional("gitlab"): gitlab_schema,
        schema.Optional("groups"): {
            str: group_schema
        },
    })
    parsed = tsrc.config.parse_config_file(manifest_path, manifest_schema)
    res = Manifest()
    res.load(parsed)
    return res
Exemplo n.º 25
0
def load(manifest_path: Path) -> Manifest:
    remote_git_server_schema = {"url": str}
    repo_schema = schema.Use(validate_repo)
    group_schema = {"repos": [str], schema.Optional("includes"): [str]}
    # Note: gitlab and github_enterprise_url keys are ignored,
    # and kept here only for backward compatibility reasons
    manifest_schema = schema.Schema(
        {
            "repos": [repo_schema],
            schema.Optional("gitlab"): remote_git_server_schema,
            schema.Optional("github_enterprise"): remote_git_server_schema,
            schema.Optional("groups"): {str: group_schema},
        }
    )
    parsed = tsrc.parse_config(manifest_path, manifest_schema)
    res = Manifest()
    res.apply_config(parsed)
    return res
Exemplo n.º 26
0
def instrumented_recipes() -> Dict[str, InstrumentationLevel]:
    """Probes for existence of an "instrumentation.toml" drop-in file at the
    root of the `repo` source tree, parses this file and return a
    :py:data:`dict` of all the recipe identifiers (as keys) requested to be
    instrumented with their instrumentation level as value.

    :raise InstrumentationSpecificationError: in case of an incoherence in the
        ``instrumentation.toml`` file

    """

    instru_filepath = os.path.join(repo_root_path(), "instrumentation.toml")

    if not os.path.exists(instru_filepath):
        return dict()

    try:
        with open(instru_filepath, "r") as instru_fp:
            instru: Dict[str, Any] = toml.load(instru_fp)
    except:
        raise InstrumentationSpecificationError(line(
            """Cannot open or parse as TOML the "instrumentation.toml" file
            placed at the root of the repo source tree."""))

    instru_file_schema = schema.Schema({
        schema.Optional(level.name.lower(), default=[]): [
            schema.Regex(RECIPE_IDENTIFIER_RE.pattern)
        ] for level in InstrumentationLevel
    })

    try:
        instru = instru_file_schema.validate(instru)
    except schema.SchemaError as exc:
        raise InstrumentationSpecificationError(line(
            """The "instrumentation.toml" file has an unexpected data
            structure. Reason: {!s}""").format(exc))

    for level in InstrumentationLevel:
        for recipe in instru[level.name.lower()]:
            recipe_config_path = os.path.join(repo_root_path(), "products",
                                              recipe, "recipe.toml")
            if not os.path.exists(recipe_config_path):
                raise InstrumentationSpecificationError(line(
                    """The recipe {!r} is not a valid recipe or has no
                    configuration file in the products
                    folder.""").format(recipe))

    recipes: Dict[str, InstrumentationLevel] = dict()
    for level in InstrumentationLevel:
        for recipe_id in instru[level.name.lower()]:
            if recipe_id in recipes:
                raise InstrumentationSpecificationError(line(
                    """The recipe {!r} is specified more than once in the
                    "instrumentation.toml" file.""").format(recipe_id))
            recipes[recipe_id] = level

    return recipes
Exemplo n.º 27
0
Arquivo: git.py Projeto: ptmcg/bob
class GitAudit(ScmAudit):

    SCHEMA = schema.Schema({
        'type': 'git',
        'dir': str,
        'remotes': {
            schema.Optional(str): str
        },
        'commit': str,
        'description': str,
        'dirty': bool
    })

    def _scanDir(self, workspace, dir):
        self.__dir = dir
        dir = os.path.join(workspace, dir)
        try:
            remotes = subprocess.check_output(
                ["git", "remote", "-v"], cwd=dir,
                universal_newlines=True).split("\n")
            remotes = (r[:-8].split("\t") for r in remotes
                       if r.endswith("(fetch)"))
            self.__remotes = {remote: url for (remote, url) in remotes}

            self.__commit = subprocess.check_output(
                ["git", "rev-parse", "HEAD"], cwd=dir,
                universal_newlines=True).strip()
            self.__description = subprocess.check_output(
                ["git", "describe", "--always", "--dirty"],
                cwd=dir,
                universal_newlines=True).strip()
            self.__dirty = subprocess.call(
                ["git", "diff-index", "--quiet", "HEAD", "--"], cwd=dir) != 0
        except subprocess.CalledProcessError as e:
            raise BuildError("Git audit failed: " + str(e))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))

    def _load(self, data):
        self.__dir = data["dir"]
        self.__remotes = data["remotes"]
        self.__commit = data["commit"]
        self.__description = data["description"]
        self.__dirty = data["dirty"]

    def dump(self):
        return {
            "type": "git",
            "dir": self.__dir,
            "remotes": self.__remotes,
            "commit": self.__commit,
            "description": self.__description,
            "dirty": self.__dirty,
        }

    def getStatusLine(self):
        return self.__description
Exemplo n.º 28
0
class RecipeBundleFeature(RecipeFeature):
    """Recipe feature for bundling (*e.g.* making a tar archive or a squashfs
    image from any previous result of another recipe) whole or parts of the
    final recipe target."""

    NAME = "bundle"

    FEATURED_ATTRIBUTES = {"bundle"}

    SCHEMA = schema.Schema({
        "bundle": {
            "sdk": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "steps": schema.And([str], len),
        },
        str: object,  # do not consider other keys
    })

    def bundle(self) -> None:
        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["bundle"]["sdk"]),
                      "sdk")

        action_out_subpath = os.path.join(self.recipe.out_subpath, "bundle")
        action_out_path = os.path.join(repo_root_path(), action_out_subpath)

        # Always clear out the previous image result:
        if os.path.exists(action_out_path):
            debug("clearing {!r}...".format(action_out_path))
            with ElevatedPrivileges():
                shutil.rmtree(action_out_path)

        # prepare output directory
        os.makedirs(action_out_path)

        # hook the SDK container process to the current TTY (if existent):
        terminal = is_tty_attached()

        with sdk.session(action_name="bundle",
                         action_targeted_recipe=self.recipe,
                         env={
                             key: self.replace_placeholders(value,
                                                            sdk_context=True)
                             for key, value in self.recipe.config["bundle"]
                             ["env"].items()
                         },
                         terminal=terminal,
                         shared_host_netns=False) as sess:
            for cmd in self.recipe.config["bundle"]["steps"]:
                info("{!r} bundles recipe {!r}, runs:\n  {}".format(
                    sdk.recipe.identifier, self.recipe.identifier, cmd))
                sess.run(self.replace_placeholders(cmd, sdk_context=True))
Exemplo n.º 29
0
class SnapPackagesPlugin(plugins.AbstractPlugin):
    key = 'snap-packages'
    schema = [
        str,
        {
            'package': str,
            schema.Optional('channel'): str,
            schema.Optional('classic'): bool,
            schema.Optional('devmode'): bool,
            schema.Optional('jailmode'): bool,
            schema.Optional('dangerous'): bool
        },
    ]

    def perform(self):
        packages = self.config
        for package in packages:
            # Using type(package) == dict here is not enough because
            # while a subclass of dict will be passed as config,
            # it is not guaranteed what concrete subclass
            # that will be. Currently, CommentedMap from ruamel.yaml
            # is used, but this may change in the future!
            if isinstance(package, dict):
                package_name = package['package']
                if package_name.endswith('.snap'):
                    package_name = self._expand_path(package_name)
                cmd = ['snap', 'install']
                if 'channel' in package:
                    cmd += ['--channel', package['channel']]
                if 'classic' in package and package['classic']:
                    cmd += ['--classic']
                if 'devmode' in package and package['devmode']:
                    cmd += ['--devmode']
                if 'jailmode' in package and package['jailmode']:
                    cmd += ['--jailmode']
                if 'dangerous' in package and package['dangerous']:
                    cmd += ['--dangerous']
                cmd += [package_name]
                self.run_command_sudo(*cmd)
            else:
                if package.endswith('.snap'):
                    package = self._expand_path(package)
                self.run_command_sudo('snap', 'install', package)
Exemplo n.º 30
0
 def _build_all_cols(data, table, use_expr=False):
     all_cols = {}
     for column in table.columns:
         # schema breaks on values expected to be strings which are None
         if (column.name in data and column.type.python_type == str
                 and data[column.name] is None):
             data[column.name] = ""
         all_cols[schema.Optional(column.name)] = (
             column.type.python_type if not use_expr else
             lambda o: isinstance(o, (list, _BinaryExpression)))
     return all_cols