Пример #1
0
class RecipeBundleFeature(RecipeFeature):
    """Recipe feature for bundling (*e.g.* making a tar archive or a squashfs
    image from any previous result of another recipe) whole or parts of the
    final recipe target."""

    NAME = "bundle"

    FEATURED_ATTRIBUTES = {"bundle"}

    SCHEMA = schema.Schema({
        "bundle": {
            "sdk": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "steps": schema.And([str], len),
        },
        str: object,  # do not consider other keys
    })

    def bundle(self) -> None:
        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["bundle"]["sdk"]),
                      "sdk")

        action_out_subpath = os.path.join(self.recipe.out_subpath, "bundle")
        action_out_path = os.path.join(repo_root_path(), action_out_subpath)

        # Always clear out the previous image result:
        if os.path.exists(action_out_path):
            debug("clearing {!r}...".format(action_out_path))
            with ElevatedPrivileges():
                shutil.rmtree(action_out_path)

        # prepare output directory
        os.makedirs(action_out_path)

        # hook the SDK container process to the current TTY (if existent):
        terminal = is_tty_attached()

        with sdk.session(action_name="bundle",
                         action_targeted_recipe=self.recipe,
                         env={
                             key: self.replace_placeholders(value,
                                                            sdk_context=True)
                             for key, value in self.recipe.config["bundle"]
                             ["env"].items()
                         },
                         terminal=terminal,
                         shared_host_netns=False) as sess:
            for cmd in self.recipe.config["bundle"]["steps"]:
                info("{!r} bundles recipe {!r}, runs:\n  {}".format(
                    sdk.recipe.identifier, self.recipe.identifier, cmd))
                sess.run(self.replace_placeholders(cmd, sdk_context=True))
Пример #2
0
def validate_check_config(check_config):

    class PrettyReprAnd(schema.And):

        def __repr__(self):
            return self._error

    check_name = PrettyReprAnd(
        str,
        lambda val: len(val) > 0,
        lambda val: not any(w in val for w in string.whitespace),
        error='Check name must be a nonzero length string with no whitespace')

    timeout_units = ['ns', 'us', 'µs', 'ms', 's', 'm', 'h']
    timeout = schema.Regex(
        '^\d+(\.\d+)?({})$'.format('|'.join(timeout_units)),
        error='Timeout must be a string containing an integer or float followed by a unit: {}'.format(
            ', '.join(timeout_units)))

    check_config_schema = schema.Schema({
        schema.Optional('cluster_checks'): {
            check_name: {
                'description': str,
                'cmd': [str],
                'timeout': timeout,
            },
        },
        schema.Optional('node_checks'): {
            'checks': {
                check_name: {
                    'description': str,
                    'cmd': [str],
                    'timeout': timeout,
                    schema.Optional('roles'): schema.Schema(
                        ['master', 'agent'],
                        error='roles must be a list containing master or agent or both',
                    ),
                },
            },
            schema.Optional('prestart'): [check_name],
            schema.Optional('poststart'): [check_name],
        },
    })

    check_config_obj = validate_json_dictionary(check_config)
    try:
        check_config_schema.validate(check_config_obj)
    except schema.SchemaError as exc:
        raise AssertionError(str(exc).replace('\n', ' ')) from exc

    if 'node_checks' in check_config_obj.keys():
        node_checks = check_config_obj['node_checks']
        assert any(k in node_checks.keys() for k in ['prestart', 'poststart']), (
            'At least one of prestart or poststart must be defined in node_checks')
        assert node_checks['checks'].keys() == set(
            node_checks.get('prestart', []) + node_checks.get('poststart', [])), (
            'All node checks must be referenced in either prestart or poststart, or both')

    return check_config_obj
Пример #3
0
def instrumented_recipes() -> Dict[str, InstrumentationLevel]:
    """Probes for existence of an "instrumentation.toml" drop-in file at the
    root of the `repo` source tree, parses this file and return a
    :py:data:`dict` of all the recipe identifiers (as keys) requested to be
    instrumented with their instrumentation level as value.

    :raise InstrumentationSpecificationError: in case of an incoherence in the
        ``instrumentation.toml`` file

    """

    instru_filepath = os.path.join(repo_root_path(), "instrumentation.toml")

    if not os.path.exists(instru_filepath):
        return dict()

    try:
        with open(instru_filepath, "r") as instru_fp:
            instru: Dict[str, Any] = toml.load(instru_fp)
    except:
        raise InstrumentationSpecificationError(line(
            """Cannot open or parse as TOML the "instrumentation.toml" file
            placed at the root of the repo source tree."""))

    instru_file_schema = schema.Schema({
        schema.Optional(level.name.lower(), default=[]): [
            schema.Regex(RECIPE_IDENTIFIER_RE.pattern)
        ] for level in InstrumentationLevel
    })

    try:
        instru = instru_file_schema.validate(instru)
    except schema.SchemaError as exc:
        raise InstrumentationSpecificationError(line(
            """The "instrumentation.toml" file has an unexpected data
            structure. Reason: {!s}""").format(exc))

    for level in InstrumentationLevel:
        for recipe in instru[level.name.lower()]:
            recipe_config_path = os.path.join(repo_root_path(), "products",
                                              recipe, "recipe.toml")
            if not os.path.exists(recipe_config_path):
                raise InstrumentationSpecificationError(line(
                    """The recipe {!r} is not a valid recipe or has no
                    configuration file in the products
                    folder.""").format(recipe))

    recipes: Dict[str, InstrumentationLevel] = dict()
    for level in InstrumentationLevel:
        for recipe_id in instru[level.name.lower()]:
            if recipe_id in recipes:
                raise InstrumentationSpecificationError(line(
                    """The recipe {!r} is specified more than once in the
                    "instrumentation.toml" file.""").format(recipe_id))
            recipes[recipe_id] = level

    return recipes
Пример #4
0
 def __init__(self, verbose=False):
     super(Producer, self).__init__(verbose=verbose)
     self.__schema__ = schema.Schema({
         'name':
         schema.Regex(r'^[a-zA-Z][0-9a-zA-Z_]+$',
                      error='Invalid name: '
                      'Expected only alphanumeric characters or "_"'
                      'and should start with a letter.'),
         'version':
         ProducerVersion(verbose=verbose)
     })
Пример #5
0
class MD5Checksum(SchemaComponent):
    """A 'validatable' component in a ``schema`` validation which only accepts syntactically valid *MD5* hash.

    Args:
        verbose (bool): Optional. Default to ``False``. Toggle exhaustive schema offence reporting.

    """

    __schema__ = schema.Regex(
        r'^[a-f0-9]{32}$',
        error='Invalid file checksum provided: Expected a valid MD5 hash.')
Пример #6
0
class CuckooSandboxFileAnalysisReport(CortexAnalyzerReport):
    """ Represents a Cortex CuckooSandbox_File_Analysis_Inet_1_2 analysis JSON
        report. """
    report_schema = schema.Schema({
        "summary": {
            "taxonomies": [
                schema.Or(
                    {
                        "level": schema.Or("info", "malicious", "safe"),
                        "namespace": "Cuckoo",
                        "predicate": "Malscore",
                        "value": schema.Regex(r'^[0-9\./]*$')
                    }, {
                        "level": schema.Or("info", "malicious", "safe"),
                        "namespace": "Cuckoo",
                        "predicate": "Malfamily",
                        "value": str,
                    })
            ]
        },
        "full":
        schema.Schema({
            "signatures": schema.Schema([str]),
        },
                      ignore_extra_keys=True),
        "success":
        bool,
        "artifacts":
        CortexAnalyzerReport.report_schema_artifacts,
        "operations": []
    })

    def __init__(self, report):
        super().__init__(report)
        self.taxonomies = report.get("summary", {}).get("taxonomies", [{}])

    @property
    def signatures(self):
        """ Matched Cuckoo signatures. """
        return self.report.get('full', {}).get('Signatures', None)

    @property
    def malscore(self):
        """ Malscore n of 10 (might be bigger). """
        for tax in self.taxonomies:
            if tax.get('predicate') == 'Malscore':
                return float(tax['value'])
        return -1
Пример #7
0
    def meta_schema(self) -> schema.Schema:
        """The schema for which the recipe configuration file (``recipe.toml``)
        needs to comply for common part (*i.e.* the part of the configuration
        common to all the recipes)"""

        return schema.Schema({
            "features": [
                schema.Or(*(featclass.NAME for featclass in
                            features.RecipeFeature.__subclasses__())),
            ],
            # All recipes have SDK except for SDK recipes
            schema.Optional("sdk"): schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
            # other keys must be dict and will be validated by recipe features
            # validation methods
            str: dict,
        })
Пример #8
0
def test_schema():

    # 检查数字
    print '----------------------int'
    print schema.Schema(int).validate(123)
    print schema.Schema(int).is_valid(123)

    # 检查字符串
    print '----------------------str'
    # Regex 没有 is_valid 方法
    print schema.Regex(r'^foo').validate('foobar')
    print schema.Schema(lambda n: "foo" in n).is_valid('foobar')
    print 'False:%s ' % schema.Schema(lambda n: "foo" in n).is_valid('fobar')

    # 检查字典
    print '----------------------dict'
    rules = {
        'name': schema.And(str, len),
        'age': schema.And(schema.Use(int), lambda n: 18 <= n <= 99),
        schema.Optional('gender'): schema.And(str, schema.Use(str.lower), lambda s: s in ('squid', 'kid'))}

    data = {'name': 'Sue', 'age': '28', 'gender': 'Squid'}

    print schema.Schema(rules).validate(data)
    print schema.Schema(rules).is_valid(data)

    print '----------------------list-dict'
    rules = [{
        'name': schema.And(str, len),
        'age': schema.And(schema.Use(int), lambda n: 18 <= n <= 99),
        schema.Optional('gender'): schema.And(str, schema.Use(str.lower), lambda s: s in ('squid', 'kid'))}]

    data = [{'name': 'Sue', 'age': '28', 'gender': 'Squid'},
            {'name': 'Sam', 'age': '42'},
            {'name': 'Sacha', 'age': '20', 'gender': 'KID'}]

    print schema.Schema(rules).validate(data)
    print schema.Schema(rules).is_valid(data)
Пример #9
0
class VirusTotalQueryReport(CortexAnalyzerReport):
    """ Represents a Cortex VirusTotal_GetReport_3_0 analysis JSON report. """
    report_schema = schema.Schema({
        "summary": {
            "taxonomies": [{
                "level": schema.Or("info", "malicious", "safe"),
                "namespace": "VT",
                "predicate": str,
                "value": schema.Regex(r'^[0-9/]*$')
            }]
        },
        "full": {
            "response_code": int,
            "resource": str,
            "verbose_msg": str
        },
        "success": bool,
        "artifacts": CortexAnalyzerReport.report_schema_artifacts,
        "operations": []
    })

    def __init__(self, unvalidated_report):
        super().__init__(unvalidated_report)

        self.taxonomies_vt = self.get_element_from_list_of_dicts(
            self.report.get('summary', {}).get('taxonomies'), 'namespace',
            'VT', {})

    @property
    def n_of_all(self):
        """ n of all Virusscanners at VirusTotal have rated this file as
            malicious. """
        return int(self.taxonomies_vt.get('value', '-1/0').split('/')[0])

    @property
    def level(self):
        """ safe, suspicious, malicious """
        return self.taxonomies_vt.get('level', None)
Пример #10
0
    return schema.And(schema.Use(type_), sub_schema)


def _as_d(sub_schema): return _coerce(dict, sub_schema)
def _as_l(sub_schema): return _coerce(list, sub_schema)


_FILE_SCHEMA = schema.Schema(_as_d(
{
    "audit-log": _as_l([str]),
    "key-custodians": _as_d({
        schema.Optional(str): _as_d({
            "pwdkm": str,
        }),
    }),
    schema.Optional(schema.Regex("^(?!meta).*$")): _as_d({
        # allow string names for security domains,
        # but meta is reserved
        "meta": _as_d({
            "owners": _as_d({str: str}),
            "public-key": str,
        }),
        schema.Optional(schema.Regex("secret-.*")): str,
    }),
}))


# NOTE: this is a public class since it must be passed in
@attr.s(frozen=True)
class Creds(object):
    "Stores credentials used to open a KeyFile"
Пример #11
0
"""
Copyright (c) 2020 Nathan Telles

This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""

from typing import Any, Dict, ValuesView

import schema as sch
import yaml

from .errors import ConfigIOError, InvalidConfig

SAFE_STR_REG = sch.Regex(r'^[\w\d]+$')

# Regex for validating ip addressses
IP_REG = sch.Regex(r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}'
                   r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$')

TCP_SCHEMA = sch.Schema({
    'type':
    'tcp_server',
    'address':
    IP_REG,
    'port':
    sch.And(sch.Use(int), lambda n: 0 <= n <= 65535),
    sch.Optional('line_ending', default="\n"):
    str,
    sch.Optional('default_response', default=''):
Пример #12
0
        Sync the docker daemon clock up to current clock.
        '''
        with self.logger.info('sync_clock'):
            cmd = 'date +%s -s @' + str(int(time.time()))
            self.executor.docker.run(
                CENTOS, cmd, rm=None, privileged=None
            ).interactive()


_EMPTY = attr.make_class('Empty', [])()  # represents a value that should be dropped from the YAML
_DEPENDS_ON_ALL = '*'  # special value for depends_on for connecting to everything else
# (mainly for jupyter notebook or similar debugging tool)
SK_CENTRAL_REMOTE = 'USER@GITLAB_HOSTNAME:REPO'

_ENV_SCHEMA = schema.Or(
    [schema.Regex('[_a-zA-Z][_a-zA-Z0-9]*(=[^=]+)?')],
    {schema.Regex('[_a-zA-Z][_a-zA-Z0-9]*'): schema.Or(str, None)})

_PORT_SCHEMA = schema.Regex(r'\d+(\:\d+)?')

'''
Environment variables in docker-compose:
https://docs.docker.com/compose/compose-file/#/environment

environment:
  RACK_ENV: development
  SHOW: 'true'
  SESSION_SECRET:

environment:
  - RACK_ENV=development
Пример #13
0
class Config(object):
    CONFIG = 'config'

    SECTION_CORE = 'core'
    SECTION_CORE_LOGLEVEL = 'loglevel'
    SECTION_CORE_LOGLEVEL_SCHEMA = schema.And(schema.Use(str.lower), lambda l: l in ('info', 'debug', 'warning', 'error'))
    SECTION_CORE_REMOTE = 'remote'

    # backward compatibility
    SECTION_CORE_CLOUD = 'cloud'
    SECTION_CORE_CLOUD_SCHEMA = schema.And(schema.Use(str.lower), lambda c: c in ('aws', 'gcp', 'local', ''))
    SECTION_CORE_STORAGEPATH = 'storagepath'

    SECTION_CORE_SCHEMA = {
        schema.Optional(SECTION_CORE_LOGLEVEL, default='info'): schema.And(str, schema.Use(str.lower), SECTION_CORE_LOGLEVEL_SCHEMA),
        schema.Optional(SECTION_CORE_REMOTE, default=''): schema.And(str, schema.Use(str.lower)),

        # backward compatibility
        schema.Optional(SECTION_CORE_CLOUD, default=''): SECTION_CORE_CLOUD_SCHEMA,
        schema.Optional(SECTION_CORE_STORAGEPATH, default=''): str,
    }

    # backward compatibility
    SECTION_AWS = 'aws'
    SECTION_AWS_STORAGEPATH = 'storagepath'
    SECTION_AWS_CREDENTIALPATH = 'credentialpath'
    SECTION_AWS_REGION = 'region'
    SECTION_AWS_PROFILE = 'profile'
    SECTION_AWS_SCHEMA = {
        SECTION_AWS_STORAGEPATH: str,
        schema.Optional(SECTION_AWS_REGION): str,
        schema.Optional(SECTION_AWS_PROFILE, default='default'): str,
        schema.Optional(SECTION_AWS_CREDENTIALPATH, default = ''): str,
    }

    # backward compatibility
    SECTION_GCP = 'gcp'
    SECTION_GCP_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_GCP_PROJECTNAME = 'projectname'
    SECTION_GCP_SCHEMA = {
        SECTION_GCP_STORAGEPATH: str,
        schema.Optional(SECTION_GCP_PROJECTNAME): str,
    }

    # backward compatibility
    SECTION_LOCAL = 'local'
    SECTION_LOCAL_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_LOCAL_SCHEMA = {
        SECTION_LOCAL_STORAGEPATH: str,
    }


    SECTION_REMOTE_REGEX = r'^\s*remote\s*"(.*)"\s*$'
    SECTION_REMOTE_FMT = 'remote "{}"'
    SECTION_REMOTE_URL = 'url'
    SECTION_REMOTE_SCHEMA = {
        SECTION_REMOTE_URL: supported_url,
        schema.Optional(SECTION_AWS_REGION): str,
        schema.Optional(SECTION_AWS_PROFILE, default='default'): str,
        schema.Optional(SECTION_AWS_CREDENTIALPATH, default = ''): str,
        schema.Optional(SECTION_GCP_PROJECTNAME): str,
    }

    SCHEMA = {
        schema.Optional(SECTION_CORE, default={}): SECTION_CORE_SCHEMA,
        schema.Optional(schema.Regex(SECTION_REMOTE_REGEX)): SECTION_REMOTE_SCHEMA,

        # backward compatibility
        schema.Optional(SECTION_AWS, default={}): SECTION_AWS_SCHEMA,
        schema.Optional(SECTION_GCP, default={}): SECTION_GCP_SCHEMA,
        schema.Optional(SECTION_LOCAL, default={}): SECTION_LOCAL_SCHEMA,
    }

    def __init__(self, dvc_dir):
        self.dvc_dir = os.path.abspath(os.path.realpath(dvc_dir))
        self.config_file = os.path.join(dvc_dir, self.CONFIG)

        try:
            self._config = configobj.ConfigObj(self.config_file)

            # NOTE: schema doesn't support ConfigObj.Section validation, so we
            # need to convert our config to dict before passing it to schema.
            self._config = self._lower(self._config)
            self._config = schema.Schema(self.SCHEMA).validate(self._config)

            # NOTE: now converting back to ConfigObj
            self._config = configobj.ConfigObj(self._config, write_empty_values=True)
            self._config.filename = self.config_file
        except Exception as ex:
            raise ConfigError(ex)

    @staticmethod
    def _lower(config):
        new_config = {}
        for s_key, s_value in config.items():
            new_s = {}
            for key, value in s_value.items():
                new_s[key.lower()] = value
            new_config[s_key.lower()] = new_s
        return new_config

    @staticmethod
    def init(dvc_dir):
        config_file = os.path.join(dvc_dir, Config.CONFIG)
        open(config_file, 'w+').close()
        return Config(dvc_dir)
Пример #14
0
Файл: git.py Проект: ptmcg/bob
class GitScm(Scm):

    SCHEMA = schema.Schema({
        'scm': 'git',
        'url': str,
        schema.Optional('dir'): str,
        schema.Optional('if'): str,
        schema.Optional('branch'): str,
        schema.Optional('tag'): str,
        schema.Optional('commit'): str,
        schema.Optional('rev'): str,
        schema.Optional(schema.Regex('^remote-.*')): str,
        schema.Optional('sslVerify'): bool,
    })
    REMOTE_PREFIX = "remote-"

    def __init__(self, spec, overrides=[], secureSSL=None):
        super().__init__(spec, overrides)
        self.__url = spec["url"]
        self.__branch = None
        self.__tag = None
        self.__commit = None
        self.__remotes = {}
        if "rev" in spec:
            rev = spec["rev"]
            if rev.startswith("refs/heads/"):
                self.__branch = rev[11:]
            elif rev.startswith("refs/tags/"):
                self.__tag = rev[10:]
            elif len(rev) == 40:
                self.__commit = rev
            else:
                raise ParseError("Invalid rev format: " + rev)
        self.__branch = spec.get("branch", self.__branch)
        self.__tag = spec.get("tag", self.__tag)
        self.__commit = spec.get("commit", self.__commit)
        if self.__commit:
            # validate commit
            if re.match("^[0-9a-f]{40}$", self.__commit) is None:
                raise ParseError("Invalid commit id: " + str(self.__commit))
        elif not self.__branch and not self.__tag:
            # nothing secified at all -> master branch
            self.__branch = "master"
        self.__dir = spec.get("dir", ".")
        # convert remotes into separate dictionary
        for key, val in spec.items():
            if key.startswith(GitScm.REMOTE_PREFIX):
                stripped_key = key[len(GitScm.REMOTE_PREFIX):]  # remove prefix
                if stripped_key == "origin":
                    raise ParseError("Invalid remote name: " + stripped_key)
                self.__remotes.update({stripped_key: val})
        self.__sslVerify = spec.get('sslVerify', secureSSL)

    def getProperties(self):
        properties = super().getProperties()
        properties.update({
            'scm':
            'git',
            'url':
            self.__url,
            'branch':
            self.__branch,
            'tag':
            self.__tag,
            'commit':
            self.__commit,
            'dir':
            self.__dir,
            'rev': (self.__commit if self.__commit else
                    (("refs/tags/" + self.__tag) if self.__tag else
                     ("refs/heads/" + self.__branch))),
            'sslVerify':
            self.__sslVerify,
        })
        for key, val in self.__remotes.items():
            properties.update({GitScm.REMOTE_PREFIX + key: val})
        return properties

    def asScript(self):
        remotes_array = [
            "# create an array of all remotes for this repository",
            "declare -A BOB_GIT_REMOTES=( [origin]={URL} )".format(
                URL=quote(self.__url)),
        ]
        # add additional remotes to array
        for name, url in self.__remotes.items():
            remotes_array.append("BOB_GIT_REMOTES[{NAME}]={URL}".format(
                NAME=quote(name), URL=quote(url)))

        # Assemble generic header including the remote handling
        header = super().asScript()
        if not self.__sslVerify:
            header += "\nexport GIT_SSL_NO_VERIFY=true"
        header += "\n" + dedent("""\
            if [ ! -d {DIR}/.git ] ; then
                git init {DIR}
            fi
            cd {DIR}
            (
                {REMOTES_ARRAY}
                # remove remotes from array that are already known to Git
                while read -r REMOTE_NAME ; do
                    # check for empty variable in case no remote at all is specified
                    if [ -z "$REMOTE_NAME" ]; then
                        continue
                    fi
                    # check if existing remote is configured
                    if [ "${{BOB_GIT_REMOTES[$REMOTE_NAME]+_}}" ]; then
                        # check if URL has changed
                        if [ ! "${{BOB_GIT_REMOTES[$REMOTE_NAME]}}" == "$(git ls-remote --get-url $REMOTE_NAME)" ]; then
                            git remote set-url "$REMOTE_NAME" "${{BOB_GIT_REMOTES[$REMOTE_NAME]}}"
                        fi
                        # it is configured, therefore no need to keep in list
                        unset "BOB_GIT_REMOTES[$REMOTE_NAME]"
                    fi
                done <<< "$(git remote)"
                # add all remaining remotes in the array to the repository
                for REMOTE_NAME in "${{!BOB_GIT_REMOTES[@]}}" ; do
                    git remote add "$REMOTE_NAME" "${{BOB_GIT_REMOTES[$REMOTE_NAME]}}"
                done
            )""").format(REMOTES_ARRAY="\n    ".join(remotes_array),
                         DIR=quote(self.__dir))

        if self.__tag or self.__commit:
            refSpec = "'+refs/heads/*:refs/remotes/origin/*' "
            if self.__tag:
                refSpec += quote("refs/tags/{0}:refs/tags/{0}".format(
                    self.__tag))
            return dedent("""\
                {HEADER}
                # checkout only if HEAD is invalid
                if ! git rev-parse --verify -q HEAD >/dev/null ; then
                    git fetch origin {REFSPEC}
                    git checkout -q {REF}
                fi
                """).format(HEADER=header,
                            REF=self.__commit if self.__commit else "tags/" +
                            quote(self.__tag),
                            REFSPEC=refSpec)
        else:
            return dedent("""\
                {HEADER}
                git fetch -p origin
                if ! git rev-parse --verify -q HEAD >/dev/null ; then
                    # checkout only if HEAD is invalid
                    git checkout -b {BRANCH} remotes/origin/{BRANCH}
                elif [[ $(git rev-parse --abbrev-ref HEAD) == {BRANCH} ]] ; then
                    # pull only if on original branch
                    git merge --ff-only refs/remotes/origin/{BRANCH}
                else
                    echo Warning: not updating {DIR} because branch was changed manually... >&2
                fi
                """).format(HEADER=header,
                            BRANCH=quote(self.__branch),
                            DIR=quote(self.__dir))

    def asDigestScript(self):
        """Return forward compatible stable string describing this git module.

        The format is "url rev-spec dir" where rev-spec depends on the given reference.
        """
        if self.__commit:
            return self.__commit + " " + self.__dir
        elif self.__tag:
            return self.__url + " refs/tags/" + self.__tag + " " + self.__dir
        else:
            return self.__url + " refs/heads/" + self.__branch + " " + self.__dir

    def asJenkins(self, workPath, credentials, options):
        scm = ElementTree.Element("scm",
                                  attrib={
                                      "class": "hudson.plugins.git.GitSCM",
                                      "plugin": "[email protected]",
                                  })
        ElementTree.SubElement(scm, "configVersion").text = "2"

        userconfigs = ElementTree.SubElement(
            ElementTree.SubElement(scm, "userRemoteConfigs"),
            "hudson.plugins.git.UserRemoteConfig")

        url = ElementTree.SubElement(userconfigs, "url")
        url.text = self.__url

        if credentials:
            credentialsId = ElementTree.SubElement(userconfigs,
                                                   "credentialsId")
            credentialsId.text = credentials

        branch = ElementTree.SubElement(
            ElementTree.SubElement(ElementTree.SubElement(scm, "branches"),
                                   "hudson.plugins.git.BranchSpec"), "name")
        if self.__commit:
            branch.text = self.__commit
        elif self.__tag:
            branch.text = "refs/tags/" + self.__tag
        else:
            branch.text = "refs/heads/" + self.__branch

        ElementTree.SubElement(
            scm, "doGenerateSubmoduleConfigurations").text = "false"
        ElementTree.SubElement(scm, "submoduleCfg", attrib={"class": "list"})

        extensions = ElementTree.SubElement(scm, "extensions")
        ElementTree.SubElement(
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.RelativeTargetDirectory"),
            "relativeTargetDir").text = os.path.normpath(
                os.path.join(workPath, self.__dir))
        # remove untracked files and stale branches
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.CleanCheckout")
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.PruneStaleBranch")
        # set git clone options
        shallow = options.get("scm.git.shallow")
        timeout = options.get("scm.git.timeout")
        if shallow is not None or timeout is not None:
            co = ElementTree.SubElement(
                extensions, "hudson.plugins.git.extensions.impl.CloneOption")
            if shallow is not None:
                try:
                    shallow = int(shallow)
                    if shallow < 0: raise ValueError()
                except ValueError:
                    raise BuildError("Invalid 'git.shallow' option: " +
                                     str(shallow))
                if shallow > 0:
                    ElementTree.SubElement(co, "shallow").text = "true"
                    ElementTree.SubElement(co, "noTags").text = "false"
                    ElementTree.SubElement(co, "reference").text = ""
                    ElementTree.SubElement(co, "depth").text = str(shallow)
                    ElementTree.SubElement(co, "honorRefspec").text = "false"

            if timeout is not None:
                try:
                    timeout = int(timeout)
                    if timeout < 0: raise ValueError()
                except ValueError:
                    raise BuildError("Invalid 'git.timeout' option: " +
                                     str(shallow))
                if timeout > 0:
                    ElementTree.SubElement(co, "timeout").text = str(timeout)

        if isTrue(options.get("scm.ignore-hooks", "0")):
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.IgnoreNotifyCommit")

        return scm

    def getDirectory(self):
        return self.__dir

    def isDeterministic(self):
        return bool(self.__tag) or bool(self.__commit)

    def hasJenkinsPlugin(self):
        return True

    def callGit(self, workspacePath, *args):
        cmdLine = ['git']
        cmdLine.extend(args)
        cwd = os.path.join(workspacePath, self.__dir)
        try:
            output = subprocess.check_output(cmdLine,
                                             cwd=cwd,
                                             universal_newlines=True,
                                             stderr=subprocess.DEVNULL)
        except subprocess.CalledProcessError as e:
            raise BuildError(
                "git error:\n Directory: '{}'\n Command: '{}'\n'{}'".format(
                    cwd, " ".join(cmdLine), e.output.rstrip()))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
        return output.strip()

    def status(self, workspacePath):
        status = ScmStatus()
        try:
            onCorrectBranch = False
            onTag = False
            output = self.callGit(workspacePath, 'ls-remote', '--get-url')
            if output != self.__url:
                status.add(
                    ScmTaint.switched,
                    "> URL: configured: '{}', actual: '{}'".format(
                        self.__url, output))

            if self.__commit:
                output = self.callGit(workspacePath, 'rev-parse', 'HEAD')
                if output != self.__commit:
                    status.add(
                        ScmTaint.switched,
                        "> commit: configured: '{}', actual: '{}'".format(
                            self.__commit, output))
            elif self.__tag:
                output = self.callGit(workspacePath, 'tag', '--points-at',
                                      'HEAD').splitlines()
                if self.__tag not in output:
                    actual = ("'" + ", ".join(output) +
                              "'") if output else "not on any tag"
                    status.add(
                        ScmTaint.switched,
                        "> tag: configured: '{}', actual: {}".format(
                            self.__tag, actual))

                # Need to check if the tag still exists. Otherwise the "git
                # log" command at the end will trip.
                try:
                    self.callGit(workspacePath, 'rev-parse',
                                 'tags/' + self.__tag)
                    onTag = True
                except BuildError:
                    pass
            elif self.__branch:
                output = self.callGit(workspacePath, 'rev-parse',
                                      '--abbrev-ref', 'HEAD')
                if output != self.__branch:
                    status.add(
                        ScmTaint.switched,
                        "> branch: configured: '{}', actual: '{}'".format(
                            self.__branch, output))
                else:
                    output = self.callGit(
                        workspacePath, 'log', '--oneline',
                        'refs/remotes/origin/' + self.__branch + '..HEAD')
                    if output:
                        status.add(
                            ScmTaint.unpushed_main,
                            joinLines(
                                "> unpushed commits on {}:".format(
                                    self.__branch), indent(output, '   ')))
                    onCorrectBranch = True

            # Check for modifications wrt. checked out commit
            output = self.callGit(workspacePath, 'status', '--porcelain')
            if output:
                status.add(ScmTaint.modified,
                           joinLines("> modified:", indent(output, '   ')))

            # The following shows all unpushed commits reachable by any ref
            # (local branches, stash, detached HEAD, etc).
            # Exclude HEAD if the configured branch is checked out to not
            # double-count them. Does not mark the SCM as dirty. Exclude the
            # configured tag too if it is checked out. Otherwise the tag would
            # count as unpushed if it is not on a remote branch.
            what = ['--all', '--not', '--remotes']
            if onCorrectBranch: what.append('HEAD')
            if onTag: what.append("tags/" + self.__tag)
            output = self.callGit(workspacePath, 'log', '--oneline',
                                  '--decorate', *what)
            if output:
                status.add(
                    ScmTaint.unpushed_local,
                    joinLines("> unpushed local commits:",
                              indent(output, '   ')))

        except BuildError as e:
            status.add(ScmTaint.error, e.slogan)

        return status

    def getAuditSpec(self):
        return ("git", self.__dir)

    def hasLiveBuildId(self):
        return True

    async def predictLiveBuildId(self, step):
        if self.__commit:
            return bytes.fromhex(self.__commit)

        with stepAction(step, "LS-REMOTE", self.__url, (INFO, TRACE)) as a:
            if self.__tag:
                # Annotated tags are objects themselves. We need the commit object!
                refs = [
                    "refs/tags/" + self.__tag + '^{}',
                    "refs/tags/" + self.__tag
                ]
            else:
                refs = ["refs/heads/" + self.__branch]
            cmdLine = ['git', 'ls-remote', self.__url] + refs
            try:
                proc = await asyncio.create_subprocess_exec(
                    *cmdLine,
                    stdout=asyncio.subprocess.PIPE,
                    stderr=subprocess.DEVNULL)
                try:
                    stdout, stderr = await proc.communicate()
                    rc = await proc.wait()
                except concurrent.futures.CancelledError:
                    proc.terminate()
                    rc = await proc.wait()
                if rc != 0:
                    a.fail("exit {}".format(rc), WARNING)
                    return None
                output = stdout.decode(
                    locale.getpreferredencoding(False)).strip()
            except (subprocess.CalledProcessError, OSError):
                a.fail("error")
                return None

            # have we found anything at all?
            if not output:
                a.fail("unknown", WARNING)
                return None

            # See if we got one of our intended refs. Git is generating lines with
            # the following format:
            #
            #   <sha1>\t<refname>
            #
            # Put the output into a dict with the refname as key. Be extra careful
            # and strip out lines not matching this pattern.
            output = {
                commitAndRef[1].strip(): bytes.fromhex(commitAndRef[0].strip())
                for commitAndRef in (line.split('\t')
                                     for line in output.split('\n'))
                if len(commitAndRef) == 2
            }
            for ref in refs:
                if ref in output: return output[ref]

            # uhh, should not happen...
            a.fail("unknown", WARNING)
            return None

    def calcLiveBuildId(self, workspacePath):
        if self.__commit:
            return bytes.fromhex(self.__commit)
        else:
            output = self.callGit(workspacePath, 'rev-parse', 'HEAD').strip()
            return bytes.fromhex(output)

    def getLiveBuildIdSpec(self, workspacePath):
        if self.__commit:
            return "=" + self.__commit
        else:
            return "g" + os.path.join(workspacePath, self.__dir)

    @staticmethod
    def processLiveBuildIdSpec(dir):
        try:
            return subprocess.check_output(["git", "rev-parse", "HEAD"],
                                           cwd=dir,
                                           universal_newlines=True).strip()
        except subprocess.CalledProcessError as e:
            raise BuildError("Git audit failed: " + str(e))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
Пример #15
0
# File Name: demo.py
# Description:

"""
import  schema

# 检查数字
print '----------------------int'
print schema.Schema(int).validate(123)
print schema.Schema(int).is_valid(123)

# 检查字符串
print '----------------------str'
# Regex 没有 is_valid 方法
print schema.Regex(r'^foo').validate('foobar')
print schema.Schema(lambda n: "foo" in n).is_valid('foobar')
print 'False:%s ' %  schema.Schema(lambda n: "foo" in n).is_valid('fobar')

# 检查字典
print '----------------------dict'
rules = {
    'name': schema.And(str, len),
    'age':  schema.And(schema.Use(int), lambda n: 18 <= n <= 99),
    schema.Optional('gender'): schema.And(str, schema.Use(str.lower),lambda s: s in ('squid', 'kid'))}

data = {'name': 'Sue', 'age': '28', 'gender': 'Squid'}

print schema.Schema(rules).validate(data)
print schema.Schema(rules).is_valid(data)
Пример #16
0
    assert url_obj.scheme
    if schemes is not None:
        assert url_obj.scheme in schemes
    assert url_obj.host

    return url_obj


def parse_valid_web_url(url):
    return parse_valid_url(url, schemes=('http', 'https'))


_PROJECT_SCHEMA = schema.Schema(
    {
        'name': str,
        schema.Optional(schema.Regex('\w+_url')): parse_valid_url,
        'repo_url': parse_valid_web_url,
        'desc': str,
        'tags': tuple
    },
    ignore_extra_keys=False)


class ApatiteError(Exception):
    pass


class DuplicateProjectError(ApatiteError):
    pass

Пример #17
0
class RecipeConfigureFeature(RecipeFeature):
    """Recipe feature for configuring root trees built during a recipe root
    feature"""

    NAME = "configure"

    FEATURED_ATTRIBUTES = {"configure"}

    SCHEMA = schema.Schema({
        "configure": {
            "sdk": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "root": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
            "steps": schema.And([str], len),
        },
        str: object,  # do not consider other keys
    })

    def configure(self) -> None:
        """TODO"""

        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["configure"]["sdk"]),
                      "sdk")

        # the recipe to configure (i.e. what is the recipe for which we need to
        # get image-resulting root tree on which running the configuration
        # steps?):
        recipe_to_configure = recipe.Recipe(
            self.replace_placeholders(self.recipe.config["configure"]["root"]))

        # check that image action for the recipe designated by "root" has well
        # been run:
        image_out_subpath = os.path.join(recipe_to_configure.out_subpath,
                                         "image")
        image_out_path = os.path.join(repo_root_path(), image_out_subpath)
        if not os.path.exists(os.path.join(image_out_path, "root")):
            raise RecipeActionError(
                line("""
                Could not process a configure action step if the image action
                step from the recipe designated by "configure.root" has not
                been run before.
            """))

        action_out_subpath = os.path.join(self.recipe.out_subpath, "configure")
        action_out_path = os.path.join(repo_root_path(), action_out_subpath)

        # Always clear out the previous configure result:
        if os.path.exists(action_out_path):
            debug("clearing {!r}...".format(action_out_path))
            with ElevatedPrivileges():
                shutil.rmtree(action_out_path)

        # prepare output directory
        os.makedirs(action_out_path)

        # hook the SDK container process to the current TTY (if existent):
        terminal = is_tty_attached()

        # Retrieve the result of the image action to work on it for this
        # configuration step:
        with ElevatedPrivileges():
            debug(
                line("""copying resulting root of the image action step for the
                proper recipe ({!r}) into the working environment for the
                configure action step...""".format(
                    recipe_to_configure.identifier)))
            shutil.copytree(os.path.join(image_out_path, "root"),
                            os.path.join(action_out_path, "root"),
                            symlinks=True)

        with sdk.session(action_name="configure",
                         action_targeted_recipe=self.recipe,
                         env={
                             key: self.replace_placeholders(value,
                                                            sdk_context=True)
                             for key, value in self.recipe.config["configure"]
                             ["env"].items()
                         },
                         terminal=terminal,
                         shared_host_netns=False) as sess:
            for cmd in self.recipe.config["configure"]["steps"]:
                info("{!r} configures recipe {!r}, runs:\n  {}".format(
                    sdk.recipe.identifier, self.recipe.identifier, cmd))
                sess.run(self.replace_placeholders(cmd, sdk_context=True))
Пример #18
0
class RecipeSdkFeature(RecipeFeature):
    """Recipe feature for creating and proposing SDK objects to other
    features."""

    NAME = "sdk"

    FEATURED_ATTRIBUTES = {"bootstrap", "run", "sdk"}

    SCHEMA = schema.Schema({
        "runtime": {
            "additional_capabilities": [str],
            "additional_device_bindings": [str],
            "cwd": schema.And(str, len),
            "prelude_commands": [str],
            "postlude_commands": [str],
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "writable_assets_dirs_at_build": [schema.And(str, len)],
        },
        "bootstrap": {
            "rootfs_archive": str,
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "steps": [str],
        },
        str: object,  # do not consider other keys
    })

    def bootstrap(self) -> None:
        self.sdk.bootstrap(
            rootfs_archive=self.replace_placeholders(
                self.recipe.config["bootstrap"]["rootfs_archive"],
                sdk_context=False),
            steps=[
                self.replace_placeholders(step, sdk_context=True)
                for step in self.recipe.config["bootstrap"]["steps"]
            ],
            env={
                key: self.replace_placeholders(value, sdk_context=True)
                for key, value in self.recipe.config["bootstrap"]
                ["env"].items()
            },
        )

    def run(self, command: Optional[str] = None) -> None:
        self.sdk.interactive_run(
            recipe=self.recipe,
            command=command,
            env={
                key: self.replace_placeholders(value, sdk_context=True)
                for key, value in self.recipe.config["runtime"]["env"].items()
            },
        )

    @property
    def sdk(self) -> Sdk:
        return Sdk(
            recipe=self.recipe,
            cwd=self.replace_placeholders(self.recipe.config["runtime"]["cwd"],
                                          sdk_context=True),
            env={
                key: self.replace_placeholders(value, sdk_context=True)
                for key, value in self.recipe.config["runtime"]["env"].items()
            },
            additional_capabilities=(
                self.recipe.config["runtime"]["additional_capabilities"]),
            additional_device_bindings=(
                self.recipe.config["runtime"]["additional_device_bindings"]),
            prelude_commands=[
                self.replace_placeholders(cmd, sdk_context=True)
                for cmd in self.recipe.config["runtime"]["prelude_commands"]
            ],
            postlude_commands=[
                self.replace_placeholders(cmd, sdk_context=True)
                for cmd in self.recipe.config["runtime"]["postlude_commands"]
            ],
            writable_assets_dirs_at_build=(self.recipe.config["runtime"]
                                           ["writable_assets_dirs_at_build"]))
Пример #19
0
class GitScm(Scm):

    SCHEMA = schema.Schema({
        'scm':
        'git',
        'url':
        str,
        schema.Optional('dir'):
        str,
        schema.Optional('if'):
        schema.Or(str, IfExpression),
        schema.Optional('branch'):
        str,
        schema.Optional('tag'):
        str,
        schema.Optional('commit'):
        str,
        schema.Optional('rev'):
        str,
        schema.Optional(schema.Regex('^remote-.*')):
        str,
        schema.Optional('sslVerify'):
        bool,
        schema.Optional('singleBranch'):
        bool,
        schema.Optional('shallow'):
        schema.Or(int, str),
        schema.Optional('submodules'):
        schema.Or(bool, [str]),
        schema.Optional('recurseSubmodules'):
        bool,
        schema.Optional('shallowSubmodules'):
        bool,
    })
    REMOTE_PREFIX = "remote-"

    def __init__(self, spec, overrides=[], secureSSL=None, stripUser=None):
        super().__init__(spec, overrides)
        self.__url = spec["url"]
        self.__branch = None
        self.__tag = None
        self.__commit = None
        self.__remotes = {}
        if "rev" in spec:
            rev = spec["rev"]
            if rev.startswith("refs/heads/"):
                self.__branch = rev[11:]
            elif rev.startswith("refs/tags/"):
                self.__tag = rev[10:]
            elif len(rev) == 40:
                self.__commit = rev
            else:
                raise ParseError("Invalid rev format: " + rev)
        self.__branch = spec.get("branch", self.__branch)
        self.__tag = spec.get("tag", self.__tag)
        self.__commit = spec.get("commit", self.__commit)
        if self.__commit:
            # validate commit
            if re.match("^[0-9a-f]{40}$", self.__commit) is None:
                raise ParseError("Invalid commit id: " + str(self.__commit))
        elif not self.__branch and not self.__tag:
            # nothing secified at all -> master branch
            self.__branch = "master"
        self.__dir = spec.get("dir", ".")
        # convert remotes into separate dictionary
        for key, val in spec.items():
            if key.startswith(GitScm.REMOTE_PREFIX):
                stripped_key = key[len(GitScm.REMOTE_PREFIX):]  # remove prefix
                if stripped_key == "origin":
                    raise ParseError("Invalid remote name: " + stripped_key)
                self.__remotes.update({stripped_key: val})
        self.__sslVerify = spec.get('sslVerify', secureSSL)
        self.__singleBranch = spec.get('singleBranch')
        self.__shallow = spec.get('shallow')
        self.__submodules = spec.get('submodules', False)
        self.__recurseSubmodules = spec.get('recurseSubmodules', False)
        self.__shallowSubmodules = spec.get('shallowSubmodules', True)
        self.__stripUser = stripUser

    def getProperties(self, isJenkins):
        properties = super().getProperties(isJenkins)
        properties.update({
            'scm':
            'git',
            'url':
            self.__url,
            'branch':
            self.__branch,
            'tag':
            self.__tag,
            'commit':
            self.__commit,
            'dir':
            self.__dir,
            'rev': (self.__commit if self.__commit else
                    (("refs/tags/" + self.__tag) if self.__tag else
                     ("refs/heads/" + self.__branch))),
            'sslVerify':
            self.__sslVerify,
            'singleBranch':
            self.__singleBranch,
            'shallow':
            self.__shallow,
            'submodules':
            self.__submodules,
            'recurseSubmodules':
            self.__recurseSubmodules,
            'shallowSubmodules':
            self.__shallowSubmodules,
        })
        for key, val in self.__remotes.items():
            properties.update({GitScm.REMOTE_PREFIX + key: val})
        return properties

    async def invoke(self, invoker, switch=False):
        # make sure the git directory exists
        if not os.path.isdir(invoker.joinPath(self.__dir, ".git")):
            await invoker.checkCommand(["git", "init", self.__dir])

        # Shallow implies singleBranch
        if self.__singleBranch is None:
            singleBranch = self.__shallow is not None
        else:
            singleBranch = self.__singleBranch
        singleBranch = singleBranch and (self.__branch is not None)

        # setup and update remotes
        remotes = {"origin": self.__url}
        remotes.update(self.__remotes)
        existingRemotes = await invoker.checkOutputCommand(["git", "remote"],
                                                           cwd=self.__dir)
        for remote in existingRemotes.split("\n"):
            if remote in remotes:
                cfgUrl = remotes[remote]
                realUrl = await invoker.checkOutputCommand(
                    ["git", "ls-remote", "--get-url", remote], cwd=self.__dir)
                if cfgUrl != realUrl:
                    await invoker.checkCommand(
                        ["git", "remote", "set-url", remote, cfgUrl],
                        cwd=self.__dir)
                del remotes[remote]

        # add remaining (new) remotes
        for remote, url in remotes.items():
            addCmd = ["git", "remote", "add", remote, url]
            if singleBranch: addCmd += ["-t", self.__branch]
            await invoker.checkCommand(addCmd, cwd=self.__dir)

        # relax security if requested
        if not self.__sslVerify:
            await invoker.checkCommand(
                ["git", "config", "http.sslVerify", "false"], cwd=self.__dir)

        # Calculate refspec that is used internally. For the user a regular
        # refspec is kept in the git config.

        # Base fetch command with shallow support
        fetchCmd = ["git", "-c", "submodule.recurse=0", "fetch", "-p"]
        if isinstance(self.__shallow, int):
            fetchCmd.append("--depth={}".format(self.__shallow))
        elif isinstance(self.__shallow, str):
            fetchCmd.append("--shallow-since={}".format(self.__shallow))
        fetchCmd.append("origin")

        # Calculate appropriate refspec (all/singleBranch/tag)
        if singleBranch:
            fetchCmd += [
                "+refs/heads/{0}:refs/remotes/origin/{0}".format(self.__branch)
            ]
        else:
            fetchCmd += ["+refs/heads/*:refs/remotes/origin/*"]
        if self.__tag:
            fetchCmd.append("refs/tags/{0}:refs/tags/{0}".format(self.__tag))

        # do the checkout
        if self.__tag or self.__commit:
            await self.__checkoutTag(invoker, fetchCmd, switch)
        else:
            await self.__checkoutBranch(invoker, fetchCmd, switch)

    async def __checkoutTag(self, invoker, fetchCmd, switch):
        # checkout only if HEAD is invalid
        head = await invoker.callCommand(
            ["git", "rev-parse", "--verify", "-q", "HEAD"],
            stdout=False,
            cwd=self.__dir)
        if head or switch:
            await invoker.checkCommand(fetchCmd, cwd=self.__dir)
            await invoker.checkCommand([
                "git", "checkout", "-q", "--no-recurse-submodules",
                self.__commit if self.__commit else "tags/" + self.__tag
            ],
                                       cwd=self.__dir)
            # FIXME: will not be called again if interrupted!
            await self.__checkoutSubmodules(invoker)

    async def __checkoutBranch(self, invoker, fetchCmd, switch):
        await invoker.checkCommand(fetchCmd, cwd=self.__dir)
        if await invoker.callCommand(
            ["git", "rev-parse", "--verify", "-q", "HEAD"],
                stdout=False,
                cwd=self.__dir):
            # checkout only if HEAD is invalid
            await invoker.checkCommand([
                "git", "checkout", "--no-recurse-submodules", "-b",
                self.__branch, "remotes/origin/" + self.__branch
            ],
                                       cwd=self.__dir)
            await self.__checkoutSubmodules(invoker)
        elif switch:
            # We're switching the ref. There we will actively change the branch which
            # is normally forbidden.
            assert not self.__submodules
            if await invoker.callCommand([
                    "git", "show-ref", "-q", "--verify",
                    "refs/heads/" + self.__branch
            ]):
                # Branch does not exist. Create and checkout.
                await invoker.checkCommand([
                    "git", "checkout", "--no-recurse-submodules", "-b",
                    self.__branch, "remotes/origin/" + self.__branch
                ],
                                           cwd=self.__dir)
            else:
                # Branch exists already. Checkout and fast forward...
                await invoker.checkCommand([
                    "git", "checkout", "--no-recurse-submodules", self.__branch
                ],
                                           cwd=self.__dir)
                await invoker.checkCommand([
                    "git", "-c", "submodule.recurse=0", "merge", "--ff-only",
                    "refs/remotes/origin/" + self.__branch
                ],
                                           cwd=self.__dir)
        elif (await invoker.checkOutputCommand(
            ["git", "rev-parse", "--abbrev-ref", "HEAD"],
                cwd=self.__dir)) == self.__branch:
            # pull only if on original branch
            preUpdate = await self.__updateSubmodulesPre(invoker)
            await invoker.checkCommand([
                "git", "-c", "submodule.recurse=0", "merge", "--ff-only",
                "refs/remotes/origin/" + self.__branch
            ],
                                       cwd=self.__dir)
            await self.__updateSubmodulesPost(invoker, preUpdate)
        else:
            invoker.warn("Not updating", self.__dir,
                         "because branch was changed manually...")

    async def __checkoutSubmodules(self, invoker):
        if not self.__submodules: return

        args = ["git", "submodule", "update", "--init"]
        if self.__shallowSubmodules:
            args += ["--depth", "1"]
        if self.__recurseSubmodules:
            args += ["--recursive"]
        if isinstance(self.__submodules, list):
            args.append("--")
            args.extend(self.__submodules)
        await invoker.checkCommand(args, cwd=self.__dir)

    async def __updateSubmodulesPre(self, invoker, base="."):
        """Query the status of the currently checked out submodules.

        Returns a map with the paths of all checked out submodules as keys.
        The value will be True if the submodule looks untouched by the user and
        is deemed to be updateable. If the value is False the submodule is
        different from the expected vanilla checkout state. The list may only
        be a sub-set of all known submodules.
        """

        if not self.__submodules:
            return {}

        # List all active and checked out submodules. This way we know the
        # state of all submodules and compare them later to the expected state.
        args = [
            "git", "-C", base, "submodule", "-q", "foreach",
            "printf '%s\\t%s\\n' \"$sm_path\" \"$(git rev-parse HEAD)\""
        ]
        checkedOut = await invoker.checkOutputCommand(args, cwd=self.__dir)
        checkedOut = {
            path: commit
            for path, commit in (line.split("\t")
                                 for line in checkedOut.split("\n") if line)
        }
        if not checkedOut: return {}

        # List commits from git tree of all paths for checked out submodules.
        # This is what should be checked out.
        args = ["git", "-C", base, "ls-tree", "-z", "HEAD"] + sorted(
            checkedOut.keys())
        allPaths = await invoker.checkOutputCommand(args, cwd=self.__dir)
        allPaths = {
            normPath(path): attribs.split(' ')[2]
            for attribs, path in (p.split('\t') for p in allPaths.split('\0')
                                  if p) if attribs.split(' ')[1] == "commit"
        }

        # Calculate which paths are in the right state. They must match the
        # commit and must be in detached HEAD state.
        ret = {}
        for path, commit in checkedOut.items():
            path = normPath(path)
            if allPaths.get(path) != commit:
                ret[path] = False
                continue

            code = await invoker.callCommand(
                ["git", "symbolic-ref", "-q", "HEAD"],
                cwd=os.path.join(self.__dir, base, path))
            if code == 0:
                ret[path] = False
                continue

            ret[path] = True

        return ret

    async def __updateSubmodulesPost(self, invoker, oldState, base="."):
        """Update all submodules that are safe.

        Will update all submodules that are either new or have not been touched
        by the user. This will be done recursively if that is enabled.
        """
        if not self.__submodules:
            return {}
        if not os.path.exists(invoker.joinPath(self.__dir, base,
                                               ".gitmodules")):
            return {}

        # Sync remote URLs into our config in case they were changed
        args = ["git", "-C", base, "submodule", "sync"]
        await invoker.checkCommand(args, cwd=self.__dir)

        # List all paths as per .gitmodules. This gives us the list of all
        # known submodules. Optionally restrict to user specified subset.
        args = [
            "git", "-C", base, "config", "-f", ".gitmodules", "-z",
            "--get-regexp", "path"
        ]
        finishedProc = await invoker.runCommand(args,
                                                cwd=self.__dir,
                                                stdout=True)
        allPaths = finishedProc.stdout.rstrip(
        ) if finishedProc.returncode == 0 else ""
        allPaths = [p.split("\n")[1] for p in allPaths.split("\0") if p]
        if isinstance(self.__submodules, list):
            subset = set(normPath(p) for p in self.__submodules)
            allPaths = [p for p in allPaths if normPath(p) in subset]

        # Update only new or unmodified paths
        updatePaths = [p for p in allPaths if oldState.get(normPath(p), True)]
        for p in sorted(set(allPaths) - set(updatePaths)):
            invoker.warn("Not updating submodule",
                         os.path.join(self.__dir, base, p),
                         "because its HEAD has been switched...")
        if not updatePaths:
            return

        # If we recurse into sub-submodules get their potential state up-front
        if self.__recurseSubmodules:
            # Explicit loop because of Python 3.5: "'await' expressions in
            # comprehensions are not supported".
            subMods = {}
            for p in updatePaths:
                subMods[p] = await self.__updateSubmodulesPre(
                    invoker, os.path.join(base, p))

        # Do the update of safe submodules
        args = ["git", "-C", base, "submodule", "update", "--init"]
        if self.__shallowSubmodules:
            args += ["--depth", "1"]
        args.append("--")
        args += updatePaths
        await invoker.checkCommand(args, cwd=self.__dir)

        # Update sub-submodules if requested
        if self.__recurseSubmodules:
            for p in updatePaths:
                await self.__updateSubmodulesPost(invoker, subMods[p],
                                                  os.path.join(base, p))

    def canSwitch(self, oldSpec):
        diff = self._diffSpec(oldSpec)

        # Filter irrelevant properties
        diff -= {"sslVerify", 'singleBranch', 'shallow', 'shallowSubmodules'}
        diff = set(prop for prop in diff if not prop.startswith("remote-"))

        # Enabling "submodules" and/or "recurseSubmodules" is ok. The
        # additional content will be checked out in invoke().
        if not oldSpec.get("submodules", False) and self.__submodules:
            diff.discard("submodules")
        if not oldSpec.get("recursiveSubmodules",
                           False) and self.__recurseSubmodules:
            diff.discard("recursiveSubmodules")

        # Without submodules the recursiveSubmodules property is irrelevant
        if not self.__submodules:
            diff.discard("recursiveSubmodules")

        # For the rest we can try a inline switch. Git does not handle
        # vanishing submodules well and neither do we. So if submodules are
        # enabled then we do not do an in-place update.
        if not diff:
            return True
        if not diff.issubset({"branch", "tag", "commit", "rev", "url"}):
            return False
        if self.__submodules:
            return False
        return True

    async def switch(self, invoker, oldSpec):
        # Try to checkout new state in old workspace. If something fails the
        # old attic logic will take over.
        await self.invoke(invoker, True)
        return True

    def asDigestScript(self):
        """Return forward compatible stable string describing this git module.

        The format is "url rev-spec dir" where rev-spec depends on the given reference.
        """
        if self.__stripUser:
            filt = removeUserFromUrl
        else:
            filt = lambda x: x

        if self.__commit:
            ret = self.__commit + " " + self.__dir
        elif self.__tag:
            ret = filt(
                self.__url) + " refs/tags/" + self.__tag + " " + self.__dir
        else:
            ret = filt(
                self.__url) + " refs/heads/" + self.__branch + " " + self.__dir

        if self.__submodules:
            ret += " submodules"
            if isinstance(self.__submodules, list):
                ret += "[{}]".format(",".join(self.__submodules))
            if self.__recurseSubmodules:
                ret += " recursive"

        return ret

    def asJenkins(self, workPath, credentials, options):
        scm = ElementTree.Element("scm",
                                  attrib={
                                      "class": "hudson.plugins.git.GitSCM",
                                      "plugin": "[email protected]",
                                  })
        ElementTree.SubElement(scm, "configVersion").text = "2"

        userconfigs = ElementTree.SubElement(
            ElementTree.SubElement(scm, "userRemoteConfigs"),
            "hudson.plugins.git.UserRemoteConfig")

        url = ElementTree.SubElement(userconfigs, "url")
        url.text = self.__url

        if credentials:
            credentialsId = ElementTree.SubElement(userconfigs,
                                                   "credentialsId")
            credentialsId.text = credentials

        branch = ElementTree.SubElement(
            ElementTree.SubElement(ElementTree.SubElement(scm, "branches"),
                                   "hudson.plugins.git.BranchSpec"), "name")
        if self.__commit:
            branch.text = self.__commit
        elif self.__tag:
            branch.text = "refs/tags/" + self.__tag
        else:
            branch.text = "refs/heads/" + self.__branch

        ElementTree.SubElement(
            scm, "doGenerateSubmoduleConfigurations").text = "false"
        ElementTree.SubElement(scm, "submoduleCfg", attrib={"class": "list"})

        extensions = ElementTree.SubElement(scm, "extensions")
        ElementTree.SubElement(
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.RelativeTargetDirectory"),
            "relativeTargetDir").text = os.path.normpath(
                os.path.join(workPath, self.__dir))
        # remove untracked files and stale branches
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.CleanCheckout")
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.PruneStaleBranch")
        # set git clone options
        if isinstance(self.__shallow, int):
            shallow = str(self.__shallow)
        else:
            shallow = options.get("scm.git.shallow")
        timeout = options.get("scm.git.timeout")
        if shallow is not None or timeout is not None:
            co = ElementTree.SubElement(
                extensions, "hudson.plugins.git.extensions.impl.CloneOption")
            if shallow is not None:
                try:
                    shallow = int(shallow)
                    if shallow < 0: raise ValueError()
                except ValueError:
                    raise BuildError("Invalid 'git.shallow' option: " +
                                     str(shallow))
                if shallow > 0:
                    ElementTree.SubElement(co, "shallow").text = "true"
                    ElementTree.SubElement(co, "noTags").text = "false"
                    ElementTree.SubElement(co, "reference").text = ""
                    ElementTree.SubElement(co, "depth").text = str(shallow)
                    ElementTree.SubElement(co, "honorRefspec").text = "false"

            if timeout is not None:
                try:
                    timeout = int(timeout)
                    if timeout < 0: raise ValueError()
                except ValueError:
                    raise BuildError("Invalid 'git.timeout' option: " +
                                     str(timeout))
                if timeout > 0:
                    ElementTree.SubElement(co, "timeout").text = str(timeout)

        if self.__submodules:
            assert isinstance(self.__submodules, bool)
            sub = ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.SubmoduleOption")
            if self.__recurseSubmodules:
                ElementTree.SubElement(sub,
                                       "recursiveSubmodules").text = "true"
            if self.__shallowSubmodules:
                ElementTree.SubElement(sub, "shallow").text = "true"
                ElementTree.SubElement(sub, "depth").text = "1"
            if timeout is not None:
                ElementTree.SubElement(sub, "timeout").text = str(timeout)

        if isTrue(options.get("scm.ignore-hooks", "0")):
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.IgnoreNotifyCommit")

        return scm

    def getDirectory(self):
        return self.__dir

    def isDeterministic(self):
        return bool(self.__tag) or bool(self.__commit)

    def hasJenkinsPlugin(self):
        # Cloning a subset of submodules is not supported by the Jenkins
        # git-plugin. Fall back to our implementation in this case.
        return not isinstance(self.__submodules, list)

    def callGit(self, workspacePath, *args, check=True):
        cmdLine = ['git']
        cmdLine.extend(args)
        cwd = os.path.join(workspacePath, self.__dir)
        try:
            output = subprocess.check_output(cmdLine,
                                             cwd=cwd,
                                             universal_newlines=True,
                                             stderr=subprocess.DEVNULL)
        except subprocess.CalledProcessError as e:
            if check:
                raise BuildError(
                    "git error:\n Directory: '{}'\n Command: '{}'\n'{}'".
                    format(cwd, " ".join(cmdLine), e.output.rstrip()))
            else:
                return ""
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
        return output.strip()

    def status(self, workspacePath):
        status = ScmStatus()
        try:
            onCorrectBranch = False
            onTag = False
            output = self.callGit(workspacePath, 'ls-remote', '--get-url')
            if output != self.__url:
                status.add(
                    ScmTaint.switched,
                    "> URL: configured: '{}', actual: '{}'".format(
                        self.__url, output))

            if self.__commit:
                output = self.callGit(workspacePath, 'rev-parse', 'HEAD')
                if output != self.__commit:
                    status.add(
                        ScmTaint.switched,
                        "> commit: configured: '{}', actual: '{}'".format(
                            self.__commit, output))
            elif self.__tag:
                output = self.callGit(workspacePath, 'tag', '--points-at',
                                      'HEAD').splitlines()
                if self.__tag not in output:
                    actual = ("'" + ", ".join(output) +
                              "'") if output else "not on any tag"
                    status.add(
                        ScmTaint.switched,
                        "> tag: configured: '{}', actual: {}".format(
                            self.__tag, actual))

                # Need to check if the tag still exists. Otherwise the "git
                # log" command at the end will trip.
                try:
                    self.callGit(workspacePath, 'rev-parse',
                                 'tags/' + self.__tag)
                    onTag = True
                except BuildError:
                    pass
            elif self.__branch:
                output = self.callGit(workspacePath, 'rev-parse',
                                      '--abbrev-ref', 'HEAD')
                if output != self.__branch:
                    status.add(
                        ScmTaint.switched,
                        "> branch: configured: '{}', actual: '{}'".format(
                            self.__branch, output))
                else:
                    output = self.callGit(
                        workspacePath, 'log', '--oneline',
                        'refs/remotes/origin/' + self.__branch + '..HEAD')
                    if output:
                        status.add(
                            ScmTaint.unpushed_main,
                            joinLines(
                                "> unpushed commits on {}:".format(
                                    self.__branch), indent(output, '   ')))
                    onCorrectBranch = True

            # Check for modifications wrt. checked out commit
            output = self.callGit(workspacePath, 'status', '--porcelain',
                                  '--ignore-submodules=all')
            if output:
                status.add(ScmTaint.modified,
                           joinLines("> modified:", indent(output, '   ')))

            # The following shows all unpushed commits reachable by any ref
            # (local branches, stash, detached HEAD, etc).
            # Exclude HEAD if the configured branch is checked out to not
            # double-count them. Does not mark the SCM as dirty. Exclude the
            # configured tag too if it is checked out. Otherwise the tag would
            # count as unpushed if it is not on a remote branch.
            what = ['--all', '--not', '--remotes']
            if onCorrectBranch: what.append('HEAD')
            if onTag: what.append("tags/" + self.__tag)
            output = self.callGit(workspacePath, 'log', '--oneline',
                                  '--decorate', *what)
            if output:
                status.add(
                    ScmTaint.unpushed_local,
                    joinLines("> unpushed local commits:",
                              indent(output, '   ')))

            # Dive into submodules
            self.__statusSubmodule(workspacePath, status, self.__submodules)

        except BuildError as e:
            status.add(ScmTaint.error, e.slogan)

        return status

    def __statusSubmodule(self, workspacePath, status, shouldExist, base="."):
        """Get the status of submodules and possibly sub-submodules.

        The regular "git status" command is not sufficient for our case. In
        case the submodule is not initialized "git status" will completely
        ignore it. Using "git submodule status" would help but it's output is
        not ment to be parsed by tools.

        So we first get the list of all possible submodules with their tracked
        commit. Then the actual commit is compared and any further
        modifications and unpuched commits are checked.
        """
        if not os.path.exists(os.path.join(workspacePath, base,
                                           ".gitmodules")):
            return

        # List all paths as per .gitmodules. This gives us the list of all
        # known submodules.
        allPaths = self.callGit(workspacePath,
                                "-C",
                                base,
                                "config",
                                "-f",
                                ".gitmodules",
                                "-z",
                                "--get-regexp",
                                "path",
                                check=False)
        allPaths = [p.split("\n")[1] for p in allPaths.split("\0") if p]
        if not allPaths:
            return

        # Fetch the respecive commits as per git ls-tree
        allPaths = self.callGit(workspacePath, "-C", base, "ls-tree", "-z",
                                "HEAD", *allPaths)
        allPaths = {
            path: attribs.split(' ')[2]
            for attribs, path in (p.split('\t') for p in allPaths.split('\0')
                                  if p) if attribs.split(' ')[1] == "commit"
        }

        # Normalize subset of submodules
        if isinstance(shouldExist, list):
            shouldExist = set(normPath(p) for p in shouldExist)
        elif shouldExist:
            shouldExist = set(normPath(p) for p in allPaths.keys())
        else:
            shouldExist = set()

        # Check each submodule for their commit, modifications and unpushed
        # stuff. Unconditionally recurse to even see if something is there even
        # tough it shouldn't.
        for path, commit in sorted(allPaths.items()):
            subPath = os.path.join(base, path)
            subShouldExist = normPath(path) in shouldExist
            if not os.path.exists(os.path.join(workspacePath, subPath,
                                               ".git")):
                if subShouldExist:
                    status.add(ScmTaint.modified,
                               "> submodule not checked out: " + subPath)
                elif not dirIsEmpty(os.path.join(workspacePath, subPath)):
                    status.add(ScmTaint.modified,
                               "> ignored submodule not empty: " + subPath)
                continue
            elif not subShouldExist:
                status.add(ScmTaint.modified,
                           "> submodule checked out: " + subPath)

            realCommit = self.callGit(workspacePath, "-C", subPath,
                                      "rev-parse", "HEAD")
            if commit != realCommit:
                status.add(
                    ScmTaint.switched,
                    "> submodule '{}' switched commit: configured: '{}', actual: '{}'"
                    .format(subPath, commit, realCommit))

            output = self.callGit(workspacePath, "-C", subPath, 'status',
                                  '--porcelain', '--ignore-submodules=all')
            if output:
                status.add(
                    ScmTaint.modified,
                    joinLines("> submodule '{}' modified:".format(subPath),
                              indent(output, '   ')))

            output = self.callGit(workspacePath, "-C", subPath, 'log',
                                  '--oneline', '--decorate', '--all', '--not',
                                  '--remotes')
            if output:
                status.add(
                    ScmTaint.unpushed_local,
                    joinLines(
                        "> submodule '{}' unpushed local commits:".format(
                            subPath), indent(output, '   ')))

            self.__statusSubmodule(workspacePath, status,
                                   self.__recurseSubmodules, subPath)

    def getAuditSpec(self):
        extra = {}
        if self.__submodules:
            extra['submodules'] = self.__submodules
            if self.__recurseSubmodules:
                extra['recurseSubmodules'] = True
        return ("git", self.__dir, extra)

    def hasLiveBuildId(self):
        return True

    async def predictLiveBuildId(self, step):
        if self.__commit:
            return bytes.fromhex(self.__commit)

        with stepAction(step, "LS-REMOTE", self.__url, (INFO, TRACE)) as a:
            if self.__tag:
                # Annotated tags are objects themselves. We need the commit object!
                refs = [
                    "refs/tags/" + self.__tag + '^{}',
                    "refs/tags/" + self.__tag
                ]
            else:
                refs = ["refs/heads/" + self.__branch]
            cmdLine = ['git', 'ls-remote', self.__url] + refs
            try:
                stdout = await check_output(cmdLine,
                                            stderr=subprocess.DEVNULL,
                                            universal_newlines=True)
                output = stdout.strip()
            except subprocess.CalledProcessError as e:
                a.fail("exit {}".format(e.returncode), WARNING)
                return None
            except OSError as e:
                a.fail("error ({})".format(e))
                return None

            # have we found anything at all?
            if not output:
                a.fail("unknown", WARNING)
                return None

            # See if we got one of our intended refs. Git is generating lines with
            # the following format:
            #
            #   <sha1>\t<refname>
            #
            # Put the output into a dict with the refname as key. Be extra careful
            # and strip out lines not matching this pattern.
            output = {
                commitAndRef[1].strip(): bytes.fromhex(commitAndRef[0].strip())
                for commitAndRef in (line.split('\t')
                                     for line in output.split('\n'))
                if len(commitAndRef) == 2
            }
            for ref in refs:
                if ref in output: return output[ref]

            # uhh, should not happen...
            a.fail("unknown", WARNING)
            return None

    def calcLiveBuildId(self, workspacePath):
        if self.__commit:
            return bytes.fromhex(self.__commit)
        else:
            output = self.callGit(workspacePath, 'rev-parse', 'HEAD').strip()
            return bytes.fromhex(output)

    def getLiveBuildIdSpec(self, workspacePath):
        if self.__commit:
            return "=" + self.__commit
        else:
            return "g" + os.path.join(workspacePath, self.__dir)

    @staticmethod
    def processLiveBuildIdSpec(dir):
        try:
            return subprocess.check_output(["git", "rev-parse", "HEAD"],
                                           cwd=dir,
                                           universal_newlines=True).strip()
        except subprocess.CalledProcessError as e:
            raise BuildError("Git audit failed: " + str(e))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
Пример #20
0
class GitScm(Scm):

    SCHEMA = schema.Schema({
        'scm': 'git',
        'url': str,
        schema.Optional('dir'): str,
        schema.Optional('if'): schema.Or(str, IfExpression),
        schema.Optional('branch'): str,
        schema.Optional('tag'): str,
        schema.Optional('commit'): str,
        schema.Optional('rev'): str,
        schema.Optional(schema.Regex('^remote-.*')): str,
        schema.Optional('sslVerify'): bool,
        schema.Optional('singleBranch'): bool,
        schema.Optional('shallow'): schema.Or(int, str)
    })
    REMOTE_PREFIX = "remote-"

    def __init__(self, spec, overrides=[], secureSSL=None):
        super().__init__(spec, overrides)
        self.__url = spec["url"]
        self.__branch = None
        self.__tag = None
        self.__commit = None
        self.__remotes = {}
        if "rev" in spec:
            rev = spec["rev"]
            if rev.startswith("refs/heads/"):
                self.__branch = rev[11:]
            elif rev.startswith("refs/tags/"):
                self.__tag = rev[10:]
            elif len(rev) == 40:
                self.__commit = rev
            else:
                raise ParseError("Invalid rev format: " + rev)
        self.__branch = spec.get("branch", self.__branch)
        self.__tag = spec.get("tag", self.__tag)
        self.__commit = spec.get("commit", self.__commit)
        if self.__commit:
            # validate commit
            if re.match("^[0-9a-f]{40}$", self.__commit) is None:
                raise ParseError("Invalid commit id: " + str(self.__commit))
        elif not self.__branch and not self.__tag:
            # nothing secified at all -> master branch
            self.__branch = "master"
        self.__dir = spec.get("dir", ".")
        # convert remotes into separate dictionary
        for key, val in spec.items():
            if key.startswith(GitScm.REMOTE_PREFIX):
                stripped_key = key[len(GitScm.REMOTE_PREFIX):]  # remove prefix
                if stripped_key == "origin":
                    raise ParseError("Invalid remote name: " + stripped_key)
                self.__remotes.update({stripped_key: val})
        self.__sslVerify = spec.get('sslVerify', secureSSL)
        self.__singleBranch = spec.get('singleBranch')
        self.__shallow = spec.get('shallow')

    def getProperties(self):
        properties = super().getProperties()
        properties.update({
            'scm':
            'git',
            'url':
            self.__url,
            'branch':
            self.__branch,
            'tag':
            self.__tag,
            'commit':
            self.__commit,
            'dir':
            self.__dir,
            'rev': (self.__commit if self.__commit else
                    (("refs/tags/" + self.__tag) if self.__tag else
                     ("refs/heads/" + self.__branch))),
            'sslVerify':
            self.__sslVerify,
            'singleBranch':
            self.__singleBranch,
            'shallow':
            self.__shallow,
        })
        for key, val in self.__remotes.items():
            properties.update({GitScm.REMOTE_PREFIX + key: val})
        return properties

    async def invoke(self, invoker):
        # make sure the git directory exists
        if not os.path.isdir(invoker.joinPath(self.__dir, ".git")):
            await invoker.checkCommand(["git", "init", self.__dir])

        # Shallow implies singleBranch
        if self.__singleBranch is None:
            singleBranch = self.__shallow is not None
        else:
            singleBranch = self.__singleBranch
        singleBranch = singleBranch and (self.__branch is not None)

        # setup and update remotes
        remotes = {"origin": self.__url}
        remotes.update(self.__remotes)
        existingRemotes = await invoker.checkOutputCommand(["git", "remote"],
                                                           cwd=self.__dir)
        for remote in existingRemotes.split("\n"):
            if remote in remotes:
                cfgUrl = remotes[remote]
                realUrl = await invoker.checkOutputCommand(
                    ["git", "ls-remote", "--get-url", remote], cwd=self.__dir)
                if cfgUrl != realUrl:
                    await invoker.checkCommand(
                        ["git", "remote", "set-url", remote, cfgUrl],
                        cwd=self.__dir)
                del remotes[remote]

        # add remaining (new) remotes
        for remote, url in remotes.items():
            addCmd = ["git", "remote", "add", remote, url]
            if singleBranch: addCmd += ["-t", self.__branch]
            await invoker.checkCommand(addCmd, cwd=self.__dir)

        # relax security if requested
        if not self.__sslVerify:
            await invoker.checkCommand(
                ["git", "config", "http.sslVerify", "false"], cwd=self.__dir)

        # Calculate refspec that is used internally. For the user a regular
        # refspec is kept in the git config.

        # Base fetch command with shallow support
        fetchCmd = ["git", "fetch", "-p"]
        if isinstance(self.__shallow, int):
            fetchCmd.append("--depth={}".format(self.__shallow))
        elif isinstance(self.__shallow, str):
            fetchCmd.append("--shallow-since={}".format(self.__shallow))
        fetchCmd.append("origin")

        # Calculate appropriate refspec (all/singleBranch/tag)
        if singleBranch:
            fetchCmd += [
                "+refs/heads/{0}:refs/remotes/origin/{0}".format(self.__branch)
            ]
        else:
            fetchCmd += ["+refs/heads/*:refs/remotes/origin/*"]
        if self.__tag:
            fetchCmd.append("refs/tags/{0}:refs/tags/{0}".format(self.__tag))

        # do the checkout
        if self.__tag or self.__commit:
            await self.__checkoutTag(invoker, fetchCmd)
        else:
            await self.__checkoutBranch(invoker, fetchCmd)

    async def __checkoutTag(self, invoker, fetchCmd):
        # checkout only if HEAD is invalid
        head = await invoker.callCommand(
            ["git", "rev-parse", "--verify", "-q", "HEAD"],
            stdout=False,
            cwd=self.__dir)
        if head:
            await invoker.checkCommand(fetchCmd, cwd=self.__dir)
            await invoker.checkCommand([
                "git", "checkout", "-q",
                self.__commit if self.__commit else "tags/" + self.__tag
            ],
                                       cwd=self.__dir)

    async def __checkoutBranch(self, invoker, fetchCmd):
        await invoker.checkCommand(fetchCmd, cwd=self.__dir)
        if await invoker.callCommand(
            ["git", "rev-parse", "--verify", "-q", "HEAD"],
                stdout=False,
                cwd=self.__dir):
            # checkout only if HEAD is invalid
            await invoker.checkCommand([
                "git", "checkout", "-b", self.__branch,
                "remotes/origin/" + self.__branch
            ],
                                       cwd=self.__dir)
        elif (await invoker.checkOutputCommand(
            ["git", "rev-parse", "--abbrev-ref", "HEAD"],
                cwd=self.__dir)) == self.__branch:
            # pull only if on original branch
            await invoker.checkCommand([
                "git", "merge", "--ff-only",
                "refs/remotes/origin/" + self.__branch
            ],
                                       cwd=self.__dir)
        else:
            invoker.warn("Not updating", self.__dir,
                         "because branch was changed manually...")

    def asDigestScript(self):
        """Return forward compatible stable string describing this git module.

        The format is "url rev-spec dir" where rev-spec depends on the given reference.
        """
        if self.__commit:
            return self.__commit + " " + self.__dir
        elif self.__tag:
            return self.__url + " refs/tags/" + self.__tag + " " + self.__dir
        else:
            return self.__url + " refs/heads/" + self.__branch + " " + self.__dir

    def asJenkins(self, workPath, credentials, options):
        scm = ElementTree.Element("scm",
                                  attrib={
                                      "class": "hudson.plugins.git.GitSCM",
                                      "plugin": "[email protected]",
                                  })
        ElementTree.SubElement(scm, "configVersion").text = "2"

        userconfigs = ElementTree.SubElement(
            ElementTree.SubElement(scm, "userRemoteConfigs"),
            "hudson.plugins.git.UserRemoteConfig")

        url = ElementTree.SubElement(userconfigs, "url")
        url.text = self.__url

        if credentials:
            credentialsId = ElementTree.SubElement(userconfigs,
                                                   "credentialsId")
            credentialsId.text = credentials

        branch = ElementTree.SubElement(
            ElementTree.SubElement(ElementTree.SubElement(scm, "branches"),
                                   "hudson.plugins.git.BranchSpec"), "name")
        if self.__commit:
            branch.text = self.__commit
        elif self.__tag:
            branch.text = "refs/tags/" + self.__tag
        else:
            branch.text = "refs/heads/" + self.__branch

        ElementTree.SubElement(
            scm, "doGenerateSubmoduleConfigurations").text = "false"
        ElementTree.SubElement(scm, "submoduleCfg", attrib={"class": "list"})

        extensions = ElementTree.SubElement(scm, "extensions")
        ElementTree.SubElement(
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.RelativeTargetDirectory"),
            "relativeTargetDir").text = os.path.normpath(
                os.path.join(workPath, self.__dir))
        # remove untracked files and stale branches
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.CleanCheckout")
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.PruneStaleBranch")
        # set git clone options
        shallow = options.get("scm.git.shallow")
        timeout = options.get("scm.git.timeout")
        if shallow is not None or timeout is not None:
            co = ElementTree.SubElement(
                extensions, "hudson.plugins.git.extensions.impl.CloneOption")
            if shallow is not None:
                try:
                    shallow = int(shallow)
                    if shallow < 0: raise ValueError()
                except ValueError:
                    raise BuildError("Invalid 'git.shallow' option: " +
                                     str(shallow))
                if shallow > 0:
                    ElementTree.SubElement(co, "shallow").text = "true"
                    ElementTree.SubElement(co, "noTags").text = "false"
                    ElementTree.SubElement(co, "reference").text = ""
                    ElementTree.SubElement(co, "depth").text = str(shallow)
                    ElementTree.SubElement(co, "honorRefspec").text = "false"

            if timeout is not None:
                try:
                    timeout = int(timeout)
                    if timeout < 0: raise ValueError()
                except ValueError:
                    raise BuildError("Invalid 'git.timeout' option: " +
                                     str(shallow))
                if timeout > 0:
                    ElementTree.SubElement(co, "timeout").text = str(timeout)

        if isTrue(options.get("scm.ignore-hooks", "0")):
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.IgnoreNotifyCommit")

        return scm

    def getDirectory(self):
        return self.__dir

    def isDeterministic(self):
        return bool(self.__tag) or bool(self.__commit)

    def hasJenkinsPlugin(self):
        return True

    def callGit(self, workspacePath, *args):
        cmdLine = ['git']
        cmdLine.extend(args)
        cwd = os.path.join(workspacePath, self.__dir)
        try:
            output = subprocess.check_output(cmdLine,
                                             cwd=cwd,
                                             universal_newlines=True,
                                             stderr=subprocess.DEVNULL)
        except subprocess.CalledProcessError as e:
            raise BuildError(
                "git error:\n Directory: '{}'\n Command: '{}'\n'{}'".format(
                    cwd, " ".join(cmdLine), e.output.rstrip()))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
        return output.strip()

    def status(self, workspacePath):
        status = ScmStatus()
        try:
            onCorrectBranch = False
            onTag = False
            output = self.callGit(workspacePath, 'ls-remote', '--get-url')
            if output != self.__url:
                status.add(
                    ScmTaint.switched,
                    "> URL: configured: '{}', actual: '{}'".format(
                        self.__url, output))

            if self.__commit:
                output = self.callGit(workspacePath, 'rev-parse', 'HEAD')
                if output != self.__commit:
                    status.add(
                        ScmTaint.switched,
                        "> commit: configured: '{}', actual: '{}'".format(
                            self.__commit, output))
            elif self.__tag:
                output = self.callGit(workspacePath, 'tag', '--points-at',
                                      'HEAD').splitlines()
                if self.__tag not in output:
                    actual = ("'" + ", ".join(output) +
                              "'") if output else "not on any tag"
                    status.add(
                        ScmTaint.switched,
                        "> tag: configured: '{}', actual: {}".format(
                            self.__tag, actual))

                # Need to check if the tag still exists. Otherwise the "git
                # log" command at the end will trip.
                try:
                    self.callGit(workspacePath, 'rev-parse',
                                 'tags/' + self.__tag)
                    onTag = True
                except BuildError:
                    pass
            elif self.__branch:
                output = self.callGit(workspacePath, 'rev-parse',
                                      '--abbrev-ref', 'HEAD')
                if output != self.__branch:
                    status.add(
                        ScmTaint.switched,
                        "> branch: configured: '{}', actual: '{}'".format(
                            self.__branch, output))
                else:
                    output = self.callGit(
                        workspacePath, 'log', '--oneline',
                        'refs/remotes/origin/' + self.__branch + '..HEAD')
                    if output:
                        status.add(
                            ScmTaint.unpushed_main,
                            joinLines(
                                "> unpushed commits on {}:".format(
                                    self.__branch), indent(output, '   ')))
                    onCorrectBranch = True

            # Check for modifications wrt. checked out commit
            output = self.callGit(workspacePath, 'status', '--porcelain')
            if output:
                status.add(ScmTaint.modified,
                           joinLines("> modified:", indent(output, '   ')))

            # The following shows all unpushed commits reachable by any ref
            # (local branches, stash, detached HEAD, etc).
            # Exclude HEAD if the configured branch is checked out to not
            # double-count them. Does not mark the SCM as dirty. Exclude the
            # configured tag too if it is checked out. Otherwise the tag would
            # count as unpushed if it is not on a remote branch.
            what = ['--all', '--not', '--remotes']
            if onCorrectBranch: what.append('HEAD')
            if onTag: what.append("tags/" + self.__tag)
            output = self.callGit(workspacePath, 'log', '--oneline',
                                  '--decorate', *what)
            if output:
                status.add(
                    ScmTaint.unpushed_local,
                    joinLines("> unpushed local commits:",
                              indent(output, '   ')))

        except BuildError as e:
            status.add(ScmTaint.error, e.slogan)

        return status

    def getAuditSpec(self):
        return ("git", self.__dir, {})

    def hasLiveBuildId(self):
        return True

    async def predictLiveBuildId(self, step):
        if self.__commit:
            return bytes.fromhex(self.__commit)

        with stepAction(step, "LS-REMOTE", self.__url, (INFO, TRACE)) as a:
            if self.__tag:
                # Annotated tags are objects themselves. We need the commit object!
                refs = [
                    "refs/tags/" + self.__tag + '^{}',
                    "refs/tags/" + self.__tag
                ]
            else:
                refs = ["refs/heads/" + self.__branch]
            cmdLine = ['git', 'ls-remote', self.__url] + refs
            try:
                stdout = await check_output(cmdLine,
                                            stderr=subprocess.DEVNULL,
                                            universal_newlines=True)
                output = stdout.strip()
            except subprocess.CalledProcessError as e:
                a.fail("exit {}".format(e.returncode), WARNING)
                return None
            except OSError as e:
                a.fail("error ({})".format(e))
                return None

            # have we found anything at all?
            if not output:
                a.fail("unknown", WARNING)
                return None

            # See if we got one of our intended refs. Git is generating lines with
            # the following format:
            #
            #   <sha1>\t<refname>
            #
            # Put the output into a dict with the refname as key. Be extra careful
            # and strip out lines not matching this pattern.
            output = {
                commitAndRef[1].strip(): bytes.fromhex(commitAndRef[0].strip())
                for commitAndRef in (line.split('\t')
                                     for line in output.split('\n'))
                if len(commitAndRef) == 2
            }
            for ref in refs:
                if ref in output: return output[ref]

            # uhh, should not happen...
            a.fail("unknown", WARNING)
            return None

    def calcLiveBuildId(self, workspacePath):
        if self.__commit:
            return bytes.fromhex(self.__commit)
        else:
            output = self.callGit(workspacePath, 'rev-parse', 'HEAD').strip()
            return bytes.fromhex(output)

    def getLiveBuildIdSpec(self, workspacePath):
        if self.__commit:
            return "=" + self.__commit
        else:
            return "g" + os.path.join(workspacePath, self.__dir)

    @staticmethod
    def processLiveBuildIdSpec(dir):
        try:
            return subprocess.check_output(["git", "rev-parse", "HEAD"],
                                           cwd=dir,
                                           universal_newlines=True).strip()
        except subprocess.CalledProcessError as e:
            raise BuildError("Git audit failed: " + str(e))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
Пример #21
0
def entrypoint():
    args = docopt(__doc__, version=f"midiedlsequencer {__version__}")
    if args["--middle-c"] not in ["C3", "C4"]:
        print("Error: --middle-c argument must be either C3 or C4.\n")
        print(__doc__)
        exit(1)

    with open(args['-s']) as inf:
        samples = yaml.load(inf, Loader=yaml.Loader)

    schema.Schema([{
        "name": str,
        "note": schema.Regex(r"[A-G](#|b)?-?\d+"),
        "src": str,
        "start": float,
        "length": float
    }]).validate(samples)

    mid = mido.MidiFile(args['-m'])

    # MIDI messages represent time in ticks relative to the previous
    # message. The following code recovers the absoulute time from
    # the beginning of the file in seconds. The MidiFile object computes
    # the time difference in seconds as it's iterated through, and returns
    # it in a property of the messages yielded by the iterator. I don't
    # like that it's stateful and somewhat obscure, but it works.
    abs_msgs = []
    acc_time = 0.0
    for msg in mid:
        acc_time += msg.time
        abs_msgs.append(msg.copy(time=acc_time))

    notes = [m for m in abs_msgs if m.type is 'note_on']

    try:
        fps = float(args['--fps'])
    except ValueError:
        print("Error: --fps argument must be a float literal.\n")
        print(__doc__)
        exit(1)

    def range_from_s_fps(start, dur, fps):
        return TimeRange(
            start_time=RationalTime.from_seconds(start).rescaled_to(fps),
            duration=RationalTime.from_seconds(dur).rescaled_to(fps))

    for s in samples:
        # The following selects the times of the note corresponding to the sample.
        midi_note = pretty_midi.note_name_to_number(
            s['note']) + (12 if args['--middle-c'] == "C3" else 0)
        times = [n.time for n in notes if n.note == midi_note]

        # The following creates an EDL timeline with the times computed above.

        tl = otio.schema.Timeline(name=s['name'])
        track = otio.schema.Track()

        # DaVinci Resolve will automatically set the starting timecode for an imported
        # EDL timeline to the start of the first clip. This has the undesirable effect
        # of removing initial blank time. As a workaround, a clip with duration zero
        # is inserted at the beginning of the EDL file: DVR will not place a clip in
        # the timeline, but it will set the starting timecode correctly.
        zero_clip = otio.schema.Clip(
            name=s["src"],
            media_reference=otio.schema.ExternalReference(target_url=s["src"]),
            source_range=range_from_s_fps(0, 0, fps))
        track.append(zero_clip)

        clip = otio.schema.Clip(
            name=s["src"],  # This is the actual clip
            media_reference=otio.schema.ExternalReference(target_url=s["src"]),
            source_range=range_from_s_fps(s['start'], s['length'], fps))

        # Blank time in OTIO timelines is represented with gaps. To accomodate non-contiguous
        # samples, gaps with appropriate durations must be inserted between clips.
        used_time = 0.0
        for t in times:
            track.append(
                otio.schema.Gap(
                    source_range=range_from_s_fps(0, t - used_time, fps)))
            track.append(clip.deepcopy())
            used_time = t + s['length']
        tl.tracks.append(track)
        otio.adapters.write_to_file(
            tl, os.path.join(args['-o'], s['name'] + '.edl'))
Пример #22
0
# ElastiCluster imports
from elasticluster.utils import string_to_boolean


## custom validators

def validator(fn):
    """
    Decorate a function for use as a validator with `schema`_

    .. _schema: https://github.com/keleshev/schema
    """
    return schema.Use(fn)


alphanumeric = schema.Regex(r'[0-9A-Za-z_]+')


boolean = schema.Use(string_to_boolean)


def _file_name(v):
    try:
        return os.path.expanduser(v)
    except Exception as err:
        raise ValueError("invalid file name `{0}`: {1}".format(v, err))


@validator
def existing_file(v):
    f = _file_name(v)
Пример #23
0
class FileInfoAnalyzerReport(CortexAnalyzerReport):
    """ Represents a Cortex FileInfo_8_0 analysis JSON report. """

    report_schema = schema.Schema({
        "summary": {
            "taxonomies": [
                schema.Schema(
                    {
                        "level": schema.Or("info", "malicious", "safe"),
                        "namespace": "FileInfo",
                        #    "predicate": str,
                        #    "value": str
                    },
                    ignore_extra_keys=True)
            ]
        },
        "full": {
            "results": [{
                "submodule_name":
                "Basic properties",
                "results": [
                    {
                        "submodule_section_header": "Hashes",
                        "submodule_section_content": {
                            "md5": schema.Regex(r'^[0-9a-z]{32}$'),
                            "sha1": schema.Regex(r'^[0-9a-z]{40}$'),
                            "sha256": schema.Regex(r'^[0-9a-z]{64}$'),
                            "ssdeep": schema.Regex(r'^[0-9A-Za-z:+/]*$'),
                        }
                    },
                    {
                        # We consume further structures submodule_sections and
                        # explicitly check the submodule_section_header to not
                        # be "Hashes" or it will accept "Hashes"-structures with
                        # malfarmed hashes.
                        "submodule_section_header":
                        schema.And(str, lambda s: s != "Hashes"),
                        "submodule_section_content":
                        schema.Schema({}, ignore_extra_keys=True)
                    },
                ],
                "summary": {
                    "taxonomies": [
                        schema.Schema(
                            {
                                "level": schema.Or("info", "malicious",
                                                   "safe"),
                                "namespace": "FileInfo",
                                #    "predicate": str,
                                #    "value": str
                            },
                            ignore_extra_keys=True)
                    ]
                }
            }]
        },
        "success": bool,
        "artifacts": CortexAnalyzerReport.report_schema_artifacts,
        "operations": []
    })

    def __init__(self, unvalidated_report=None):
        """
        @param report: hash with report data from Cortex FileInfo Analyzer
        """
        super().__init__(unvalidated_report)

        basic_properties = self.get_element_from_list_of_dicts(
            self.report.get('full', []).get('results', {}), 'submodule_name',
            'Basic properties').get('results', [])
        self._hashes = self.get_element_from_list_of_dicts(
            basic_properties, 'submodule_section_header',
            'Hashes').get('submodule_section_content', {})

    @property
    def sha256sum(self):
        """ Return the sha256 sum. """
        return self._hashes.get('sha256')

    @property
    def md5sum(self):
        """ Return the md5 sum. """
        return self._hashes.get('md5')

    @property
    def ssdeepsum(self):
        """ Return the ssdeep sum. """
        # TODO: think about if we want to compare ssdeep hashes
        return self._hashes.get('ssdeep')
Пример #24
0
Файл: git.py Проект: kolewu/bob
class GitScm(Scm):

    SCHEMA = schema.Schema({
        'scm': 'git',
        'url': str,
        schema.Optional('dir'): str,
        schema.Optional('if'): str,
        schema.Optional('branch'): str,
        schema.Optional('tag'): str,
        schema.Optional('commit'): str,
        schema.Optional('rev'): str,
        schema.Optional(schema.Regex('^remote-.*')): str,
    })
    REMOTE_PREFIX = "remote-"

    def __init__(self, spec, overrides=[]):
        super().__init__(overrides)
        self.__recipe = spec['recipe']
        self.__url = spec["url"]
        self.__branch = None
        self.__tag = None
        self.__commit = None
        self.__remotes = {}
        if "rev" in spec:
            rev = spec["rev"]
            if rev.startswith("refs/heads/"):
                self.__branch = rev[11:]
            elif rev.startswith("refs/tags/"):
                self.__tag = rev[10:]
            elif len(rev) == 40:
                self.__commit = rev
            else:
                raise ParseError("Invalid rev format: " + rev)
        self.__branch = spec.get("branch", self.__branch)
        self.__tag = spec.get("tag", self.__tag)
        self.__commit = spec.get("commit", self.__commit)
        if self.__commit:
            # validate commit
            if re.match("^[0-9a-f]{40}$", self.__commit) is None:
                raise ParseError("Invalid commit id: " + str(self.__commit))
        elif not self.__branch and not self.__tag:
            # nothing secified at all -> master branch
            self.__branch = "master"
        self.__dir = spec.get("dir", ".")
        # convert remotes into separate dictionary
        for key, val in spec.items():
            if key.startswith(GitScm.REMOTE_PREFIX):
                stripped_key = key[len(GitScm.REMOTE_PREFIX):]  # remove prefix
                if stripped_key == "origin":
                    raise ParseError("Invalid remote name: " + stripped_key)
                self.__remotes.update({stripped_key: val})

    def getProperties(self):
        properties = [{
            'recipe':
            self.__recipe,
            'scm':
            'git',
            'url':
            self.__url,
            'branch':
            self.__branch,
            'tag':
            self.__tag,
            'commit':
            self.__commit,
            'dir':
            self.__dir,
            'rev': (self.__commit if self.__commit else
                    (("refs/tags/" + self.__tag) if self.__tag else
                     ("refs/heads/" + self.__branch)))
        }]
        for key, val in self.__remotes.items():
            properties[0].update({GitScm.REMOTE_PREFIX + key: val})
        return properties

    def asScript(self):
        remotes_array = [
            "# create an array of all remotes for this repository",
            "declare -A BOB_GIT_REMOTES=( [origin]={URL} )".format(
                URL=quote(self.__url)),
        ]
        # add additional remotes to array
        for name, url in self.__remotes.items():
            remotes_array.append("BOB_GIT_REMOTES[{NAME}]={URL}".format(
                NAME=quote(name), URL=quote(url)))
        # create script to handle remotes
        remotes_script = dedent("""\
            (
                {REMOTES_ARRAY}
                # remove remotes from array that are already known to Git
                while read -r REMOTE_NAME ; do
                    # check for empty variable in case no remote at all is specified
                    if [ -z "$REMOTE_NAME" ]; then
                        continue
                    fi
                    # check if existing remote is configured
                    if [ "${{BOB_GIT_REMOTES[$REMOTE_NAME]+_}}" ]; then
                        # check if URL has changed
                        if [ ! "${{BOB_GIT_REMOTES[$REMOTE_NAME]}}" == "$(git ls-remote --get-url $REMOTE_NAME)" ]; then
                            git remote set-url "$REMOTE_NAME" "${{BOB_GIT_REMOTES[$REMOTE_NAME]}}"
                        fi
                        # it is configured, therefore no need to keep in list
                        unset "BOB_GIT_REMOTES[$REMOTE_NAME]"
                    fi
                done <<< "$(git remote)"
                # add all remaining remotes in the array to the repository
                for REMOTE_NAME in "${{!BOB_GIT_REMOTES[@]}}" ; do
                    git remote add "$REMOTE_NAME" "${{BOB_GIT_REMOTES[$REMOTE_NAME]}}"
                done
            )""").format(REMOTES_ARRAY="\n    ".join(remotes_array))

        if self.__tag or self.__commit:
            return dedent("""\
                export GIT_SSL_NO_VERIFY=true
                if [ ! -d {DIR}/.git ] ; then
                    git init {DIR}
                fi
                cd {DIR}
                {REMOTES}
                # checkout only if HEAD is invalid
                if ! git rev-parse --verify -q HEAD >/dev/null ; then
                    git fetch -t origin '+refs/heads/*:refs/remotes/origin/*'
                    git checkout -q {REF}
                fi
                """).format(URL=self.__url,
                            REF=self.__commit if self.__commit else "tags/" +
                            self.__tag,
                            DIR=self.__dir,
                            REMOTES=remotes_script)
        else:
            return dedent("""\
                export GIT_SSL_NO_VERIFY=true
                if [ -d {DIR}/.git ] ; then
                    cd {DIR}
                    if [[ $(git rev-parse --abbrev-ref HEAD) == "{BRANCH}" ]] ; then
                        git fetch -p origin
                        git merge --ff-only refs/remotes/origin/{BRANCH}
                    else
                        echo "Warning: not updating {DIR} because branch was changed manually..." >&2
                    fi
                else
                    if ! git clone -b {BRANCH} {URL} {DIR} ; then
                        rm -rf {DIR}/.git {DIR}/*
                        exit 1
                    fi
                    cd {DIR}
                fi
                {REMOTES}
                """).format(URL=self.__url,
                            BRANCH=self.__branch,
                            DIR=self.__dir,
                            REMOTES=remotes_script)

    def asDigestScript(self):
        """Return forward compatible stable string describing this git module.

        The format is "url rev-spec dir" where rev-spec depends on the given reference.
        """
        if self.__commit:
            return self.__commit + " " + self.__dir
        elif self.__tag:
            return self.__url + " refs/tags/" + self.__tag + " " + self.__dir
        else:
            return self.__url + " refs/heads/" + self.__branch + " " + self.__dir

    def asJenkins(self, workPath, credentials, options):
        scm = ElementTree.Element("scm",
                                  attrib={
                                      "class": "hudson.plugins.git.GitSCM",
                                      "plugin": "[email protected]",
                                  })
        ElementTree.SubElement(scm, "configVersion").text = "2"

        userconfigs = ElementTree.SubElement(
            ElementTree.SubElement(scm, "userRemoteConfigs"),
            "hudson.plugins.git.UserRemoteConfig")

        url = ElementTree.SubElement(userconfigs, "url")
        url.text = self.__url

        if credentials:
            credentialsId = ElementTree.SubElement(userconfigs,
                                                   "credentialsId")
            credentialsId.text = credentials

        branch = ElementTree.SubElement(
            ElementTree.SubElement(ElementTree.SubElement(scm, "branches"),
                                   "hudson.plugins.git.BranchSpec"), "name")
        if self.__commit:
            branch.text = self.__commit
        elif self.__tag:
            branch.text = "refs/tags/" + self.__tag
        else:
            branch.text = "refs/heads/" + self.__branch

        ElementTree.SubElement(
            scm, "doGenerateSubmoduleConfigurations").text = "false"
        ElementTree.SubElement(scm, "submoduleCfg", attrib={"class": "list"})

        extensions = ElementTree.SubElement(scm, "extensions")
        ElementTree.SubElement(
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.RelativeTargetDirectory"),
            "relativeTargetDir").text = os.path.normpath(
                os.path.join(workPath, self.__dir))
        # remove untracked files and stale branches
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.CleanCheckout")
        ElementTree.SubElement(
            extensions, "hudson.plugins.git.extensions.impl.PruneStaleBranch")
        shallow = options.get("scm.git.shallow")
        if shallow is not None:
            try:
                shallow = int(shallow)
                if shallow < 0: raise ValueError()
            except ValueError:
                raise BuildError("Invalid 'git.shallow' option: " +
                                 str(shallow))
            if shallow > 0:
                co = ElementTree.SubElement(
                    extensions,
                    "hudson.plugins.git.extensions.impl.CloneOption")
                ElementTree.SubElement(co, "shallow").text = "true"
                ElementTree.SubElement(co, "noTags").text = "false"
                ElementTree.SubElement(co, "reference").text = ""
                ElementTree.SubElement(co, "depth").text = str(shallow)
                ElementTree.SubElement(co, "honorRefspec").text = "false"
        if isTrue(options.get("scm.ignore-hooks", "0")):
            ElementTree.SubElement(
                extensions,
                "hudson.plugins.git.extensions.impl.IgnoreNotifyCommit")

        return scm

    def merge(self, other):
        return False

    def getDirectories(self):
        return {self.__dir: hashString(self.asDigestScript())}

    def isDeterministic(self):
        return bool(self.__tag) or bool(self.__commit)

    def hasJenkinsPlugin(self):
        return True

    def callGit(self, workspacePath, *args):
        cmdLine = ['git']
        cmdLine.extend(args)
        try:
            output = subprocess.check_output(cmdLine,
                                             cwd=os.path.join(
                                                 os.getcwd(), workspacePath,
                                                 self.__dir),
                                             universal_newlines=True,
                                             stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            raise BuildError(
                "git error:\n Directory: '{}'\n Command: '{}'\n'{}'".format(
                    os.path.join(workspacePath, self.__dir), " ".join(cmdLine),
                    e.output.rstrip()))
        return output

    # Get GitSCM status. The purpose of this function is to return the status of the given directory
    #
    # return values:
    #  - error: The SCM is in a error state. Use this if git returned a error code.
    #  - dirty: SCM is dirty. Could be: modified files, switched to another branch/tag/commit/repo, unpushed commits.
    #  - clean: Same branch/tag/commit as specified in the recipe and no local changes.
    #  - empty: Directory is not existing.
    #
    # This function is called when build with --clean-checkout. 'error' and 'dirty' SCMs are moved to attic,
    # while empty and clean directories are not.
    def status(self, workspacePath, dir):
        scmdir = os.path.join(workspacePath, dir)
        if not os.path.exists(os.path.join(os.getcwd(), scmdir)):
            return 'empty', '', ''

        status = 'clean'
        shortStatus = ""
        longStatus = ""

        def setStatus(shortMsg, longMsg, dirty=True):
            nonlocal status, shortStatus, longStatus
            if (shortMsg not in shortStatus):
                shortStatus += shortMsg
            longStatus += longMsg
            if (dirty):
                status = 'dirty'

        try:
            output = self.callGit(workspacePath, 'ls-remote',
                                  '--get-url').rstrip()
            if output != self.__url:
                setStatus(
                    "S",
                    colorize(
                        "> URL: configured: '{}'  actual: '{}'\n".format(
                            self.__url, output), "33"))
            else:
                if self.__commit:
                    output = self.callGit(workspacePath, 'rev-parse',
                                          'HEAD').rstrip()
                    if output != self.__commit:
                        setStatus(
                            "S",
                            colorize(
                                "> commitId: configured: {}  actual: {}\n".
                                format(self.__commit, output), "33"))
                elif self.__tag:
                    output = self.callGit(workspacePath, 'tag', '--points-at',
                                          'HEAD').rstrip().splitlines()
                    if self.__tag not in output:
                        actual = ("'" + ", ".join(output) +
                                  "'") if output else "not on any tag"
                        setStatus(
                            "S",
                            colorize(
                                "    > tag: configured: '{}' actual: {}\n".
                                format(self.__tag, actual), "33"))
                elif self.__branch:
                    output = self.callGit(workspacePath, 'rev-parse',
                                          '--abbrev-ref', 'HEAD').rstrip()
                    if output != self.__branch:
                        setStatus(
                            "S",
                            colorize(
                                "> branch: configured: {} actual: {}\n".format(
                                    self.__branch, output), "33"))
                    else:
                        output = self.callGit(
                            workspacePath, 'rev-list',
                            'origin/' + self.__branch + '..HEAD')
                        if len(output):
                            setStatus("U", "")
                            # do not print detailed status this point.
                            # git log --branches --not --remotes --decorate will give the same informations.

            output = self.callGit(workspacePath, 'status', '--porcelain')
            if len(output):
                longMsg = colorize("> modified:\n", "33")
                for line in output.split('\n'):
                    if line != "":
                        longMsg += '  ' + line + '\n'
                setStatus("M", longMsg)

            # the following shows unpushed commits even on local branches. do not mark the SCM as dirty.
            output = self.callGit(workspacePath, 'log', '--branches', '--not',
                                  '--remotes', '--decorate')
            if len(output):
                longMsg = colorize("> unpushed:\n", "33")
                for line in output.split('\n'):
                    if line != "":
                        longStatus += '  ' + line + '\n'
                setStatus("u", longMsg, False)

        except BuildError as e:
            print(e)
            ret = 'error'

        return status, shortStatus, longStatus

    def getAuditSpec(self):
        return ("git", [self.__dir])

    def hasLiveBuildId(self):
        return True

    def predictLiveBuildId(self):
        if self.__commit:
            return [bytes.fromhex(self.__commit)]

        if self.__tag:
            # Annotated tags are objects themselves. We need the commit object!
            refs = [
                "refs/tags/" + self.__tag + '^{}', "refs/tags/" + self.__tag
            ]
        else:
            refs = ["refs/heads/" + self.__branch]
        cmdLine = ['git', 'ls-remote', self.__url] + refs
        try:
            output = subprocess.check_output(cmdLine,
                                             universal_newlines=True,
                                             stderr=subprocess.STDOUT).strip()
        except subprocess.CalledProcessError as e:
            return [None]

        # have we found anything at all?
        if not output:
            return [None]

        # see if we got one of our intended refs
        output = {
            ref.strip(): bytes.fromhex(commit.strip())
            for commit, ref in (line.split('\t')
                                for line in output.split('\n'))
        }
        for ref in refs:
            if ref in output: return [output[ref]]

        # uhh, should not happen...
        return [None]

    def calcLiveBuildId(self, workspacePath):
        if self.__commit:
            return [bytes.fromhex(self.__commit)]
        else:
            output = self.callGit(workspacePath, 'rev-parse', 'HEAD').strip()
            return [bytes.fromhex(output)]

    def getLiveBuildIdSpec(self, workspacePath):
        if self.__commit:
            return ["=" + self.__commit]
        else:
            return ["g" + os.path.join(workspacePath, self.__dir)]

    @staticmethod
    def processLiveBuildIdSpec(dir):
        try:
            return subprocess.check_output(["git", "rev-parse", "HEAD"],
                                           cwd=dir,
                                           universal_newlines=True).strip()
        except subprocess.CalledProcessError as e:
            raise BuildError("Git audit failed: " + str(e))
        except OSError as e:
            raise BuildError("Error calling git: " + str(e))
Пример #25
0
class Descriptor(object, metaclass=abc.ABCMeta):
    """Abstract base class for all |Descriptor|.

    A |Descriptor| is a class used to extract relevant information from a collection of |Record|.

    More specifically, a |Descriptor| is used in two phases:

    * In a first time, optional internals are updated and/or accumulated over the entire collection of
      |RecordCollection| tuples. Those internals will be used to tailor the |Descriptor| to the description task
      specific context (*e.g.* like updating a range of possible value to later normalize or clip a property value).
    * In a second time, the descriptor is used to construct a new |RecordCollection| or a tuple of |RecordCollection|
      where each enclosed |Record| is added a named property containing the adequate description of the |Record|
      from a tuple of |RecordCollection|.

    Subclasses must override the two interfaces used to do so: :meth:`update(*record_collections) <update>` and
    :meth:`compute(*record_collections) <compute>` as well as the :meth:`reset` method and
    the :meth:`_make_interface` private method.

    Args:
        name (str): The |Descriptor| name.

    Attributes:
        name (str): The |Descriptor| name.

    """

    __interface_schema__ = schema.Schema(
        schema.Or(
            {
                'name': schema.Regex(r'^[(),a-zA-Z_ ]+$'),
                'type': 'categorical',
                'property': schema.Regex(r'^[a-z][a-z_]+[a-z]$'),
                'schema': _categorical_schema
            }, {
                'name': schema.Regex(r'^[(),a-zA-Z_ ]+$'),
                'type': 'continuous',
                'property': schema.Regex(r'^[a-z][a-z_]+[a-z]$'),
                'schema': _continuous_schema
            }))

    def __init__(self, name):
        self.name = name

    @property
    def property_name(self):
        """str: The |Record| inserted property name after :meth:`compute` call."""
        return '_'.join((_camel_to_snake(self.__class__.__name__),
                         _camel_to_snake(self.name)))

    @property
    def __descriptor__(self):  # noqa: D401
        """dict: A dictionary which summarizes the internal state of the |Descriptor| instance.

        Keys:
            name (str): The descriptor name. Note that in some |Descriptor|, it conjointly designates the
                descriptor name **and** the name of the property read from each |Record| to describe it.
            property (str): The name of the property written out by the |Descriptor| in each |Record|.
            type (str): Either ``categorical`` or ``continuous``.
            schema (dict, tuple, ColorMap): For ``categorical`` descriptors, it must be a dict mapping each
                ``category name`` to its relevant value (*e.g.* its index or |Color|). For ``continuous`` descriptors,
                it must be a range description (*i.e.* either a ``(start, end)`` tuple or a object with a ``range``
                attribute like a |ColorMap|).

        See Also:
            For more information on how :attr:`__descriptor__` is constructed, please see :ref:`descriptor`.

        """
        interface = self._make_interface()
        interface.update({
            'name': self.name.title().replace('_', ' '),
            'property': self.property_name
        })
        try:
            self.__interface_schema__.validate(interface)
        except schema.SchemaError as e:
            raise ValueError('Invalid __descriptor__ returned:\n'
                             '{}\n{}'.format(e.errors, e.autos))
        return interface

    def __repr__(self):
        """Return a python representation of the |Descriptor|."""
        return '{}(name={})'.format(self.__class__.__name__, self.name)

    def __str__(self):
        """Return the |Descriptor| representation."""
        return self.__repr__()

    def __eq__(self, other):
        """Return ``True`` if two |Descriptor| have the same attributes and class."""
        if self.__class__ == other.__class__:
            return dict_equal(self.__dict__, other.__dict__)
        return False

    def __ne__(self, other):
        """Return ``True`` if two |Descriptor| do not have the same attributes and class."""
        return not self == other

    @abc.abstractmethod
    def update(self, *record_collections):
        """Update internal values from |Record| descriptions.

        Args:
            *record_collections (|RecordCollection|): |RecordCollection| of which |Record| will be used to update
                internals.

        """
        raise NotImplementedError

    @abc.abstractmethod
    def compute(self, *record_collections):
        """Construct new |RecordCollection| where each enclosed |Record| is added a named description property.

        Args:
            *record_collections (|RecordCollection|): |RecordCollection| used to construct new |RecordCollection| with
                described |Record|.

        Returns:
            (|RecordCollection|, ): A described |RecordCollection| tuple.

        """
        raise NotImplementedError

    @abc.abstractmethod
    def reset(self):
        """Reset |Descriptor| internals to factory values."""
        raise NotImplementedError

    @abc.abstractmethod
    def _make_interface(self):
        """Build a summary of hte internal state of the |Descriptor|.

        It is used as a unified interface for all descriptors to describe and explicit their description.

        As an example, for a typical |CategoricalDescriptor|, the interface dictionary would contain:

        * The |Descriptor| name
        * The |Descriptor| property name
        * The |Descriptor| type, *i.e.* ``'categorical'`` here
        * The category name to category index mapping.

        As for a typical |ContinuousDescriptor|, it would contain:

        * The |Descriptor| name
        * The |Descriptor| property name
        * The |Descriptor| type, *i.e.* ``'continuous'`` here
        * The value range.

        See Also:
            The :attr:`__descriptor__` for more information.

        Returns:
            dict: A valid |Descriptor| :attr:`__descriptor__`.

        """
        raise NotImplementedError
Пример #26
0
class RecipeConfigureFeature(RecipeFeature):
    """Recipe feature for configuring root trees built during a recipe root
    feature"""

    NAME = "configure"

    FEATURED_ATTRIBUTES = {"configure"}

    SCHEMA = schema.Schema({
        "sdk": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
        "configure": {
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "root": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
            "steps": schema.And([str], len),
        },
        str: object,  # do not consider other keys
    })

    def configure(self) -> None:
        """TODO"""

        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["sdk"]), "sdk")

        # the recipe to configure (i.e. what is the recipe for which we need to
        # get image-resulting root tree on which running the configuration
        # steps?):
        recipe_to_configure = recipe.Recipe(
            self.replace_placeholders(self.recipe.config["configure"]["root"]))

        # check that image action for the recipe designated by "root" has well
        # been run:
        image_out_subpath = os.path.join(recipe_to_configure.out_subpath,
                                         "image")
        image_out_path = os.path.join(repo_root_path(), image_out_subpath)
        if not os.path.exists(os.path.join(image_out_path, "root")):
            raise RecipeActionError(
                line("""
                Could not process a configure action step if the image action
                step from the recipe designated by "configure.root" has not
                been run before.
            """))

        action_out_subpath = os.path.join(self.recipe.out_subpath, "configure")
        action_out_path = os.path.join(repo_root_path(), action_out_subpath)

        # Always clear out the previous configure result:
        if os.path.exists(action_out_path):
            info("Clearing {!r}...".format(action_out_path))
            with ElevatedPrivileges():
                shutil.rmtree(action_out_path)

        # prepare output directory
        os.makedirs(action_out_path)

        # hook the SDK container process to the current TTY (if existent):
        terminal = is_tty_attached()

        # Retrieve the result of the image action to work on it for this
        # configuration step:
        with ElevatedPrivileges():
            info(
                line("""Copying resulting root of the image action step for the
                proper recipe ({!r}) into the working environment for the
                configure action step...""".format(
                    recipe_to_configure.identifier)))
            # WARNING!! Do not use `shutil.copytree()` function for the action
            # that follows as this function uses `shutil.copy2()` by default,
            # which itself uses `shutil.copystat()`. However, `copystat()`
            # copies all node metadata **EXCEPT** the user and mode
            # ownerships!!! See the Warning block on top of the page
            # https://docs.python.org/3/library/shutil.html.
            # Therefore, let's use the good old "cp -a", which seems to be
            # available for all flavors of "cp" (but GNU cp is advised for
            # this) and seems way quicker than `copytree()`. A pure Python
            # workaround would have been too time-consuming.
            run([
                "cp", "-a",
                os.path.join(image_out_path, "root"),
                os.path.join(action_out_path, "root")
            ])

        with sdk.session(action_name="configure",
                         action_targeted_recipe=self.recipe,
                         env={
                             key: self.replace_placeholders(value,
                                                            sdk_context=True)
                             for key, value in self.recipe.config["configure"]
                             ["env"].items()
                         },
                         terminal=terminal,
                         shared_host_netns=False) as sess:
            for cmd in self.recipe.config["configure"]["steps"]:
                info("{!r} configures recipe {!r}, runs:\n  {}".format(
                    sdk.recipe.identifier, self.recipe.identifier, cmd))
                sess.run(self.replace_placeholders(cmd, sdk_context=True))
Пример #27
0
def _check_config_file(config):
    """For non-deployment editions, we must load config from a json5 file.

    This also gets run for deployments, as this function is responsible for
    merging multiple configs into the single root config.

    This function also contains all schema validation of json5 files, as
    the version stored in the observatory image is separate.
    """
    global CONFIG_FOLDER, IMAGE_TAG

    # Not a deployment -- requires CONFIG_FOLDER
    if CONFIG_FOLDER is None:
        assert config is not None
        CONFIG_FOLDER = config
    else:
        assert config is None

    import pyjson5, schema as s
    config_data = pyjson5.load(open(os.path.join(CONFIG_FOLDER,
            'config.json5')))

    # Before applying schema, merge in child configs
    for child_name in os.listdir(CONFIG_FOLDER):
        child_path = os.path.join(CONFIG_FOLDER, child_name)
        if not os.path.isdir(child_path):
            continue
        child_config_path = os.path.join(child_path, 'config.json5')
        if not os.path.lexists(child_config_path):
            continue

        child_config = pyjson5.load(open(child_config_path))

        # First traversal -- patch keys and values
        nodes = [([], child_config)]
        while nodes:
            path, src = nodes.pop()
            for k, v in src.items():
                ## Rewrite v
                # Check executables; amend cwd
                if path and (
                        'file_detail_views' == path[-1]
                        or 'decision_views' == path[-1]
                        or 'parsers' == path[-1]
                        or 'tasks' == path[-1]):
                    if 'cwd' in v:
                        v['cwd'] = f'{child_name}/' + v['cwd']
                    else:
                        v['cwd'] = child_name

                # Docker build stage patch
                if path == ['build', 'stages']:
                    if 'commands' in v:
                        v['commands'] = [
                                vv
                                    .replace('{disttarg}', f'{{disttarg}}/{child_name}')
                                    .replace('{dist}', f'{{dist}}/{child_name}')
                                for vv in v['commands']]

                if isinstance(v, dict):
                    nodes.append((path + [k], v))

        # Second traversal -- merge
        nodes = [([], config_data, child_config)]
        while nodes:
            path, dst, src = nodes.pop()
            for k, v in src.items():
                ## Rewrite k
                # Docker build stage patch -- rewrites `k`
                if path == ['build', 'stages']:
                    # Adding a build stage. If not 'base' or 'final', then
                    # prepend config folder name
                    if k not in ['base', 'final']:
                        k = f'{child_name}_{k}'

                # New entries only for these
                if path in [
                        ['pipelines'],
                        ['file_detail_views'],
                        ['decision_views'],
                        ['parsers'],
                        ]:
                    # Amend these with the child's name, to allow for copying
                    k = f'{child_name.replace("_", "-")}-{k}'
                    assert k not in dst, f'Cannot extend {path} {k}; must be new'

                ## Check for merge type
                # Check if new -- if so, assign and be done
                if k not in dst:
                    # Non-existent key; add it to the dictionary
                    dst[k] = v
                    continue

                # Do merge
                if isinstance(v, dict):
                    if not isinstance(dst[k], dict):
                        raise ValueError(f'{path} {k} type does not match base config')

                    # Dictionary merge
                    nodes.append((path + [k], dst[k], v))
                elif isinstance(v, list):
                    if not isinstance(dst[k], list):
                        raise ValueError(f'{path} {k} type does not match base config')

                    # Add to end.
                    dst[k].extend(v)
                else:
                    raise ValueError(f'May not extend {path} {k}: base config type {dst[k]}')

    # Pull in parser-specific schema
    import importlib.util
    spec = importlib.util.spec_from_file_location('etl_parse',
            os.path.join(faw_dir, 'common', 'pdf-etl-parse', 'parse_schema.py'))
    etl_parse = importlib.util.module_from_spec(spec)
    spec.loader.exec_module(etl_parse)

    schema_views = {
            s.Optional('decision_views', default={}): s.Or({}, {
                str: {
                    'label': str,
                    'type': 'program',
                    'exec': [s.Or(
                        s.And(str, lambda x: not x.startswith('<')),
                        s.And(str, lambda x: x in [
                            '<filesPath>', '<apiInfo>', '<jsonArguments>', '<mongo>', '<outputHtml>',
                            '<workbenchApiUrl>']),
                        )],
                    s.Optional('cwd', default='.'): str,
                    'execStdin': s.And(str, lambda x: all([
                        y.group(0) in ['<referenceDecisions>', '<statsbyfile>']
                        for y in re.finditer('<[^>]*>', x)]),
                        error="Must be string with any <'s being one of: "
                            "<referenceDecisions>, <statsbyfile>"),
                },
            }),
            s.Optional('file_detail_views', default={}): s.Or({}, {
                str: {
                    'label': str,
                    'type': 'program_to_html',
                    'exec': [str],
                    s.Optional('cwd', default='.'): str,
                    s.Optional('outputMimeType', default='text/html'): str,
                },
            }),
    }

    # NOTE -- primary schema validation is here, but NOT for submodules such
    # as pdf-etl-parse.
    sch = s.Schema({
        'name': s.And(str, s.Regex(r'^[a-zA-Z0-9-]+$')),
        # parsers validated by pdf-etl-parse
        'parsers': etl_parse.schema_get(),
        s.Optional('parserDefaultTimeout', default=30): s.Or(float, int),
        'decision_default': str,
        s.Optional('pipelines', default={}): s.Or({}, {
            s.And(str, lambda x: '_' not in x and '.' not in x,
                    error='Must not have underscore or dot'): {
                s.Optional('label'): str,
                s.Optional('disabled', default=False): s.Or(True, False),
                s.Optional('tasks', default={}): s.Or({},
                    s.And(
                        {
                            s.And(str, lambda x: '_' not in x and '.' not in x,
                                    error='Must not have underscore or dot'): {
                                s.Optional('disabled', default=False): s.Or(True, False),
                                'version': str,
                                'exec': [str],
                                s.Optional('cwd', default='.'): str,
                                s.Optional('dependsOn', default=[]): [str],
                            },
                        },
                        lambda x: all([d in x for _, task in x.items() for d in task['dependsOn']]),
                        error="Task `dependsOn` had invalid name",
                    )),
                s.Optional('parsers', default={}): etl_parse.schema_get(),
                **schema_views,
            },
        }),
        'build': {
            'stages': {
                str: {
                    s.Optional('from'): str,
                    s.Optional('commands'): [str],
                    s.Optional('copy_output'): {
                        str: s.Or(str, bool),
                    },
                },
            },
        },
        **schema_views,
    })
    try:
        config_data = sch.validate(config_data)
    except Exception as e:
        traceback.print_exc()
        sys.stderr.write('\n'.join([str(v) for v in e.autos]) + '\n')
        sys.exit(1)
    IMAGE_TAG = config_data['name']
    return config_data
Пример #28
0
class RecipeRootFeature(RecipeFeature):
    """Recipe feature for building rootfs from sources and binary packages.
    Provides the 'run' attribute for development and debug."""

    NAME = "root"

    FEATURED_ATTRIBUTES = {"build", "image", "run"}

    SCHEMA = schema.Schema({
        "sdk": schema.Regex(RECIPE_IDENTIFIER_RE.pattern),
        "root": {
            schema.Optional("env", default={}): {
                schema.Regex(ENVVAR_FORMAT_RE.pattern,
                             error="Bad environment variable name"):
                str
            },
            "build_steps": [str],
            "image_steps": schema.And([str], len),
        },
        str: object,  # do not consider other keys
    })

    def build(self,
              clear_cache: bool = False,
              clear_previous_build: bool = True) -> None:
        """Build a root tree for this recipe from sources and binary packages
        cache.

        :param clear_cache: whether or not the cache has to clear before
            processing root build
        :param clear_previous_build: whether or not the previous root build
            result has to be cleared

        """

        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["sdk"]), "sdk")

        action_out_subpath = os.path.join(self.recipe.out_subpath, "build")
        action_out_path = os.path.join(repo_root_path(), action_out_subpath)

        # Clear out the previous image result when told so:
        if os.path.exists(action_out_path) and clear_previous_build:
            info("Clearing {!r}...".format(action_out_path))
            with ElevatedPrivileges():
                shutil.rmtree(action_out_path)

        # prepare output directory
        if not os.path.exists(action_out_path):
            os.makedirs(action_out_path)

        # hook the SDK container process to the current TTY (if existent):
        terminal = is_tty_attached()

        if clear_cache:
            info("Clearing cache for recipe {!r}...".format(
                self.recipe.identifier))
            with ElevatedPrivileges():
                shutil.rmtree(
                    os.path.join(repo_root_path(), self.recipe.cache_subpath))

        with sdk.session(action_name="build",
                         action_targeted_recipe=self.recipe,
                         env={
                             key: self.replace_placeholders(value,
                                                            sdk_context=True)
                             for key, value in self.recipe.config["root"]
                             ["env"].items()
                         },
                         terminal=terminal,
                         shared_host_netns=True) as sess:
            for cmd in self.recipe.config["root"]["build_steps"]:
                info("{!r} builds recipe {!r}, runs:\n  {}".format(
                    sdk.recipe.identifier, self.recipe.identifier, cmd))
                sess.run(self.replace_placeholders(cmd, sdk_context=True))

    def image(self) -> None:
        """Build a root tree for this recipe only from binary packages cache
        and without any build-time dependency."""

        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["sdk"]), "sdk")

        action_out_subpath = os.path.join(self.recipe.out_subpath, "image")
        action_out_path = os.path.join(repo_root_path(), action_out_subpath)

        # Always clear out the previous image result:
        if os.path.exists(action_out_path):
            info("Clearing {!r}...".format(action_out_path))
            with ElevatedPrivileges():
                shutil.rmtree(action_out_path)

        # prepare output directory
        os.makedirs(action_out_path)

        # hook the SDK container process to the current TTY (if existent):
        terminal = is_tty_attached()

        with sdk.session(action_name="image",
                         action_targeted_recipe=self.recipe,
                         env={
                             key: self.replace_placeholders(value,
                                                            sdk_context=True)
                             for key, value in self.recipe.config["root"]
                             ["env"].items()
                         },
                         terminal=terminal,
                         shared_host_netns=False) as sess:
            for cmd in self.recipe.config["root"]["image_steps"]:
                info("{!r} images recipe {!r}, runs:\n  {}".format(
                    sdk.recipe.identifier, self.recipe.identifier, cmd))
                sess.run(self.replace_placeholders(cmd, sdk_context=True))

    def run(self, command: Optional[str] = None) -> None:
        # using getattr to avoid static analyzers from complaining about
        # missing attr (but brought by a recipe feature):
        sdk = getattr(recipe.Recipe(self.recipe.config["sdk"]), "sdk")

        sdk.interactive_run(
            recipe=self.recipe,
            command=command,
            env={
                key: self.replace_placeholders(value, sdk_context=True)
                for key, value in self.recipe.config["root"]["env"].items()
            },
        )
Пример #29
0
from datetime import datetime

import schema as sm

from .client import HttpClient, ClientError
from .provider import ExchangeRatesProvider, ExchangeRateLoadError
from ..models import Currency, Rate

_POSSIBLE_CCY = [ccy.value for ccy in Currency]

_ccy_validator = sm.Or(*_POSSIBLE_CCY)
_rate_validator = sm.And(sm.Use(float), lambda n: n >= 0)
_date_validator = sm.Regex(r"\d{4}-\d{2}-\d{2}")

_ecb_rate_scm = sm.Schema({
    "rates": {
        _ccy_validator: _rate_validator
    },
    "base": _ccy_validator,
    "date": _date_validator
})


class ECBProvider(ExchangeRatesProvider):
    API_ENDPOINT_CONFIG_NAME = "API_ENDPOINT"

    @classmethod
    def create(cls, endpoint: str):
        if not endpoint:
            raise ValueError("API endpoint is required")