コード例 #1
0
def lint_milestone(ctx):
    """
    Make sure PRs are assigned a milestone
    """
    pr_url = os.environ.get("CIRCLE_PULL_REQUEST")
    if pr_url:
        import requests

        pr_id = pr_url.rsplit('/')[-1]

        res = requests.get(
            "https://api.github.com/repos/DataDog/datadog-agent/issues/{}".
            format(pr_id))
        pr = res.json()
        if pr.get("milestone"):
            print("Milestone: %s" % pr["milestone"].get("title", "NO_TITLE"))
            return

        print("PR %s requires a milestone" % pr_url)
        raise Exit(code=1)

    # The PR has not been created yet
    else:
        print("PR not yet created, skipping check for milestone")
コード例 #2
0
ファイル: agent.py プロジェクト: yutaoxu/datadog-agent
def image_build(ctx, arch='amd64', base_dir="omnibus", python_version="2", skip_tests=False):
    """
    Build the docker image
    """
    BOTH_VERSIONS = ["both", "2+3"]
    VALID_VERSIONS = ["2", "3"] + BOTH_VERSIONS
    if python_version not in VALID_VERSIONS:
        raise ParseError("provided python_version is invalid")

    build_context = "Dockerfiles/agent"
    base_dir = base_dir or os.environ.get("OMNIBUS_BASE_DIR")
    pkg_dir = os.path.join(base_dir, 'pkg')
    deb_glob = 'datadog-agent*_{}.deb'.format(arch)
    dockerfile_path = "{}/{}/Dockerfile".format(build_context, arch)
    list_of_files = glob.glob(os.path.join(pkg_dir, deb_glob))
    # get the last debian package built
    if not list_of_files:
        print("No debian package build found in {}".format(pkg_dir))
        print("See agent.omnibus-build")
        raise Exit(code=1)
    latest_file = max(list_of_files, key=os.path.getctime)
    shutil.copy2(latest_file, build_context)

    # Pull base image with content trust enabled
    pull_base_images(ctx, dockerfile_path, signed_pull=True)
    common_build_opts = "-t {} -f {}".format(AGENT_TAG, dockerfile_path)
    if python_version not in BOTH_VERSIONS:
        common_build_opts = "{} --build-arg PYTHON_VERSION={}".format(common_build_opts, python_version)

    # Build with the testing target
    if not skip_tests:
        ctx.run("docker build {} --target testing {}".format(common_build_opts, build_context))

    # Build with the release target
    ctx.run("docker build {} --target release {}".format(common_build_opts, build_context))
    ctx.run("rm {}/{}".format(build_context, deb_glob))
コード例 #3
0
ファイル: docker.py プロジェクト: winebarrel/datadog-agent
def mirror_image(ctx,
                 src_image,
                 dst_image="datadog/docker-library",
                 dst_tag="auto"):
    """
    Pull an upstream image and mirror it to our docker-library repository
    for integration tests. Tag format should be A-Z_n_n_n
    """
    if dst_tag == "auto":
        # Autogenerate tag
        match = re.search('([^:\/\s]+):[v]?(.*)$', src_image)
        if not match:
            print(
                "Cannot guess destination tag for {}, please provide a --dst-tag option"
                .format(src_image))
            raise Exit(1)
        dst_tag = "_".join(match.groups()).replace(".", "_")

    dst = "{}:{}".format(dst_image, dst_tag)
    print("Uploading {} to {}".format(src_image, dst))

    # TODO: use docker python lib
    ctx.run("docker pull {src} && docker tag {src} {dst} && docker push {dst}".
            format(src=src_image, dst=dst))
コード例 #4
0
def _get_release_version_from_release_json(release_json, major_version, version_re, release_json_key=None):
    """
    If release_json_key is None, returns the highest version entry in release.json.
    If release_json_key is set, returns the entry for release_json_key of the highest version entry in release.json.
    """

    release_version = None
    release_component_version = None

    # Get the release entry for the given Agent major version
    release_entry_name = release_entry_for(major_version)
    release_json_entry = release_json.get(release_entry_name, None)

    # Check that the release entry exists, otherwise fail
    if release_json_entry:
        release_version = release_entry_name

        # Check that the component's version is defined in the release entry
        if release_json_key is not None:
            match = version_re.match(release_json_entry.get(release_json_key, ""))
            if match:
                release_component_version = _create_version_from_match(match)
            else:
                print(
                    "{} does not have a valid {} ({}), ignoring".format(
                        release_entry_name, release_json_key, release_json_entry.get(release_json_key, "")
                    )
                )

    if not release_version:
        raise Exit("Couldn't find any matching {} version.".format(release_version), 1)

    if release_json_key is not None:
        return release_component_version

    return release_version
コード例 #5
0
def dev_env_cleanup(ctx, name="kind"):
    """Remove traces of the dev env."""
    validate_kind_version()
    clusters = run("kind get clusters", hide=True).stdout.strip().splitlines()
    if name in clusters:
        run("kind delete cluster --name={}".format(name), hide=True)
    else:
        raise Exit(message="Unable to find cluster named: {}".format(name))

    run('for frr in $(docker ps -a -f name=frr --format {{.Names}}) ; do '
        '    docker rm -f $frr ; '
        'done', hide=True)

    # cleanup bgp configs
    dev_env_dir = os.getcwd() + "/dev-env/bgp"
    frr_volume_dir = dev_env_dir + "/frr-volume"

    # sudo because past docker runs will have changed ownership of this dir
    run('sudo rm -rf "%s"' % frr_volume_dir)
    run('rm -f "%s"/config.yaml' % dev_env_dir)

    # cleanup layer2 configs
    dev_env_dir = os.getcwd() + "/dev-env/layer2"
    run('rm -f "%s"/config.yaml' % dev_env_dir)
コード例 #6
0
def upstream(c, collection='f5_modules', verbose=False):
    """Copy all module utils, to the local/ansible_collections/f5networks/collection_name directory.
    """
    coll_dest = '{0}/local/ansible_collections/f5networks/{1}'.format(
        BASE_DIR, collection)
    module_utils_dst = '{0}/local/ansible_collections/f5networks/{1}/plugins/module_utils/'.format(
        BASE_DIR, collection)
    module_utils_src = '{0}/library/module_utils/network/f5/'.format(BASE_DIR)

    purge_upstreamed_files(c, module_utils_dst, coll_dest, verbose)
    create_directories(c, coll_dest, module_utils_dst, verbose)
    files_upstream(c, module_utils_src, module_utils_dst)

    retries = 0
    while not cmp_dir(module_utils_src, module_utils_dst):
        purge_upstreamed_files(c, module_utils_dst, coll_dest, verbose)
        create_directories(c, coll_dest, module_utils_dst, verbose)
        files_upstream(c, module_utils_src, module_utils_dst)
        retries = retries + 1

    if retries > 2:
        raise Exit('Failed to upstream module utils, exiting.')

    print("Module utils files upstreamed successfully.")
コード例 #7
0
ファイル: test.py プロジェクト: urosgruber/datadog-agent
def lint_teamassignment(ctx):
    """
    Make sure PRs are assigned a team label
    """
    pr_url = os.environ.get("CIRCLE_PULL_REQUEST")
    if pr_url:
        import requests

        pr_id = pr_url.rsplit('/')[-1]

        res = requests.get("https://api.github.com/repos/DataDog/datadog-agent/issues/{}".format(pr_id))
        issue = res.json()

        for label in issue.get('labels', {}):
            if re.match('team/', label['name']):
                print("Team Assignment: {}".format(label['name']))
                return

        print("PR {} requires team assignment".format(pr_url))
        raise Exit(code=1)

    # The PR has not been created yet
    else:
        print("PR not yet created, skipping check for team assignment")
コード例 #8
0
ファイル: go.py プロジェクト: snowflakekiller/datadog-agent
def misspell(ctx, targets):
    """
    Run misspell on targets.

    Example invokation:
        inv misspell --targets=./pkg/collector/check,./pkg/aggregator
    """
    if isinstance(targets, basestring):
        # when this function is called from the command line, targets are passed
        # as comma separated tokens in a string
        targets = targets.split(',')

    result = ctx.run("misspell " + " ".join(targets), hide=True)
    legit_misspells = []
    for found_misspell in result.stdout.split("\n"):
        if len(found_misspell.strip()) > 0:
            if not any([ignored_target in found_misspell for ignored_target in MISSPELL_IGNORED_TARGETS]):
                legit_misspells.append(found_misspell)

    if len(legit_misspells) > 0:
        print("Misspell issues found:\n" + "\n".join(legit_misspells))
        raise Exit(code=2)
    else:
        print("misspell found no issues")
コード例 #9
0
ファイル: go.py プロジェクト: youngdou/datadog-agent
def lint(ctx, targets):
    """
    Run golint on targets. If targets are not specified,
    the value from `invoke.yaml` will be used.

    Example invokation:
        inv lint --targets=./pkg/collector/check,./pkg/aggregator
    """
    if isinstance(targets, str):
        # when this function is called from the command line, targets are passed
        # as comma separated tokens in a string
        targets = targets.split(',')

    # add the /... suffix to the targets
    targets_list = ["{}/...".format(t) for t in targets]
    result = ctx.run("go run golang.org/x/lint/golint {}".format(
        ' '.join(targets_list)))
    if result.stdout:
        files = []
        skipped_files = set()
        for line in (out for out in result.stdout.split('\n') if out):
            fname = os.path.basename(line.split(":")[0])
            if fname in MODULE_WHITELIST:
                skipped_files.add(fname)
                continue
            files.append(fname)

        if files:
            print("Linting issues found in {} files.".format(len(files)))
            raise Exit(code=1)

        if skipped_files:
            for skipped in skipped_files:
                print("Allowed errors in whitelisted file {}".format(skipped))

    print("golint found no issues")
コード例 #10
0
def image_build(ctx, arch='amd64', tag=AGENT_TAG, push=False):
    """
    Build the docker image
    """

    dca_binary = glob.glob(os.path.join(BIN_PATH, "datadog-cluster-agent"))
    # get the last debian package built
    if not dca_binary:
        print(f"No bin found in {BIN_PATH}")
        print("See cluster-agent.build")
        raise Exit(code=1)
    latest_file = max(dca_binary, key=os.path.getctime)
    ctx.run(f"chmod +x {latest_file}")

    build_context = "Dockerfiles/cluster-agent"
    exec_path = f"{build_context}/datadog-cluster-agent.{arch}"
    dockerfile_path = f"{build_context}/{arch}/Dockerfile"

    shutil.copy2(latest_file, exec_path)
    ctx.run(f"docker build -t {tag} {build_context} -f {dockerfile_path}")
    ctx.run(f"rm {exec_path}")

    if push:
        ctx.run(f"docker push {tag}")
コード例 #11
0
def create_rc(
    ctx,
    major_versions="6,7",
    integration_version=None,
    omnibus_software_version=None,
    jmxfetch_version=None,
    omnibus_ruby_version=None,
    security_agent_policies_version=None,
    macos_build_version=None,
):
    """
    Takes whatever version is the highest in release.json and adds a new RC to it.
    If there was no RC, creates one and bump minor version. If there was an RC, create RC + 1.

    Update internal module dependencies with the new RC.
    """

    if sys.version_info[0] < 3:
        print("Must use Python 3 for this task")
        return Exit(code=1)

    list_major_versions = major_versions.split(",")
    print("Creating RC for agent version(s) {}".format(list_major_versions))

    list_major_versions = [int(x) for x in list_major_versions]
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    # We want to match:
    # - X.Y.Z
    # - X.Y.Z-rc.t
    # - vX.Y(.Z) (security-agent-policies repo)
    version_re = re.compile(r'(v)?(\d+)[.](\d+)([.](\d+))?(-rc\.(\d+))?')

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    # jmxfetch and security-agent-policies follow their own version scheme
    highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="JMXFETCH_VERSION",
    )

    highest_security_agent_policies_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="SECURITY_AGENT_POLICIES_VERSION",
    )

    if highest_version["rc"] is None:
        # No RC exists, create one
        highest_version["minor"] = highest_version["minor"] + 1
        highest_version["rc"] = 1
    else:
        # An RC exists, create next RC
        highest_version["rc"] = highest_version["rc"] + 1
    new_rc = _stringify_version(highest_version)
    print("Creating {}".format(new_rc))

    if not integration_version:
        integration_version = _get_highest_repo_version(
            github_token, "integrations-core", highest_version, version_re)
    print("integrations-core's tag is {}".format(
        _stringify_version(integration_version)))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            github_token, "omnibus-software", highest_version, version_re)
    print("omnibus-software's tag is {}".format(
        _stringify_version(omnibus_software_version)))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(github_token, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
    print("jmxfetch's tag is {}".format(_stringify_version(jmxfetch_version)))

    if not omnibus_ruby_version:
        omnibus_ruby_version = _get_highest_repo_version(
            github_token, "omnibus-ruby", highest_version, version_re)
    print("omnibus-ruby's tag is {}".format(
        _stringify_version(omnibus_ruby_version)))

    if not security_agent_policies_version:
        security_agent_policies_version = _get_highest_repo_version(
            github_token, "security-agent-policies",
            highest_security_agent_policies_version, version_re)
    print("security-agent-policies' tag is {}".format(
        _stringify_version(security_agent_policies_version)))

    if not macos_build_version:
        macos_build_version = _get_highest_repo_version(
            github_token, "datadog-agent-macos-build", highest_version,
            version_re)
    print("datadog-agent-macos-build's tag is {}".format(
        _stringify_version(macos_build_version)))

    _save_release_json(
        release_json,
        list_major_versions,
        highest_version,
        integration_version,
        omnibus_software_version,
        omnibus_ruby_version,
        jmxfetch_version,
        security_agent_policies_version,
        macos_build_version,
    )

    # Update internal module dependencies
    update_modules(ctx, _stringify_version(highest_version))
コード例 #12
0
def finish(
    ctx,
    major_versions="6,7",
    integration_version=None,
    omnibus_software_version=None,
    jmxfetch_version=None,
    omnibus_ruby_version=None,
    security_agent_policies_version=None,
    macos_build_version=None,
    ignore_rc_tag=False,
):
    """
    Creates new entry in the release.json file for the new version. Removes all the RC entries.

    Update internal module dependencies with the new version.
    """

    if sys.version_info[0] < 3:
        print("Must use Python 3 for this task")
        return Exit(code=1)

    list_major_versions = major_versions.split(",")
    print("Finishing release for major version(s) {}".format(
        list_major_versions))

    list_major_versions = [int(x) for x in list_major_versions]
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    # We want to match:
    # - X.Y.Z
    # - X.Y.Z-rc.t
    # - vX.Y(.Z) (security-agent-policies repo)
    version_re = re.compile(r'(v)?(\d+)[.](\d+)([.](\d+))?(-rc\.(\d+))?')

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    # jmxfetch and security-agent-policies follow their own version scheme
    highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="JMXFETCH_VERSION",
    )

    highest_security_agent_policies_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="SECURITY_AGENT_POLICIES_VERSION",
    )

    # Erase RCs
    for major_version in list_major_versions:
        highest_version["major"] = major_version
        rc = highest_version["rc"]
        while highest_version["rc"] not in [0, None]:
            # In case we have skipped an RC in the file...
            try:
                release_json.pop(_stringify_version(highest_version))
            finally:
                highest_version["rc"] = highest_version["rc"] - 1
        highest_version["rc"] = rc

    # Tags in other repos are based on the highest major (e.g. for releasing version 6.X.Y and 7.X.Y they will tag only 7.X.Y)
    highest_version["major"] = highest_major

    # We don't want to fetch RC tags
    highest_version["rc"] = None

    if not integration_version:
        integration_version = _get_highest_repo_version(
            github_token, "integrations-core", highest_version, version_re)
        if integration_version is None:
            print(
                "ERROR: No version found for integrations-core - did you create the tag?"
            )
            return Exit(code=1)
        if integration_version["rc"] is not None:
            print(
                "ERROR: integrations-core tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on integrations-core.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("integrations-core's tag is {}".format(
        _stringify_version(integration_version)))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            github_token, "omnibus-software", highest_version, version_re)
        if omnibus_software_version is None:
            print(
                "ERROR: No version found for omnibus-software - did you create the tag?"
            )
            return Exit(code=1)
        if omnibus_software_version["rc"] is not None:
            print(
                "ERROR: omnibus-software tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on omnibus-software.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("omnibus-software's tag is {}".format(
        _stringify_version(omnibus_software_version)))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(github_token, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
    print("jmxfetch's tag is {}".format(_stringify_version(jmxfetch_version)))

    if not omnibus_ruby_version:
        omnibus_ruby_version = _get_highest_repo_version(
            github_token, "omnibus-ruby", highest_version, version_re)
        if omnibus_ruby_version is None:
            print(
                "ERROR: No version found for omnibus-ruby - did you create the tag?"
            )
            return Exit(code=1)
        if omnibus_ruby_version["rc"] is not None:
            print(
                "ERROR: omnibus-ruby tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on omnibus-ruby.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("omnibus-ruby's tag is {}".format(
        _stringify_version(omnibus_ruby_version)))

    if not security_agent_policies_version:
        security_agent_policies_version = _get_highest_repo_version(
            github_token, "security-agent-policies",
            highest_security_agent_policies_version, version_re)
    print("security-agent-policies' tag is {}".format(
        _stringify_version(security_agent_policies_version)))

    if not macos_build_version:
        macos_build_version = _get_highest_repo_version(
            github_token, "datadog-agent-macos-build", highest_version,
            version_re)
        if macos_build_version is None:
            print(
                "ERROR: No version found for datadog-agent-macos-build - did you create the tag?"
            )
            return Exit(code=1)
        if macos_build_version["rc"] is not None:
            print(
                "ERROR: datadog-agent-macos-build tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on datadog-agent-macos-build.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("datadog-agent-macos-build' tag is {}".format(
        _stringify_version(macos_build_version)))

    _save_release_json(
        release_json,
        list_major_versions,
        highest_version,
        integration_version,
        omnibus_software_version,
        omnibus_ruby_version,
        jmxfetch_version,
        security_agent_policies_version,
        macos_build_version,
    )

    # Update internal module dependencies
    update_modules(ctx, _stringify_version(highest_version))
コード例 #13
0
ファイル: test.py プロジェクト: okanyedibela/datadog-agent
def test(
    ctx,
    module=None,
    targets=None,
    coverage=False,
    build_include=None,
    build_exclude=None,
    verbose=False,
    race=False,
    profile=False,
    fail_on_fmt=False,
    rtloader_root=None,
    python_home_2=None,
    python_home_3=None,
    cpus=0,
    major_version='7',
    python_runtimes='3',
    timeout=120,
    arch="x64",
    cache=True,
    skip_linters=False,
    save_result_json=None,
    go_mod="mod",
):
    """
    Run all the tools and tests on the given module and targets.

    A module should be provided as the path to one of the go modules in the repository.

    Targets should be provided as a comma-separated list of relative paths within the given module.
    If targets are provided but no module is set, the main module (".") is used.

    If no module or target is set the tests are run against all modules and targets.

    Example invokation:
        inv test --targets=./pkg/collector/check,./pkg/aggregator --race
        inv test --module=. --race
    """
    if isinstance(module, str):
        # when this function is called from the command line, targets are passed
        # as comma separated tokens in a string
        if isinstance(targets, str):
            modules = [GoModule(module, targets=targets.split(','))]
        else:
            modules = [m for m in DEFAULT_MODULES.values() if m.path == module]
    elif isinstance(targets, str):
        modules = [GoModule(".", targets=targets.split(','))]
    else:
        print("Using default modules and targets")
        modules = DEFAULT_MODULES.values()

    build_include = (get_default_build_tags(build="test-with-process-tags",
                                            arch=arch) if build_include is None
                     else filter_incompatible_tags(build_include.split(","),
                                                   arch=arch))
    build_exclude = [] if build_exclude is None else build_exclude.split(",")
    build_tags = get_build_tags(build_include, build_exclude)

    timeout = int(timeout)

    # explicitly run these tasks instead of using pre-tasks so we can
    # pass the `target` param (pre-tasks are invoked without parameters)
    print("--- go generating:")
    generate(ctx)

    if skip_linters:
        print("--- [skipping linters]")
    else:
        print("--- Linting licenses:")
        lint_licenses(ctx)

        print("--- Linting filenames:")
        lint_filenames(ctx)

        # Until all packages whitelisted in .golangci.yml are fixed and removed
        # from the 'skip-dirs' list we need to keep using the old functions that
        # lint without build flags (linting some file is better than no linting).
        print("--- Vetting and linting (legacy):")
        for module in modules:
            print("----- Module '{}'".format(module.full_path()))
            if not module.condition():
                print("----- Skipped")
                continue

            with ctx.cd(module.full_path()):
                vet(ctx,
                    targets=module.targets,
                    rtloader_root=rtloader_root,
                    build_tags=build_tags,
                    arch=arch)
                fmt(ctx, targets=module.targets, fail_on_fmt=fail_on_fmt)
                lint(ctx, targets=module.targets)
                misspell(ctx, targets=module.targets)
                ineffassign(ctx, targets=module.targets)
                staticcheck(ctx,
                            targets=module.targets,
                            build_tags=build_tags,
                            arch=arch)

        # for now we only run golangci_lint on Unix as the Windows env need more work
        if sys.platform != 'win32':
            print("--- golangci_lint:")
            for module in modules:
                print("----- Module '{}'".format(module.full_path()))
                if not module.condition():
                    print("----- Skipped")
                    continue

                with ctx.cd(module.full_path()):
                    golangci_lint(ctx,
                                  targets=module.targets,
                                  rtloader_root=rtloader_root,
                                  build_tags=build_tags,
                                  arch=arch)

    with open(PROFILE_COV, "w") as f_cov:
        f_cov.write("mode: count")

    ldflags, gcflags, env = get_build_flags(
        ctx,
        rtloader_root=rtloader_root,
        python_home_2=python_home_2,
        python_home_3=python_home_3,
        major_version=major_version,
        python_runtimes=python_runtimes,
    )

    if sys.platform == 'win32':
        env['CGO_LDFLAGS'] += ' -Wl,--allow-multiple-definition'

    if profile:
        test_profiler = TestProfiler()
    else:
        test_profiler = None  # Use stdout

    race_opt = ""
    covermode_opt = ""
    build_cpus_opt = ""
    if cpus:
        build_cpus_opt = "-p {}".format(cpus)
    if race:
        # race doesn't appear to be supported on non-x64 platforms
        if arch == "x86":
            print(
                "\n -- Warning... disabling race test, not supported on this platform --\n"
            )
        else:
            race_opt = "-race"

        # Needed to fix an issue when using -race + gcc 10.x on Windows
        # https://github.com/bazelbuild/rules_go/issues/2614
        if sys.platform == 'win32':
            ldflags += " -linkmode=external"

    if coverage:
        if race:
            # atomic is quite expensive but it's the only way to run
            # both the coverage and the race detector at the same time
            # without getting false positives from the cover counter
            covermode_opt = "-covermode=atomic"
        else:
            covermode_opt = "-covermode=count"

    print("\n--- Running unit tests:")

    coverprofile = ""
    if coverage:
        coverprofile = "-coverprofile={}".format(PROFILE_COV)

    nocache = '-count=1' if not cache else ''

    build_tags.append("test")
    TMP_JSON = 'tmp.json'
    if save_result_json and os.path.isfile(save_result_json):
        # Remove existing file since we append to it.
        # We don't need to do that for TMP_JSON since gotestsum overwrites the output.
        print("Removing existing '{}' file".format(save_result_json))
        os.remove(save_result_json)

    cmd = 'go run gotest.tools/gotestsum {json_flag} --format pkgname -- {verbose} -mod={go_mod} -vet=off -timeout {timeout}s -tags "{go_build_tags}" -gcflags="{gcflags}" '
    cmd += '-ldflags="{ldflags}" {build_cpus} {race_opt} -short {covermode_opt} {coverprofile} {nocache} {pkg_folder}'
    args = {
        "go_mod":
        go_mod,
        "go_build_tags":
        " ".join(build_tags),
        "gcflags":
        gcflags,
        "ldflags":
        ldflags,
        "race_opt":
        race_opt,
        "build_cpus":
        build_cpus_opt,
        "covermode_opt":
        covermode_opt,
        "coverprofile":
        coverprofile,
        "timeout":
        timeout,
        "verbose":
        '-v' if verbose else '',
        "nocache":
        nocache,
        "json_flag":
        '--jsonfile "{}" '.format(TMP_JSON) if save_result_json else "",
    }

    failed_modules = []
    for module in modules:
        print("----- Module '{}'".format(module.full_path()))
        if not module.condition():
            print("----- Skipped")
            continue

        with ctx.cd(module.full_path()):
            res = ctx.run(
                cmd.format(pkg_folder=' '.join(
                    "{}/...".format(t) if not t.endswith("/...") else t
                    for t in module.targets),
                           **args),
                env=env,
                out_stream=test_profiler,
                warn=True,
            )

        if res.exited is None or res.exited > 0:
            failed_modules.append(module.full_path())

        if save_result_json:
            with open(save_result_json, 'ab') as json_file, open(
                    os.path.join(module.full_path(), TMP_JSON),
                    'rb') as module_file:
                json_file.write(module_file.read())

    if failed_modules:
        # Exit if any of the modules failed
        raise Exit(
            code=1,
            message="Unit tests failed in the following modules: {}".format(
                ', '.join(failed_modules)))

    if coverage:
        print("\n--- Test coverage:")
        ctx.run("go tool cover -func {}".format(PROFILE_COV))

    if profile:
        print("\n--- Top 15 packages sorted by run time:")
        test_profiler.print_sorted(15)
コード例 #14
0
ファイル: go.py プロジェクト: washkinazy/datadog-agent
def deps(ctx, no_checks=False, core_dir=None, verbose=False, android=False, dep_vendor_only=False, no_dep_ensure=False):
    """
    Setup Go dependencies
    """
    deps = get_deps('deps')
    order = deps.get("order", deps.keys())
    for dependency in order:
        tool = deps.get(dependency)
        if not tool:
            print("Malformed bootstrap JSON, dependency {} not found". format(dependency))
            raise Exit(code=1)
        print("processing checkout tool {}".format(dependency))
        process_deps(ctx, dependency, tool.get('version'), tool.get('type'), 'checkout', verbose=verbose)

    order = deps.get("order", deps.keys())
    for dependency in order:
        tool = deps.get(dependency)
        print("processing get tool {}".format(dependency))
        process_deps(ctx, dependency, tool.get('version'), tool.get('type'), 'install', verbose=verbose)

    if android:
        ndkhome = os.environ.get('ANDROID_NDK_HOME')
        if not ndkhome:
            print("set ANDROID_NDK_HOME to build android")
            raise Exit(code=1)

        cmd = "gomobile init -ndk {}". format(ndkhome)
        print("gomobile command {}". format(cmd))
        ctx.run(cmd)

    if not no_dep_ensure:
        # source level deps
        print("calling dep ensure")
        start = datetime.datetime.now()
        verbosity = ' -v' if verbose else ''
        vendor_only = ' --vendor-only' if dep_vendor_only else ''
        ctx.run("dep ensure{}{}".format(verbosity, vendor_only))
        dep_done = datetime.datetime.now()

        # If github.com/DataDog/datadog-agent gets vendored too - nuke it
        #
        # This may happen as a result of having to introduce DEPPROJECTROOT
        # in our builders to get around a known-issue with go dep, and the
        # strange GOPATH situation in our builders.
        #
        # This is only a workaround, we should eliminate the need to resort
        # to DEPPROJECTROOT.
        if os.path.exists('vendor/github.com/DataDog/datadog-agent'):
            print("Removing vendored github.com/DataDog/datadog-agent")
            shutil.rmtree('vendor/github.com/DataDog/datadog-agent')

        # make sure PSUTIL is gone on windows; the dep ensure above will vendor it
        # in because it's necessary on other platforms
        if not android and sys.platform == 'win32':
            print("Removing PSUTIL on Windows")
            ctx.run("rd /s/q vendor\\github.com\\shirou\\gopsutil")

        # Make sure that golang.org/x/mobile is deleted.  It will get vendored in
        # because we use it, and there's no way to exclude; however, we must use
        # the version from $GOPATH
        if os.path.exists('vendor/golang.org/x/mobile'):
            print("Removing vendored golang.org/x/mobile")
            shutil.rmtree('vendor/golang.org/x/mobile')

    checks_start = datetime.datetime.now()
    if not no_checks:
        verbosity = 'v' if verbose else 'q'
        core_dir = core_dir or os.getenv('DD_CORE_DIR')

        if core_dir:
            checks_base = os.path.join(os.path.abspath(core_dir), 'datadog_checks_base')
            ctx.run('pip install -{} -e "{}[deps]"'.format(verbosity, checks_base))
        else:
            core_dir = os.path.join(os.getcwd(), 'vendor', 'integrations-core')
            checks_base = os.path.join(core_dir, 'datadog_checks_base')
            if not os.path.isdir(core_dir):
                ctx.run('git clone -{} https://github.com/DataDog/integrations-core {}'.format(verbosity, core_dir))
            ctx.run('pip install -{} "{}[deps]"'.format(verbosity, checks_base))
    checks_done = datetime.datetime.now()

    if not no_dep_ensure:
        print("dep ensure, elapsed:    {}".format(dep_done - start))
    print("checks install elapsed: {}".format(checks_done - checks_start))
コード例 #15
0
def update_changelog(ctx, new_version):
    """
    Quick task to generate the new CHANGELOG using reno when releasing a minor
    version (linux/macOS only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure as e:
        print(
            "Error: You have uncommitted change, please commit or stash before using update_changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep {}".format(new_version))
    except Failure as e:
        print("Missing '{}' git tag: mandatory to use 'reno'".format(
            new_version))
        raise

    # removing releasenotes from bugfix on the old minor.
    branching_point = "{}.{}.0-devel".format(new_version_int[0],
                                             new_version_int[1])
    previous_minor = "{}.{}".format(new_version_int[0], new_version_int[1] - 1)
    if previous_minor == "7.15":
        previous_minor = "6.15"  # 7.15 is the first release in the 7.x series
    log_result = ctx.run(
        "git log {}...remotes/origin/{}.x --name-only --oneline | \
            grep releasenotes/notes/ || true".format(branching_point,
                                                     previous_minor))
    log_result = log_result.stdout.replace('\n', ' ').strip()
    if len(log_result) > 0:
        ctx.run("git rm --ignore-unmatch {}".format(log_result))

    # generate the new changelog
    ctx.run("reno report \
            --ignore-cache \
            --earliest-version {} \
            --version {} \
            --no-show-source > /tmp/new_changelog.rst".format(
        branching_point, new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # mac's `sed` has a different syntax for the "-i" paramter
    sed_i_arg = "-i"
    if sys.platform == 'darwin':
        sed_i_arg = "-i ''"
    # check whether there is a v6 tag on the same v7 tag, if so add the v6 tag to the release title
    v6_tag = ""
    if new_version_int[0] == 7:
        v6_tag = _find_v6_tag(ctx, new_version)
        if v6_tag:
            ctx.run(
                "sed {0} -E 's#^{1}#{1} / {2}#' /tmp/new_changelog.rst".format(
                    sed_i_arg, new_version, v6_tag))
    # remove the old header from the existing changelog
    ctx.run("sed {0} -e '1,4d' CHANGELOG.rst".format(sed_i_arg))

    # merging to CHANGELOG.rst
    ctx.run(
        "cat CHANGELOG.rst >> /tmp/new_changelog.rst && mv /tmp/new_changelog.rst CHANGELOG.rst"
    )

    # commit new CHANGELOG
    ctx.run("git add CHANGELOG.rst \
            && git commit -m \"Update CHANGELOG for {}\"".format(new_version))
コード例 #16
0
ファイル: docker.py プロジェクト: yasra002/datadog-agent
def dockerize_test(ctx, binary, skip_cleanup=False):
    """
    Run a go test in a remote docker environment and pipe its output to stdout.
    Host and target systems must be identical (test is built on the host).
    """
    import docker

    client = docker.from_env()
    temp_folder = tempfile.mkdtemp(prefix="ddtest-")

    ctx.run("cp %s %s/test.bin" % (binary, temp_folder))

    with open("%s/Dockerfile" % temp_folder, 'w') as stream:
        stream.write("""FROM debian:stretch-slim
ENV DOCKER_DD_AGENT=yes
WORKDIR /
ADD https://github.com/docker/compose/releases/download/1.16.1/docker-compose-Linux-x86_64 /bin/docker-compose
RUN echo "1804b0ce6596efe707b9cab05d74b161833ed503f0535a937dd5d17bea8fc50a  /bin/docker-compose" > sum && \
    sha256sum -c sum && \
    chmod +x /bin/docker-compose
CMD /test.bin
COPY test.bin /test.bin
""")
        # Handle optional testdata folder
        if os.path.isdir("./testdata"):
            ctx.run("cp -R testdata %s" % temp_folder)
            stream.write("COPY testdata /testdata")

    test_image, _ = client.images.build(path=temp_folder, rm=True)

    scratch_volume = client.volumes.create()

    test_container = client.containers.run(
        test_image.id,
        detach=True,
        pid_mode="host",  # For origin detection
        environment=[
            "SCRATCH_VOLUME_NAME=" + scratch_volume.name,
            "SCRATCH_VOLUME_PATH=/tmp/scratch",
        ],
        volumes={
            '/var/run/docker.sock': {
                'bind': '/var/run/docker.sock',
                'mode': 'ro'
            },
            '/proc': {
                'bind': '/host/proc',
                'mode': 'ro'
            },
            '/sys/fs/cgroup': {
                'bind': '/host/sys/fs/cgroup',
                'mode': 'ro'
            },
            scratch_volume.name: {
                'bind': '/tmp/scratch',
                'mode': 'rw'
            }
        })

    exit_code = test_container.wait()['StatusCode']

    print(test_container.logs(stdout=True, stderr=False, stream=False))

    sys.stderr.write(
        test_container.logs(stdout=False, stderr=True, stream=False))

    if not skip_cleanup:
        shutil.rmtree(temp_folder)
        test_container.remove(v=True, force=True)
        scratch_volume.remove(force=True)
        client.images.remove(test_image.id)

    if exit_code != 0:
        raise Exit(code=exit_code)
コード例 #17
0
def get_integrations_from_cache(ctx, python, bucket, branch, integrations_dir, target_dir, integrations, awscli="aws"):
    """
    Get cached integration wheels for given integrations.
    python: Python version to retrieve integrations for
    bucket: S3 bucket to retrieve integration wheels from
    branch: namespace in the bucket to get the integration wheels from
    integrations_dir: directory with Git repository of integrations
    target_dir: local directory to put integration wheels to
    integrations: comma-separated names of the integrations to try to retrieve from cache
    awscli: AWS CLI executable to call
    """
    integrations_hashes = {}
    for integration in integrations.strip().split(","):
        integration_path = os.path.join(integrations_dir, integration)
        if not os.path.exists(integration_path):
            raise Exit(f"Integration {integration} given, but doesn't exist in {integrations_dir}", code=2)
        last_commit = ctx.run(
            LAST_DIRECTORY_COMMIT_PATTERN.format(integrations_dir=integrations_dir, integration=integration),
            hide="both",
            echo=False,
        )
        integrations_hashes[integration] = last_commit.stdout.strip()

    print(f"Trying to retrieve {len(integrations_hashes)} integration wheels from cache")
    # On windows, maximum length of a command line call is 8191 characters, therefore
    # we do multiple syncs that fit within that limit (we use 8100 as a nice round number
    # and just to make sure we don't do any of-by-one errors that would break this).
    # WINDOWS NOTES: on Windows, the awscli is usually in program files, so we have to wrap the
    # executable in quotes; also we have to not put the * in quotes, as there's no
    # expansion on it, unlike on Linux
    exclude_wildcard = "*" if platform.system().lower() == "windows" else "'*'"
    sync_command_prefix = (
        f"\"{awscli}\" s3 sync s3://{bucket} {target_dir} --no-sign-request --exclude {exclude_wildcard}"
    )
    sync_commands = [[[sync_command_prefix], len(sync_command_prefix)]]
    for integration, hash in integrations_hashes.items():
        include_arg = " --include " + CACHED_WHEEL_FULL_PATH_PATTERN.format(
            hash=hash,
            integration=integration,
            python_version=python,
            branch=branch,
        )
        if len(include_arg) + sync_commands[-1][1] > 8100:
            sync_commands.append([[sync_command_prefix], len(sync_command_prefix)])
        sync_commands[-1][0].append(include_arg)
        sync_commands[-1][1] += len(include_arg)

    for sync_command in sync_commands:
        ctx.run("".join(sync_command[0]))

    found = []
    # move all wheel files directly to the target_dir, so they're easy to find/work with in Omnibus
    for integration in sorted(integrations_hashes):
        hash = integrations_hashes[integration]
        original_path_glob = os.path.join(
            target_dir,
            CACHED_WHEEL_FULL_PATH_PATTERN.format(
                hash=hash,
                integration=integration,
                python_version=python,
                branch=branch,
            ),
        )
        files_matched = glob.glob(original_path_glob)
        if len(files_matched) == 0:
            continue
        elif len(files_matched) > 1:
            raise Exit(
                f"More than 1 wheel for integration {integration} matched by {original_path_glob}: {files_matched}"
            )
        wheel_path = files_matched[0]
        print(f"Found cached wheel for integration {integration}")
        shutil.move(wheel_path, target_dir)
        found.append(f"datadog_{integration}")

    print(f"Found {len(found)} cached integration wheels")
    with open(os.path.join(target_dir, "found.txt"), "w") as f:
        f.write('\n'.join(found))
コード例 #18
0
def update_dca_changelog(ctx, new_version, agent_version):
    """
    Quick task to generate the new CHANGELOG-DCA using reno when releasing a minor
    version (linux/macOS only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    agent_version_int = list(map(int, agent_version.split(".")))

    if len(agent_version_int) != 3:
        print("Error: invalid version: {}".format(agent_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure:
        print(
            "Error: You have uncommitted changes, please commit or stash before using update-dca-changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep dca-{}".format(new_version))
    except Failure:
        print("Missing 'dca-{}' git tag: mandatory to use 'reno'".format(
            new_version))
        raise

    # Cluster agent minor releases are in sync with the agent's, bugfixes are not necessarily.
    # We rely on the agent's devel tag to enforce the sync between both releases.
    branching_point_agent = "{}.{}.0-devel".format(agent_version_int[0],
                                                   agent_version_int[1])
    previous_minor_branchoff = "dca-{}.{}.X".format(new_version_int[0],
                                                    new_version_int[1] - 1)
    log_result = ctx.run(
        "git log {}...remotes/origin/{} --name-only --oneline | \
            grep releasenotes-dca/notes/ || true".format(
            branching_point_agent, previous_minor_branchoff))
    log_result = log_result.stdout.replace('\n', ' ').strip()

    # Do not include release notes that were added in the previous minor release branch (previous_minor_branchoff)
    # and the branch-off points for the current release (pined by the agent's devel tag)
    if len(log_result) > 0:
        ctx.run("git rm --ignore-unmatch {}".format(log_result))

    current_branchoff = "dca-{}.{}.X".format(new_version_int[0],
                                             new_version_int[1])
    # generate the new changelog. Specifying branch in case this is run outside the release branch that contains the tag.
    ctx.run("reno --rel-notes-dir releasenotes-dca report \
            --ignore-cache \
            --branch {} \
            --version dca-{} \
            --no-show-source > /tmp/new_changelog-dca.rst".format(
        current_branchoff, new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # mac's `sed` has a different syntax for the "-i" paramter
    sed_i_arg = "-i"
    if sys.platform == 'darwin':
        sed_i_arg = "-i ''"
    # remove the old header from the existing changelog
    ctx.run("sed {0} -e '1,4d' CHANGELOG-DCA.rst".format(sed_i_arg))

    if sys.platform != 'darwin':
        # sed on darwin doesn't support `-z`. On mac, you will need to manually update the following.
        ctx.run(
            "sed -z {0} -e 's/dca-{1}\\n===={2}/{1}\\n{2}/' /tmp/new_changelog-dca.rst"
            .format(sed_i_arg, new_version, '=' * len(new_version)))

    # merging to CHANGELOG.rst
    ctx.run(
        "cat CHANGELOG-DCA.rst >> /tmp/new_changelog-dca.rst && mv /tmp/new_changelog-dca.rst CHANGELOG-DCA.rst"
    )

    # commit new CHANGELOG
    ctx.run("git add CHANGELOG-DCA.rst")

    print("\nCommit this with:")
    print(
        "git commit -m \"[DCA] Update CHANGELOG for {}\"".format(new_version))
コード例 #19
0
def check_version(agent_version):
    """Check Agent version to see if it is valid."""
    version_re = re.compile(r'7[.](\d+)[.](\d+)(-rc\.(\d+))?')
    if not version_re.match(agent_version):
        raise Exit(message="Version should be of the form 7.Y.Z or 7.Y.Z-rc.t")
コード例 #20
0
ファイル: utils.py プロジェクト: rabbagliettiandrea/witch
def abort():
    raise Exit(code=-1)
コード例 #21
0
ファイル: go.py プロジェクト: sriram9707/datadog-agent
def deps(
    ctx,
    verbose=False,
    android=False,
    no_bootstrap=False,
    no_dep_ensure=False,
):
    """
    Setup Go dependencies
    """
    if not no_bootstrap:
        deps = get_deps('deps')
        order = deps.get("order", deps.keys())
        for dependency in order:
            tool = deps.get(dependency)
            if not tool:
                print(
                    "Malformed bootstrap JSON, dependency {} not found".format(
                        dependency))
                raise Exit(code=1)
            print("processing checkout tool {}".format(dependency))
            process_deps(ctx,
                         dependency,
                         tool.get('version'),
                         tool.get('type'),
                         'checkout',
                         verbose=verbose)

        order = deps.get("order", deps.keys())
        for dependency in order:
            tool = deps.get(dependency)
            if tool.get('install', True):
                print("processing get tool {}".format(dependency))
                process_deps(
                    ctx,
                    dependency,
                    tool.get('version'),
                    tool.get('type'),
                    'install',
                    cmd=tool.get('cmd'),
                    verbose=verbose,
                )

    if android:
        ndkhome = os.environ.get('ANDROID_NDK_HOME')
        if not ndkhome:
            print("set ANDROID_NDK_HOME to build android")
            raise Exit(code=1)

        cmd = "gomobile init -ndk {}".format(ndkhome)
        print("gomobile command {}".format(cmd))
        ctx.run(cmd)

    if not no_dep_ensure:
        # source level deps
        print("calling go mod vendor")
        start = datetime.datetime.now()
        verbosity = ' -v' if verbose else ''
        ctx.run("go mod vendor{}".format(verbosity))
        # use modvendor to copy missing files dependencies
        ctx.run('{}/bin/modvendor -copy="**/*.c **/*.h **/*.proto"{}'.format(
            get_gopath(ctx), verbosity))
        dep_done = datetime.datetime.now()

        # If github.com/DataDog/datadog-agent gets vendored too - nuke it
        #
        # This may happen as a result of having to introduce DEPPROJECTROOT
        # in our builders to get around a known-issue with go dep, and the
        # strange GOPATH situation in our builders.
        #
        # This is only a workaround, we should eliminate the need to resort
        # to DEPPROJECTROOT.
        if os.path.exists('vendor/github.com/DataDog/datadog-agent'):
            print("Removing vendored github.com/DataDog/datadog-agent")
            shutil.rmtree('vendor/github.com/DataDog/datadog-agent')

        # make sure PSUTIL is gone on windows; the go mod above will vendor it
        # in because it's necessary on other platforms
        if not android and sys.platform == 'win32':
            print("Removing PSUTIL on Windows")
            ctx.run("rd /s/q vendor\\github.com\\shirou\\gopsutil")

        # Make sure that golang.org/x/mobile is deleted.  It will get vendored in
        # because we use it, and there's no way to exclude; however, we must use
        # the version from $GOPATH
        if os.path.exists('vendor/golang.org/x/mobile'):
            print("Removing vendored golang.org/x/mobile")
            shutil.rmtree('vendor/golang.org/x/mobile')

    if not no_dep_ensure:
        print("go mod vendor, elapsed: {}".format(dep_done - start))
コード例 #22
0
def deps(ctx, no_checks=False, core_dir=None, verbose=False, android=False):
    """
    Setup Go dependencies
    """
    verbosity = ' -v' if verbose else ''
    deps = get_deps('deps')
    order = deps.get("order", deps.keys())
    env = load_release_versions(ctx)
    for dependency in order:
        tool = deps.get(dependency)
        if not tool:
            print("Malformed bootstrap JSON, dependency {} not found".format(
                dependency))
            raise Exit(code=1)
        print("processing checkout tool {}".format(dependency))
        process_deps(ctx,
                     dependency,
                     tool.get('version'),
                     tool.get('type'),
                     'checkout',
                     verbose=verbose)

    order = deps.get("order", deps.keys())
    for dependency in order:
        tool = deps.get(dependency)
        if tool.get('install', True):
            print("processing get tool {}".format(dependency))
            process_deps(ctx,
                         dependency,
                         tool.get('version'),
                         tool.get('type'),
                         'install',
                         verbose=verbose)

    if android:
        ndkhome = os.environ.get('ANDROID_NDK_HOME')
        if not ndkhome:
            print("set ANDROID_NDK_HOME to build android")
            raise Exit(code=1)

        cmd = "gomobile init -ndk {}".format(ndkhome)
        print("gomobile command {}".format(cmd))
        ctx.run(cmd)

    # source level deps
    print("calling dep ensure")
    start = datetime.datetime.now()
    ctx.run("dep ensure{}".format(verbosity))
    dep_done = datetime.datetime.now()

    # If github.com/DataDog/datadog-agent gets vendored too - nuke it
    #
    # This may happen as a result of having to introduce DEPPROJECTROOT
    # in our builders to get around a known-issue with go dep, and the
    # strange GOPATH situation in our builders.
    #
    # This is only a workaround, we should eliminate the need to resort
    # to DEPPROJECTROOT.
    if os.path.exists('vendor/github.com/DataDog/datadog-agent'):
        print("Removing vendored github.com/DataDog/datadog-agent")
        shutil.rmtree('vendor/github.com/DataDog/datadog-agent')

    # make sure PSUTIL is gone on windows; the dep ensure above will vendor it
    # in because it's necessary on other platforms
    if not android and sys.platform == 'win32':
        print("Removing PSUTIL on Windows")
        ctx.run("rd /s/q vendor\\github.com\\shirou\\gopsutil")

    # Make sure that golang.org/x/mobile is deleted.  It will get vendored in
    # because we use it, and there's no way to exclude; however, we must use
    # the version from $GOPATH
    if os.path.exists('vendor/golang.org/x/mobile'):
        print("Removing vendored golang.org/x/mobile")
        shutil.rmtree('vendor/golang.org/x/mobile')

    checks_start = datetime.datetime.now()
    if not no_checks:
        verbosity = 'v' if verbose else 'q'
        core_dir = core_dir or os.getenv('STACKSTATE_INTEGRATIONS_DIR')

        if core_dir:
            checks_base = os.path.join(os.path.abspath(core_dir),
                                       'stackstate_checks_base')
            ctx.run('pip install -{} -e {}'.format(verbosity, checks_base))
            ctx.run('pip install -{} -r {}'.format(
                verbosity, os.path.join(checks_base, 'requirements.in')))
        else:
            core_dir = os.path.join(os.getcwd(), 'vendor',
                                    'stackstate-agent-integrations')
            checks_base = os.path.join(core_dir, 'stackstate_checks_base')
            if not os.path.isdir(core_dir):
                ctx.run(
                    'git clone -{} https://github.com/StackVista/stackstate-agent-integrations {}'
                    .format(verbosity, core_dir))
            integrations_core_version = os.getenv(
                'STACKSTATE_INTEGRATIONS_VERSION'
            ) or env['STACKSTATE_INTEGRATIONS_VERSION'] or "master"
            ctx.run('git -C {} checkout {}'.format(core_dir,
                                                   integrations_core_version))
            tags_output = ctx.run("git -C " + core_dir +
                                  " ls-remote --tags | awk -F/ '{ print $3 }'")
            tags = tags_output.stdout
            if integrations_core_version in tags:
                ctx.run('git -C {} pull origin master'.format(core_dir))
            else:
                ctx.run('git -C {} pull'.format(core_dir))
            ctx.run('pip install -{} {}'.format(verbosity, checks_base))
            ctx.run('pip install -{} -r {}'.format(
                verbosity, os.path.join(checks_base, 'requirements.in')))
    checks_done = datetime.datetime.now()

    print("dep ensure, elapsed:    {}".format(dep_done - start))
    print("checks install elapsed: {}".format(checks_done - checks_start))
コード例 #23
0
def finish(ctx,
           major_versions="6,7",
           integration_version=None,
           omnibus_software_version=None,
           jmxfetch_version=None,
           omnibus_ruby_version=None,
           ignore_rc_tag=False):
    """
    Creates new entry in the release.json file for the new version. Removes all the RC entries.
    """

    list_major_versions = major_versions.split(",")
    print("Creating new agent version(s) {}".format(list_major_versions))

    list_major_versions = map(lambda x: int(x), list_major_versions)
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    version_re = re.compile('(\\d+)[.](\\d+)[.](\\d+)(-rc\\.(\\d+))?')

    auth = "https://{}:x-oauth-basic@".format(github_token)

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version, highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    # Erase RCs
    for major_version in list_major_versions:
        highest_version["major"] = major_version
        rc = highest_version["rc"]
        while highest_version["rc"] not in [0, None]:
            # In case we have skipped an RC in the file...
            try:
                release_json.pop(_stringify_version(highest_version))
            finally:
                highest_version["rc"] = highest_version["rc"] - 1
        highest_version["rc"] = rc

    # Tags in other repos are based on the highest major (e.g. for releasing version 6.X.Y and 7.X.Y they will tag only 7.X.Y)
    highest_version["major"] = highest_major

    # We don't want to fetch RC tags
    highest_version["rc"] = None

    if not integration_version:
        integration_version = _get_highest_repo_version(
            auth, "integrations-core", highest_version, version_re)
        if integration_version is None:
            print(
                "EREROR: No version found for integrationscore - did you create the tag ?"
            )
            return Exit(code=1)
        if integration_version["rc"] != None:
            print(
                "ERROR: Integration-Core tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on Integration-Core.")
            else:
                print("Aborting.")
                return Exit(code=1)
        integration_version = _stringify_version(integration_version)
    print("Integration-Core's tag is {}".format(integration_version))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            auth, "omnibus-software", highest_version, version_re)
        if omnibus_software_version is None:
            print(
                "EREROR: No version found for omnibus-software - did you create the tag ?"
            )
            return Exit(code=1)
        if omnibus_software_version["rc"] != None:
            print(
                "ERROR: Omnibus-Software tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on Omnibus-Software.")
            else:
                print("Aborting.")
                return Exit(code=1)
        omnibus_software_version = _stringify_version(omnibus_software_version)
    print("Omnibus-Software's tag is {}".format(omnibus_software_version))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(auth, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
        jmxfetch_version = _stringify_version(jmxfetch_version)
    print("Jmxfetch's tag is {}".format(jmxfetch_version))

    if not omnibus_ruby_version:
        print(
            "ERROR: No omnibus_ruby_version found. Please specify it manually via '--omnibus-ruby-version' until we start tagging omnibus-ruby builds."
        )
        return Exit(code=1)

    _save_release_json(release_json, list_major_versions, highest_version,
                       integration_version, omnibus_software_version,
                       omnibus_ruby_version, jmxfetch_version)
コード例 #24
0
ファイル: fabfile.py プロジェクト: kocurvsmotane/realpy2
def test(c):
    with c.prefix("source env/bin/activate"):
        result = c.run("nosetests -v", warn=True)
        if not result.ok and not confirm("Tests failed. Continue?"):
            raise Exit()
コード例 #25
0
def create_rc(ctx,
              major_versions="6,7",
              integration_version=None,
              omnibus_software_version=None,
              jmxfetch_version=None,
              omnibus_ruby_version=None):
    """
    Takes whatever version is the highest in release.json and adds a new RC to it.
    If there was no RC, creates one and bump minor version. If there was an RC, create RC + 1.
    """

    list_major_versions = major_versions.split(",")
    if list_major_versions.count < 1:
        print("Specify at least one major version to release")
        return Exit(code=1)

    print("Creating RC for agent version(s) {}".format(list_major_versions))
    list_major_versions = map(lambda x: int(x), list_major_versions)
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    version_re = re.compile('(\\d+)[.](\\d+)[.](\\d+)(-rc\\.(\\d+))?')

    auth = "https://{}:x-oauth-basic@".format(github_token)

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version, highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    if highest_version["rc"] is None:
        # No RC exists, create one
        highest_version["minor"] = highest_version["minor"] + 1
        highest_version["rc"] = 1
    else:
        # An RC exists, create next RC
        highest_version["rc"] = highest_version["rc"] + 1
    new_rc = _stringify_version(highest_version)
    print("Creating {}".format(new_rc))

    if not integration_version:
        integration_version = _get_highest_repo_version(
            auth, "integrations-core", highest_version, version_re)
        integration_version = _stringify_version(integration_version)
    print("Integration-Core's tag is {}".format(integration_version))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            auth, "omnibus-software", highest_version, version_re)
        omnibus_software_version = _stringify_version(omnibus_software_version)
    print("Omnibus-Software's tag is {}".format(omnibus_software_version))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(auth, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
        jmxfetch_version = _stringify_version(jmxfetch_version)
    print("Jmxfetch's tag is {}".format(jmxfetch_version))

    if not omnibus_ruby_version:
        print(
            "ERROR: No omnibus_ruby_version found. Please specify it manually via '--omnibus-ruby-version' until we start tagging omnibus-ruby builds."
        )
        return Exit(code=1)

    _save_release_json(release_json, list_major_versions, highest_version,
                       integration_version, omnibus_software_version,
                       omnibus_ruby_version, jmxfetch_version)
コード例 #26
0
ファイル: tasks.py プロジェクト: toolsqacn/metallb
def bgp_dev_env():
    dev_env_dir = os.getcwd() + "/dev-env/bgp"
    frr_volume_dir = dev_env_dir + "/frr-volume"

    # TODO -- The IP address handling will need updates to add support for IPv6

    # We need the IPs for each Node in the cluster to place them in the BGP
    # router configuration file (bgpd.conf). Each Node will peer with this
    # router.
    node_ips = run(
        "kubectl get nodes -o jsonpath='{.items[*].status.addresses"
        "[?(@.type==\"InternalIP\")].address}{\"\\n\"}'",
        echo=True)
    node_ips = node_ips.stdout.strip().split()
    if len(node_ips) != 3:
        raise Exit(message='Expected 3 nodes, got %d' % len(node_ips))

    # Create a new directory that will be used as the config volume for frr.
    try:
        # sudo because past docker runs will have changed ownership of this dir
        run('sudo rm -rf "%s"' % frr_volume_dir)
        os.mkdir(frr_volume_dir)
    except FileExistsError:
        pass
    except Exception as e:
        raise Exit(message='Failed to create frr-volume directory: %s' %
                   str(e))

    # These two config files are static, so we just copy them straight in.
    shutil.copyfile("%s/frr/zebra.conf" % dev_env_dir,
                    "%s/zebra.conf" % frr_volume_dir)
    shutil.copyfile("%s/frr/daemons" % dev_env_dir,
                    "%s/daemons" % frr_volume_dir)

    # bgpd.conf is created from a template so that we can include the current
    # Node IPs.
    with open("%s/frr/bgpd.conf.tmpl" % dev_env_dir, 'r') as f:
        bgpd_config = "! THIS FILE IS AUTOGENERATED\n" + f.read()
    for n in range(0, len(node_ips)):
        bgpd_config = bgpd_config.replace("NODE%d_IP" % n, node_ips[n])
    with open("%s/bgpd.conf" % frr_volume_dir, 'w') as f:
        f.write(bgpd_config)

    # Run a BGP router in a container for all of the speakers to peer with.
    run(
        'for frr in $(docker ps -a -f name=frr --format {{.Names}}) ; do '
        '    docker rm -f $frr ; '
        'done',
        echo=True)
    run("docker run -d --privileged --network kind --rm --name frr --volume %s:/etc/frr "
        "frrouting/frr:latest" % frr_volume_dir,
        echo=True)

    peer_address = run(
        'docker inspect -f "{{ '
        'range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" frr',
        echo=True)
    with open("%s/config.yaml.tmpl" % dev_env_dir, 'r') as f:
        mlb_config = "# THIS FILE IS AUTOGENERATED\n" + f.read()
    mlb_config = mlb_config.replace("PEER_ADDRESS",
                                    peer_address.stdout.strip())
    with open("%s/config.yaml" % dev_env_dir, 'w') as f:
        f.write(mlb_config)
    # Apply the MetalLB ConfigMap
    run("kubectl apply -f %s/config.yaml" % dev_env_dir)
コード例 #27
0
def check_if_logged_in_to_heroku(c):
    if not local("heroku auth:whoami", warn=True):
        raise Exit(
            'Log-in with the "heroku login -i" command before running this '
            "command.")
コード例 #28
0
ファイル: tasks.py プロジェクト: toolsqacn/metallb
def release(ctx, version, skip_release_notes=False):
    """Tag a new release."""
    status = run("git status --porcelain", hide=True).stdout.strip()
    if status != "":
        raise Exit(message="git checkout not clean, cannot release")

    version = semver.parse_version_info(version)
    is_patch_release = version.patch != 0

    # Check that we have release notes for the desired version.
    run("git checkout main", echo=True)
    if not skip_release_notes:
        with open("website/content/release-notes/_index.md") as release_notes:
            if "## Version {}".format(version) not in release_notes.read():
                raise Exit(message="no release notes for v{}".format(version))

    # Move HEAD to the correct release branch - either a new one, or
    # an existing one.
    if is_patch_release:
        run("git checkout v{}.{}".format(version.major, version.minor),
            echo=True)
    else:
        run("git checkout -b v{}.{}".format(version.major, version.minor),
            echo=True)

    # Copy over release notes from main.
    if not skip_release_notes:
        run("git checkout main -- website/content/release-notes/_index.md",
            echo=True)

    # Update links on the website to point to files at the version
    # we're creating.
    if is_patch_release:
        previous_version = "v{}.{}.{}".format(version.major, version.minor,
                                              version.patch - 1)
    else:
        previous_version = "main"

    def _replace(pattern):
        oldpat = pattern.format(previous_version)
        newpat = pattern.format("v{}").format(version)
        run("perl -pi -e 's#{}#{}#g' website/content/*.md website/content/*/*.md"
            .format(oldpat, newpat),
            echo=True)

    _replace("/metallb/metallb/{}")
    _replace("/metallb/metallb/tree/{}")
    _replace("/metallb/metallb/blob/{}")

    # Update the version listed on the website sidebar
    run("perl -pi -e 's/MetalLB .*/MetalLB v{}/g' website/content/_header.md".
        format(version),
        echo=True)

    # Update the manifests with the new version
    run("perl -pi -e 's,image: metallb/speaker:.*,image: metallb/speaker:v{},g' manifests/metallb.yaml"
        .format(version),
        echo=True)
    run("perl -pi -e 's,image: metallb/controller:.*,image: metallb/controller:v{},g' manifests/metallb.yaml"
        .format(version),
        echo=True)

    # Update the version in kustomize instructions
    #
    # TODO: Check if kustomize instructions really need the version in the
    # website or if there is a simpler way. For now, though, we just replace the
    # only page that mentions the version on release.
    run("perl -pi -e 's,github.com/metallb/metallb//manifests\?ref=.*,github.com/metallb/metallb//manifests\?ref=v{},g' website/content/installation/_index.md"
        .format(version),
        echo=True)

    # Update the version embedded in the binary
    run("perl -pi -e 's/version\s+=.*/version = \"{}\"/g' internal/version/version.go"
        .format(version),
        echo=True)
    run("gofmt -w internal/version/version.go", echo=True)

    run("git commit -a -m 'Automated update for release v{}'".format(version),
        echo=True)
    run("git tag v{} -m 'See the release notes for details:\n\nhttps://metallb.universe.tf/release-notes/#version-{}-{}-{}'"
        .format(version, version.major, version.minor, version.patch),
        echo=True)
    run("git checkout main", echo=True)
コード例 #29
0
ファイル: test.py プロジェクト: forrestaustin/datadog-agent
def lint_releasenote(ctx):
    """
    Lint release notes with Reno
    """

    # checking if a releasenote has been added/changed
    pr_url = os.environ.get("CIRCLE_PULL_REQUEST")
    if pr_url:
        import requests
        pr_id = pr_url.rsplit('/')[-1]

        # first check 'changelog/no-changelog' label
        res = requests.get(
            "https://api.github.com/repos/DataDog/datadog-agent/issues/{}".
            format(pr_id))
        issue = res.json()
        if any([
                l['name'] == 'changelog/no-changelog'
                for l in issue.get('labels', {})
        ]):
            print(
                "'changelog/no-changelog' label found on the PR: skipping linting"
            )
            return

        # Then check that at least one note was touched by the PR
        url = "https://api.github.com/repos/DataDog/datadog-agent/pulls/{}/files".format(
            pr_id)
        # traverse paginated github response
        while True:
            res = requests.get(url)
            files = res.json()
            if any([f['filename'].startswith("releasenotes/notes/") or \
                    f['filename'].startswith("releasenotes-dca/notes/") for f in files]):
                break

            if 'next' in res.links:
                url = res.links['next']['url']
            else:
                print("Error: No releasenote was found for this PR. Please add one using 'reno'"\
                      ", or apply the label 'changelog/no-changelog' to the PR.")
                raise Exit(code=1)

    # The PR has not been created yet, let's compare with master (the usual base branch of the future PR)
    else:
        branch = os.environ.get("CIRCLE_BRANCH")
        if branch is None:
            print("No branch found, skipping reno linting")
        else:
            if re.match(r".*/.*", branch) is None:
                print(
                    "{} is not a feature branch, skipping reno linting".format(
                        branch))
            else:
                import requests

                # Then check that in the diff with master, at least one note was touched
                url = "https://api.github.com/repos/DataDog/datadog-agent/compare/master...{}".format(
                    branch)
                # traverse paginated github response
                while True:
                    res = requests.get(url)
                    files = res.json().get("files", {})
                    if any([f['filename'].startswith("releasenotes/notes/") or \
                            f['filename'].startswith("releasenotes-dca/notes/") for f in files]):
                        break

                    if 'next' in res.links:
                        url = res.links['next']['url']
                    else:
                        print("Error: No releasenote was found for this PR. Please add one using 'reno'"\
                              ", or apply the label 'changelog/no-changelog' to the PR.")
                        raise Exit(code=1)

    ctx.run("reno lint")
コード例 #30
0
def update_installscript_changelog(ctx, new_version):
    """
    Quick task to generate the new CHANGELOG-INSTALLSCRIPT using reno when releasing a minor
    version (linux/macOS only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure:
        print(
            "Error: You have uncommitted changes, please commit or stash before using update-installscript-changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep installscript-{}".format(new_version))
    except Failure:
        print("Missing 'installscript-{}' git tag: mandatory to use 'reno'".
              format(new_version))
        raise

    # generate the new changelog
    ctx.run("reno --rel-notes-dir releasenotes-installscript report \
            --ignore-cache \
            --version installscript-{} \
            --no-show-source > /tmp/new_changelog-installscript.rst".format(
        new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # mac's `sed` has a different syntax for the "-i" paramter
    sed_i_arg = "-i"
    if sys.platform == 'darwin':
        sed_i_arg = "-i ''"
    # remove the old header from the existing changelog
    ctx.run("sed {0} -e '1,4d' CHANGELOG-INSTALLSCRIPT.rst".format(sed_i_arg))

    if sys.platform != 'darwin':
        # sed on darwin doesn't support `-z`. On mac, you will need to manually update the following.
        ctx.run(
            "sed -z {0} -e 's/installscript-{1}\\n===={2}/{1}\\n{2}/' /tmp/new_changelog-installscript.rst"
            .format(sed_i_arg, new_version, '=' * len(new_version)))

    # merging to CHANGELOG-INSTALLSCRIPT.rst
    ctx.run(
        "cat CHANGELOG-INSTALLSCRIPT.rst >> /tmp/new_changelog-installscript.rst && mv /tmp/new_changelog-installscript.rst CHANGELOG-INSTALLSCRIPT.rst"
    )

    # commit new CHANGELOG-INSTALLSCRIPT
    ctx.run("git add CHANGELOG-INSTALLSCRIPT.rst")

    print("\nCommit this with:")
    print(
        "git commit -m \"[INSTALLSCRIPT] Update CHANGELOG-INSTALLSCRIPT for {}\""
        .format(new_version))