Exemplo n.º 1
0
def showcov(ctx):
    """Open report in a web browser"""
    target = "htmlcov/index.html"
    if not pathlib.Path(target).exists():
        raise Exit("Can't find HTML report")
    webbrowser.open(target)
Exemplo n.º 2
0
def test(
    ctx,
    module=None,
    targets=None,
    coverage=False,
    build_include=None,
    build_exclude=None,
    verbose=False,
    race=False,
    profile=False,
    fail_on_fmt=False,
    rtloader_root=None,
    python_home_2=None,
    python_home_3=None,
    cpus=0,
    major_version='7',
    python_runtimes='3',
    timeout=120,
    arch="x64",
    cache=True,
    skip_linters=False,
    save_result_json=None,
    rerun_fails=None,
    go_mod="mod",
):
    """
    Run all the tools and tests on the given module and targets.

    A module should be provided as the path to one of the go modules in the repository.

    Targets should be provided as a comma-separated list of relative paths within the given module.
    If targets are provided but no module is set, the main module (".") is used.

    If no module or target is set the tests are run against all modules and targets.

    Example invokation:
        inv test --targets=./pkg/collector/check,./pkg/aggregator --race
        inv test --module=. --race
    """
    if isinstance(module, str):
        # when this function is called from the command line, targets are passed
        # as comma separated tokens in a string
        if isinstance(targets, str):
            modules = [GoModule(module, targets=targets.split(','))]
        else:
            modules = [m for m in DEFAULT_MODULES.values() if m.path == module]
    elif isinstance(targets, str):
        modules = [GoModule(".", targets=targets.split(','))]
    else:
        print("Using default modules and targets")
        modules = DEFAULT_MODULES.values()

    build_include = (get_default_build_tags(build="test-with-process-tags",
                                            arch=arch) if build_include is None
                     else filter_incompatible_tags(build_include.split(","),
                                                   arch=arch))
    build_exclude = [] if build_exclude is None else build_exclude.split(",")
    build_tags = get_build_tags(build_include, build_exclude)

    timeout = int(timeout)

    # explicitly run these tasks instead of using pre-tasks so we can
    # pass the `target` param (pre-tasks are invoked without parameters)
    print("--- go generating:")
    generate(ctx)

    if skip_linters:
        print("--- [skipping Go linters]")
    else:
        # Until all packages whitelisted in .golangci.yml are fixed and removed
        # from the 'skip-dirs' list we need to keep using the old functions that
        # lint without build flags (linting some file is better than no linting).
        print("--- Vetting and linting (legacy):")
        for module in modules:
            print("----- Module '{}'".format(module.full_path()))
            if not module.condition():
                print("----- Skipped")
                continue

            with ctx.cd(module.full_path()):
                vet(ctx,
                    targets=module.targets,
                    rtloader_root=rtloader_root,
                    build_tags=build_tags,
                    arch=arch)
                fmt(ctx, targets=module.targets, fail_on_fmt=fail_on_fmt)
                lint(ctx, targets=module.targets)
                misspell(ctx, targets=module.targets)
                ineffassign(ctx, targets=module.targets)
                staticcheck(ctx,
                            targets=module.targets,
                            build_tags=build_tags,
                            arch=arch)

        # for now we only run golangci_lint on Unix as the Windows env need more work
        if sys.platform != 'win32':
            print("--- golangci_lint:")
            for module in modules:
                print("----- Module '{}'".format(module.full_path()))
                if not module.condition():
                    print("----- Skipped")
                    continue

                with ctx.cd(module.full_path()):
                    golangci_lint(ctx,
                                  targets=module.targets,
                                  rtloader_root=rtloader_root,
                                  build_tags=build_tags,
                                  arch=arch)

    with open(PROFILE_COV, "w") as f_cov:
        f_cov.write("mode: count")

    ldflags, gcflags, env = get_build_flags(
        ctx,
        rtloader_root=rtloader_root,
        python_home_2=python_home_2,
        python_home_3=python_home_3,
        major_version=major_version,
        python_runtimes=python_runtimes,
    )

    if sys.platform == 'win32':
        env['CGO_LDFLAGS'] += ' -Wl,--allow-multiple-definition'

    if profile:
        test_profiler = TestProfiler()
    else:
        test_profiler = None  # Use stdout

    race_opt = ""
    covermode_opt = ""
    build_cpus_opt = ""
    if cpus:
        build_cpus_opt = "-p {}".format(cpus)
    if race:
        # race doesn't appear to be supported on non-x64 platforms
        if arch == "x86":
            print(
                "\n -- Warning... disabling race test, not supported on this platform --\n"
            )
        else:
            race_opt = "-race"

        # Needed to fix an issue when using -race + gcc 10.x on Windows
        # https://github.com/bazelbuild/rules_go/issues/2614
        if sys.platform == 'win32':
            ldflags += " -linkmode=external"

    if coverage:
        if race:
            # atomic is quite expensive but it's the only way to run
            # both the coverage and the race detector at the same time
            # without getting false positives from the cover counter
            covermode_opt = "-covermode=atomic"
        else:
            covermode_opt = "-covermode=count"

    print("\n--- Running unit tests:")

    coverprofile = ""
    if coverage:
        coverprofile = "-coverprofile={}".format(PROFILE_COV)

    nocache = '-count=1' if not cache else ''

    build_tags.append("test")
    TMP_JSON = 'tmp.json'
    if save_result_json and os.path.isfile(save_result_json):
        # Remove existing file since we append to it.
        # We don't need to do that for TMP_JSON since gotestsum overwrites the output.
        print("Removing existing '{}' file".format(save_result_json))
        os.remove(save_result_json)

    cmd = 'gotestsum {json_flag} --format pkgname {rerun_fails} --packages="{packages}" -- {verbose} -mod={go_mod} -vet=off -timeout {timeout}s -tags "{go_build_tags}" -gcflags="{gcflags}" '
    cmd += '-ldflags="{ldflags}" {build_cpus} {race_opt} -short {covermode_opt} {coverprofile} {nocache}'
    args = {
        "go_mod":
        go_mod,
        "go_build_tags":
        " ".join(build_tags),
        "gcflags":
        gcflags,
        "ldflags":
        ldflags,
        "race_opt":
        race_opt,
        "build_cpus":
        build_cpus_opt,
        "covermode_opt":
        covermode_opt,
        "coverprofile":
        coverprofile,
        "timeout":
        timeout,
        "verbose":
        '-v' if verbose else '',
        "nocache":
        nocache,
        "json_flag":
        '--jsonfile "{}" '.format(TMP_JSON) if save_result_json else "",
        "rerun_fails":
        "--rerun-fails={}".format(rerun_fails) if rerun_fails else "",
    }

    failed_modules = []
    for module in modules:
        print("----- Module '{}'".format(module.full_path()))
        if not module.condition():
            print("----- Skipped")
            continue

        with ctx.cd(module.full_path()):
            res = ctx.run(
                cmd.format(packages=' '.join(
                    "{}/...".format(t) if not t.endswith("/...") else t
                    for t in module.targets),
                           **args),
                env=env,
                out_stream=test_profiler,
                warn=True,
            )

        if res.exited is None or res.exited > 0:
            failed_modules.append(module.full_path())

        if save_result_json:
            with open(save_result_json, 'ab') as json_file, open(
                    os.path.join(module.full_path(), TMP_JSON),
                    'rb') as module_file:
                json_file.write(module_file.read())

    if failed_modules:
        # Exit if any of the modules failed
        raise Exit(
            code=1,
            message="Unit tests failed in the following modules: {}".format(
                ', '.join(failed_modules)))

    if coverage:
        print("\n--- Test coverage:")
        ctx.run("go tool cover -func {}".format(PROFILE_COV))

    if profile:
        print("\n--- Top 15 packages sorted by run time:")
        test_profiler.print_sorted(15)
Exemplo n.º 3
0
def lint_releasenote(ctx):
    """
    Lint release notes with Reno
    """

    # checking if a releasenote has been added/changed
    pr_url = os.environ.get("CIRCLE_PULL_REQUEST")
    if pr_url:
        import requests
        pr_id = pr_url.rsplit('/')[-1]

        # first check 'changelog/no-changelog' label
        res = requests.get(
            "https://api.github.com/repos/DataDog/datadog-agent/issues/{}".
            format(pr_id))
        issue = res.json()
        if any([
                l['name'] == 'changelog/no-changelog'
                for l in issue.get('labels', {})
        ]):
            print(
                "'changelog/no-changelog' label found on the PR: skipping linting"
            )
            return

        # Then check that at least one note was touched by the PR
        url = "https://api.github.com/repos/DataDog/datadog-agent/pulls/{}/files".format(
            pr_id)
        # traverse paginated github response
        while True:
            res = requests.get(url)
            files = res.json()
            if any([f['filename'].startswith("releasenotes/notes/") or \
                    f['filename'].startswith("releasenotes-dca/notes/") for f in files]):
                break

            if 'next' in res.links:
                url = res.links['next']['url']
            else:
                print("Error: No releasenote was found for this PR. Please add one using 'reno'"\
                      ", or apply the label 'changelog/no-changelog' to the PR.")
                raise Exit(code=1)

    # The PR has not been created yet, let's compare with master (the usual base branch of the future PR)
    else:
        branch = os.environ.get("CIRCLE_BRANCH")
        if branch is None:
            print("No branch found, skipping reno linting")
        else:
            if re.match(r".*/.*", branch) is None:
                print(
                    "{} is not a feature branch, skipping reno linting".format(
                        branch))
            else:
                import requests

                # Then check that in the diff with master, at least one note was touched
                url = "https://api.github.com/repos/DataDog/datadog-agent/compare/master...{}".format(
                    branch)
                # traverse paginated github response
                while True:
                    res = requests.get(url)
                    files = res.json().get("files", {})
                    if any([f['filename'].startswith("releasenotes/notes/") or \
                            f['filename'].startswith("releasenotes-dca/notes/") for f in files]):
                        break

                    if 'next' in res.links:
                        url = res.links['next']['url']
                    else:
                        print("Error: No releasenote was found for this PR. Please add one using 'reno'"\
                              ", or apply the label 'changelog/no-changelog' to the PR.")
                        raise Exit(code=1)

    ctx.run("reno lint")
Exemplo n.º 4
0
def bgp_dev_env(ip_family):
    dev_env_dir = os.getcwd() + "/dev-env/bgp"
    frr_volume_dir = dev_env_dir + "/frr-volume"

    # TODO -- The IP address handling will need updates to add support for IPv6

    # We need the IPs for each Node in the cluster to place them in the BGP
    # router configuration file (bgpd.conf). Each Node will peer with this
    # router.
    node_ips = run(
        "kubectl get nodes -o jsonpath='{.items[*].status.addresses"
        "[?(@.type==\"InternalIP\")].address}{\"\\n\"}'",
        echo=True)
    node_ips = node_ips.stdout.strip().split()
    if len(node_ips) != 3:
        raise Exit(message='Expected 3 nodes, got %d' % len(node_ips))

    # Create a new directory that will be used as the config volume for frr.
    try:
        # sudo because past docker runs will have changed ownership of this dir
        run('sudo rm -rf "%s"' % frr_volume_dir)
        os.mkdir(frr_volume_dir)
    except FileExistsError:
        pass
    except Exception as e:
        raise Exit(message='Failed to create frr-volume directory: %s' %
                   str(e))

    # These config files are static, so we copy them straight in.
    copy_files = ('zebra.conf', 'daemons', 'vtysh.conf')
    for f in copy_files:
        shutil.copyfile("%s/frr/%s" % (dev_env_dir, f),
                        "%s/%s" % (frr_volume_dir, f))

    # bgpd.conf is created from a template so that we can include the current
    # Node IPs.
    with open("%s/frr/bgpd.conf.tmpl" % dev_env_dir, 'r') as f:
        bgpd_config = "! THIS FILE IS AUTOGENERATED\n" + f.read()
        bgpd_config = bgpd_config.replace("PROTOCOL", ip_family)
    for n in range(0, len(node_ips)):
        bgpd_config = bgpd_config.replace("NODE%d_IP" % n, node_ips[n])
    with open("%s/bgpd.conf" % frr_volume_dir, 'w') as f:
        f.write(bgpd_config)

    # Run a BGP router in a container for all of the speakers to peer with.
    run(
        'for frr in $(docker ps -a -f name=frr --format {{.Names}}) ; do '
        '    docker rm -f $frr ; '
        'done',
        echo=True)
    run("docker run -d --privileged --network kind --rm --name frr --volume %s:/etc/frr "
        "frrouting/frr:v7.5.1" % frr_volume_dir,
        echo=True)

    if ip_family == "ipv4":
        peer_address = run(
            'docker inspect -f "{{ '
            'range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" frr',
            echo=True)
    elif ip_family == "ipv6":
        peer_address = run(
            'docker inspect -f "{{ '
            'range .NetworkSettings.Networks}}{{.GlobalIPv6Address}}{{end}}" frr',
            echo=True)
    else:
        raise Exit(message='Unsupported ip address family %s' % ip_family)

    with open("%s/config.yaml.tmpl" % dev_env_dir, 'r') as f:
        mlb_config = "# THIS FILE IS AUTOGENERATED\n" + f.read()
    mlb_config = mlb_config.replace("IP_PEER_ADDRESS",
                                    peer_address.stdout.strip())
    with open("%s/config.yaml" % dev_env_dir, 'w') as f:
        f.write(mlb_config)
    # Apply the MetalLB ConfigMap
    run("kubectl apply -f %s/config.yaml" % dev_env_dir)
Exemplo n.º 5
0
def e2etest(ctx,
            name="kind",
            export=None,
            kubeconfig=None,
            system_namespaces="kube-system,metallb-system",
            service_pod_port=80,
            skip_docker=False,
            focus="",
            skip="",
            ipv4_service_range=None,
            ipv6_service_range=None):
    """Run E2E tests against development cluster."""
    if skip_docker:
        opt_skip_docker = "--skip-docker"
    else:
        opt_skip_docker = ""

    ginkgo_skip = ""
    if skip:
        ginkgo_skip = "--ginkgo.skip=" + skip

    ginkgo_focus = ""
    if focus:
        ginkgo_focus = "--ginkgo.focus=" + focus

    if kubeconfig is None:
        validate_kind_version()
        clusters = run("kind get clusters",
                       hide=True).stdout.strip().splitlines()
        if name in clusters:
            kubeconfig_file = tempfile.NamedTemporaryFile()
            kubeconfig = kubeconfig_file.name
            run("kind export kubeconfig --name={} --kubeconfig={}".format(
                name, kubeconfig),
                pty=True,
                echo=True)
        else:
            raise Exit(message="Unable to find cluster named: {}".format(name))
    else:
        os.environ['KUBECONFIG'] = kubeconfig

    namespaces = system_namespaces.replace(' ', '').split(',')
    for ns in namespaces:
        run("kubectl -n {} wait --for=condition=Ready --all pods --timeout 300s"
            .format(ns),
            hide=True)

    if ipv4_service_range is None:
        ipv4_service_range = get_service_range(4)

    if ipv6_service_range is None:
        ipv6_service_range = get_service_range(6)

    testrun = run(
        "cd `git rev-parse --show-toplevel`/e2etest &&"
        "go test -timeout 30m {} {} --provider=local --kubeconfig={} --service-pod-port={} -ipv4-service-range={} -ipv6-service-range={} {}"
        .format(ginkgo_focus, ginkgo_skip, kubeconfig, service_pod_port,
                ipv4_service_range, ipv6_service_range, opt_skip_docker),
        warn="True")

    if export != None:
        run("kind export logs {}".format(export))

    if testrun.failed:
        raise Exit(message="E2E tests failed", code=testrun.return_code)
Exemplo n.º 6
0
def create_rc(ctx,
              major_versions="6,7",
              integration_version=None,
              omnibus_software_version=None,
              jmxfetch_version=None,
              omnibus_ruby_version=None):
    """
    Creates new entry in the release.json file for a new RC.
    Looks at the current release.json to find the highest version released and create a new RC based on that.
    """

    list_major_versions = major_versions.split(",")
    if list_major_versions.count < 1:
        print("Specify at least one major version to release")
        return Exit(code=1)

    list_major_versions = map(lambda x: int(x), list_major_versions)
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    print("Creating RC for agent version(s) {}".format(list_major_versions))

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    version_re = re.compile('(\\d+)[.](\\d+)[.](\\d+)(-rc\\.(\\d+))?')

    auth = "https://{}:x-oauth-basic@".format(github_token)

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version, highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    highest_version["rc"] = highest_version["rc"] + 1
    new_rc = _stringify_version(highest_version)
    print("Creating {}".format(new_rc))

    if not integration_version:
        integration_version = _get_highest_repo_version(
            auth, "integrations-core", highest_version, version_re)
        integration_version = _stringify_version(integration_version)
    print("Integration-Core's tag is {}".format(integration_version))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            auth, "omnibus-software", highest_version, version_re)
        omnibus_software_version = _stringify_version(omnibus_software_version)
    print("Omnibus-Software's tag is {}".format(omnibus_software_version))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(auth, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
        jmxfetch_version = _stringify_version(jmxfetch_version)
    print("Jmxfetch's tag is {}".format(jmxfetch_version))

    if not omnibus_ruby_version:
        print("ERROR: No omnibus_ruby_version found")
        return Exit(code=1)

    _save_release_json(release_json, list_major_versions, highest_version,
                       integration_version, omnibus_software_version,
                       omnibus_ruby_version, jmxfetch_version)
Exemplo n.º 7
0
def trigger(_,
            git_ref="master",
            release_version_6="nightly",
            release_version_7="nightly-a7",
            repo_branch="nightly"):
    """
    Trigger a deploy pipeline on the given git ref.
    The --release-version-6 and --release-version-7 options indicate which release.json entries are used.
    To not build Agent 6, set --release-version-6 "". To not build Agent 7, set --release-version-7 "".
    The --repo-branch option indicates which branch of the staging repository the packages will be deployed to.

    Example:
    inv pipeline.trigger --git-ref 7.22.0 --release-version-6 "6.22.0" --release-version-7 "7.22.0" --repo-branch "stable"
    """

    #
    # Create gitlab instance and make sure we have access.
    project_name = "DataDog/datadog-agent"
    gitlab = Gitlab()
    gitlab.test_project_found(project_name)

    #
    # If git_ref matches v7 pattern and release_version_6 is not empty, make sure Gitlab has v6 tag.
    # If git_ref matches v6 pattern and release_version_7 is not empty, make sure Gitlab has v7 tag.
    # v7 version pattern should be able to match 7.12.24-rc2 and 7.12.34
    #
    v7_pattern = r'^7\.(\d+\.\d+)(-.+|)$'
    v6_pattern = r'^6\.(\d+\.\d+)(-.+|)$'

    match = re.match(v7_pattern, git_ref)

    if release_version_6 and match:
        # release_version_6 is not empty and git_ref matches v7 pattern, construct v6 tag and check.
        tag_name = "6." + "".join(match.groups())
        gitlab_tag = gitlab.find_tag(project_name, tag_name)

        if ("name" not in gitlab_tag) or gitlab_tag["name"] != tag_name:
            print(
                "Cannot find GitLab v6 tag {} while trying to build git ref {}"
                .format(tag_name, git_ref))
            raise Exit(code=1)

        print("Successfully cross checked v6 tag {} and git ref {}".format(
            tag_name, git_ref))
    else:
        match = re.match(v6_pattern, git_ref)

        if release_version_7 and match:
            # release_version_7 is not empty and git_ref matches v6 pattern, construct v7 tag and check.
            tag_name = "7." + "".join(match.groups())
            gitlab_tag = gitlab.find_tag(project_name, tag_name)

            if ("name" not in gitlab_tag) or gitlab_tag["name"] != tag_name:
                print(
                    "Cannot find GitLab v7 tag {} while trying to build git ref {}"
                    .format(tag_name, git_ref))
                raise Exit(code=1)

            print("Successfully cross checked v7 tag {} and git ref {}".format(
                tag_name, git_ref))

    pipeline_id = trigger_agent_pipeline(gitlab,
                                         project_name,
                                         git_ref,
                                         release_version_6,
                                         release_version_7,
                                         repo_branch,
                                         deploy=True)
    wait_for_pipeline(gitlab, project_name, pipeline_id)
Exemplo n.º 8
0
    def make_request(self,
                     path,
                     headers=None,
                     data=None,
                     json=False,
                     stream=False,
                     method=None):
        """
        Utility to make a request to the Gitlab API.
        """
        import requests

        url = self.BASE_URL + path

        headers = dict(headers or [])
        headers["PRIVATE-TOKEN"] = self.api_token

        # TODO: Use the param argument of requests instead of handling URL params
        # manually
        try:
            if data or method == "POST":
                r = requests.post(url,
                                  headers=headers,
                                  data=data,
                                  stream=stream)
            else:
                r = requests.get(url, headers=headers, stream=stream)
            if r.status_code == 401:
                print(
                    "HTTP 401: Your GITLAB_TOKEN may have expired. You can "
                    "check and refresh it at "
                    "https://gitlab.ddbuild.io/profile/personal_access_tokens")
                print("Gitlab says: {}".format(r.json()["error_description"]))
                raise Exit(code=1)
        except requests.exceptions.Timeout:
            print("Connection to GitLab ({}) timed out.".format(url))
            raise Exit(code=1)
        except requests.exceptions.RequestException as e:
            m = errno_regex.match(str(e))
            if not m:
                print("Unknown error raised connecting to {}: {}".format(
                    url, e))

            # Parse errno to give a better explanation
            # Requests doesn't have granularity at the level we want:
            # http://docs.python-requests.org/en/master/_modules/requests/exceptions/
            errno_code = int(m.group(1))
            message = m.group(2)

            if errno_code == errno.ENOEXEC:
                print("Error resolving {}: {}".format(url, message))
            elif errno_code == errno.ECONNREFUSED:
                print("Connection to Gitlab ({}) refused".format(url))
            else:
                print("Error while connecting to {}: {}".format(url, str(e)))
            raise Exit(code=1)
        if json:
            return r.json()
        if stream:
            return r
        return r.text
Exemplo n.º 9
0
def generate_doc(ctx):
    """
    Generates the doxygen documentation, puts it in rtloader/doc, and logs doc errors/warnings.
    (rtloader/doc is hardcoded right now in the Doxyfile, as doxygen cannot take the output directory as argument)
    Logs all errors and warnings to <rtloader_path>/doxygen/errors.log and to the standard output.
    Returns 1 if errors were found (by default, doxygen returns 0 even if errors are present).
    """
    rtloader_path = get_rtloader_path()

    # Clean up Doxyfile options that are not supported on the version of Doxygen used
    result = ctx.run(f"doxygen -u '{rtloader_path}/doxygen/Doxyfile'",
                     warn=True)
    if result.exited != 0:
        print("Fatal error encountered while trying to clean up the Doxyfile.")
        raise Exit(code=result.exited)

    # doxygen puts both errors and warnings in stderr
    result = ctx.run(
        "doxygen '{0}/doxygen/Doxyfile' 2>'{0}/doxygen/errors.log'".format(
            rtloader_path),
        warn=True  # noqa: FS002
    )

    if result.exited != 0:
        print(
            "Fatal error encountered while trying to generate documentation.")
        print(f"See {rtloader_path}/doxygen/errors.log for details.")
        raise Exit(code=result.exited)

    errors, warnings = [], []

    def flushentry(entry):
        if 'error:' in entry:
            errors.append(entry)
        elif 'warning:' in entry:
            warnings.append(entry)

    # Separate warnings from errors
    with open(f"{rtloader_path}/doxygen/errors.log") as errfile:
        currententry = ""
        for line in errfile.readlines():
            if 'error:' in line or 'warning:' in line:  # We get to a new entry, flush current one
                flushentry(currententry)
                currententry = ""

            currententry += line

        flushentry(currententry)  # Flush last entry

        print("\033[93m{}\033[0m".format("\n".join(warnings)))  # noqa: FS002
        print("\033[91m{}\033[0m".format("\n".join(errors)))  # noqa: FS002
        print(
            f"Found {len(errors)} error(s) and {len(warnings)} warning(s) while generating documentation."
        )
        print(
            f"The full list is available in {rtloader_path}/doxygen/errors.log."
        )

    # Exit with non-zero code if an error has been found
    if len(errors) > 0:
        raise Exit(code=1)
Exemplo n.º 10
0
def deps(ctx, no_checks=False, core_dir=None, verbose=False, android=False):
    """
    Setup Go dependencies
    """
    verbosity = ' -v' if verbose else ''
    deps = get_deps('deps')
    order = deps.get("order", deps.keys())
    for dependency in order:
        tool = deps.get(dependency)
        if not tool:
            print("Malformed bootstrap JSON, dependency {} not found".format(
                dependency))
            raise Exit(code=1)
        print("processing checkout tool {}".format(dependency))
        process_deps(ctx,
                     dependency,
                     tool.get('version'),
                     tool.get('type'),
                     'checkout',
                     verbose=verbose)

    order = deps.get("order", deps.keys())
    for dependency in order:
        tool = deps.get(dependency)
        print("processing get tool {}".format(dependency))
        process_deps(ctx,
                     dependency,
                     tool.get('version'),
                     tool.get('type'),
                     'install',
                     verbose=verbose)

    if android:
        ndkhome = os.environ.get('ANDROID_NDK_HOME')
        if not ndkhome:
            print("set ANDROID_NDK_HOME to build android")
            raise Exit(code=1)

        cmd = "gomobile init -ndk {}".format(ndkhome)
        print("gomobile command {}".format(cmd))
        ctx.run(cmd)

    # source level deps
    print("calling dep ensure")
    start = datetime.datetime.now()
    ctx.run("dep ensure{}".format(verbosity))
    dep_done = datetime.datetime.now()

    # If github.com/DataDog/datadog-agent gets vendored too - nuke it
    #
    # This may happen as a result of having to introduce DEPPROJECTROOT
    # in our builders to get around a known-issue with go dep, and the
    # strange GOPATH situation in our builders.
    #
    # This is only a workaround, we should eliminate the need to resort
    # to DEPPROJECTROOT.
    if os.path.exists('vendor/github.com/DataDog/datadog-agent'):
        print("Removing vendored github.com/DataDog/datadog-agent")
        shutil.rmtree('vendor/github.com/DataDog/datadog-agent')

    # make sure PSUTIL is gone on windows; the dep ensure above will vendor it
    # in because it's necessary on other platforms
    if not android and sys.platform == 'win32':
        print("Removing PSUTIL on Windows")
        ctx.run("rd /s/q vendor\\github.com\\shirou\\gopsutil")

    # Make sure that golang.org/x/mobile is deleted.  It will get vendored in
    # because we use it, and there's no way to exclude; however, we must use
    # the version from $GOPATH
    if os.path.exists('vendor/golang.org/x/mobile'):
        print("Removing vendored golang.org/x/mobile")
        shutil.rmtree('vendor/golang.org/x/mobile')

    checks_start = datetime.datetime.now()
    if not no_checks:
        verbosity = 'v' if verbose else 'q'
        core_dir = core_dir or os.getenv('DD_CORE_DIR')

        if core_dir:
            checks_base = os.path.join(os.path.abspath(core_dir),
                                       'datadog_checks_base')
            ctx.run('pip install -{} -e {}'.format(verbosity, checks_base))
            ctx.run('pip install -{} -r {}'.format(
                verbosity, os.path.join(checks_base, 'requirements.in')))
        else:
            core_dir = os.path.join(os.getcwd(), 'vendor', 'integrations-core')
            checks_base = os.path.join(core_dir, 'datadog_checks_base')
            if not os.path.isdir(core_dir):
                ctx.run(
                    'git clone -{} https://github.com/DataDog/integrations-core {}'
                    .format(verbosity, core_dir))
            ctx.run('pip install -{} {}'.format(verbosity, checks_base))
            ctx.run('pip install -{} -r {}'.format(
                verbosity, os.path.join(checks_base, 'requirements.in')))
    checks_done = datetime.datetime.now()

    print("dep ensure, elapsed:    {}".format(dep_done - start))
    print("checks install elapsed: {}".format(checks_done - checks_start))
Exemplo n.º 11
0
def deps(
    ctx,
    core_dir=None,
    verbose=False,
    android=False,
    dep_vendor_only=False,
    no_bootstrap=False,
    no_dep_ensure=False,
):
    """
    Setup Go dependencies
    """
    if not no_bootstrap:
        deps = get_deps('deps')
        order = deps.get("order", deps.keys())
        for dependency in order:
            tool = deps.get(dependency)
            if not tool:
                print(
                    "Malformed bootstrap JSON, dependency {} not found".format(
                        dependency))
                raise Exit(code=1)
            print("processing checkout tool {}".format(dependency))
            process_deps(ctx,
                         dependency,
                         tool.get('version'),
                         tool.get('type'),
                         'checkout',
                         verbose=verbose)

        order = deps.get("order", deps.keys())
        for dependency in order:
            tool = deps.get(dependency)
            if tool.get('install', True):
                print("processing get tool {}".format(dependency))
                process_deps(
                    ctx,
                    dependency,
                    tool.get('version'),
                    tool.get('type'),
                    'install',
                    cmd=tool.get('cmd'),
                    verbose=verbose,
                )

    if android:
        ndkhome = os.environ.get('ANDROID_NDK_HOME')
        if not ndkhome:
            print("set ANDROID_NDK_HOME to build android")
            raise Exit(code=1)

        cmd = "gomobile init -ndk {}".format(ndkhome)
        print("gomobile command {}".format(cmd))
        ctx.run(cmd)

    if not no_dep_ensure:
        # source level deps
        print("calling go mod vendor")
        start = datetime.datetime.now()
        verbosity = ' -v' if verbose else ''
        ctx.run("go mod vendor{}".format(verbosity))
        # use modvendor to copy missing files dependencies
        ctx.run('{}/bin/modvendor -copy="**/*.c **/*.h **/*.proto"{}'.format(
            get_gopath(ctx), verbosity))
        dep_done = datetime.datetime.now()

        # If github.com/DataDog/datadog-agent gets vendored too - nuke it
        #
        # This may happen as a result of having to introduce DEPPROJECTROOT
        # in our builders to get around a known-issue with go dep, and the
        # strange GOPATH situation in our builders.
        #
        # This is only a workaround, we should eliminate the need to resort
        # to DEPPROJECTROOT.
        if os.path.exists('vendor/github.com/DataDog/datadog-agent'):
            print("Removing vendored github.com/DataDog/datadog-agent")
            shutil.rmtree('vendor/github.com/DataDog/datadog-agent')

        # make sure PSUTIL is gone on windows; the go mod above will vendor it
        # in because it's necessary on other platforms
        if not android and sys.platform == 'win32':
            print("Removing PSUTIL on Windows")
            ctx.run("rd /s/q vendor\\github.com\\shirou\\gopsutil")

        # Make sure that golang.org/x/mobile is deleted.  It will get vendored in
        # because we use it, and there's no way to exclude; however, we must use
        # the version from $GOPATH
        if os.path.exists('vendor/golang.org/x/mobile'):
            print("Removing vendored golang.org/x/mobile")
            shutil.rmtree('vendor/golang.org/x/mobile')

    if not no_dep_ensure:
        print("go mod vendor, elapsed: {}".format(dep_done - start))
Exemplo n.º 12
0
def do_update_changelog(ctx, target, cur_version, new_version, dry_run=False):
    """
    Actually perform the operations needed to update the changelog, this
    method is supposed to be used by other tasks and not directly.
    """
    # get the name of the current release tag
    target_tag = get_release_tag_string(target, cur_version)

    # get the diff from HEAD
    diff_lines = get_diff(ctx, target, target_tag)

    # for each PR get the title, we'll use it to populate the changelog
    pr_numbers = parse_pr_numbers(diff_lines)
    print("Found {} PRs merged since tag: {}".format(len(pr_numbers),
                                                     target_tag))
    entries = []
    for pr_num in pr_numbers:
        try:
            payload = get_pr(pr_num)
        except Exception as e:
            sys.stderr.write("Unable to fetch info for PR #{}\n: {}".format(
                pr_num, e))
            continue

        changelog_labels = get_changelog_types(payload)

        if not changelog_labels:
            raise Exit(
                "No valid changelog labels found attached to PR #{}, please add one"
                .format(pr_num))
        elif len(changelog_labels) > 1:
            raise Exit(
                "Multiple changelog labels found attached to PR #{}, please use only one"
                .format(pr_num))

        changelog_type = changelog_labels[0]
        if changelog_type == CHANGELOG_TYPE_NONE:
            # No changelog entry for this PR
            print("Skipping PR #{} from changelog".format(pr_num))
            continue

        author = payload.get('user', {}).get('login')
        author_url = payload.get('user', {}).get('html_url')
        title = '[{}] {}'.format(changelog_type, payload.get('title'))

        entry = ChangelogEntry(pr_num, title, payload.get('html_url'), author,
                               author_url, from_contributor(payload))

        entries.append(entry)

    # store the new changelog in memory
    new_entry = StringIO()

    # the header contains version and date
    header = "### {} / {}\n".format(new_version,
                                    datetime.now().strftime("%Y-%m-%d"))
    new_entry.write(header)

    # one bullet point for each PR
    new_entry.write("\n")
    for entry in entries:
        thanknote = ""
        if entry.from_contributor:
            thanknote = " Thanks [{}]({}).".format(entry.author,
                                                   entry.author_url)
        new_entry.write("* {}. See [#{}]({}).{}\n".format(
            entry.title, entry.number, entry.url, thanknote))
    new_entry.write("\n")

    # read the old contents
    changelog_path = os.path.join(ROOT, target, "CHANGELOG.md")
    with open(changelog_path, 'r') as f:
        old = f.readlines()

    # write the new changelog in memory
    changelog = StringIO()

    # preserve the title
    changelog.write("".join(old[:2]))

    # prepend the new changelog to the old contents
    # make the command idempotent
    if header not in old:
        changelog.write(new_entry.getvalue())

    # append the rest of the old changelog
    changelog.write("".join(old[2:]))

    # print on the standard out in case of a dry run
    if dry_run:
        print(changelog.getvalue())
        sys.exit(0)

    # overwrite the old changelog
    with open(changelog_path, 'w') as f:
        f.write(changelog.getvalue())
Exemplo n.º 13
0
def build(ctx, vstudio_root=None, arch="All", major_version='7', debug=False):
    """
    Build the custom action library for the .NET Tracer
    """

    if sys.platform != 'win32':
        print("DotNet Tracer build is only for Win32")
        raise Exit(code=1)

    print("arch is {}".format(arch))
    this_dir = os.getcwd()
    solution_dir = os.getcwd()
    tracer_home = os.path.join(solution_dir, "tracer_home")
    output_path = os.path.join(solution_dir, "output")

    pfxfile = None
    pfxpass = None
    remove_pfx = True
    cmd = ""
    configuration = "Release"
    if debug:
        configuration = "Debug"

    ctx.run("nuget restore {solution_dir}\\Datadog.Trace.Minimal.sln".format(
        solution_dir=solution_dir))

    cmd = "msbuild {solution_dir}\\Datadog.Trace.proj /t:{target} /p:Platform={arch} /p:Configuration={config} /p:TracerHomeDirectory={tracer_home} /p:RunWixToolsOutOfProc=true /p:MsiOutputPath={output_path}"

    ## compile all binaries
    run_cmd = cmd.format(solution_dir=solution_dir,
                         target="CreateHomeDirectory",
                         arch=arch,
                         config=configuration,
                         tracer_home=tracer_home,
                         output_path=output_path)
    ctx.run(run_cmd)

    ## pull the signing cert and password
    try:
        if sys.platform == 'win32' and os.environ.get('SIGN_WINDOWS'):
            # get certificate and password from ssm
            pfxfile = get_signing_cert(ctx)
            pfxpass = get_pfx_pass(ctx)
        else:
            remove_pfx = False

        ## sign all dll files
        for f in glob.iglob("{tracer_home}/**/datadog*.dll".format(
                tracer_home=tracer_home),
                            recursive=True):
            sign_binary(ctx, f, pfxfile, pfxpass)

        ## build the msi installers
        run_cmd = cmd.format(solution_dir=solution_dir,
                             target="MsiOnly",
                             arch=arch,
                             config=configuration,
                             tracer_home=tracer_home,
                             output_path=output_path)
        ctx.run(run_cmd)

        ## sign msi installers
        for f in glob.iglob(
                "{output_path}/**/*.msi".format(output_path=output_path),
                recursive=True):
            sign_binary(ctx, f, pfxfile, pfxpass)

        ## build the nuget packages
        cmd = "dotnet pack {options} --output {output_path} --configuration {configuration} -p:Platform=AnyCPU {proj}"
        ctx.run(
            cmd.format(options="--no-build",
                       output_path=output_path,
                       configuration=configuration,
                       proj="src\Datadog.Trace\Datadog.Trace.csproj"))
        ctx.run(
            cmd.format(
                options="--no-restore",
                output_path=output_path,
                configuration=configuration,
                proj=
                "src\Datadog.Trace.OpenTracing\Datadog.Trace.OpenTracing.csproj"
            ))

        ## sign nuget packages
        for f in glob.iglob(
                "{output_path}/**/*.nupkg".format(output_path=output_path),
                recursive=True):
            sign_nuget(ctx, f, pfxfile, pfxpass)

    except Exception as e:
        if pfxfile and remove_pfx:
            os.remove(pfxfile)
        raise

    if pfxfile and remove_pfx:
        os.remove(pfxfile)
Exemplo n.º 14
0
def build(ctx,
          build_dir,
          destination=None,
          target=None,
          scheme=None,
          configuration=None,
          tmp_dir=None,
          project=None,
          workspace=None,
          install_dir=None,
          archive_path=None,
          prefix=None,
          postfix=None):
    '''Build the xp Xcode project using xcodebuild'''

    cmd = ["set -o pipefail && xcrun", "xcodebuild"]

    # prepend overrides such as "CODE_SIGN_IDENTITY= CODE_SIGNING_REQUIRED=NO"
    if prefix:
        cmd.append(prefix)

    # increase concurrent compile tasks number (set to CPU number).
    cmd.append(
        "-IDEBuildOperationMaxNumberOfConcurrentCompileTasks=`sysctl -n hw.ncpu`"
    )

    if project:
        cmd.append("-project '{}'".format(project))
    elif workspace:
        cmd.append("-workspace '{}'".format(workspace))
    else:
        raise Exit(-1)

    if scheme:
        cmd.append("-scheme '{}'".format(scheme))

        # DerivedData path can only be specified for schemes or tests
        if tmp_dir:
            cmd.append("-derivedDataPath '{}'".format(tmp_dir))
    elif target:
        cmd.append("-target '{}'".format(target))
    else:
        print("No scheme or target specified!")
        raise Exit(-1)

    if destination:
        cmd.append("-destination '{}'".format(destination))

    if configuration:
        cmd.append("-configuration '{}'".format(configuration))

    if archive_path:
        cmd.append("-archivePath '{}' archive".format(archive_path))
    elif install_dir:
        cmd.append("DSTROOT='{}' install".format(install_dir))

    if postfix:
        cmd.append(postfix)

    if ctx.xcodebuild.xcpretty_for_builds:
        if ctx.run("bundle show xcpretty", pty=True, hide="out", warn=True):
            cmd.append("| bundle exec xcpretty")
        elif ctx.run("which xcpretty", pty=True, hide="out", warn=True):
            cmd.append("| xcpretty")

    with ctx.cd(build_dir):
        cmd = " ".join(cmd)
        print("{}".format(cmd))
        ctx.run(cmd)
Exemplo n.º 15
0
def create_new_version(ctx,
                       major_versions="6,7",
                       integration_version=None,
                       omnibus_software_version=None,
                       jmxfetch_version=None,
                       omnibus_ruby_version=None,
                       ignore_rc_tag=False):
    """
    Creates new entry in the release.json file for a new version.
    """

    list_major_versions = major_versions.split(",")
    if list_major_versions.count < 1:
        print("Specify at least one major version to release")
        return Exit(code=1)

    list_major_versions = map(lambda x: int(x), list_major_versions)
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    print("Creating new agent version(s) {}".format(list_major_versions))

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    version_re = re.compile('(\\d+)[.](\\d+)[.](\\d+)(-rc\\.(\\d+))?')

    auth = "https://{}:x-oauth-basic@".format(github_token)

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version, highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    # Erase RCs
    for major_version in list_major_versions:
        highest_version["major"] = major_version
        rc = highest_version["rc"]
        while highest_version["rc"] > 0:
            # In case we have skipped an RC in the file...
            try:
                release_json.pop(_stringify_version(highest_version))
            finally:
                highest_version["rc"] = highest_version["rc"] - 1
        highest_version["rc"] = rc

    # Tags in other repos are based on the highest major (e.g. for releasing version 6.X.Y and 7.X.Y they will tag only 7.X.Y)
    highest_version["major"] = highest_major

    # We don't want to fetch RC tags
    highest_version["rc"] = 0

    if not integration_version:
        integration_version = _get_highest_repo_version(
            auth, "integrations-core", highest_version, version_re)
        if integration_version is None:
            print(
                "EREROR: No version found for integrationscore - did you create the tag ?"
            )
            return Exit(code=1)
        if integration_version["rc"] != 0:
            print(
                "ERROR: Integration-Core tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on Integration-Core.")
            else:
                print("Aborting.")
                return Exit(code=1)
        integration_version = _stringify_version(integration_version)
    print("Integration-Core's tag is {}".format(integration_version))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            auth, "omnibus-software", highest_version, version_re)
        if omnibus_software_version is None:
            print(
                "EREROR: No version found for omnibus-software - did you create the tag ?"
            )
            return Exit(code=1)
        if omnibus_software_version["rc"] != 0:
            print(
                "ERROR: Omnibus-Software tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on Omnibus-Software.")
            else:
                print("Aborting.")
                return Exit(code=1)
        omnibus_software_version = _stringify_version(omnibus_software_version)
    print("Omnibus-Software's tag is {}".format(omnibus_software_version))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(auth, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
        jmxfetch_version = _stringify_version(jmxfetch_version)
    print("Jmxfetch's tag is {}".format(jmxfetch_version))

    if not omnibus_ruby_version:
        print("ERROR: No omnibus_ruby_version found")
        return Exit(code=1)

    _save_release_json(release_json, list_major_versions, highest_version,
                       integration_version, omnibus_software_version,
                       omnibus_ruby_version, jmxfetch_version)
Exemplo n.º 16
0
def check_if_logged_in_to_heroku(c):
    if not local('heroku auth:whoami', warn=True):
        raise Exit(
            'Log-in with the "heroku login" command before running this '
            'command.')
Exemplo n.º 17
0
def update_changelog(ctx, new_version):
    """
    Quick task to generate the new CHANGELOG using reno when releasing a minor
    version (linux/macOS only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure as e:
        print(
            "Error: You have uncommitted change, please commit or stash before using update_changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep {}".format(new_version))
    except Failure as e:
        print("Missing '{}' git tag: mandatory to use 'reno'".format(
            new_version))
        raise

    # removing releasenotes from bugfix on the old minor.
    branching_point = "{}.{}.0-devel".format(new_version_int[0],
                                             new_version_int[1])
    previous_minor = "{}.{}".format(new_version_int[0], new_version_int[1] - 1)
    if previous_minor == "7.15":
        previous_minor = "6.15"  # 7.15 is the first release in the 7.x series
    log_result = ctx.run(
        "git log {}...remotes/origin/{}.x --name-only --oneline | \
            grep releasenotes/notes/ || true".format(branching_point,
                                                     previous_minor))
    log_result = log_result.stdout.replace('\n', ' ').strip()
    if len(log_result) > 0:
        ctx.run("git rm --ignore-unmatch {}".format(log_result))

    # generate the new changelog
    ctx.run("reno report \
            --ignore-cache \
            --earliest-version {} \
            --version {} \
            --no-show-source > /tmp/new_changelog.rst".format(
        branching_point, new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # mac's `sed` has a different syntax for the "-i" paramter
    sed_i_arg = "-i"
    if sys.platform == 'darwin':
        sed_i_arg = "-i ''"
    # check whether there is a v6 tag on the same v7 tag, if so add the v6 tag to the release title
    v6_tag = ""
    if new_version_int[0] == 7:
        v6_tag = _find_v6_tag(ctx, new_version)
        if v6_tag:
            ctx.run(
                "sed {0} -E 's#^{1}#{1} / {2}#' /tmp/new_changelog.rst".format(
                    sed_i_arg, new_version, v6_tag))
    # remove the old header from the existing changelog
    ctx.run("sed {0} -e '1,4d' CHANGELOG.rst".format(sed_i_arg))

    # merging to CHANGELOG.rst
    ctx.run(
        "cat CHANGELOG.rst >> /tmp/new_changelog.rst && mv /tmp/new_changelog.rst CHANGELOG.rst"
    )

    # commit new CHANGELOG
    ctx.run("git add CHANGELOG.rst \
            && git commit -m \"Update CHANGELOG for {}\"".format(new_version))
Exemplo n.º 18
0
def update_installscript_changelog(ctx, new_version):
    """
    Quick task to generate the new CHANGELOG-INSTALLSCRIPT using reno when releasing a minor
    version (linux/macOS only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure:
        print(
            "Error: You have uncommitted changes, please commit or stash before using update-installscript-changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep installscript-{}".format(new_version))
    except Failure:
        print("Missing 'installscript-{}' git tag: mandatory to use 'reno'".
              format(new_version))
        raise

    # generate the new changelog
    ctx.run("reno --rel-notes-dir releasenotes-installscript report \
            --ignore-cache \
            --version installscript-{} \
            --no-show-source > /tmp/new_changelog-installscript.rst".format(
        new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # mac's `sed` has a different syntax for the "-i" paramter
    sed_i_arg = "-i"
    if sys.platform == 'darwin':
        sed_i_arg = "-i ''"
    # remove the old header from the existing changelog
    ctx.run("sed {0} -e '1,4d' CHANGELOG-INSTALLSCRIPT.rst".format(sed_i_arg))

    if sys.platform != 'darwin':
        # sed on darwin doesn't support `-z`. On mac, you will need to manually update the following.
        ctx.run(
            "sed -z {0} -e 's/installscript-{1}\\n===={2}/{1}\\n{2}/' /tmp/new_changelog-installscript.rst"
            .format(sed_i_arg, new_version, '=' * len(new_version)))

    # merging to CHANGELOG-INSTALLSCRIPT.rst
    ctx.run(
        "cat CHANGELOG-INSTALLSCRIPT.rst >> /tmp/new_changelog-installscript.rst && mv /tmp/new_changelog-installscript.rst CHANGELOG-INSTALLSCRIPT.rst"
    )

    # commit new CHANGELOG-INSTALLSCRIPT
    ctx.run("git add CHANGELOG-INSTALLSCRIPT.rst")

    print("\nCommit this with:")
    print(
        "git commit -m \"[INSTALLSCRIPT] Update CHANGELOG-INSTALLSCRIPT for {}\""
        .format(new_version))
Exemplo n.º 19
0
def update_changelog(ctx, new_version):
    """
    Quick task to generate the new CHANGELOG using reno when releasing a minor
    version (linux only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure as e:
        print(
            "Error: You have uncommitted change, please commit or stash before using update_changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep {}".format(new_version))
    except Failure as e:
        print("Missing '{}' git tag: mandatory to use 'reno'".format(
            new_version))
        raise

    # removing releasenotes from bugfix on the old minor.
    previous_minor = "%s.%s" % (new_version_int[0], new_version_int[1] - 1)
    log_result = ctx.run("git log {}.0...remotes/origin/{}.x --name-only | \
            grep releasenotes/notes/ || true".format(previous_minor,
                                                     previous_minor))
    log_result = log_result.stdout.replace('\n', ' ').strip()
    if len(log_result) > 0:
        ctx.run("git rm --ignore-unmatch {}".format(log_result))

    # generate the new changelog
    ctx.run("reno report \
            --ignore-cache \
            --earliest-version {}.0 \
            --version {} \
            --no-show-source > /tmp/new_changelog.rst".format(
        previous_minor, new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # remove the old header. Mac don't have the same sed CLI
    if sys.platform == 'darwin':
        ctx.run("sed -i '' -e '1,4d' CHANGELOG.rst")
    else:
        ctx.run("sed -i -e '1,4d' CHANGELOG.rst")

    # merging to CHANGELOG.rst
    ctx.run(
        "cat CHANGELOG.rst >> /tmp/new_changelog.rst && mv /tmp/new_changelog.rst CHANGELOG.rst"
    )

    # commit new CHANGELOG
    ctx.run("git add CHANGELOG.rst \
            && git commit -m \"Update CHANGELOG for {}\"".format(new_version))
Exemplo n.º 20
0
def finish(
    ctx,
    major_versions="6,7",
    integration_version=None,
    omnibus_software_version=None,
    jmxfetch_version=None,
    omnibus_ruby_version=None,
    security_agent_policies_version=None,
    macos_build_version=None,
    ignore_rc_tag=False,
):
    """
    Creates new entry in the release.json file for the new version. Removes all the RC entries.
    """

    if sys.version_info[0] < 3:
        print("Must use Python 3 for this task")
        return Exit(code=1)

    list_major_versions = major_versions.split(",")
    print("Finishing release for major version(s) {}".format(
        list_major_versions))

    list_major_versions = [int(x) for x in list_major_versions]
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    # We want to match:
    # - X.Y.Z
    # - X.Y.Z-rc.t
    # - vX.Y(.Z) (security-agent-policies repo)
    version_re = re.compile(r'(v)?(\d+)[.](\d+)([.](\d+))?(-rc\.(\d+))?')

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    # jmxfetch and security-agent-policies follow their own version scheme
    highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="JMXFETCH_VERSION",
    )

    highest_security_agent_policies_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="SECURITY_AGENT_POLICIES_VERSION",
    )

    # Erase RCs
    for major_version in list_major_versions:
        highest_version["major"] = major_version
        rc = highest_version["rc"]
        while highest_version["rc"] not in [0, None]:
            # In case we have skipped an RC in the file...
            try:
                release_json.pop(_stringify_version(highest_version))
            finally:
                highest_version["rc"] = highest_version["rc"] - 1
        highest_version["rc"] = rc

    # Tags in other repos are based on the highest major (e.g. for releasing version 6.X.Y and 7.X.Y they will tag only 7.X.Y)
    highest_version["major"] = highest_major

    # We don't want to fetch RC tags
    highest_version["rc"] = None

    if not integration_version:
        integration_version = _get_highest_repo_version(
            github_token, "integrations-core", highest_version, version_re)
        if integration_version is None:
            print(
                "ERROR: No version found for integrations-core - did you create the tag?"
            )
            return Exit(code=1)
        if integration_version["rc"] is not None:
            print(
                "ERROR: integrations-core tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on integrations-core.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("integrations-core's tag is {}".format(
        _stringify_version(integration_version)))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            github_token, "omnibus-software", highest_version, version_re)
        if omnibus_software_version is None:
            print(
                "ERROR: No version found for omnibus-software - did you create the tag?"
            )
            return Exit(code=1)
        if omnibus_software_version["rc"] is not None:
            print(
                "ERROR: omnibus-software tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on omnibus-software.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("omnibus-software's tag is {}".format(
        _stringify_version(omnibus_software_version)))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(github_token, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
    print("jmxfetch's tag is {}".format(_stringify_version(jmxfetch_version)))

    if not omnibus_ruby_version:
        omnibus_ruby_version = _get_highest_repo_version(
            github_token, "omnibus-ruby", highest_version, version_re)
        if omnibus_ruby_version is None:
            print(
                "ERROR: No version found for omnibus-ruby - did you create the tag?"
            )
            return Exit(code=1)
        if omnibus_ruby_version["rc"] is not None:
            print(
                "ERROR: omnibus-ruby tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on omnibus-ruby.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("omnibus-ruby's tag is {}".format(
        _stringify_version(omnibus_ruby_version)))

    if not security_agent_policies_version:
        security_agent_policies_version = _get_highest_repo_version(
            github_token, "security-agent-policies",
            highest_security_agent_policies_version, version_re)
    print("security-agent-policies' tag is {}".format(
        _stringify_version(security_agent_policies_version)))

    if not macos_build_version:
        macos_build_version = _get_highest_repo_version(
            github_token, "datadog-agent-macos-build", highest_version,
            version_re)
        if macos_build_version is None:
            print(
                "ERROR: No version found for datadog-agent-macos-build - did you create the tag?"
            )
            return Exit(code=1)
        if macos_build_version["rc"] is not None:
            print(
                "ERROR: datadog-agent-macos-build tag is still an RC tag. That's probably NOT what you want in the final artifact."
            )
            if ignore_rc_tag:
                print("Continuing with RC tag on datadog-agent-macos-build.")
            else:
                print("Aborting.")
                return Exit(code=1)
    print("datadog-agent-macos-build' tag is {}".format(
        _stringify_version(macos_build_version)))

    _save_release_json(
        release_json,
        list_major_versions,
        highest_version,
        integration_version,
        omnibus_software_version,
        omnibus_ruby_version,
        jmxfetch_version,
        security_agent_policies_version,
        macos_build_version,
    )
Exemplo n.º 21
0
def dockerize_test(ctx, binary, skip_cleanup=False):
    """
    Run a go test in a remote docker environment and pipe its output to stdout.
    Host and target systems must be identical (test is built on the host).
    """
    import docker

    client = docker.from_env()
    temp_folder = tempfile.mkdtemp(prefix="ddtest-")

    ctx.run("cp %s %s/test.bin" % (binary, temp_folder))

    with open("%s/Dockerfile" % temp_folder, 'w') as stream:
        stream.write("""FROM debian:stretch-slim
ENV DOCKER_DD_AGENT=yes
WORKDIR /
ADD https://github.com/docker/compose/releases/download/1.16.1/docker-compose-Linux-x86_64 /bin/docker-compose
RUN echo "1804b0ce6596efe707b9cab05d74b161833ed503f0535a937dd5d17bea8fc50a  /bin/docker-compose" > sum && \
    sha256sum -c sum && \
    chmod +x /bin/docker-compose
CMD /test.bin
COPY test.bin /test.bin
""")
        # Handle optional testdata folder
        if os.path.isdir("./testdata"):
            ctx.run("cp -R testdata %s" % temp_folder)
            stream.write("COPY testdata /testdata")

    test_image, _ = client.images.build(path=temp_folder, rm=True)

    scratch_volume = client.volumes.create()

    test_container = client.containers.run(
        test_image.id,
        detach=True,
        pid_mode="host",  # For origin detection
        environment=[
            "SCRATCH_VOLUME_NAME=" + scratch_volume.name,
            "SCRATCH_VOLUME_PATH=/tmp/scratch",
        ],
        volumes={
            '/var/run/docker.sock': {
                'bind': '/var/run/docker.sock',
                'mode': 'ro'
            },
            '/proc': {
                'bind': '/host/proc',
                'mode': 'ro'
            },
            '/sys/fs/cgroup': {
                'bind': '/host/sys/fs/cgroup',
                'mode': 'ro'
            },
            scratch_volume.name: {
                'bind': '/tmp/scratch',
                'mode': 'rw'
            }
        })

    exit_code = test_container.wait()['StatusCode']

    print(test_container.logs(stdout=True, stderr=False, stream=False))

    sys.stderr.write(
        test_container.logs(stdout=False, stderr=True, stream=False))

    if not skip_cleanup:
        shutil.rmtree(temp_folder)
        test_container.remove(v=True, force=True)
        scratch_volume.remove(force=True)
        client.images.remove(test_image.id)

    if exit_code != 0:
        raise Exit(code=exit_code)
Exemplo n.º 22
0
def create_rc(
    ctx,
    major_versions="6,7",
    integration_version=None,
    omnibus_software_version=None,
    jmxfetch_version=None,
    omnibus_ruby_version=None,
    security_agent_policies_version=None,
    macos_build_version=None,
):
    """
    Takes whatever version is the highest in release.json and adds a new RC to it.
    If there was no RC, creates one and bump minor version. If there was an RC, create RC + 1.
    """

    if sys.version_info[0] < 3:
        print("Must use Python 3 for this task")
        return Exit(code=1)

    list_major_versions = major_versions.split(",")
    print("Creating RC for agent version(s) {}".format(list_major_versions))

    list_major_versions = [int(x) for x in list_major_versions]
    highest_major = 0
    for version in list_major_versions:
        if int(version) > highest_major:
            highest_major = version

    github_token = os.environ.get('GITHUB_TOKEN')
    if github_token is None:
        print(
            "Error: set the GITHUB_TOKEN environment variable.\nYou can create one by going to"
            " https://github.com/settings/tokens. It should have at least the 'repo' permissions."
        )
        return Exit(code=1)

    # We want to match:
    # - X.Y.Z
    # - X.Y.Z-rc.t
    # - vX.Y(.Z) (security-agent-policies repo)
    version_re = re.compile(r'(v)?(\d+)[.](\d+)([.](\d+))?(-rc\.(\d+))?')

    with open("release.json", "r") as release_json_stream:
        release_json = json.load(release_json_stream,
                                 object_pairs_hook=OrderedDict)

    highest_version = _get_highest_version_from_release_json(
        release_json, highest_major, version_re)

    # jmxfetch and security-agent-policies follow their own version scheme
    highest_jmxfetch_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="JMXFETCH_VERSION",
    )

    highest_security_agent_policies_version = _get_highest_version_from_release_json(
        release_json,
        highest_major,
        version_re,
        release_json_key="SECURITY_AGENT_POLICIES_VERSION",
    )

    if highest_version["rc"] is None:
        # No RC exists, create one
        highest_version["minor"] = highest_version["minor"] + 1
        highest_version["rc"] = 1
    else:
        # An RC exists, create next RC
        highest_version["rc"] = highest_version["rc"] + 1
    new_rc = _stringify_version(highest_version)
    print("Creating {}".format(new_rc))

    if not integration_version:
        integration_version = _get_highest_repo_version(
            github_token, "integrations-core", highest_version, version_re)
    print("integrations-core's tag is {}".format(
        _stringify_version(integration_version)))

    if not omnibus_software_version:
        omnibus_software_version = _get_highest_repo_version(
            github_token, "omnibus-software", highest_version, version_re)
    print("omnibus-software's tag is {}".format(
        _stringify_version(omnibus_software_version)))

    if not jmxfetch_version:
        jmxfetch_version = _get_highest_repo_version(github_token, "jmxfetch",
                                                     highest_jmxfetch_version,
                                                     version_re)
    print("jmxfetch's tag is {}".format(_stringify_version(jmxfetch_version)))

    if not omnibus_ruby_version:
        omnibus_ruby_version = _get_highest_repo_version(
            github_token, "omnibus-ruby", highest_version, version_re)
    print("omnibus-ruby's tag is {}".format(
        _stringify_version(omnibus_ruby_version)))

    if not security_agent_policies_version:
        security_agent_policies_version = _get_highest_repo_version(
            github_token, "security-agent-policies",
            highest_security_agent_policies_version, version_re)
    print("security-agent-policies' tag is {}".format(
        _stringify_version(security_agent_policies_version)))

    if not macos_build_version:
        macos_build_version = _get_highest_repo_version(
            github_token, "datadog-agent-macos-build", highest_version,
            version_re)
    print("datadog-agent-macos-build's tag is {}".format(
        _stringify_version(macos_build_version)))

    _save_release_json(
        release_json,
        list_major_versions,
        highest_version,
        integration_version,
        omnibus_software_version,
        omnibus_ruby_version,
        jmxfetch_version,
        security_agent_policies_version,
        macos_build_version,
    )
Exemplo n.º 23
0
def release(ctx, version, skip_release_notes=False):
    """Tag a new release."""
    status = run("git status --porcelain", hide=True).stdout.strip()
    if status != "":
        raise Exit(message="git checkout not clean, cannot release")

    version = semver.parse_version_info(version)
    is_patch_release = version.patch != 0

    # Check that we have release notes for the desired version.
    run("git checkout main", echo=True)
    if not skip_release_notes:
        with open("website/content/release-notes/_index.md") as release_notes:
            if "## Version {}".format(version) not in release_notes.read():
                raise Exit(message="no release notes for v{}".format(version))

    # Move HEAD to the correct release branch - either a new one, or
    # an existing one.
    if is_patch_release:
        run("git checkout v{}.{}".format(version.major, version.minor),
            echo=True)
    else:
        run("git checkout -b v{}.{}".format(version.major, version.minor),
            echo=True)

    # Copy over release notes from main.
    if not skip_release_notes:
        run("git checkout main -- website/content/release-notes/_index.md",
            echo=True)

    # Update links on the website to point to files at the version
    # we're creating.
    if is_patch_release:
        previous_version = "v{}.{}.{}".format(version.major, version.minor,
                                              version.patch - 1)
    else:
        previous_version = "main"

    def _replace(pattern):
        oldpat = pattern.format(previous_version)
        newpat = pattern.format("v{}").format(version)
        run("perl -pi -e 's#{}#{}#g' website/content/*.md website/content/*/*.md"
            .format(oldpat, newpat),
            echo=True)

    _replace("/metallb/metallb/{}")
    _replace("/metallb/metallb/tree/{}")
    _replace("/metallb/metallb/blob/{}")

    # Update the version listed on the website sidebar
    run("perl -pi -e 's/MetalLB .*/MetalLB v{}/g' website/content/_header.md".
        format(version),
        echo=True)

    # Update the manifests with the new version
    run("perl -pi -e 's,image: quay.io/metallb/speaker:.*,image: quay.io/metallb/speaker:v{},g' manifests/metallb.yaml"
        .format(version),
        echo=True)
    run("perl -pi -e 's,image: quay.io/metallb/controller:.*,image: quay.io/metallb/controller:v{},g' manifests/metallb.yaml"
        .format(version),
        echo=True)

    # Update the versions in the helm chart (version and appVersion are always the same)
    # helm chart versions follow Semantic Versioning, and thus exclude the leading 'v'
    run("perl -pi -e 's,^version: .*,version: {},g' charts/metallb/Chart.yaml".
        format(version),
        echo=True)
    run("perl -pi -e 's,^appVersion: .*,appVersion: v{},g' charts/metallb/Chart.yaml"
        .format(version),
        echo=True)
    run("perl -pi -e 's,^Current chart version is: .*,Current chart version is: `{}`,g' charts/metallb/README.md"
        .format(version),
        echo=True)

    # Update the version in kustomize instructions
    #
    # TODO: Check if kustomize instructions really need the version in the
    # website or if there is a simpler way. For now, though, we just replace the
    # only page that mentions the version on release.
    run("perl -pi -e 's,github.com/metallb/metallb//manifests\?ref=.*,github.com/metallb/metallb//manifests\?ref=v{},g' website/content/installation/_index.md"
        .format(version),
        echo=True)

    # Update the version embedded in the binary
    run("perl -pi -e 's/version\s+=.*/version = \"{}\"/g' internal/version/version.go"
        .format(version),
        echo=True)
    run("gofmt -w internal/version/version.go", echo=True)

    run("git commit -a -m 'Automated update for release v{}'".format(version),
        echo=True)
    run("git tag v{} -m 'See the release notes for details:\n\nhttps://metallb.universe.tf/release-notes/#version-{}-{}-{}'"
        .format(version, version.major, version.minor, version.patch),
        echo=True)
    run("git checkout main", echo=True)
Exemplo n.º 24
0
def update_dca_changelog(ctx, new_version, agent_version):
    """
    Quick task to generate the new CHANGELOG-DCA using reno when releasing a minor
    version (linux/macOS only).
    """
    new_version_int = list(map(int, new_version.split(".")))

    if len(new_version_int) != 3:
        print("Error: invalid version: {}".format(new_version_int))
        raise Exit(1)

    agent_version_int = list(map(int, agent_version.split(".")))

    if len(agent_version_int) != 3:
        print("Error: invalid version: {}".format(agent_version_int))
        raise Exit(1)

    # let's avoid losing uncommitted change with 'git reset --hard'
    try:
        ctx.run("git diff --exit-code HEAD", hide="both")
    except Failure:
        print(
            "Error: You have uncommitted changes, please commit or stash before using update-dca-changelog"
        )
        return

    # make sure we are up to date
    ctx.run("git fetch")

    # let's check that the tag for the new version is present (needed by reno)
    try:
        ctx.run("git tag --list | grep dca-{}".format(new_version))
    except Failure:
        print("Missing 'dca-{}' git tag: mandatory to use 'reno'".format(
            new_version))
        raise

    # Cluster agent minor releases are in sync with the agent's, bugfixes are not necessarily.
    # We rely on the agent's devel tag to enforce the sync between both releases.
    branching_point_agent = "{}.{}.0-devel".format(agent_version_int[0],
                                                   agent_version_int[1])
    previous_minor_branchoff = "dca-{}.{}.X".format(new_version_int[0],
                                                    new_version_int[1] - 1)
    log_result = ctx.run(
        "git log {}...remotes/origin/{} --name-only --oneline | \
            grep releasenotes-dca/notes/ || true".format(
            branching_point_agent, previous_minor_branchoff))
    log_result = log_result.stdout.replace('\n', ' ').strip()

    # Do not include release notes that were added in the previous minor release branch (previous_minor_branchoff)
    # and the branch-off points for the current release (pined by the agent's devel tag)
    if len(log_result) > 0:
        ctx.run("git rm --ignore-unmatch {}".format(log_result))

    current_branchoff = "dca-{}.{}.X".format(new_version_int[0],
                                             new_version_int[1])
    # generate the new changelog. Specifying branch in case this is run outside the release branch that contains the tag.
    ctx.run("reno --rel-notes-dir releasenotes-dca report \
            --ignore-cache \
            --branch {} \
            --version dca-{} \
            --no-show-source > /tmp/new_changelog-dca.rst".format(
        current_branchoff, new_version))

    # reseting git
    ctx.run("git reset --hard HEAD")

    # mac's `sed` has a different syntax for the "-i" paramter
    sed_i_arg = "-i"
    if sys.platform == 'darwin':
        sed_i_arg = "-i ''"
    # remove the old header from the existing changelog
    ctx.run("sed {0} -e '1,4d' CHANGELOG-DCA.rst".format(sed_i_arg))

    if sys.platform != 'darwin':
        # sed on darwin doesn't support `-z`. On mac, you will need to manually update the following.
        ctx.run(
            "sed -z {0} -e 's/dca-{1}\\n===={2}/{1}\\n{2}/' /tmp/new_changelog-dca.rst"
            .format(sed_i_arg, new_version, '=' * len(new_version)))

    # merging to CHANGELOG.rst
    ctx.run(
        "cat CHANGELOG-DCA.rst >> /tmp/new_changelog-dca.rst && mv /tmp/new_changelog-dca.rst CHANGELOG-DCA.rst"
    )

    # commit new CHANGELOG
    ctx.run("git add CHANGELOG-DCA.rst")

    print("\nCommit this with:")
    print(
        "git commit -m \"[DCA] Update CHANGELOG for {}\"".format(new_version))
Exemplo n.º 25
0
def dockerize_test(ctx, binary, skip_cleanup=False):
    """
    Run a go test in a remote docker environment and pipe its output to stdout.
    Host and target systems must be identical (test is built on the host).
    """
    import docker

    client = docker.from_env()
    temp_folder = tempfile.mkdtemp(prefix="ddtest-")

    ctx.run("cp %s %s/test.bin" % (binary, temp_folder))

    with open("%s/Dockerfile" % temp_folder, 'w') as stream:
        stream.write("""FROM docker/compose:debian-1.28.3
ENV DOCKER_DD_AGENT=yes
WORKDIR /
CMD /test.bin
COPY test.bin /test.bin
""")
        # Handle optional testdata folder
        if os.path.isdir("./testdata"):
            ctx.run("cp -R testdata %s" % temp_folder)
            stream.write("COPY testdata /testdata")

    test_image, _ = client.images.build(path=temp_folder, rm=True)

    scratch_volume = client.volumes.create()

    test_container = client.containers.run(
        test_image.id,
        detach=True,
        pid_mode="host",  # For origin detection
        environment=[
            "SCRATCH_VOLUME_NAME=" + scratch_volume.name,
            "SCRATCH_VOLUME_PATH=/tmp/scratch",
        ],
        volumes={
            '/var/run/docker.sock': {
                'bind': '/var/run/docker.sock',
                'mode': 'ro'
            },
            '/proc': {
                'bind': '/host/proc',
                'mode': 'ro'
            },
            '/sys/fs/cgroup': {
                'bind': '/host/sys/fs/cgroup',
                'mode': 'ro'
            },
            scratch_volume.name: {
                'bind': '/tmp/scratch',
                'mode': 'rw'
            },
        },
    )

    exit_code = test_container.wait()['StatusCode']

    print(test_container.logs(stdout=True, stderr=False, stream=False))

    sys.stderr.write(
        test_container.logs(stdout=False, stderr=True,
                            stream=False).decode(sys.stderr.encoding))

    if not skip_cleanup:
        shutil.rmtree(temp_folder)
        test_container.remove(v=True, force=True)
        scratch_volume.remove(force=True)
        client.images.remove(test_image.id)

    if exit_code != 0:
        raise Exit(code=exit_code)
Exemplo n.º 26
0
def _check_cwd(ctx):
    '''Check that we are invoked on the root directory.'''
    if ctx.C.ROOT_DIR != os.getcwd():
        print("Please run this command from the project's root.")
        raise Exit(-1)
Exemplo n.º 27
0
def publish_manifest(ctx, name, tag, image, signed_push=False):
    """
    Publish a manifest referencing image names matching the specified pattern.
    In that pattern, OS and ARCH strings are replaced, if found, by corresponding
    entries in the list of platforms passed as an argument. This allows creating
    a set of image references more easily. See the manifest tool documentation for
    further details: https://github.com/estesp/manifest-tool.
    """
    manifest_spec = {"image": "{}:{}".format(name, tag)}
    src_images = []

    for img in image:
        img_splitted = img.replace(' ', '').split(',')
        if len(img_splitted) != 2:
            print("Impossible to parse source format for: '{}'".format(img))
            raise Exit(code=1)

        platform_splitted = img_splitted[1].split('/')
        if len(platform_splitted) != 2:
            print("Impossible to parse platform format for: '{}'".format(img))
            raise Exit(code=1)

        src_images.append(
            {"image": img_splitted[0], "platform": {"architecture": platform_splitted[1], "os": platform_splitted[0]}}
        )
    manifest_spec["manifests"] = src_images

    with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
        temp_file_path = f.name
        yaml.dump(manifest_spec, f, default_flow_style=False)

    print("Using temp file: {}".format(temp_file_path))
    ctx.run("cat {}".format(temp_file_path))

    try:
        result = retry_run(ctx, "manifest-tool push from-spec {}".format(temp_file_path))
        if result.stdout:
            out = result.stdout.split('\n')[0]
            fields = out.split(" ")

            if len(fields) != 3:
                print("Unexpected output when invoking manifest-tool")
                raise Exit(code=1)

            digest_fields = fields[1].split(":")

            if len(digest_fields) != 2 or digest_fields[0] != "sha256":
                print("Unexpected digest format in manifest-tool output")
                raise Exit(code=1)

            digest = digest_fields[1]
            length = fields[2]

        if signed_push:
            cmd = """
            notary -s https://notary.docker.io -d {home}/.docker/trust addhash \
                -p docker.io/{name} {tag} {length} --sha256 {sha256} \
                -r targets/releases
            """
            retry_run(ctx, cmd.format(home=os.path.expanduser("~"), name=name, tag=tag, length=length, sha256=digest))
    finally:
        os.remove(temp_file_path)
Exemplo n.º 28
0
def build(
    ctx,
    race=False,
    incremental_build=False,
    major_version='7',
    python_runtimes='3',
    go_mod="mod",
    windows=is_windows,
    arch="x64",
    embedded_path=DATADOG_AGENT_EMBEDDED_PATH,
    compile_ebpf=True,
    nikos_embedded_path=None,
    bundle_ebpf=False,
    parallel_build=True,
):
    """
    Build the system_probe
    """

    # generate windows resources
    if windows:
        windres_target = "pe-x86-64"
        if arch == "x86":
            raise Exit(message="system probe not supported on x86")

        ver = get_version_numeric_only(ctx, major_version=major_version)
        maj_ver, min_ver, patch_ver = ver.split(".")
        resdir = os.path.join(".", "cmd", "system-probe", "windows_resources")

        ctx.run(
            f"windmc --target {windres_target} -r {resdir} {resdir}/system-probe-msg.mc"
        )

        ctx.run(
            f"windres --define MAJ_VER={maj_ver} --define MIN_VER={min_ver} --define PATCH_VER={patch_ver} -i cmd/system-probe/windows_resources/system-probe.rc --target {windres_target} -O coff -o cmd/system-probe/rsrc.syso"
        )
    elif compile_ebpf:
        # Only build ebpf files on unix
        build_object_files(ctx, parallel_build=parallel_build)

    generate_cgo_types(ctx, windows=windows)
    ldflags, gcflags, env = get_build_flags(
        ctx,
        major_version=major_version,
        python_runtimes=python_runtimes,
        embedded_path=embedded_path,
        nikos_embedded_path=nikos_embedded_path,
    )

    build_tags = get_default_build_tags(build="system-probe", arch=arch)
    if bundle_ebpf:
        build_tags.append(BUNDLE_TAG)
    if nikos_embedded_path:
        build_tags.append(DNF_TAG)

    cmd = 'go build -mod={go_mod} {race_opt} {build_type} -tags "{go_build_tags}" '
    cmd += '-o {agent_bin} -gcflags="{gcflags}" -ldflags="{ldflags}" {REPO_PATH}/cmd/system-probe'

    args = {
        "go_mod": go_mod,
        "race_opt": "-race" if race else "",
        "build_type": "" if incremental_build else "-a",
        "go_build_tags": " ".join(build_tags),
        "agent_bin": BIN_PATH,
        "gcflags": gcflags,
        "ldflags": ldflags,
        "REPO_PATH": REPO_PATH,
    }

    ctx.run(cmd.format(**args), env=env)
Exemplo n.º 29
0
def build(ctx, rebuild=False, race=False, static=False, build_include=None,
          build_exclude=None, major_version='7', arch="x64", go_mod="vendor"):
    """
    Build Dogstatsd
    """
    build_include = DEFAULT_BUILD_TAGS if build_include is None else build_include.split(",")
    build_exclude = [] if build_exclude is None else build_exclude.split(",")
    build_tags = get_build_tags(build_include, build_exclude)
    ldflags, gcflags, env = get_build_flags(ctx, static=static, major_version=major_version)
    bin_path = DOGSTATSD_BIN_PATH

    # generate windows resources
    if sys.platform == 'win32':
        windres_target = "pe-x86-64"
        if arch == "x86":
            env["GOARCH"] = "386"
            windres_target = "pe-i386"

        ver = get_version_numeric_only(ctx, env, major_version=major_version)
        maj_ver, min_ver, patch_ver = ver.split(".")

        ctx.run("windmc --target {target_arch}  -r cmd/dogstatsd/windows_resources cmd/dogstatsd/windows_resources/dogstatsd-msg.mc".format(target_arch=windres_target))
        ctx.run("windres --define MAJ_VER={maj_ver} --define MIN_VER={min_ver} --define PATCH_VER={patch_ver} -i cmd/dogstatsd/windows_resources/dogstatsd.rc --target {target_arch} -O coff -o cmd/dogstatsd/rsrc.syso".format(
            maj_ver=maj_ver,
            min_ver=min_ver,
            patch_ver=patch_ver,
            target_arch=windres_target
        ))

    if not sys.platform.startswith('linux'):
        for ex in LINUX_ONLY_TAGS:
            if ex not in build_exclude:
                build_exclude.append(ex)
    build_tags = get_build_tags(build_include, build_exclude)

    if static:
        bin_path = STATIC_BIN_PATH

    # NOTE: consider stripping symbols to reduce binary size
    cmd = "go build -mod={go_mod} {race_opt} {build_type} -tags \"{build_tags}\" -o {bin_name} "
    cmd += "-gcflags=\"{gcflags}\" -ldflags=\"{ldflags}\" {REPO_PATH}/cmd/dogstatsd"
    args = {
        "go_mod": go_mod,
        "race_opt": "-race" if race else "",
        "build_type": "-a" if rebuild else "",
        "build_tags": " ".join(build_tags),
        "bin_name": os.path.join(bin_path, bin_name("dogstatsd")),
        "gcflags": gcflags,
        "ldflags": ldflags,
        "REPO_PATH": REPO_PATH,
    }
    ctx.run(cmd.format(**args), env=env)

    # Render the configuration file template
    #
    # We need to remove cross compiling bits if any because go generate must
    # build and execute in the native platform
    env = {
        "GOOS": "",
        "GOARCH": "",
    }
    cmd = "go generate -mod={} {}/cmd/dogstatsd"
    ctx.run(cmd.format(go_mod, REPO_PATH), env=env)

    if static and sys.platform.startswith("linux"):
        cmd = "file {bin_name} "
        args = {
            "bin_name": os.path.join(bin_path, bin_name("dogstatsd")),
        }
        result = ctx.run(cmd.format(**args))
        if "statically linked" not in result.stdout:
            print("Dogstatsd binary is not static, exiting...")
            raise Exit(code=1)

    refresh_assets(ctx)
Exemplo n.º 30
0
    # Superuser privileges via auto-response
    sudo_pass_auto_respond = Responder(
        pattern=r'\[sudo\] password:'******'\n',
    )

    # create ssh connection
    cxn = Connection(ip, config=fabric_config)

    # add an ssh key to host
    try:
        run_result = cxn.run('cat ~/.ssh/authorized_keys', hide=True)
        if run_result.failed:
            cxn.run('mkdir -p ~/.ssh', hide=True)
            cxn.run('echo %s >> ~/.ssh/authorized_keys' % ssh_public_key,
                    hide=True)
            cxn.run('chmod 700 ~/.ssh', hide=True)
            cxn.run('chmod 400 ~/.ssh/authorized_keys', hide=True)
        else:
            if ssh_public_key not in run_result.stdout:
                cxn.run(
                    'cp ~/.ssh/authorized_keys ~/.ssh/authorized_keys$(date +%Y%m%d%H%M%S)~',
                    hide=True)
                cxn.run('echo %s >> ~/.ssh/authorized_keys' % ssh_public_key,
                        hide=True)
            else:
                print("ssh key already added.")
    except AuthenticationException as e:
        raise Exit(e)