Example #1
0
def main(args: argparse.Namespace) -> None:
    check_path()

    rel = version.LooseVersion(args.release)

    print("Release:", rel)
    if len(rel.version) == 3:
        # Major release
        major, minor, patch = version.StrictVersion(args.release).version
        rc = None
        rc_ver = None
    else:
        # RC release
        major, minor, patch, rc, rc_ver = cast(Tuple[int, int, int, str, int],
                                               rel.version)
        assert rc == "rc"

    release = str(major) + "." + str(minor) + "." + str(patch)
    branch = "release_" + release
    git.clean("-xdf")
    git.checkout(branch)
    git.pull("origin", branch)
    git.submodule("update")
    commit_hash = latest_hash()

    download_r_packages(release, "" if rc is None else rc + str(rc_ver),
                        commit_hash)

    download_py_packages(major, minor, commit_hash)
Example #2
0
def main(args: argparse.Namespace) -> None:
    check_path()

    rel = version.StrictVersion(args.release)
    platforms = [
        "win_amd64",
        "manylinux2010_x86_64",
        "manylinux2014_aarch64",
        "macosx_10_14_x86_64.macosx_10_15_x86_64.macosx_11_0_x86_64",
    ]
    print("Release:", rel)
    major, minor, patch = rel.version
    branch = "release_" + str(major) + "." + str(minor) + ".0"
    git.clean("-xdf")
    git.checkout(branch)
    git.pull("origin", branch)
    git.submodule("update")
    commit_hash = lastest_hash()

    dir_URL = PREFIX + str(major) + "." + str(minor) + ".0" + "/"
    src_filename_prefix = "xgboost-" + args.release + "%2B" + commit_hash + "-py3-none-"
    target_filename_prefix = "xgboost-" + args.release + "-py3-none-"

    if not os.path.exists(DIST):
        os.mkdir(DIST)

    filenames = download_wheels(platforms, dir_URL, src_filename_prefix,
                                target_filename_prefix)
    print("List of downloaded wheels:", filenames)
    print("""
Following steps should be done manually:
- Generate source package by running `python setup.py sdist`.
- Upload pypi package by `python3 -m twine upload dist/<Package Name>` for all wheels.
- Check the uploaded files on `https://pypi.org/project/xgboost/<VERSION>/#files` and `pip
  install xgboost==<VERSION>` """)
Example #3
0
def finish(name):
    """
    USAGE
        git-finish [-n/--name= <name>]

    DESCRIPTION
        Set the current branch's upstream to point to old, then rename it,
        substituting the "feature" prefix for "old", so it no longer appears in
        "git features".
    """
    buf = StringIO()
    if not name:
        name = str(git.branch("--show-current")).strip()

    if re.match("^feature/", name):
        new_name = re.sub("^feature", "finished", name)
        click.echo(f"Finishing {name} by renaming it to {new_name}")
        try:
            git.branch(u="old", _err_to_out=True, _out=buf)
            git.branch("-m", name, new_name, _err_to_out=True, _out=buf)
            git.checkout("master")
        except sh.ErrorReturnCode:
            click.echo("Something went wrong!")
            click.echo(buf.getvalue())
            return 1
    else:
        click.echo('Branch name does not start with "feature/"')
        return 1
    return 0
Example #4
0
def clone_commit(cirpy_dir, commit):
    """ Clones the `circuitpython` repository, fetches the commit, then
        checks out the repo at that ref.
    """
    working_dir = pathlib.Path()

    rosiepi_logger.info("Cloning repository at reference: %s", commit)

    try:
        git.clone("--depth", "1", "-n",
                  "https://github.com/sommersoft/circuitpython.git", cirpy_dir)

        os.chdir(cirpy_dir)
        git.fetch("origin", commit)

        git.checkout(commit)

        git.submodule("sync")

        git.submodule("update", "--init")

    except sh.ErrorReturnCode as git_err:
        git_stderr = str(git_err.stderr, encoding="utf-8").strip("\n")
        err_msg = [
            f"Failed to retrive repository at {commit}:",
            f" - {git_stderr}",
        ]
        rosiepi_logger.warning("%s", "\n".join(err_msg))
        raise RuntimeError(git_stderr) from None

    finally:
        os.chdir(working_dir)
Example #5
0
def fix_head_for_github(
    base_commit_ref: Optional[str] = None,
    head_ref: Optional[str] = None,
) -> Iterator[Optional[str]]:
    """
    GHA can checkout the incorrect commit for a PR (it will create a fake merge commit),
    so we need to reset the head to the actual PR branch head before continuing.

    Note that this code is written in a generic manner, so that it becomes a no-op when
    the CI system has not artifically altered the HEAD ref.

    :return: The baseline ref as a commit hash
    """

    stashed_rev: Optional[str] = None
    base_ref: Optional[str] = base_commit_ref

    if get_git_repo() is None:
        yield base_ref
        return

    if base_ref:
        # Preserve location of head^ after we possibly change location below
        base_ref = git(["rev-parse", base_ref]).stdout.decode("utf-8").rstrip()

    if head_ref:
        stashed_rev = git(["branch",
                           "--show-current"]).stdout.decode("utf-8").rstrip()
        if not stashed_rev:
            stashed_rev = git(["rev-parse",
                               "HEAD"]).stdout.decode("utf-8").rstrip()
        click.echo(f"| not on head ref {head_ref}; checking that out now...",
                   err=True)
        git.checkout([head_ref])

    try:
        if base_ref is not None:
            merge_base = git("merge-base", base_ref, "HEAD").rstrip()
            # fmt:off
            click.echo("| reporting findings introduced by these commits:",
                       err=True)
            print_git_log(f"{merge_base}..HEAD")
            if merge_base != git("rev-parse", base_ref).rstrip():
                click.echo(
                    "| also reporting findings fixed by these commits from the baseline branch:",
                    err=True)
                print_git_log(f"{merge_base}..{base_ref}")
                click.echo("| to exclude these latter commits, run with",
                           err=True)
                click.echo(
                    f"|   --baseline-ref $(git merge-base {base_commit_ref} HEAD)",
                    err=True)
            # fmt: on

        yield base_ref
    finally:
        if stashed_rev is not None:
            click.echo(f"| returning to original head revision {stashed_rev}",
                       err=True)
            git.checkout([stashed_rev])
Example #6
0
def test_watch(monkeypatch):
    # XXX TODO: expand
    def wrap_watch(pattern, force=False):
        cmd = 'watch{0}'.format('-force' if force else '')
        msg = Fake({
            "_client": {
                "host": "stackexchange.com",
                "get_user": lambda id: Fake({
                    "name": "J F",
                    "id": id
                })
            },
            "owner": {
                "name": "El'endia Starman",
                "id": 1
            },
            "room": {
                "id": 11540,
                "get_current_user_ids": lambda: [161943]
            },
            # Ouch, this is iffy
            # Prevent an error from deep inside do_blacklist
            "content_source": '!!/{0} {1}'.format(cmd, pattern)
        })
        msg.room._client = msg._client

        return chatcommands.watch(pattern, alias_used=cmd, original_msg=msg)

    # Prevent from attempting to check privileges with Metasmoke
    monkeypatch.setattr(GlobalVars, "code_privileged_users", [1, 161943])

    try:
        # Invalid regex
        resp = wrap_watch(r'?')
        assert "An invalid pattern was provided" in resp

        # This is one of the perpetually condemned spam domains, blacklisted forever
        resp = wrap_watch(r'israelbigmarket')
        assert "That pattern looks like it's already caught" in resp

        # The phone number here is the first one in this format in bad_keywords.txt
        resp = wrap_watch(r'[a-z_]*(?:1_*)?913[\W_]*608[\W_]*4584[a-z_]*')
        assert "Mostly non-latin" not in resp
        assert "Bad keyword in answer" in resp
        assert "Bad keyword in body" in resp

        # XXX TODO: figure out how to trigger duplicate entry separately
        monkeypatch.setattr("chatcommunicate.is_privileged",
                            lambda *args: True)
        monkeypatch.setattr("gitmanager.GitManager.prepare_git_for_operation",
                            lambda *args: (True, None))

        assert wrap_watch("trimfire", True).startswith("Already watched")

        monkeypatch.setattr("gitmanager.GitManager.add_to_blacklist",
                            lambda *args, **kwargs: (True, "Hahaha"))
        assert wrap_watch("male enhancement", True) == "Hahaha"
    finally:
        git.checkout("master")
Example #7
0
def pull_update():
    # Force pull update
    try:
        git.branch('-D', 'temp')
    except GitError:
        pass
    git.checkout('-b', 'temp', "origin/deploy")
    git.branch('-M', 'deploy')
    os._exit(0)
Example #8
0
def _cut_stable_release(layer_list, charm_list, ancillary_list, filter_by_tag,
                        dry_run):
    """This will merge each layers master onto the stable branches.

    PLEASE NOTE: This step should come after each stable branch has been tagged
    and references a current stable bundle revision.

    layer_list: YAML spec containing git repos and their upstream/downstream properties
    charm_list: YAML spec containing git repos and their upstream/downstream properties
    """
    layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8"))
    charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8"))
    ancillary_list = yaml.safe_load(
        Path(ancillary_list).read_text(encoding="utf8"))
    new_env = os.environ.copy()
    for layer_map in layer_list + charm_list + ancillary_list:
        for layer_name, repos in layer_map.items():
            downstream = repos["downstream"]
            if not repos.get("needs_stable", True):
                continue

            tags = repos.get("tags", None)
            if tags:
                if not any(match in filter_by_tag for match in tags):
                    continue

            log.info(
                f"Releasing :: {layer_name:^35} :: from: master to: stable")
            if not dry_run:
                downstream = f"https://{new_env['CDKBOT_GH_USR']}:{new_env['CDKBOT_GH_PSW']}@github.com/{downstream}"
                identifier = str(uuid.uuid4())
                os.makedirs(identifier)
                for line in git.clone(downstream, identifier, _iter=True):
                    log.info(line)
                git_rev_master = git("rev-parse",
                                     "origin/master",
                                     _cwd=identifier).stdout.decode()
                git_rev_stable = git("rev-parse",
                                     "origin/stable",
                                     _cwd=identifier).stdout.decode()
                if git_rev_master == git_rev_stable:
                    log.info(
                        f"Skipping  :: {layer_name:^35} :: master == stable")
                    continue
                git.config("user.email",
                           "*****@*****.**",
                           _cwd=identifier)
                git.config("user.name", "cdkbot", _cwd=identifier)
                git.config("--global", "push.default", "simple")
                git.checkout("-f", "stable", _cwd=identifier)
                git.merge("master", "--no-ff", _cwd=identifier)
                for line in git.push("origin",
                                     "stable",
                                     _cwd=identifier,
                                     _iter=True):
                    log.info(line)
Example #9
0
    def _baseline_context(self) -> Iterator[None]:
        """
        Runs a block of code on files from the current branch HEAD.

        :raises ActionFailure: If git cannot detect a HEAD commit
        :raises ActionFailure: If unmerged files are detected
        """
        repo = get_git_repo()

        if not repo:
            yield
            return

        self._abort_on_pending_changes()
        self._abort_on_conflicting_untracked_paths()

        current_tree = git("write-tree").stdout.decode().strip()
        try:
            for a in self._status.added:
                a.unlink()
            git.checkout(self._base_commit, "--", ".")
            yield
        finally:
            # git checkout will fail if the checked-out index deletes all files in the repo
            # In this case, we still want to continue without error.
            # Note that we have no good way of detecting this issue without inspecting the checkout output
            # message, which means we are fragile with respect to git version here.
            try:
                git.checkout(current_tree.strip(), "--", ".")
            except sh.ErrorReturnCode as error:
                output = error.stderr.decode()
                if (output and len(output) >= 2 and
                        "pathspec '.' did not match any file(s) known to git"
                        in output.strip()):
                    debug_echo(
                        "Restoring git index failed due to total repository deletion; skipping checkout"
                    )
                else:
                    raise ActionFailure(
                        f"Fatal error restoring Git state; please restore your repository state manually:\n{output}"
                    )

            if self._status.removed:
                # Need to check if file exists since it is possible file was deleted
                # in both the base and head. Only call if there are files to delete
                to_remove = [r for r in self._status.removed if r.exists()]
                if to_remove:
                    git.rm("-f", *(str(r) for r in to_remove))
Example #10
0
    def pull_layers(self):
        """ clone all downstream layers to be processed locally when doing charm builds
        """
        if self.rebuild_cache:
            click.echo("-  rebuild cache triggered, cleaning out cache.")
            shutil.rmtree(str(self.layers_dir))
            shutil.rmtree(str(self.interfaces_dir))
            os.mkdir(str(self.layers_dir))
            os.mkdir(str(self.interfaces_dir))

        layers_to_pull = []
        for layer_map in self.layers:
            layer_name = list(layer_map.keys())[0]

            if layer_name == "layer:index":
                continue

            layers_to_pull.append(layer_name)

        pool = ThreadPool()
        pool.map(self.download, layers_to_pull)

        self.db["pull_layer_manifest"] = []
        _paths_to_process = {
            "layer": glob("{}/*".format(str(self.layers_dir))),
            "interface": glob("{}/*".format(str(self.interfaces_dir))),
        }
        for prefix, paths in _paths_to_process.items():
            for _path in paths:
                build_path = _path
                if not build_path:
                    raise BuildException(
                        f"Could not determine build path for {_path}")

                git.checkout(self.layer_branch, _cwd=build_path)

                layer_manifest = {
                    "rev":
                    git("rev-parse", "HEAD",
                        _cwd=build_path).stdout.decode().strip(),
                    "url":
                    f"{prefix}:{Path(build_path).stem}",
                }
                self.db["pull_layer_manifest"].append(layer_manifest)
                click.echo(
                    f"- {layer_manifest['url']} at commit: {layer_manifest['rev']}"
                )
def test_watch(monkeypatch):
    # XXX TODO: expand
    def wrap_watch(pattern, force=False):
        cmd = 'watch{0}'.format('-force' if force else '')
        msg = Fake({
            "_client": {
                "host": "stackexchange.com",
                "get_user": lambda id: Fake({"name": "J F", "id": id})
            },
            "owner": {"name": "ArtOfCode", "id": 121520},
            "room": {"id": 11540, "get_current_user_ids": lambda: [161943]},
            # Ouch, this is iffy
            # Prevent an error from deep inside do_blacklist
            "content_source": '!!/{0} {1}'.format(cmd, pattern)
        })
        msg.room._client = msg._client

        return chatcommands.watch(pattern, alias_used=cmd, original_msg=msg)

    # Prevent from attempting to check privileges with Metasmoke
    monkeypatch.setattr(GlobalVars, "code_privileged_users", [1, 161943])

    try:
        # Invalid regex
        resp = wrap_watch(r'?')
        assert "An invalid pattern was provided" in resp

        # This is one of the perpetually condemned spam domains, blacklisted forever
        resp = wrap_watch(r'israelbigmarket')
        assert "That pattern looks like it's already caught" in resp

        # The phone number here is the first one in this format in bad_keywords.txt
        resp = wrap_watch(r'[a-z_]*(?:1_*)?913[\W_]*608[\W_]*4584[a-z_]*')
        assert "Mostly non-latin" not in resp
        assert "Bad keyword in answer" in resp
        assert "Bad keyword in body" in resp

        # XXX TODO: figure out how to trigger duplicate entry separately
        monkeypatch.setattr("chatcommunicate.is_privileged", lambda *args: True)
        monkeypatch.setattr("gitmanager.GitManager.prepare_git_for_operation", lambda *args: (True, None))

        assert wrap_watch("trimfire", True).startswith("Already watched")

        monkeypatch.setattr("gitmanager.GitManager.add_to_blacklist", lambda *args, **kwargs: (True, "Hahaha"))
        assert wrap_watch("male enhancement", True) == "Hahaha"
    finally:
        git.checkout("master")
Example #12
0
    def _baseline_context(self) -> Iterator[None]:
        """
        Runs a block of code on files from the current branch HEAD.

        :raises RuntimeError: If git cannot detect a HEAD commit
        :raises RuntimeError: If unmerged files are detected
        """
        repo = get_git_repo()

        if not repo:
            yield
            return

        self._abort_if_dirty()

        current_tree = git("write-tree").stdout.decode().strip()
        try:
            for a in self._status.added:
                a.unlink()
            git.checkout(self._base_commit, "--", ".")
            yield
        finally:
            # git checkout will fail if the checked-out index deletes all files in the repo
            # In this case, we still want to continue without error.
            # Note that we have no good way of detecting this issue without inspecting the checkout output
            # message, which means we are fragile with respect to git version here.
            try:
                git.checkout(current_tree.strip(), "--", ".")
            except sh.ErrorReturnCode as error:
                output = error.stderr.decode()
                if (output and len(output) >= 2 and
                        "pathspec '.' did not match any file(s) known to git"
                        in output.strip()):
                    debug_echo(
                        "Restoring git index failed due to total repository deletion; skipping checkout"
                    )
                else:
                    raise error

            if self._status.removed:
                git.rm("-f", *(str(r) for r in self._status.removed))
Example #13
0
def feature(ticket, project_directory, name):
    """
    USAGE
        git-feature <ticket> <project folder> <name>

    DESCRIPTION
        Create a new GIT branch, tracking master, and setting upstream, named
        following the pattern 'feature/<ticket>__<project folder>__<name>'. Other git-tools tools
        will look for names following this pattern. Spaces in <name> will be replaced with dashes.
    """
    name__interspersed_dashes = "-".join(re.split(r"\s+", name))
    br_name = f"feature/{ticket}__{project_directory}__{name__interspersed_dashes}"
    click.echo(f"Calling git-feature to create {br_name}")
    try:
        print(git.checkout("master", _err_to_out=True))
        print(sh.jira.view(ticket))
        print(git.checkout("-t", "-b", br_name, _err_to_out=True))
        print(git.branch("--set-upstream-to=master", _err_to_out=True))
    except sh.ErrorReturnCode as e:
        click.echo(f"Something went wrong!\n\nException: {e}")
        return 1
    return 0
Example #14
0
def main(args: argparse.Namespace) -> None:
    check_path()

    rel = version.parse(args.release)
    assert isinstance(rel, version.Version)

    major = rel.major
    minor = rel.minor
    patch = rel.micro

    print("Release:", rel)
    if not rel.is_prerelease:
        # Major release
        rc: Optional[str] = None
        rc_ver: Optional[int] = None
    else:
        # RC release
        major = rel.major
        minor = rel.minor
        patch = rel.micro
        assert rel.pre is not None
        rc, rc_ver = rel.pre
        assert rc == "rc"

    release = str(major) + "." + str(minor) + "." + str(patch)
    branch = "release_" + release
    git.clean("-xdf")
    git.checkout(branch)
    git.pull("origin", branch)
    git.submodule("update")
    commit_hash = latest_hash()

    download_r_packages(
        release, "" if rc is None else rc + str(rc_ver), commit_hash
    )

    download_py_packages(major, minor, commit_hash)
Example #15
0
def _fix_head_for_github(
    base_ref_name: Optional[str] = None,
    head_ref: Optional[str] = None,
) -> Iterator[Optional[str]]:
    """
    GHA can checkout the incorrect commit for a PR (it will create a fake merge commit),
    so we need to reset the head to the actual PR branch head before continuing.

    Note that this code is written in a generic manner, so that it becomes a no-op when
    the CI system has not artifically altered the HEAD ref.

    :return: The baseline ref as a commit hash
    """
    debug_echo(
        f"Called _fix_head_for_github with base_ref_name: {base_ref_name} head_ref: {head_ref}"
    )

    stashed_rev: Optional[str] = None
    base_ref: Optional[str] = base_ref_name

    if get_git_repo() is None:
        debug_echo("Yielding base_ref since get_git_repo was None")
        yield base_ref
        return

    if base_ref:
        # Preserve location of head^ after we possibly change location below
        try:
            debug_echo(f"Calling git rev-parse {base_ref}")
            process = git(["rev-parse", base_ref])
            base_ref = process.stdout.decode("utf-8").rstrip()
        except sh.ErrorReturnCode as ex:
            raise ActionFailure(
                f"There is a problem with your git project:{ex}")

    if head_ref:
        debug_echo("Calling git branch --show-current")
        stashed_rev = git(["branch",
                           "--show-current"]).stdout.decode("utf-8").rstrip()
        debug_echo(f"stashed_rev: {stashed_rev}")
        if not stashed_rev:
            debug_echo("Calling git rev-parse HEAD")
            rev_parse = git(["rev-parse", "HEAD"])
            debug_echo(rev_parse.stderr.decode("utf-8").rstrip())
            stashed_rev = rev_parse.stdout.decode("utf-8").rstrip()
            debug_echo(f"stashed_rev: {stashed_rev}")

        click.echo(f"| not on head ref {head_ref}; checking that out now...",
                   err=True)
        git.checkout([head_ref],
                     _timeout=GIT_SH_TIMEOUT,
                     _out=debug_echo,
                     _err=debug_echo)
        debug_echo(f"checked out {head_ref}")

    try:
        if base_ref is not None:
            merge_base = git("merge-base", base_ref, "HEAD").rstrip()
            # fmt:off
            click.echo("| reporting findings introduced by these commits:",
                       err=True)
            print_git_log(f"{merge_base}..HEAD")
            if merge_base != git("rev-parse", base_ref).rstrip():
                click.echo(
                    "| also reporting findings fixed by these commits from the baseline branch:",
                    err=True)
                print_git_log(f"{merge_base}..{base_ref}")
                click.echo("| to exclude these latter commits, run with",
                           err=True)
                click.echo(
                    f"|   --baseline-ref $(git merge-base {base_ref_name} HEAD)",
                    err=True)
            # fmt: on
        debug_echo(f"yielding {base_ref}")
        yield base_ref
    finally:
        if stashed_rev is not None:
            click.echo(f"| returning to original head revision {stashed_rev}",
                       err=True)
            git.checkout([stashed_rev], _timeout=GIT_SH_TIMEOUT)
Example #16
0
def _tag_stable_forks(layer_list, charm_list, k8s_version, bundle_rev,
                      filter_by_tag, bugfix, dry_run):
    """Tags stable forks to a certain bundle revision for a k8s version

    layer_list: YAML spec containing git repos and their upstream/downstream properties
    bundle_rev: bundle revision to tag for a particular version of k8s

    git tag (ie. ck-{bundle_rev}), this would mean we tagged current
    stable branches for 1.14 with the latest charmed kubernetes(ck) bundle rev
    of {bundle_rev}

    TODO: Switch to different merge strategy
    git checkout master
    git checkout -b staging
    git merge stable -s ours
    git checkout stable
    git reset staging
    """
    layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8"))
    charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8"))
    new_env = os.environ.copy()
    for layer_map in layer_list + charm_list:
        for layer_name, repos in layer_map.items():

            tags = repos.get("tags", None)
            if tags:
                if not any(match in filter_by_tag for match in tags):
                    continue

            downstream = repos["downstream"]
            if bugfix:
                tag = f"{k8s_version}+{bundle_rev}"
            else:
                tag = f"ck-{k8s_version}-{bundle_rev}"
            if not repos.get("needs_tagging", True):
                log.info(f"Skipping {layer_name} :: does not require tagging")
                continue

            log.info(f"Tagging {layer_name} ({tag}) :: {repos['downstream']}")
            if not dry_run:
                downstream = f"https://{new_env['CDKBOT_GH_USR']}:{new_env['CDKBOT_GH_PSW']}@github.com/{downstream}"
                identifier = str(uuid.uuid4())
                os.makedirs(identifier)
                for line in git.clone(downstream, identifier, _iter=True):
                    log.info(line)
                git.config("user.email",
                           "*****@*****.**",
                           _cwd=identifier)
                git.config("user.name", "cdkbot", _cwd=identifier)
                git.config("--global", "push.default", "simple")
                git.checkout("stable", _cwd=identifier)
                try:
                    for line in git.tag("--force",
                                        tag,
                                        _cwd=identifier,
                                        _iter=True,
                                        _bg_exc=False):
                        log.info(line)
                    for line in git.push(
                            "--force",
                            "origin",
                            tag,
                            _cwd=identifier,
                            _bg_exc=False,
                            _iter=True,
                    ):
                        log.info(line)
                except sh.ErrorReturnCode as error:
                    log.info(
                        f"Problem tagging: {error.stderr.decode().strip()}, will skip for now.."
                    )
Example #17
0
def build_fw(board, build_ref, test_log):  # pylint: disable=too-many-locals,too-many-statements
    """ Builds the firware at `build_ref` for `board`. Firmware will be
        output to `.fw_builds/<build_ref>/<board>/`.

    :param: str board: Name of the board to build firmware for.
    :param: str build_ref: The tag/commit to build firmware for.
    :param: test_log: The TestController.log used for output.
    """
    working_dir = os.getcwd()
    cirpy_ports_dir = pathlib.Path(cirpy_dir(), "ports")

    board_port_dir = None
    for port in _AVAILABLE_PORTS:
        port_dir = cirpy_ports_dir / port / "boards" / board
        if port_dir.exists():
            board_port_dir = (cirpy_ports_dir / port).resolve()
            rosiepi_logger.info("Board source found: %s", board_port_dir)
            break

    if board_port_dir is None:
        err_msg = [
            f"'{board}' board not available to test. Can't build firmware.",
            #"="*60,
            #"Closing RosiePi"
        ]
        raise RuntimeError("\n".join(err_msg))

    build_dir = pathlib.Path.home() / ".fw_builds" / build_ref[:5] / board

    os.chdir(cirpy_dir())
    try:
        test_log.write("Fetching {}...".format(build_ref))
        git.fetch("--depth", "1", "origin", build_ref)

        test_log.write("Checking out {}...".format(build_ref))
        git.checkout(build_ref)

        test_log.write("Syncing submodules...")
        git.submodule("sync")

        test_log.write("Updating submodules...")
        git.submodule("update", "--init", "--depth", "1")
    except sh.ErrorReturnCode as git_err:
        # TODO: change to 'master'
        git.checkout("-f", "rosiepi_test")
        os.chdir(working_dir)
        err_msg = [
            "Building firmware failed:",
            " - {}".format(str(git_err.stderr, encoding="utf-8").strip("\n")),
            #"="*60,
            #"Closing RosiePi"
        ]
        raise RuntimeError("\n".join(err_msg)) from None

    os.chdir(board_port_dir)
    board_cmd = (f"make clean BOARD={board} BUILD={build_dir}",
                 f"make BOARD={board} BUILD={build_dir}")

    test_log.write("Building firmware...")
    try:
        rosiepi_logger.info("Running make recipe: %s", '; '.join(board_cmd))
        run_envs = {
            "BASH_ENV": "/etc/profile",
        }

        rosiepi_logger.info("Running build clean...")
        # pylint: disable=subprocess-run-check
        subprocess.run(
            board_cmd[0],
            shell=True,
            stdout=subprocess.DEVNULL,
            stderr=subprocess.STDOUT,
            executable="/usr/bin/bash",
            start_new_session=True,
            env=run_envs,
        )

        build_dir.mkdir(mode=0o0774, parents=True)

        # pylint: enable=subprocess-run-check
        rosiepi_logger.info("Running firmware build...")
        fw_build = subprocess.run(
            board_cmd[1],
            check=True,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            executable="/usr/bin/bash",
            start_new_session=True,
            env=run_envs,
        )

        result = str(fw_build.stdout, encoding="utf-8").split("\n")
        success_msg = [line for line in result if "bytes" in line]
        test_log.write(" - " + "\n - ".join(success_msg))
        rosiepi_logger.info("Firmware built...")

    except subprocess.CalledProcessError as cmd_err:
        # TODO: change to 'master'
        git.checkout("-f", "rosiepi_test")
        os.chdir(working_dir)
        err_msg = [
            "Building firmware failed:",
            " - {}".format(str(cmd_err.stdout, encoding="utf-8").strip("\n")),
            #"="*60,
            #"Closing RosiePi"
        ]
        rosiepi_logger.warning("Firmware build failed...")
        raise RuntimeError("\n".join(err_msg)) from None

    finally:
        # TODO: change to 'master'
        git.checkout("-f", "rosiepi_test")
        os.chdir(working_dir)

    return build_dir
Example #18
0
parser.add_argument('--configure',
                    help='re-configure the build',
                    action='store_true')

args = parser.parse_args()

# --- install the source code

if args.gitclone:
    print "git clone the source code"

    # use 1.1.4, not latest and not 1.2, until told otherwise
    rm('-fr', 'libtorrent')
    git.clone('https://github.com/arvidn/libtorrent.git')
    cd('libtorrent')
    git.checkout('RC_1_1')  # gets 1.1.4   <<----- DO THIS
else:
    # ensure cd to libtorrent
    cd('libtorrent')

# --- configure
if args.configure:
    print "configure"

    cmd = sh.Command('./autotool.sh')
    cmd()

    configure = sh.Command("./configure")
    configure('--libdir=/usr/lib/i386-linux-gnu/', 'LDFLAGS=-L/usr/local/lib/',
              "--with-boost-libdir=/usr/lib/i386-linux-gnu/",
              "--enable-encryption", "--prefix=/usr/local/", "--disable-debug",
Example #19
0
def _cut_stable_release(layer_list, charm_list, ancillary_list, filter_by_tag,
                        dry_run):
    """This will merge each layers master onto the stable branches.

    PLEASE NOTE: This step should come after each stable branch has been tagged
    and references a current stable bundle revision.

    layer_list: YAML spec containing git repos and their upstream/downstream properties
    charm_list: YAML spec containing git repos and their upstream/downstream properties
    """
    layer_list = yaml.safe_load(Path(layer_list).read_text(encoding="utf8"))
    charm_list = yaml.safe_load(Path(charm_list).read_text(encoding="utf8"))
    ancillary_list = yaml.safe_load(
        Path(ancillary_list).read_text(encoding="utf8"))
    new_env = os.environ.copy()

    failed_to_release = []
    for layer_map in layer_list + charm_list + ancillary_list:
        for layer_name, repos in layer_map.items():
            downstream = repos["downstream"]
            if not repos.get("needs_stable", True):
                continue

            tags = repos.get("tags", None)
            if tags:
                if not any(match in filter_by_tag for match in tags):
                    continue

            auth = (new_env.get("CDKBOT_GH_USR"), new_env.get("CDKBOT_GH_PSW"))
            default_branch = repos.get("branch") or default_gh_branch(
                downstream, auth=auth)

            log.info(
                f"Releasing :: {layer_name:^35} :: from: {default_branch} to: stable"
            )
            downstream = f"https://{':'.join(auth)}@github.com/{downstream}"
            identifier = str(uuid.uuid4())
            os.makedirs(identifier)
            for line in git.clone(downstream, identifier, _iter=True):
                log.info(line)
            git_rev_default = (git("rev-parse",
                                   f"origin/{default_branch}",
                                   _cwd=identifier).stdout.decode().strip())
            git_rev_stable = (git("rev-parse",
                                  "origin/stable",
                                  _cwd=identifier).stdout.decode().strip())
            if git_rev_default == git_rev_stable:
                log.info(
                    f"Skipping  :: {layer_name:^35} :: {default_branch} == stable"
                )
                continue
            log.info(
                f"Commits   :: {layer_name:^35} :: {default_branch} != stable")
            log.info(f"  {default_branch:10}= {git_rev_default:32}")
            log.info(f"  {'stable':10}= {git_rev_stable:32}")
            for line in git("rev-list",
                            f"origin/stable..origin/{default_branch}",
                            _cwd=identifier):
                for line in git.show(
                        "--format=%h %an '%s' %cr",
                        "--no-patch",
                        line.strip(),
                        _cwd=identifier,
                ):
                    log.info("    " + line.strip())
            if not dry_run:
                git.config("user.email",
                           "*****@*****.**",
                           _cwd=identifier)
                git.config("user.name", "cdkbot", _cwd=identifier)
                git.config("--global", "push.default", "simple")
                git.checkout("-f", "stable", _cwd=identifier)
                git.reset(default_branch, _cwd=identifier)
                for line in git.push("origin",
                                     "stable",
                                     "-f",
                                     _cwd=identifier,
                                     _iter=True):
                    log.info(line)