Exemple #1
0
def create_github_release(
    repository: Repository,
    version: str,
) -> None:
    """
    Create a tag and release on GitHub.
    """
    changelog_url = 'https://dcos-e2e.readthedocs.io/en/latest/changelog.html'
    release_name = 'Release ' + version
    release_message = 'See ' + changelog_url
    github_release = repository.create_git_tag_and_release(
        tag=version,
        tag_message='Release ' + version,
        release_name=release_name,
        release_message=release_message,
        type='commit',
        object=repository.get_commits()[0].sha,
        draft=False,
    )

    # The artifacts we build must be built from the tag we just created.
    # This tag is created remotely on GitHub using the GitHub HTTP API.
    #
    # We fetch all tags from GitHub and set our local HEAD to the latest master
    # from GitHub.
    #
    # One symptom of this is that ``minidcos --version`` from the PyInstaller
    # binary shows the correct version.
    local_repository = Repo('.')
    client = HttpGitClient(repository.owner.html_url)
    remote_refs = client.fetch(repository.name + '.git', local_repository)

    # Update the local tags and references with the remote ones.
    for key, value in remote_refs.items():
        local_repository.refs[key] = value

    # Advance local HEAD to remote master HEAD.
    local_repository[b'HEAD'] = remote_refs[b'refs/heads/master']

    # We need to make the artifacts just after creating a tag so that the
    # --version output is exactly the one of the tag.
    # No tag exists when the GitHub release is a draft.
    # This means that temporarily we have a release without binaries.
    linux_artifacts = make_linux_binaries(repo_root=Path('.'))
    for installer_path in linux_artifacts:
        github_release.upload_asset(
            path=str(installer_path),
            label=installer_path.name,
        )
def test_linux_binaries() -> None:
    """
    ``make_linux_binaries`` creates binaries which can be run on Linux.
    """

    binary_paths = make_linux_binaries(
        repo_root=Path(__file__).parent.parent.parent, )
    binary_path_names = set(path.name for path in binary_paths)
    assert binary_path_names == {'dcos-docker', 'dcos-aws', 'dcos-vagrant'}

    mounts = []
    remote_binaries_dir = Path('/binaries')
    remote_paths = []
    for path in binary_paths:
        remote_path = remote_binaries_dir / path.name
        mounts.append(
            Mount(
                source=str(path.absolute()),
                target=str(remote_path),
                type='bind',
            ), )
        remote_paths.append(remote_path)

    client = docker.from_env(version='auto')

    for remote_path in remote_paths:
        cmd_in_container = [
            'chmod',
            '+x',
            str(remote_path),
            '&&',
            str(remote_path),
            '--help',
        ]
        cmd = 'bash -c "{cmd}"'.format(cmd=' '.join(cmd_in_container))
        client.containers.run(
            image='python:3.6',
            mounts=mounts,
            command=cmd,
            remove=True,
        )
Exemple #3
0
def test_linux_binaries() -> None:
    """
    ``make_linux_binaries`` creates a binary which can be run on Linux.
    """
    repo_root = Path(__file__).parent.parent.parent.absolute()
    binary_paths = make_linux_binaries(repo_root=repo_root)
    binary_path_names = set(path.name for path in binary_paths)
    assert binary_path_names == {'minidcos'}
    mounts = []
    remote_repo_dir = Path('/repo')

    mounts.append(
        Mount(
            source=str(repo_root),
            target=str(remote_repo_dir),
            type='bind',
        ),
    )

    remote_paths = []
    for path in binary_paths:
        relative_path = path.relative_to(repo_root)
        remote_path = remote_repo_dir / str(relative_path)
        remote_paths.append(remote_path)

    client = docker.from_env(version='auto')

    for remote_path in remote_paths:
        # Unset LANG and LC_ALL to show that these are not necessary for the
        # CLI to run.
        # This was a problem when the binaries were built with Python < 3.7.
        cmd_in_container = [
            'unset',
            'LANG',
            '&&',
            'unset',
            'LC_ALL',
            '&&',
            'chmod',
            '+x',
            str(remote_path),
            '&&',
            str(remote_path),
            '--version',
            '&&',
            'rm',
            '-rf',
            str(remote_path),
        ]
        command = 'bash -c "{cmd}"'.format(cmd=' '.join(cmd_in_container))
        container = client.containers.create(
            image='python:3.7',
            mounts=mounts,
            command=command,
        )

        container.start()
        for line in container.logs(stream=True):
            line = line.decode().strip()
            LOGGER.info(line)

        status_code = container.wait()['StatusCode']
        assert status_code == 0
        container.stop()
        container.remove(v=True)
Exemple #4
0
def test_linux_binaries() -> None:
    """
    ``make_linux_binaries`` creates a binary which can be run on Linux.
    """
    repo_root = Path(__file__).parent.parent.parent.absolute()
    binary_paths = make_linux_binaries(repo_root=repo_root)
    binary_path_names = set(path.name for path in binary_paths)
    assert binary_path_names == {'vws', 'vuforia-cloud-reco'}
    remote_repo_dir = Path('/repo')

    mounts = [
        Mount(
            source=str(repo_root),
            target=str(remote_repo_dir),
            type='bind',
        ),
    ]

    remote_paths = []
    for path in binary_paths:
        relative_path = path.relative_to(repo_root)
        remote_path = remote_repo_dir / str(relative_path)
        remote_paths.append(remote_path)

    client = docker.from_env()
    # We use the Python image because this is already pulled when building the
    # image.
    #
    # Because of a click limitation, we do not support running on containers
    # which have LANG and LC_ALL unset.
    image = 'python:3.9'
    client.images.pull(image)

    for remote_path in remote_paths:
        cmd_in_container = [
            'chmod',
            '+x',
            str(remote_path),
            '&&',
            str(remote_path),
            '--version',
            '&&',
            'rm',
            '-rf',
            str(remote_path),
        ]
        command = 'bash -c "{cmd}"'.format(cmd=' '.join(cmd_in_container))
        container = client.containers.create(
            image=image,
            mounts=mounts,
            command=command,
        )

        container.start()
        for line in container.logs(stream=True):
            line = line.decode().strip()
            LOGGER.warning(line)

        status_code = container.wait()['StatusCode']

        assert status_code == 0
        container.stop()
        container.remove(v=True)