예제 #1
0
def remove_package(install, repository, package_id):
    """Remove a package from the local repository.

    Errors if any packages in package_ids are activated in install.

    install: pkgpanda.Install
    repository: pkgpanda.Repository
    package_id: package ID to remove from repository

    """
    if package_id in install.get_active():
        raise PackageConflict("Refusing to remove active package {0}".format(package_id))

    sys.stdout.write("\rRemoving: {0}".format(package_id))
    sys.stdout.flush()
    try:
        # Validate package id, that package is installed.
        PackageId(package_id)
        repository.remove(package_id)
    except ValidationError as ex:
        raise ValidationError("Invalid package id {0}".format(package_id)) from ex
    except OSError as ex:
        raise Exception("Error removing package {0}: {1}".format(package_id, ex)) from ex
    else:
        sys.stdout.write("\rRemoved: {0}".format(package_id))
    finally:
        sys.stdout.write("\n")
        sys.stdout.flush()
예제 #2
0
def check_forbidden_services(path, services):
    """Check if package contains systemd services that may break DC/OS

    This functions checks the contents of systemd's unit file dirs and
    throws the exception if there are reserved services inside.

    Args:
        path: path where the package contents are
        services: list of reserved services to look for

    Raises:
        ValidationError: Reserved serice names were found inside the package
    """
    services_dir_regexp = re.compile(r'dcos.target.wants(?:_.+)?')
    forbidden_srv_set = set(services)
    pkg_srv_set = set()

    for direntry in os.listdir(path):
        if not services_dir_regexp.match(direntry):
            continue
        pkg_srv_set.update(set(os.listdir(os.path.join(path, direntry))))

    found_units = forbidden_srv_set.intersection(pkg_srv_set)

    if found_units:
        msg = "Reverved unit names found: " + ','.join(found_units)
        raise ValidationError(msg)
예제 #3
0
    def checkout_to(self, directory):
        # fetch into a bare repository so if we're on a host which has a cache we can
        # only get the new commits.
        fetch_git(self.bare_folder, self.url)

        # Warn if the ref_origin is set and gives a different sha1 than the
        # current ref.
        try:
            origin_commit = get_git_sha1(self.bare_folder, self.ref_origin)
        except Exception as ex:
            raise ValidationError(
                "Unable to find sha1 of ref_origin {}: {}".format(
                    self.ref_origin, ex))
        if self.ref != origin_commit:
            logger.warning(
                "Current ref doesn't match the ref origin. "
                "Package ref should probably be updated to pick up "
                "new changes to the code:" +
                " Current: {}, Origin: {}".format(self.ref, origin_commit))

        # Clone into `src/`.
        if is_windows:
            # Note: Mesos requires autocrlf to be set on Windows otherwise it does not build.
            check_call([
                "git", "clone", "-q", "--config", "core.autocrlf=true",
                self.bare_folder, directory
            ])
        else:
            check_call(["git", "clone", "-q", self.bare_folder, directory])

        # Checkout from the bare repo in the cache folder at the specific sha1
        check_call([
            "git", "--git-dir", directory + "/.git", "--work-tree", directory,
            "checkout", "-f", "-q", self.ref
        ])
예제 #4
0
 def validate_name(name):
     # [a-zA-Z0-9@._+-]
     # May not start with '.' or '-'.
     if not re.match(name_regex, name):
         raise ValidationError(
             "Invalid package name {0}. Must match the regex {1}".format(
                 name, name_regex))
예제 #5
0
파일: __init__.py 프로젝트: zhaixuepan/dcos
def symlink_tree(src, dest):
    for name in os.listdir(src):
        src_path = os.path.join(src, name)
        dest_path = os.path.join(dest, name)
        # Symlink files and symlinks directly. For directories make a
        # real directory and symlink everything inside.
        # NOTE: We could relax this and follow symlinks, but then we
        # need to be careful about recursive filesystem layouts.
        if os.path.isdir(src_path) and not os.path.islink(src_path):
            if os.path.exists(dest_path):
                # We can only merge a directory into a directory.
                # We won't merge into a symlink directory because that could
                # result in a package editing inside another package.
                if not os.path.isdir(dest_path) and not os.path.islink(
                        dest_path):
                    raise ValidationError(
                        "Can't merge a file `{0}` and directory (or symlink) `{1}` with the same name."
                        .format(src_path, dest_path))
            else:
                os.makedirs(dest_path)

            # Recurse into the directory symlinking everything so long as the directory isn't
            symlink_tree(src_path, dest_path)
        else:
            try:
                os.symlink(src_path, dest_path)
            except FileNotFoundError as ex:
                raise ConflictingFile(src_path, dest_path, ex) from ex
예제 #6
0
    def validate_group_name(group_name):
        if not group_name:
            return

        if not re.match(linux_group_regex, group_name):
            raise ValidationError("Group {} has invalid name, must match the following regex: {}".format(
                group_name, linux_group_regex))
예제 #7
0
 def validate_version(version):
     # [a-zA-Z0-9@._+:]
     # May not contain a '-'.
     if not re.match(version_regex, version):
         raise ValidationError(
             "Invalid package version {0}. Must match the regex {1}".format(
                 version, version_regex))
예제 #8
0
def add_package_file(repository, package_filename):
    """Add a package to the repository from a file.

    repository: pkgpanda.Repository
    package_filename: location of the package file

    """
    filename_suffix = '.tar.xz'
    # Extract Package Id (Filename must be path/{pkg-id}.tar.xz).
    name = os.path.basename(package_filename)

    if not name.endswith(filename_suffix):
        raise ValidationError(
            "ERROR: Can only add package tarballs which have names like "
            "{{pkg-id}}{}".format(filename_suffix))

    pkg_id = name[:-len(filename_suffix)]

    # Validate the package id
    PackageId(pkg_id)

    def fetch(_, target):
        extract_tarball(package_filename, target)

    repository.add(fetch, pkg_id)
예제 #9
0
def swap_active_package(install, repository, package_id, systemd, block_systemd):
    """Replace an active package with a package_id with the same name.

    swap(install, repository, 'foo--version') will replace the active 'foo'
    package with 'foo--version'.

    install: pkgpanda.Install
    repository: pkgpanda.Repository
    package_id: package ID to activate
    systemd: start/stop systemd services
    block_systemd: if systemd, block waiting for systemd services to come up

    """
    active = install.get_active()
    # TODO(cmaloney): I guarantee there is a better way to write this and
    # I've written the same logic before...
    packages_by_name = dict()
    for id_str in active:
        pkg_id = PackageId(id_str)
        packages_by_name[pkg_id.name] = pkg_id

    new_id = PackageId(package_id)
    if new_id.name not in packages_by_name:
        raise ValidationError("No package with name {} currently active to swap with.".format(new_id.name))

    packages_by_name[new_id.name] = new_id
    new_active = list(map(str, packages_by_name.values()))
    # Activate with the new package name
    activate_packages(install, repository, new_active, systemd, block_systemd)
예제 #10
0
파일: __init__.py 프로젝트: SStar1314/dcos
def validate_compatible(packages, roles):
    # Every package name appears only once.
    names = set()
    ids = set()
    tuples = set()
    for package in packages:
        if package.name in names:
            raise ValidationError(
                "Repeated name {0} in set of packages {1}".format(
                    package.name, ' '.join(map(lambda x: str(x.id),
                                               packages))))
        names.add(package.name)
        ids.add(str(package.id))
        tuples.add((package.name, package.variant))

    # All requires are met.
    # NOTE: Requires are given just to make it harder to accidentally
    # break a cluster.

    # Environment variables in packages, mapping from variable to package.
    environment = dict()

    for package in packages:

        # Check that all requirements of the package are met.
        # Requirements can be specified on a package name or full version string.
        for requirement in package.requires:
            name, variant = expand_require(requirement)
            if name not in names:
                raise ValidationError(
                    ("Package {} variant {} requires {} variant {} but that " +
                     "is not in the set of packages {}").format(
                         package.id, package.variant, name, variant,
                         ', '.join(str(x.id) for x in packages)))

        # No repeated/conflicting environment variables with other packages as
        # well as magic system enviornment variables.
        for k, v in package.environment.items():
            if k in reserved_env_vars:
                raise ValidationError(
                    "{0} are reserved enviornment vars and cannot be specified in packages. Present in package {1}"
                    .format(", ".join(reserved_env_vars), package))
            if k in environment:
                raise ValidationError(
                    "Repeated environment variable {0}. In both packages {1} and {2}."
                    .format(k, v, package))
            environment[k] = package
예제 #11
0
    def add_user(self, username, group):
        UserManagement.validate_username(username)

        if not self._manage_users:
            return

        # Check if the user already exists and exit.
        try:
            UserManagement.validate_user_group(username, group)
            self._users.add(username)
            return
        except KeyError as ex:
            # Doesn't exist, fall through
            pass

        # If we're not allowed to manage users, error
        if not self._add_users:
            raise ValidationError(
                "User {} doesn't exist but is required by a DC/OS Component, and "
                "automatic user addition is disabled".format(username))

        # Add the user:
        add_user_cmd = [
            'useradd',
            '--system',
            '--home-dir',
            '/opt/mesosphere',
            '--shell',
            '/sbin/nologin',
            '-c',
            'DCOS System User',
        ]

        if group is not None:
            UserManagement.validate_group(group)
            add_user_cmd += ['-g', group]

        add_user_cmd += [username]

        try:
            check_output(add_user_cmd)
            self._users.add(username)
        except CalledProcessError as ex:
            raise ValidationError(
                "User {} doesn't exist and couldn't be created because of: {}".
                format(username, ex.output))
예제 #12
0
    def __init__(self, src_info, cache_dir):
        super().__init__(src_info)

        assert self.kind == 'git'

        if src_info.keys() != {'kind', 'git', 'ref', 'ref_origin'}:
            raise ValidationError(
                "git source must have keys 'git' (the repo to fetch), 'ref' (the sha-1 to "
                "checkout), and 'ref_origin' (the branch/tag ref was derived from)")

        if not is_sha(src_info['ref']):
            raise ValidationError("ref must be a sha1. Got: {}".format(src_info['ref']))

        self.url = src_info['git']
        self.ref = src_info['ref']
        self.ref_origin = src_info['ref_origin']
        self.bare_folder = cache_dir + "/cache.git".format()
예제 #13
0
def get_git_sha1(bare_folder, ref):
    try:
        return check_output(
            ["git", "--git-dir", bare_folder, "rev-parse",
             ref + "^{commit}"]).decode('ascii').strip()
    except CalledProcessError as ex:
        raise ValidationError("Unable to find ref '{}' in '{}': {}".format(
            ref, bare_folder, ex)) from ex
예제 #14
0
    def validate_group(group):
        # Empty group is allowed.
        if not group:
            return

        UserManagement.validate_group_name(group)

        try:
            grp.getgrnam(group)
        except KeyError:
            raise ValidationError("Group {} does not exist on the system".format(group))
예제 #15
0
    def parse(id: str):
        parts = id.split('--')
        if len(parts) != 2:
            raise ValidationError(
                "Invalid package id {0}. Package ids may only ".format(id) +
                "contain one '--' which seperates the name and version")

        PackageId.validate_name(parts[0])
        PackageId.validate_version(parts[1])

        return parts[0], parts[1]
예제 #16
0
def extract_archive(archive, dst_dir):
    archive_type = _identify_archive_type(archive)

    if archive_type == 'tar':
        check_call(["tar", "-xf", archive, "--strip-components=1", "-C", dst_dir])
    elif archive_type == 'zip':
        check_call(["unzip", "-x", archive, "-d", dst_dir])
        # unzip binary does not support '--strip-components=1',
        _strip_first_path_component(dst_dir)
    else:
        raise ValidationError("Unsupported archive: {}".format(os.path.basename(archive)))
예제 #17
0
파일: actions.py 프로젝트: zhous1q/dcos
def _get_package_list(package_list_id: str, repository_url: str) -> List[str]:
    package_list_url = repository_url + '/package_lists/{}.package_list.json'.format(package_list_id)
    with tempfile.NamedTemporaryFile() as f:
        download(f.name, package_list_url, os.getcwd(), rm_on_error=False)
        package_list = load_json(f.name)

    if not isinstance(package_list, list):
        raise ValidationError('{} should contain a JSON list of packages. Got a {}'.format(
            package_list_url, type(package_list)
        ))

    return package_list
예제 #18
0
    def __init__(self, src_info, cache_dir, working_directory):
        super().__init__(src_info)

        assert self.kind == 'git_local'

        if src_info.keys() > {'kind', 'rel_path'}:
            raise ValidationError(
                "Only kind, rel_path can be specified for git_local")
        if os.path.isabs(src_info['rel_path']):
            raise ValidationError(
                "rel_path must be a relative path to the current directory "
                "when used with git_local. Using a relative path means others "
                "that clone the repository will have things just work rather "
                "than a path.")
        self.src_repo_path = os.path.normpath(working_directory + '/' +
                                              src_info['rel_path']).rstrip('/')

        # Make sure there are no local changes, we can't `git clone` local changes.
        try:
            git_status = check_output([
                'git', '-C', self.src_repo_path, 'status', '--porcelain',
                '-uno', '-z'
            ]).decode()
            if len(git_status):
                raise ValidationError(
                    "No local changse are allowed in the git_local_work base repository. "
                    "Use `git -C {0} status` to see local changes. "
                    "All local changes must be committed or stashed before the "
                    "package can be built. One workflow (temporary commit): `git -C {0} "
                    "commit -am TMP` to commit everything, build the package, "
                    "`git -C {0} reset --soft HEAD^` to get back to where you were.\n\n"
                    "Found changes: {1}".format(self.src_repo_path,
                                                git_status))
        except CalledProcessError:
            raise ValidationError(
                "Unable to check status of git_local_work checkout {}. Is the "
                "rel_path correct?".format(src_info['rel_path']))

        self.commit = get_git_sha1(self.src_repo_path + "/.git", "HEAD")
예제 #19
0
파일: __init__.py 프로젝트: SStar1314/dcos
def expand_require(require):
    name = None
    variant = None
    if isinstance(require, str):
        name = require
    elif isinstance(require, dict):
        if 'name' not in require or 'variant' not in require:
            raise ValidationError(
                "When specifying a dependency in requires by dictionary to " +
                "depend on a variant both the name of the package and the " +
                "variant name must always be specified")
        name = require['name']
        variant = require['variant']

    if PackageId.is_id(name):
        raise ValidationError(
            "ERROR: Specifying a dependency on '" + name + "', an exact" +
            "package id isn't allowed. Dependencies may be specified by" +
            "package name alone or package name + variant (to change the" +
            "package variant).")

    return (name, variant)
예제 #20
0
    def __init__(self, name, src_info, package_dir):
        super().__init__(name, src_info, package_dir)

        assert self.kind in {'url', 'url_extract'}

        if src_info.keys() != {'kind', 'sha1', 'url'}:
                raise ValidationError(
                        "url and url_extract sources must have exactly 'sha1' (sha1 of the artifact"
                        " which will be downloaded), and 'url' (url to download artifact) as options")

        self.url = src_info['url']
        self.extract = (self.kind == 'url_extract')
        self.cache_filename = self._get_filename(package_dir + "/cache")
        self.sha = src_info['sha1']
예제 #21
0
    def validate_user_group(username, group_name):
        user = pwd.getpwnam(username)
        if not group_name:
            return

        group = grp.getgrnam(group_name)
        if user.pw_gid != group.gr_gid:

            # check if the user is the right group, but the group is not primary.
            if username in group.gr_mem:
                return

            raise ValidationError(
                "User {} exists with current UID {}, however he should be assigned to group {} with {} UID, please "
                "check `buildinfo.json`".format(username, user.pw_gid, group_name, group.gr_gid))
예제 #22
0
def extract_archive(archive, dst_dir):
    archive_type = _identify_archive_type(archive)

    if archive_type == 'tar':
        if is_windows:
            check_call(["bsdtar", "-xf", archive, "-C", dst_dir])
        else:
            check_call(["tar", "-xf", archive, "--strip-components=1", "-C", dst_dir])
    elif archive_type == 'zip':
        if is_windows:
            check_call(["powershell.exe", "-command", "expand-archive", "-path", archive, "-destinationpath", dst_dir])
        else:
            check_call(["unzip", "-x", archive, "-d", dst_dir])
        # unzip binary does not support '--strip-components=1',
        _strip_first_path_component(dst_dir)
    else:
        raise ValidationError("Unsupported archive: {}".format(os.path.basename(archive)))
예제 #23
0
def _check_components_sanity(path):
    """Check if archive is sane

    Check if there is only one top level component (directory) in the extracted
    archive's directory.

    Args:
        path: path to the extracted archive's directory

    Raises:
        Raise an exception if there is anything else than a single directory
    """
    dir_contents = os.listdir(path)

    if len(dir_contents) != 1 or not os.path.isdir(os.path.join(path, dir_contents[0])):
        raise ValidationError("Extracted archive has more than one top level"
                              "component, unable to strip it.")
예제 #24
0
def get_src_fetcher(src_info, cache_dir, working_directory):
    try:
        kind = src_info['kind']
        if kind not in pkgpanda.build.src_fetchers.all_fetchers:
            raise ValidationError(
                "No known way to catch src with kind '{}'. Known kinds: {}".
                format(kind, pkgpanda.src_fetchers.all_fetchers.keys()))

        args = {'src_info': src_info, 'cache_dir': cache_dir}

        if src_info['kind'] in ['git_local', 'url', 'url_extract']:
            args['working_directory'] = working_directory

        return pkgpanda.build.src_fetchers.all_fetchers[kind](**args)
    except ValidationError as ex:
        raise BuildError(
            "Validation error when fetching sources for package: {}".format(
                ex))
예제 #25
0
    def __init__(self, src_info, cache_dir, working_directory):
        super().__init__(src_info)

        assert self.kind in {'url', 'url_extract'}

        if ('kind' not in src_info) or ('sha1'
                                        not in src_info) or ('url'
                                                             not in src_info):
            raise ValidationError(
                "url and url_extract sources must have exactly 'sha1' (sha1 of the artifact"
                " which will be downloaded), and 'url' (url to download artifact) as options"
            )

        self.url = src_info['url']
        self.extract = (self.kind == 'url_extract')
        self.cache_dir = cache_dir
        self.cache_filename = self._get_filename(cache_dir)
        self.working_directory = working_directory
        self.sha = src_info['sha1']
예제 #26
0
    def checkout_to(self, directory):
        # Download file to cache if it isn't already there
        if not os.path.exists(self.cache_filename):
            print("Downloading source tarball {}".format(self.url))
            download_atomic(self.cache_filename, self.url, self.working_directory)

        # Validate the sha1 of the source is given and matches the sha1
        file_sha = sha1(self.cache_filename)

        if self.sha != file_sha:
            corrupt_filename = self.cache_filename + '.corrupt'
            check_call(['mv', self.cache_filename, corrupt_filename])
            raise ValidationError(
                "Provided sha1 didn't match sha1 of downloaded file, corrupt download saved as {}. "
                "Provided: {}, Download file's sha1: {}, Url: {}".format(
                    corrupt_filename, self.sha, file_sha, self.url))

        if self.extract:
            extract_archive(self.cache_filename, directory)
        else:
            # Copy the file(s) into src/
            # TODO(cmaloney): Hardlink to save space?
            shutil.copyfile(self.cache_filename, self._get_filename(directory))
예제 #27
0
파일: actions.py 프로젝트: zhous1q/dcos
 def fetcher(id, target):
     if repository_url is None:
         raise ValidationError("ERROR: Non-local package {} but no repository url given.".format(id))
     return requests_fetcher(repository_url, id, target, os.getcwd())
예제 #28
0
파일: actions.py 프로젝트: zhous1q/dcos
def _do_bootstrap(install, repository):
    # These files should be set by the environment which initially builds
    # the host (cloud-init).
    repository_url = if_exists(load_string, install.get_config_filename("setup-flags/repository-url"))

    def fetcher(id, target):
        if repository_url is None:
            raise ValidationError("ERROR: Non-local package {} but no repository url given.".format(id))
        return requests_fetcher(repository_url, id, target, os.getcwd())

    setup_pkg_dir = install.get_config_filename("setup-packages")
    if os.path.exists(setup_pkg_dir):
        raise ValidationError(
            "setup-packages is no longer supported. It's functionality has been replaced with late "
            "binding packages. Found setup packages dir: {}".format(setup_pkg_dir))

    setup_packages_to_activate = []

    # If the host has late config values, build the late config package from them.
    late_config = if_exists(load_yaml, install.get_config_filename("setup-flags/late-config.yaml"))
    if late_config:
        pkg_id_str = late_config['late_bound_package_id']
        late_values = late_config['bound_values']
        print("Binding late config to late package {}".format(pkg_id_str))
        print("Bound values: {}".format(late_values))

        if not PackageId.is_id(pkg_id_str):
            raise ValidationError("Invalid late package id: {}".format(pkg_id_str))
        pkg_id = PackageId(pkg_id_str)
        if pkg_id.version != "setup":
            raise ValidationError("Late package must have the version setup. Bad package: {}".format(pkg_id_str))

        # Collect the late config package.
        with tempfile.NamedTemporaryFile() as f:
            download(
                f.name,
                repository_url + '/packages/{0}/{1}.dcos_config'.format(pkg_id.name, pkg_id_str),
                os.getcwd(),
                rm_on_error=False,
            )
            late_package = load_yaml(f.name)

        # Resolve the late package using the bound late config values.
        final_late_package = resolve_late_package(late_package, late_values)

        # Render the package onto the filesystem and add it to the package
        # repository.
        with tempfile.NamedTemporaryFile() as f:
            do_gen_package(final_late_package, f.name)
            repository.add(lambda _, target: extract_tarball(f.name, target), pkg_id_str)
        setup_packages_to_activate.append(pkg_id_str)

    # If active.json is set on the host, use that as the set of packages to
    # activate. Otherwise just use the set of currently active packages (those
    # active in the bootstrap tarball)
    to_activate = None
    active_path = install.get_config_filename("setup-flags/active.json")
    if os.path.exists(active_path):
        print("Loaded active packages from", active_path)
        to_activate = load_json(active_path)

        # Ensure all packages are local
        print("Ensuring all packages in active set {} are local".format(",".join(to_activate)))
        for package in to_activate:
            repository.add(fetcher, package)
    else:
        print("Calculated active packages from bootstrap tarball")
        to_activate = list(install.get_active())

        package_list_filename = install.get_config_filename("setup-flags/cluster-package-list")
        print("Checking for cluster packages in:", package_list_filename)
        package_list_id = if_exists(load_string, package_list_filename)
        if package_list_id:
            print("Cluster package list:", package_list_id)
            cluster_packages = _get_package_list(package_list_id, repository_url)
            print("Loading cluster-packages: {}".format(cluster_packages))

            for package_id_str in cluster_packages:
                # Validate the package ids
                pkg_id = PackageId(package_id_str)

                # Fetch the packages if not local
                if not repository.has_package(package_id_str):
                    repository.add(fetcher, package_id_str)

                # Add the package to the set to activate
                setup_packages_to_activate.append(package_id_str)
        else:
            print("No cluster-packages specified")

    # Calculate the full set of final packages (Explicit activations + setup packages).
    # De-duplicate using a set.
    to_activate = list(set(to_activate + setup_packages_to_activate))

    print("Activating packages")
    install.activate(repository.load_packages(to_activate))
예제 #29
0
파일: actions.py 프로젝트: zouyee/dcos
def _do_bootstrap(install, repository):
    # These files should be set by the environment which initially builds
    # the host (cloud-init).
    repository_url = if_exists(
        load_string, install.get_config_filename("setup-flags/repository-url"))

    # TODO(cmaloney): If there is 1+ master, grab the active config from a master.
    # If the config can't be grabbed from any of them, fail.
    def fetcher(id, target):
        if repository_url is None:
            raise ValidationError(
                "ERROR: Non-local package {} but no repository url given.".
                format(repository_url))
        return requests_fetcher(repository_url, id, target, os.getcwd())

    # Copy host/cluster-specific packages written to the filesystem manually
    # from the setup-packages folder into the repository. Do not overwrite or
    # merge existing packages, hard fail instead.
    setup_packages_to_activate = []
    setup_pkg_dir = install.get_config_filename("setup-packages")
    copy_fetcher = partial(_copy_fetcher, setup_pkg_dir)
    if os.path.exists(setup_pkg_dir):
        for pkg_id_str in os.listdir(setup_pkg_dir):
            print("Installing setup package: {}".format(pkg_id_str))
            if not PackageId.is_id(pkg_id_str):
                raise ValidationError(
                    "Invalid package id in setup package: {}".format(
                        pkg_id_str))
            pkg_id = PackageId(pkg_id_str)
            if pkg_id.version != "setup":
                raise ValidationError(
                    "Setup packages (those in `{0}`) must have the version setup. "
                    "Bad package: {1}".format(setup_pkg_dir, pkg_id_str))

            # Make sure there is no existing package
            if repository.has_package(pkg_id_str):
                print("WARNING: Ignoring already installed package {}".format(
                    pkg_id_str))

            repository.add(copy_fetcher, pkg_id_str)
            setup_packages_to_activate.append(pkg_id_str)

    # If active.json is set on the host, use that as the set of packages to
    # activate. Otherwise just use the set of currently active packages (those
    # active in the bootstrap tarball)
    to_activate = None
    active_path = install.get_config_filename("setup-flags/active.json")
    if os.path.exists(active_path):
        print("Loaded active packages from", active_path)
        to_activate = load_json(active_path)

        # Ensure all packages are local
        print("Ensuring all packages in active set {} are local".format(
            ",".join(to_activate)))
        for package in to_activate:
            repository.add(fetcher, package)
    else:
        print("Calculated active packages from bootstrap tarball")
        to_activate = list(install.get_active())

        # Fetch and activate all requested additional packages to accompany the bootstrap packages.
        cluster_packages_filename = install.get_config_filename(
            "setup-flags/cluster-packages.json")
        cluster_packages = if_exists(load_json, cluster_packages_filename)
        print("Checking for cluster packages in:", cluster_packages_filename)
        if cluster_packages:
            if not isinstance(cluster_packages, list):
                print(
                    'ERROR: {} should contain a JSON list of packages. Got a {}'
                    .format(cluster_packages_filename, type(cluster_packages)))
            print("Loading cluster-packages: {}".format(cluster_packages))

            for package_id_str in cluster_packages:
                # Validate the package ids
                pkg_id = PackageId(package_id_str)

                # Fetch the packages if not local
                if not repository.has_package(package_id_str):
                    repository.add(fetcher, package_id_str)

                # Add the package to the set to activate
                setup_packages_to_activate.append(package_id_str)
        else:
            print("No cluster-packages specified")

    # Calculate the full set of final packages (Explicit activations + setup packages).
    # De-duplicate using a set.
    to_activate = list(set(to_activate + setup_packages_to_activate))

    print("Activating packages")
    install.activate(repository.load_packages(to_activate))
예제 #30
0
def build(package_store, name, variant, clean_after_build, recursive=False):
    assert isinstance(package_store, PackageStore)
    print("Building package {} variant {}".format(
        name, pkgpanda.util.variant_str(variant)))
    tmpdir = tempfile.TemporaryDirectory(prefix="pkgpanda_repo")
    repository = Repository(tmpdir.name)

    package_dir = package_store.get_package_folder(name)

    def src_abs(name):
        return package_dir + '/' + name

    def cache_abs(filename):
        return package_store.get_package_cache_folder(name) + '/' + filename

    # Build pkginfo over time, translating fields from buildinfo.
    pkginfo = {}

    # Build up the docker command arguments over time, translating fields as needed.
    cmd = DockerCmd()

    assert (name, variant) in package_store.packages, \
        "Programming error: name, variant should have been validated to be valid before calling build()."
    buildinfo = copy.deepcopy(package_store.get_buildinfo(name, variant))

    if 'name' in buildinfo:
        raise BuildError(
            "'name' is not allowed in buildinfo.json, it is implicitly the name of the "
            "folder containing the buildinfo.json")

    # Convert single_source -> sources
    try:
        sources = expand_single_source_alias(name, buildinfo)
    except ValidationError as ex:
        raise BuildError(
            "Invalid buildinfo.json for package: {}".format(ex)) from ex

    # Save the final sources back into buildinfo so it gets written into
    # buildinfo.json. This also means buildinfo.json is always expanded form.
    buildinfo['sources'] = sources

    # Construct the source fetchers, gather the checkout ids from them
    checkout_ids = dict()
    fetchers = dict()
    try:
        for src_name, src_info in sorted(sources.items()):
            # TODO(cmaloney): Switch to a unified top level cache directory shared by all packages
            cache_dir = package_store.get_package_cache_folder(
                name) + '/' + src_name
            check_call(['mkdir', '-p', cache_dir])
            fetcher = get_src_fetcher(src_info, cache_dir, package_dir)
            fetchers[src_name] = fetcher
            checkout_ids[src_name] = fetcher.get_id()
    except ValidationError as ex:
        raise BuildError(
            "Validation error when fetching sources for package: {}".format(
                ex))

    for src_name, checkout_id in checkout_ids.items():
        # NOTE: single_source buildinfo was expanded above so the src_name is
        # always correct here.
        # Make sure we never accidentally overwrite something which might be
        # important. Fields should match if specified (And that should be
        # tested at some point). For now disallowing identical saves hassle.
        assert_no_duplicate_keys(checkout_id, buildinfo['sources'][src_name])
        buildinfo['sources'][src_name].update(checkout_id)

    # Add the sha1 of the buildinfo.json + build file to the build ids
    build_ids = {"sources": checkout_ids}
    build_ids['build'] = pkgpanda.util.sha1(src_abs(buildinfo['build_script']))
    build_ids['pkgpanda_version'] = pkgpanda.build.constants.version
    build_ids['variant'] = '' if variant is None else variant

    extra_dir = src_abs("extra")
    # Add the "extra" folder inside the package as an additional source if it
    # exists
    if os.path.exists(extra_dir):
        extra_id = hash_folder(extra_dir)
        build_ids['extra_source'] = extra_id
        buildinfo['extra_source'] = extra_id

    # Figure out the docker name.
    docker_name = buildinfo['docker']
    cmd.container = docker_name

    # Add the id of the docker build environment to the build_ids.
    try:
        docker_id = get_docker_id(docker_name)
    except CalledProcessError:
        # docker pull the container and try again
        check_call(['docker', 'pull', docker_name])
        docker_id = get_docker_id(docker_name)

    build_ids['docker'] = docker_id

    # TODO(cmaloney): The environment variables should be generated during build
    # not live in buildinfo.json.
    build_ids['environment'] = buildinfo['environment']

    # Packages need directories inside the fake install root (otherwise docker
    # will try making the directories on a readonly filesystem), so build the
    # install root now, and make the package directories in it as we go.
    install_dir = tempfile.mkdtemp(prefix="pkgpanda-")

    active_packages = list()
    active_package_ids = set()
    active_package_variants = dict()
    auto_deps = set()
    # Verify all requires are in the repository.
    if 'requires' in buildinfo:
        # Final package has the same requires as the build.
        pkginfo['requires'] = buildinfo['requires']

        # TODO(cmaloney): Pull generating the full set of requires a function.
        to_check = copy.deepcopy(buildinfo['requires'])
        if type(to_check) != list:
            raise BuildError(
                "`requires` in buildinfo.json must be an array of dependencies."
            )
        while to_check:
            requires_info = to_check.pop(0)
            requires_name, requires_variant = expand_require(requires_info)

            if requires_name in active_package_variants:
                # TODO(cmaloney): If one package depends on the <default>
                # variant of a package and 1+ others depends on a non-<default>
                # variant then update the dependency to the non-default variant
                # rather than erroring.
                if requires_variant != active_package_variants[requires_name]:
                    # TODO(cmaloney): Make this contain the chains of
                    # dependencies which contain the conflicting packages.
                    # a -> b -> c -> d {foo}
                    # e {bar} -> d {baz}
                    raise BuildError(
                        "Dependncy on multiple variants of the same package {}. "
                        "variants: {} {}".format(
                            requires_name, requires_variant,
                            active_package_variants[requires_name]))

                # The variant has package {requires_name, variant} already is a
                # dependency, don't process it again / move on to the next.
                continue

            active_package_variants[requires_name] = requires_variant

            # Figure out the last build of the dependency, add that as the
            # fully expanded dependency.
            requires_last_build = package_store.get_last_build_filename(
                requires_name, requires_variant)
            if not os.path.exists(requires_last_build):
                if recursive:
                    # Build the dependency
                    build(package_store, requires_name, requires_variant,
                          clean_after_build, recursive)
                else:
                    raise BuildError(
                        "No last build file found for dependency {} variant {}. Rebuild "
                        "the dependency".format(requires_name,
                                                requires_variant))

            try:
                pkg_id_str = load_string(requires_last_build)
                auto_deps.add(pkg_id_str)
                pkg_buildinfo = package_store.get_buildinfo(
                    requires_name, requires_variant)
                pkg_requires = pkg_buildinfo['requires']
                pkg_path = repository.package_path(pkg_id_str)
                pkg_tar = pkg_id_str + '.tar.xz'
                if not os.path.exists(
                        package_store.get_package_cache_folder(requires_name) +
                        '/' + pkg_tar):
                    raise BuildError(
                        "The build tarball {} refered to by the last_build file of the "
                        "dependency {} variant {} doesn't exist. Rebuild the dependency."
                        .format(pkg_tar, requires_name, requires_variant))

                active_package_ids.add(pkg_id_str)

                # Mount the package into the docker container.
                cmd.volumes[
                    pkg_path] = "/opt/mesosphere/packages/{}:ro".format(
                        pkg_id_str)
                os.makedirs(
                    os.path.join(install_dir,
                                 "packages/{}".format(pkg_id_str)))

                # Add the dependencies of the package to the set which will be
                # activated.
                # TODO(cmaloney): All these 'transitive' dependencies shouldn't
                # be available to the package being built, only what depends on
                # them directly.
                to_check += pkg_requires
            except ValidationError as ex:
                raise BuildError(
                    "validating package needed as dependency {0}: {1}".format(
                        requires_name, ex)) from ex
            except PackageError as ex:
                raise BuildError(
                    "loading package needed as dependency {0}: {1}".format(
                        requires_name, ex)) from ex

    # Add requires to the package id, calculate the final package id.
    # NOTE: active_packages isn't fully constructed here since we lazily load
    # packages not already in the repository.
    build_ids['requires'] = list(active_package_ids)
    version_base = hash_checkout(build_ids)
    version = None
    if "version_extra" in buildinfo:
        version = "{0}-{1}".format(buildinfo["version_extra"], version_base)
    else:
        version = version_base
    pkg_id = PackageId.from_parts(name, version)

    # Save the build_ids. Useful for verify exactly what went into the
    # package build hash.
    buildinfo['build_ids'] = build_ids
    buildinfo['package_version'] = version

    # Save the package name and variant. The variant is used when installing
    # packages to validate dependencies.
    buildinfo['name'] = name
    buildinfo['variant'] = variant

    # If the package is already built, don't do anything.
    pkg_path = package_store.get_package_cache_folder(
        name) + '/{}.tar.xz'.format(pkg_id)

    # Done if it exists locally
    if exists(pkg_path):
        print("Package up to date. Not re-building.")

        # TODO(cmaloney): Updating / filling last_build should be moved out of
        # the build function.
        write_string(package_store.get_last_build_filename(name, variant),
                     str(pkg_id))

        return pkg_path

    # Try downloading.
    dl_path = package_store.try_fetch_by_id(pkg_id)
    if dl_path:
        print(
            "Package up to date. Not re-building. Downloaded from repository-url."
        )
        # TODO(cmaloney): Updating / filling last_build should be moved out of
        # the build function.
        write_string(package_store.get_last_build_filename(name, variant),
                     str(pkg_id))
        print(dl_path, pkg_path)
        assert dl_path == pkg_path
        return pkg_path

    # Fall out and do the build since it couldn't be downloaded
    print("Unable to download from cache. Proceeding to build")

    print("Building package {} with buildinfo: {}".format(
        pkg_id, json.dumps(buildinfo, indent=2, sort_keys=True)))

    # Clean out src, result so later steps can use them freely for building.
    def clean():
        # Run a docker container to remove src/ and result/
        cmd = DockerCmd()
        cmd.volumes = {
            package_store.get_package_cache_folder(name): "/pkg/:rw",
        }
        cmd.container = "ubuntu:14.04.4"
        cmd.run(["rm", "-rf", "/pkg/src", "/pkg/result"])

    clean()

    # Only fresh builds are allowed which don't overlap existing artifacts.
    result_dir = cache_abs("result")
    if exists(result_dir):
        raise BuildError(
            "result folder must not exist. It will be made when the package is "
            "built. {}".format(result_dir))

    # 'mkpanda add' all implicit dependencies since we actually need to build.
    for dep in auto_deps:
        print("Auto-adding dependency: {}".format(dep))
        # NOTE: Not using the name pkg_id because that overrides the outer one.
        id_obj = PackageId(dep)
        add_package_file(repository, package_store.get_package_path(id_obj))
        package = repository.load(dep)
        active_packages.append(package)

    # Checkout all the sources int their respective 'src/' folders.
    try:
        src_dir = cache_abs('src')
        if os.path.exists(src_dir):
            raise ValidationError(
                "'src' directory already exists, did you have a previous build? "
                +
                "Currently all builds must be from scratch. Support should be "
                + "added for re-using a src directory when possible. src={}".
                format(src_dir))
        os.mkdir(src_dir)
        for src_name, fetcher in sorted(fetchers.items()):
            root = cache_abs('src/' + src_name)
            os.mkdir(root)

            fetcher.checkout_to(root)
    except ValidationError as ex:
        raise BuildError(
            "Validation error when fetching sources for package: {}".format(
                ex))

    # Copy over environment settings
    pkginfo['environment'] = buildinfo['environment']

    # Whether pkgpanda should on the host make sure a `/var/lib` state directory is available
    pkginfo['state_directory'] = buildinfo.get('state_directory', False)
    if pkginfo['state_directory'] not in [True, False]:
        raise BuildError(
            "state_directory in buildinfo.json must be a boolean `true` or `false`"
        )

    username = buildinfo.get('username')
    if not (username is None or isinstance(username, str)):
        raise BuildError(
            "username in buildinfo.json must be either not set (no user for this"
            " package), or a user name string")
    if username:
        try:
            pkgpanda.UserManagement.validate_username(username)
        except ValidationError as ex:
            raise BuildError(
                "username in buildinfo.json didn't meet the validation rules. {}"
                .format(ex))
    pkginfo['username'] = username

    # Activate the packages so that we have a proper path, environment
    # variables.
    # TODO(cmaloney): RAII type thing for temproary directory so if we
    # don't get all the way through things will be cleaned up?
    install = Install(root=install_dir,
                      config_dir=None,
                      rooted_systemd=True,
                      manage_systemd=False,
                      block_systemd=True,
                      fake_path=True,
                      manage_users=False,
                      manage_state_dir=False)
    install.activate(active_packages)
    # Rewrite all the symlinks inside the active path because we will
    # be mounting the folder into a docker container, and the absolute
    # paths to the packages will change.
    # TODO(cmaloney): This isn't very clean, it would be much nicer to
    # just run pkgpanda inside the package.
    rewrite_symlinks(install_dir, repository.path, "/opt/mesosphere/packages/")

    print("Building package in docker")

    # TODO(cmaloney): Run as a specific non-root user, make it possible
    # for non-root to cleanup afterwards.
    # Run the build, prepping the environment as necessary.
    mkdir(cache_abs("result"))

    # Copy the build info to the resulting tarball
    write_json(cache_abs("src/buildinfo.full.json"), buildinfo)
    write_json(cache_abs("result/buildinfo.full.json"), buildinfo)

    write_json(cache_abs("result/pkginfo.json"), pkginfo)

    # Make the folder for the package we are building. If docker does it, it
    # gets auto-created with root permissions and we can't actually delete it.
    os.makedirs(os.path.join(install_dir, "packages", str(pkg_id)))

    # TOOD(cmaloney): Disallow writing to well known files and directories?
    # Source we checked out
    cmd.volumes.update({
        # TODO(cmaloney): src should be read only...
        cache_abs("src"):
        "/pkg/src:rw",
        # The build script
        src_abs(buildinfo['build_script']):
        "/pkg/build:ro",
        # Getting the result out
        cache_abs("result"):
        "/opt/mesosphere/packages/{}:rw".format(pkg_id),
        install_dir:
        "/opt/mesosphere:ro"
    })

    if os.path.exists(extra_dir):
        cmd.volumes[extra_dir] = "/pkg/extra:ro"

    cmd.environment = {
        "PKG_VERSION": version,
        "PKG_NAME": name,
        "PKG_ID": pkg_id,
        "PKG_PATH": "/opt/mesosphere/packages/{}".format(pkg_id),
        "PKG_VARIANT": variant if variant is not None else "<default>"
    }

    try:
        # TODO(cmaloney): Run a wrapper which sources
        # /opt/mesosphere/environment then runs a build. Also should fix
        # ownership of /opt/mesosphere/packages/{pkg_id} post build.
        cmd.run([
            "/bin/bash", "-o", "nounset", "-o", "pipefail", "-o", "errexit",
            "/pkg/build"
        ])
    except CalledProcessError as ex:
        raise BuildError("docker exited non-zero: {}\nCommand: {}".format(
            ex.returncode, ' '.join(ex.cmd)))

    # Clean up the temporary install dir used for dependencies.
    # TODO(cmaloney): Move to an RAII wrapper.
    check_call(['rm', '-rf', install_dir])

    print("Building package tarball")

    # Check for forbidden services before packaging the tarball:
    try:
        check_forbidden_services(cache_abs("result"), RESERVED_UNIT_NAMES)
    except ValidationError as ex:
        raise BuildError("Package validation failed: {}".format(ex))

    # TODO(cmaloney): Updating / filling last_build should be moved out of
    # the build function.
    write_string(package_store.get_last_build_filename(name, variant),
                 str(pkg_id))

    # Bundle the artifacts into the pkgpanda package
    tmp_name = pkg_path + "-tmp.tar.xz"
    make_tar(tmp_name, cache_abs("result"))
    os.rename(tmp_name, pkg_path)
    print("Package built.")
    if clean_after_build:
        clean()
    return pkg_path