Exemple #1
0
    def _build_dependencies(self):
        package_files = self.download_dependencies()

        def filter_files(package_files):
            for p in package_files:
                banned = False
                for k in self.BANNED_PACKAGES:
                    if k in p.lower():
                        banned = True
                if banned:
                    continue
                yield p

        package_files = [f for f in filter_files(package_files)]
        if not package_files:
            LOG.info("No RPM packages of OpenStack dependencies to build")
            return
        package_base_names = [sh.basename(f) for f in package_files]
        utils.log_iterable(sorted(package_base_names),
                           logger=LOG,
                           header=("Building %s dependency RPM"
                                   " packages") % (len(package_files)))
        with utils.progress_bar(name='Building',
                                max_am=len(package_files)) as p_bar:
            for (i, filename) in enumerate(sorted(package_files)):
                cmdline = self.py2rpm_start_cmdline() + ["--", filename]
                build_filename = "py2rpm-%s.out" % sh.basename(filename)
                out_filename = sh.joinpths(self.log_dir, build_filename)
                sh.execute_save_output(cmdline,
                                       out_filename=out_filename,
                                       quiet=True)
                p_bar.update(i + 1)
Exemple #2
0
 def _unpack_tar(self, file_name, file_location, tmp_dir):
     (root_name, _) = os.path.splitext(file_name)
     tar_members = self._filter_files(
         self._get_tar_file_members(file_location))
     (root_img_fn, ramdisk_fn,
      kernel_fn) = self._find_pieces(tar_members, file_location)
     if not root_img_fn:
         msg = "Tar file %r has no root image member" % (file_name)
         raise IOError(msg)
     kernel_real_fn = None
     root_real_fn = None
     ramdisk_real_fn = None
     self._log_pieces_found('archive', root_img_fn, ramdisk_fn, kernel_fn)
     extract_dir = sh.mkdir(sh.joinpths(tmp_dir, root_name))
     with contextlib.closing(tarfile.open(file_location, 'r')) as tfh:
         for m in tfh.getmembers():
             if m.name == root_img_fn:
                 root_real_fn = sh.joinpths(extract_dir,
                                            sh.basename(root_img_fn))
                 self._unpack_tar_member(tfh, m, root_real_fn)
             elif ramdisk_fn and m.name == ramdisk_fn:
                 ramdisk_real_fn = sh.joinpths(extract_dir,
                                               sh.basename(ramdisk_fn))
                 self._unpack_tar_member(tfh, m, ramdisk_real_fn)
             elif kernel_fn and m.name == kernel_fn:
                 kernel_real_fn = sh.joinpths(extract_dir,
                                              sh.basename(kernel_fn))
                 self._unpack_tar_member(tfh, m, kernel_real_fn)
     return self._describe(root_real_fn, ramdisk_real_fn, kernel_real_fn)
Exemple #3
0
    def _build_dependencies(self):
        package_files = self.download_dependencies()

        def filter_files(package_files):
            for p in package_files:
                banned = False
                for k in self.BANNED_PACKAGES:
                    if k in p.lower():
                        banned = True
                if banned:
                    continue
                yield p

        package_files = [f for f in filter_files(package_files)]
        if not package_files:
            LOG.info("No RPM packages of OpenStack dependencies to build")
            return
        package_base_names = [sh.basename(f) for f in package_files]
        utils.log_iterable(sorted(package_base_names), logger=LOG,
                           header=("Building %s dependency RPM"
                                   " packages") % (len(package_files)))
        with utils.progress_bar(name='Building',
                                max_am=len(package_files)) as p_bar:
            for (i, filename) in enumerate(sorted(package_files)):
                cmdline = self.py2rpm_start_cmdline() + ["--", filename]
                build_filename = "py2rpm-%s.out" % sh.basename(filename)
                out_filename = sh.joinpths(self.log_dir, build_filename)
                sh.execute_save_output(cmdline, out_filename=out_filename,
                                       quiet=True)
                p_bar.update(i + 1)
Exemple #4
0
 def _patches(self):
     in_patches = patcher.expand_patches(self.get_option('patches', 'package'))
     your_patches = []
     for path in in_patches:
         target_path = sh.joinpths(self.build_paths['sources'], sh.basename(path))
         sh.copy(path, target_path)
         your_patches.append(sh.basename(target_path))
     return your_patches
Exemple #5
0
 def _patches(self):
     in_patches = patcher.expand_patches(self.get_option('patches', 'package'))
     your_patches = []
     for path in in_patches:
         target_path = sh.joinpths(self.build_paths['sources'], sh.basename(path))
         sh.copy(path, target_path)
         your_patches.append(sh.basename(target_path))
     return your_patches
Exemple #6
0
 def pre_build():
     build_requirements = self.requirements.get("build-requires")
     if build_requirements:
         utils.log_iterable(build_requirements,
                            header="Installing build requirements",
                            logger=LOG)
         self.helper.transaction(install_pkgs=build_requirements,
                                 tracewriter=self.tracewriter)
     build_requirements = ''
     try:
         build_requirements = sh.load_file(self.rpm_build_requires_filename)
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
     build_requirements = set(pkg_resources.yield_lines(build_requirements))
     for repo_name in self.REPOS:
         repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
         matched_paths = []
         paths = list_src_rpms(repo_dir)
         envra_details = self.envra_helper.explode(*paths)
         for (path, envra_detail) in zip(paths, envra_details):
             package_name = envra_detail.get('name')
             if package_name in build_requirements:
                 matched_paths.append(path)
                 build_requirements.discard(package_name)
         if matched_paths:
             with sh.remove_before(self.prebuild_dir) as prebuild_dir:
                 if not sh.isdir(prebuild_dir):
                     sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter)
                 for path in matched_paths:
                     sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path)))
                 build(prebuild_dir, repo_name,
                       'Prebuilding %s RPM packages from their SRPMs'
                       ' for repo %s using %s jobs')
     return build_requirements
def download(distro, uri, target_dir, **kwargs):
    puri = urlparse(uri)
    scheme = puri.scheme.lower()
    path = puri.path
    if scheme in ['git'] or path.find('.git') != -1:
        dirs_made = sh.mkdirslist(target_dir)
        downloader = GitDownloader(distro, uri, target_dir)
        downloader.download()
        return dirs_made
    if scheme in ['http', 'https']:
        dirs_made = []
        with utils.tempdir() as tdir:
            fn = sh.basename(path)
            downloader = UrlLibDownloader(uri, sh.joinpths(tdir, fn))
            downloader.download()
            if fn.endswith('.tar.gz'):
                dirs_made = sh.mkdirslist(target_dir)
                cmd = ['tar', '-xzvf', sh.joinpths(tdir, fn), '-C', target_dir]
                sh.execute(*cmd)
            elif fn.endswith('.zip'):
                # TODO(harlowja) this might not be 100% right...
                # we might have to move the finished directory...
                dirs_made = sh.mkdirslist(target_dir)
                cmd = ['unzip', sh.joinpths(tdir, fn), '-d', target_dir]
                sh.execute(*cmd)
            else:
                raise excp.DownloadException("Unable to extract %s downloaded from %s" % (fn, uri))
        return dirs_made
    else:
        raise excp.DownloadException("Unknown scheme %s, unable to download from %s" % (scheme, uri))
Exemple #8
0
def load(path, distros_patch=None):
    """Load configuration for all distros found in path.

    :param path: path containing distro configuration in yaml format
    :param distros_patch: distros file patch, jsonpath format (rfc6902)
    """
    distro_possibles = []
    patch = jsonpatch.JsonPatch(distros_patch) if distros_patch else None
    input_files = glob.glob(sh.joinpths(path, '*.yaml'))
    if not input_files:
        raise excp.ConfigException(
            'Did not find any distro definition files in %r' % path)
    for fn in input_files:
        LOG.debug("Attempting to load distro definition from %r", fn)
        try:
            cls_kvs = utils.load_yaml(fn)
            # Apply any user specified patches to distros file
            if patch:
                patch.apply(cls_kvs, in_place=True)
        except Exception as err:
            LOG.warning('Could not load distro definition from %r: %s', fn,
                        err)
        else:
            if 'name' not in cls_kvs:
                name, _ext = os.path.splitext(sh.basename(fn))
                cls_kvs['name'] = name
            distro_possibles.append(Distro(**cls_kvs))
    matches = _match_distros(distro_possibles)
    LOG.debug("Matched distros %s", [m.name for m in matches])
    return matches
Exemple #9
0
 def _patches(self):
     your_patches = []
     in_patches = self.get_option('patches', 'package')
     if in_patches:
         for path in in_patches:
             path = sh.abspth(path)
             if sh.isdir(path):
                 for c_path in sh.listdir(path, files_only=True):
                     tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(c_path))
                     sh.copy(c_path, tgt_fn)
                     your_patches.append(sh.basename(tgt_fn))
             else:
                 tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(path))
                 sh.copy(path, tgt_fn)
                 your_patches.append(sh.basename(tgt_fn))
     return your_patches
Exemple #10
0
 def _copy_startup_scripts(self, spec_filename):
     common_init_content = utils.load_template("packaging",
                                               "common.init")[1]
     for src in rpm.spec(spec_filename).sources:
         script = sh.basename(src[0])
         if not (script.endswith(".init")):
             continue
         target_filename = sh.joinpths(self.rpm_sources_dir, script)
         if sh.isfile(target_filename):
             continue
         bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init")
         if bin_name == "quantum-server":
             daemon_args = ("'--config-file=/etc/quantum/plugin.ini"
                            " --config-file=/etc/quantum/quantum.conf'")
         elif bin_name == "quantum-l3-agent":
             daemon_args = ("'--config-file=/etc/quantum/l3_agent.ini"
                            " --config-file=/etc/quantum/quantum.conf'")
         elif bin_name == "quantum-dhcp-agent":
             daemon_args = ("'--config-file=/etc/quantum/dhcp_agent.ini"
                            " --config-file=/etc/quantum/quantum.conf'")
         else:
             daemon_args = ""
         params = {
             "bin": bin_name,
             "package": bin_name.split("-", 1)[0],
             "daemon_args": daemon_args,
         }
         sh.write_file(target_filename,
                       utils.expand_template(common_init_content, params))
Exemple #11
0
    def _build_openstack_package(self, instance):
        params = self._package_parameters(instance)
        patches = instance.list_patches("package")
        params['patches'] = [sh.basename(fn) for fn in patches]

        build_name = instance.get_option('build_name', default_value=instance.name)
        (rpm_name, template_name) = self._find_template_and_rpm_name(instance, build_name)
        try:
            egg_name = instance.egg_info['name']
            params["version"] = instance.egg_info["version"]
        except AttributeError:
            pass
        else:
            if any(s.endswith("client")
                   for s in (instance.name, egg_name, build_name)):
                client_name = utils.strip_prefix_suffix(egg_name, "python-", "client")
                if not client_name:
                    msg = "Bad client package name %s" % (egg_name)
                    raise excp.PackageException(msg)
                params["clientname"] = client_name
                params["apiname"] = instance.get_option(
                    'api_name', default_value=client_name.title())

        if all((rpm_name, template_name)):
            spec_filename = self._write_spec_file(instance, rpm_name,
                                                  template_name, params)
            self._build_from_spec(instance, spec_filename, patches)
        else:
            self.py2rpm_helper.build_srpm(source=instance.get_option("app_dir"),
                                          log_filename=instance.name,
                                          release=params.get("release"),
                                          with_tests=not params.get("no_tests"))
Exemple #12
0
 def _build_openstack_package(self, instance):
     params = self._package_parameters(instance)
     patches = instance.list_patches("package")
     params['patches'] = [sh.basename(fn) for fn in patches]
     (rpm_name, template_name) = self._get_template_and_rpm_name(instance)
     try:
         egg_name = instance.egg_info['name']
         params["version"] = instance.egg_info["version"]
         if self._is_client(instance.name, egg_name):
             client_name = utils.strip_prefix_suffix(
                 egg_name, "python-", "client")
             if not client_name:
                 msg = "Bad client package name %s" % (egg_name)
                 raise excp.PackageException(msg)
             params["clientname"] = client_name
             params["apiname"] = self.API_NAMES.get(client_name,
                                                    client_name.title())
     except AttributeError:
         spec_filename = None
         if template_name:
             spec_filename = sh.joinpths(settings.TEMPLATE_DIR,
                                         self.SPEC_TEMPLATE_DIR,
                                         template_name)
         if not spec_filename or not sh.isfile(spec_filename):
             rpm_name = None
     if rpm_name:
         if not template_name:
             template_name = "%s.spec" % rpm_name
         spec_filename = self._write_spec_file(instance, rpm_name,
                                               template_name, params)
         self._build_from_spec(instance, spec_filename, patches)
     else:
         self._build_from_app_dir(instance, params)
Exemple #13
0
def load(path, distros_patch=None):
    """Load configuration for all distros found in path.

    :param path: path containing distro configuration in yaml format
    :param distros_patch: distros file patch, jsonpath format (rfc6902)
    """
    distro_possibles = []
    patch = jsonpatch.JsonPatch(distros_patch) if distros_patch else None
    input_files = glob.glob(sh.joinpths(path, '*.yaml'))
    if not input_files:
        raise excp.ConfigException('Did not find any distro definition files in %r' % path)
    for fn in input_files:
        LOG.debug("Attempting to load distro definition from %r", fn)
        try:
            cls_kvs = utils.load_yaml(fn)
            # Apply any user specified patches to distros file
            if patch:
                patch.apply(cls_kvs, in_place=True)
        except Exception as err:
            LOG.warning('Could not load distro definition from %r: %s', fn, err)
        else:
            if 'name' not in cls_kvs:
                name, _ext = os.path.splitext(sh.basename(fn))
                cls_kvs['name'] = name
            distro_possibles.append(Distro(**cls_kvs))
    matches = _match_distros(distro_possibles)
    LOG.debug("Matched distros %s", [m.name for m in matches])
    return matches
Exemple #14
0
 def _install_node_repo(self):
     repo_url = self.get_option('nodejs_repo')
     if not repo_url:
         return
     # Download the said url and install it so that we can actually install
     # the node.js requirement which seems to be needed by horizon for css compiling??
     repo_basename = sh.basename(repo_url)
     (_fn, fn_ext) = os.path.splitext(repo_basename)
     fn_ext = fn_ext.lower().strip()
     if fn_ext not in ['.rpm', '.repo']:
         LOG.warn("Unknown node.js repository configuration extension %s (we only support .rpm or .repo)!", colorizer.quote(fn_ext))
         return
     with NamedTemporaryFile(suffix=fn_ext) as temp_fh:
         LOG.info("Downloading node.js repository configuration from %s to %s.", repo_url, temp_fh.name)
         down.UrlLibDownloader(repo_url, temp_fh.name).download()
         temp_fh.flush()
         if fn_ext == ".repo":
             # Just write out the repo file after downloading it...
             repo_file_name = sh.joinpths("/etc/yum.repos.d", repo_basename)
             if not sh.exists(repo_file_name):
                 with sh.Rooted(True):
                     sh.write_file(repo_file_name, sh.load_file(temp_fh.name),
                                   tracewriter=self.tracewriter)
                     sh.chmod(repo_file_name, 0644)
         elif fn_ext == ".rpm":
             # Install it instead from said rpm (which likely is a
             # file that contains said repo location)...
             packager = yum.YumPackager(self.distro).direct_install(temp_fh.name)
Exemple #15
0
 def _build_openstack_package(self, instance):
     params = self._package_parameters(instance)
     patches = instance.list_patches("package")
     params['patches'] = [sh.basename(fn) for fn in patches]
     (rpm_name, template_name) = self._get_template_and_rpm_name(instance)
     try:
         egg_name = instance.egg_info['name']
         params["version"] = instance.egg_info["version"]
         if self._is_client(instance.name, egg_name):
             client_name = utils.strip_prefix_suffix(egg_name, "python-", "client")
             if not client_name:
                 msg = "Bad client package name %s" % (egg_name)
                 raise excp.PackageException(msg)
             params["clientname"] = client_name
             params["apiname"] = self.API_NAMES.get(client_name,
                                                    client_name.title())
     except AttributeError:
         spec_filename = None
         if template_name:
             spec_filename = sh.joinpths(settings.TEMPLATE_DIR,
                                         self.SPEC_TEMPLATE_DIR,
                                         template_name)
         if not spec_filename or not sh.isfile(spec_filename):
             rpm_name = None
     if rpm_name:
         if not template_name:
             template_name = "%s.spec" % rpm_name
         spec_filename = self._write_spec_file(instance, rpm_name,
                                               template_name, params)
         self._build_from_spec(instance, spec_filename, patches)
     else:
         self._build_from_app_dir(instance, params)
Exemple #16
0
 def _execute_make(self, filename, marks_dir):
     cmdline = ["make", "-f", filename, "-j", str(self._jobs)]
     out_filename = sh.joinpths(self.log_dir,
                                "%s.log" % sh.basename(filename))
     sh.execute_save_output(cmdline,
                            cwd=marks_dir,
                            out_filename=out_filename)
Exemple #17
0
def get_archive_details(filename):
    if not sh.isfile(filename):
        raise IOError("Can not detail non-existent file %s" % (filename))

    # Check if we already got the details of this file previously
    cache_key = "f:%s:%s" % (sh.basename(filename), sh.getsize(filename))
    if cache_key in EGGS_DETAILED:
        return EGGS_DETAILED[cache_key]

    # Get pip to get us the egg-info.
    with utils.tempdir() as td:
        filename = sh.copy(filename, sh.joinpths(td, sh.basename(filename)))
        extract_to = sh.mkdir(sh.joinpths(td, 'build'))
        pip_util.unpack_file(filename, extract_to, content_type='', link='')
        details = get_directory_details(extract_to)

    EGGS_DETAILED[cache_key] = details
    return details
Exemple #18
0
def get_archive_details(filename):
    if not sh.isfile(filename):
        raise IOError("Can not detail non-existent file %s" % (filename))

    # Check if we already got the details of this file previously
    cache_key = "f:%s:%s" % (sh.basename(filename), sh.getsize(filename))
    if cache_key in EGGS_DETAILED:
        return EGGS_DETAILED[cache_key]

    # Get pip to get us the egg-info.
    with utils.tempdir() as td:
        filename = sh.copy(filename, sh.joinpths(td, sh.basename(filename)))
        extract_to = sh.mkdir(sh.joinpths(td, 'build'))
        pip_util.unpack_file(filename, extract_to, content_type='', link='')
        details = get_directory_details(extract_to)

    EGGS_DETAILED[cache_key] = details
    return details
    def _remove_src_rpm(self, rpm):
        filename = sh.basename(rpm)
        if not filename:
            LOG.error("Cannot determine file name from rpm: %r", rpm)
            return False
        (package, ext) = os.path.splitext(filename)
        if not package:
            LOG.error("Cannot determine package name from rpm: %r", rpm)
            return False

        cmd = YUM_REMOVE + [package]
        self._execute_yum(cmd)
        return True
Exemple #20
0
    def build_binary(self):

        def _install_build_requirements():
            build_requires = self.requirements["build-requires"]
            if build_requires:
                utils.log_iterable(sorted(build_requires),
                                   header=("Installing %s build requirements" % len(build_requires)),
                                   logger=LOG)
                cmdline = ["yum", "install", "-y"] + list(build_requires)
                sh.execute(cmdline)

        def _is_src_rpm(filename):
            return filename.endswith('.src.rpm')

        _install_build_requirements()

        for repo_name in self.REPOS:
            repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
            sh.mkdirslist(repo_dir, tracewriter=self.tracewriter)
            src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
            if sh.isdir(src_repo_dir):
                src_repo_files = sh.listdir(src_repo_dir, files_only=True)
                src_repo_files = sorted([f for f in src_repo_files if _is_src_rpm(f)])
            else:
                src_repo_files = []
            if not src_repo_files:
                continue
            src_repo_base_files = [sh.basename(f) for f in src_repo_files]
            LOG.info('Building %s RPM packages from their SRPMs for repo %s using %s jobs',
                     len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs)
            makefile_name = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name)
            marks_dir = sh.joinpths(self.deps_dir, "marks-binary")
            sh.mkdirslist(marks_dir, tracewriter=self.tracewriter)
            (_fn, content) = utils.load_template("packaging/makefiles", "binary.mk")
            rpmbuild_flags = ("--rebuild --define '_topdir %s'" % self.rpmbuild_dir)
            if self.opts.get("usr_only", False):
                rpmbuild_flags += "--define 'usr_only 1'"
            params = {
                "SRC_REPO_DIR": src_repo_dir,
                "RPMBUILD_FLAGS": rpmbuild_flags,
                "LOGS_DIR": self.log_dir,
            }
            sh.write_file(makefile_name,
                          utils.expand_template(content, params),
                          tracewriter=self.tracewriter)
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self._execute_make(makefile_name, marks_dir)
                self._move_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                 repo_dir)
            self._create_repo(repo_name)
Exemple #21
0
 def _move_rpm_files(self, source_dir, target_dir):
     # NOTE(imelnikov): we should create target_dir even if we have
     #  nothing to move, because later we rely on its existence
     if not sh.isdir(target_dir):
         sh.mkdirslist(target_dir, tracewriter=self.tracewriter)
     if not sh.isdir(source_dir):
         return []
     moved = []
     for filename in sh.listdir(source_dir, recursive=True, files_only=True):
         if not filename.lower().endswith(".rpm"):
             continue
         sh.move(filename, target_dir, force=True)
         moved.append(sh.joinpths(target_dir, sh.basename(filename)))
     return moved
Exemple #22
0
def load_examples():
    examples = []
    for filename in glob.glob(EXAMPLE_GLOB):
        if sh.isfile(filename):
            # The test generator will use the first element as the test
            # identifer so provide a filename + index based test identifer to
            # be able to connect test failures to the example which caused it.
            try:
                base = sh.basename(filename)
                base = re.sub(r"[.\s]", "_", base)
                for i, example in enumerate(utils.load_yaml(filename)):
                    examples.append(("%s_%s" % (base, i), example))
            except IOError:
                pass
    return examples
Exemple #23
0
 def _unpack_tar(self, file_name, file_location, tmp_dir):
     (root_name, _) = os.path.splitext(file_name)
     tar_members = self._filter_files(self._get_tar_file_members(file_location))
     (root_img_fn, ramdisk_fn, kernel_fn) = self._find_pieces(tar_members, file_location)
     if not root_img_fn:
         msg = "Tar file %r has no root image member" % (file_name)
         raise IOError(msg)
     kernel_real_fn = None
     root_real_fn = None
     ramdisk_real_fn = None
     self._log_pieces_found('archive', root_img_fn, ramdisk_fn, kernel_fn)
     extract_dir = sh.mkdir(sh.joinpths(tmp_dir, root_name))
     with contextlib.closing(tarfile.open(file_location, 'r')) as tfh:
         for m in tfh.getmembers():
             if m.name == root_img_fn:
                 root_real_fn = sh.joinpths(extract_dir, sh.basename(root_img_fn))
                 self._unpack_tar_member(tfh, m, root_real_fn)
             elif ramdisk_fn and m.name == ramdisk_fn:
                 ramdisk_real_fn = sh.joinpths(extract_dir, sh.basename(ramdisk_fn))
                 self._unpack_tar_member(tfh, m, ramdisk_real_fn)
             elif kernel_fn and m.name == kernel_fn:
                 kernel_real_fn = sh.joinpths(extract_dir, sh.basename(kernel_fn))
                 self._unpack_tar_member(tfh, m, kernel_real_fn)
     return self._describe(root_real_fn, ramdisk_real_fn, kernel_real_fn)
Exemple #24
0
 def _copy_startup_scripts(self, spec_filename):
     common_init_content = utils.load_template("packaging",
                                               "common.init")[1]
     for src in rpm.spec(spec_filename).sources:
         script = sh.basename(src[0])
         if not (script.endswith(".init")):
             continue
         target_filename = sh.joinpths(self.rpm_sources_dir, script)
         if sh.isfile(target_filename):
             continue
         bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init")
         params = {
             "bin": bin_name,
             "package": bin_name.split("-", 1)[0],
         }
         sh.write_file(target_filename,
                       utils.expand_template(common_init_content, params))
Exemple #25
0
def load(path):
    distro_possibles = []
    input_files = glob.glob(sh.joinpths(path, '*.yaml'))
    if not input_files:
        raise excp.ConfigException('Did not find any distro definition files in %r' % path)
    for fn in input_files:
        LOG.debug("Attempting to load distro definition from %r", fn)
        try:
            cls_kvs = utils.load_yaml(fn)
        except Exception as err:
            LOG.warning('Could not load distro definition from %r: %s', fn, err)
        else:
            if 'name' not in cls_kvs:
                name, _ext = os.path.splitext(sh.basename(fn))
                cls_kvs['name'] = name
            distro_possibles.append(Distro(**cls_kvs))
    return _match_distros(distro_possibles)
Exemple #26
0
 def _copy_startup_scripts(self, instance, spec_details):
     common_init_content = utils.load_template("packaging",
                                               "common.init")[1]
     daemon_args = instance.get_option('daemon_args', default_value={})
     for src in spec_details.get('sources', []):
         script = sh.basename(src)
         if not (script.endswith(".init")):
             continue
         target_filename = sh.joinpths(self.rpm_sources_dir, script)
         if sh.isfile(target_filename):
             continue
         bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init")
         params = {
             "bin": bin_name,
             "package": bin_name.split("-", 1)[0],
             "daemon_args": daemon_args.get(bin_name, ''),
         }
         sh.write_file(target_filename,
                       utils.expand_template(common_init_content, params))
Exemple #27
0
 def pre_build():
     build_requirements = self.requirements.get("build-requires")
     if build_requirements:
         utils.log_iterable(build_requirements,
                            header="Installing build requirements",
                            logger=LOG)
         self.helper.transaction(install_pkgs=build_requirements,
                                 tracewriter=self.tracewriter)
     build_requirements = []
     try:
         build_requirements.extend(_get_lines(self.rpm_build_requires_filename))
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
     built_files = []
     built_requirements = []
     for repo_name in self.REPOS:
         repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
         matched_paths = []
         available_paths = list_src_rpms(repo_dir)
         envra_path_details = self.envra_helper.explode(*available_paths)
         for (path, envra_detail) in zip(available_paths, envra_path_details):
             package_name = envra_detail.get('name')
             if package_name in build_requirements:
                 matched_paths.append(path)
                 built_requirements.append(package_name)
         if matched_paths:
             with sh.remove_before(self.prebuild_dir) as prebuild_dir:
                 sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter)
                 for path in matched_paths:
                     sh.copy(path,
                             sh.joinpths(prebuild_dir, sh.basename(path)))
                 built_files.extend(
                     build(prebuild_dir, repo_name,
                           'Prebuilding %s RPM packages from their'
                           ' SRPMs for repo %s using %s jobs',
                           "%s-prebuild" % self.group, built_files))
     leftover_requirements = set()
     for req in build_requirements:
         if req not in built_requirements:
             leftover_requirements.add(req)
     return (leftover_requirements, built_files)
Exemple #28
0
 def build(repo_dir, repo_name, header_tpl, group, built_files):
     repo_files = []
     for srpm in list_src_rpms(repo_dir):
         if srpm not in built_files:
             repo_files.append(srpm)
     if not repo_files:
         return []
     utils.log_iterable(repo_files,
                        header=header_tpl % (len(repo_files),
                                             self.SRC_REPOS[repo_name],
                                             self.jobs),
                        logger=LOG)
     rpmbuild_flags = "--rebuild"
     if self.opts.get("usr_only", False):
         rpmbuild_flags += " --define 'usr_only 1'"
     if self.opts.get("overwrite_configs", False):
         rpmbuild_flags += " --define 'overwrite_configs 1'"
     with sh.remove_before(self.rpmbuild_dir):
         self._create_rpmbuild_subdirs()
         # This is needed so that make correctly identifies the right
         # files and the right *.mark files and so-on; instead of
         # grabbing all the files (including ones we don't want to
         # build just yet...)
         files_dirname = '%s-%s-build' % (repo_name, group)
         files_dir = sh.joinpths(self.deps_dir, files_dirname)
         sh.mkdirslist(files_dir)
         for srpm in repo_files:
             sh.copy(srpm, sh.joinpths(files_dir, sh.basename(srpm)))
         try:
             self.py2rpm_helper.build_all_binaries(repo_name,
                                                   files_dir,
                                                   rpmbuild_flags,
                                                   self.tracewriter,
                                                   self.jobs)
         finally:
             # If we made any rpms (even if a failure happened, make
             # sure that we move them to the right target repo).
             moved_rpms = move_rpms(repo_name)
             if len(moved_rpms) > 0:
                 self._create_repo(repo_name)
     return repo_files
Exemple #29
0
 def explode(self, *filenames):
     if not filenames:
         return []
     cmdline = [self._executable]
     for filename in filenames:
         cmdline.append(sh.basename(filename))
     (stdout, _stderr) = sh.execute(cmdline)
     results = []
     missing = collections.deque(filenames)
     for line in stdout.splitlines():
         decoded = json.loads(line)
         decoded['origin'] = missing.popleft()
         results.append(decoded)
     if missing:
         raise AssertionError("%s filenames names were lost during"
                              " exploding: %s" % (len(missing),
                                                  list(missing)))
     if len(results) > len(filenames):
         diff = len(results) - len(filenames)
         raise AssertionError("%s filenames appeared unexpectedly while"
                              " exploding" % (diff))
     return results
Exemple #30
0
 def explode(self, *filenames):
     if not filenames:
         return []
     cmdline = [self._executable]
     for filename in filenames:
         cmdline.append(sh.basename(filename))
     (stdout, _stderr) = sh.execute(cmdline)
     results = []
     missing = collections.deque(filenames)
     for line in stdout.splitlines():
         decoded = json.loads(line)
         decoded['origin'] = missing.popleft()
         results.append(decoded)
     if missing:
         raise AssertionError("%s filenames names were lost during"
                              " exploding: %s" %
                              (len(missing), list(missing)))
     if len(results) > len(filenames):
         diff = len(results) - len(filenames)
         raise AssertionError("%s filenames appeared unexpectedly while"
                              " exploding" % (diff))
     return results
Exemple #31
0
 def _copy_startup_scripts(self, instance, spec_filename):
     common_init_content = utils.load_template("packaging",
                                               "common.init")[1]
     cmd = [self.specprint_executable]
     cmd.extend(['-f', spec_filename])
     daemon_args = instance.get_option('daemon_args', default_value={})
     spec_details = json.loads(sh.execute(cmd)[0])
     for src in spec_details.get('sources', []):
         script = sh.basename(src)
         if not (script.endswith(".init")):
             continue
         target_filename = sh.joinpths(self.rpm_sources_dir, script)
         if sh.isfile(target_filename):
             continue
         bin_name = utils.strip_prefix_suffix(script, "openstack-", ".init")
         params = {
             "bin": bin_name,
             "package": bin_name.split("-", 1)[0],
             "daemon_args": daemon_args.get(bin_name, ''),
         }
         sh.write_file(target_filename,
                       utils.expand_template(common_init_content, params))
Exemple #32
0
    def _build_openstack_package(self, instance):
        params = self._package_parameters(instance)
        patches = instance.list_patches("package")
        params['patches'] = [sh.basename(fn) for fn in patches]

        build_name = instance.get_option('build_name',
                                         default_value=instance.name)
        (rpm_name, template_name) = self._find_template_and_rpm_name(
            instance, build_name)
        try:
            egg_name = instance.egg_info['name']
            params["version"] = instance.egg_info["version"]
        except AttributeError:
            pass
        else:
            if any(
                    s.endswith("client")
                    for s in (instance.name, egg_name, build_name)):
                client_name = utils.strip_prefix_suffix(
                    egg_name, "python-", "client")
                if not client_name:
                    msg = "Bad client package name %s" % (egg_name)
                    raise excp.PackageException(msg)
                params["clientname"] = client_name
                params["apiname"] = instance.get_option(
                    'api_name', default_value=client_name.title())

        if all((rpm_name, template_name)):
            spec_filename = self._write_spec_file(instance, rpm_name,
                                                  template_name, params)
            self._build_from_spec(instance, spec_filename, patches)
        else:
            self.py2rpm_helper.build_srpm(
                source=instance.get_option("app_dir"),
                log_filename=instance.name,
                release=params.get("release"),
                with_tests=not params.get("no_tests"))
    def _install_src_rpm(self, rpm):
        filename = sh.basename(rpm)
        if not filename:
            LOG.error("Cannot determine file name from rpm: %r", rpm)
            return False
        (package, ext) = os.path.splitext(filename)
        if not package:
            LOG.error("Cannot determine package name from rpm: %r", rpm)
            return False

        if self._is_installed(package):
            return True
        with utils.tempdir() as tdir:
            if not sh.exists(rpm):
                (fetched_filen, bytes_down) = downloader.UrlLibDownloader(rpm, sh.joinpths(tdir, filename)).download()
                LOG.debug("For url %s, we downloaded %s bytes to %s", rpm, bytes_down, fetched_filen)
                # RLOO, do we want to catch any exceptions?
            else:
                fetched_filen = rpm
        
            cmd = YUM_INSTALL + ["--nogpgcheck", fetched_filen] 
            self._execute_yum(cmd)

        return True
Exemple #34
0
def download(distro, uri, target_dir, **kwargs):
    puri = urlparse(uri)
    scheme = puri.scheme.lower()
    path = puri.path.strip().split("?", 1)[0]
    if scheme == 'git' or path.lower().endswith('.git'):
        downloader = GitDownloader(distro, uri, target_dir)
        downloader.download()
    elif scheme in ['http', 'https']:
        with utils.tempdir() as tdir:
            fn = sh.basename(path)
            downloader = UrlLibDownloader(uri, sh.joinpths(tdir, fn))
            downloader.download()
            if fn.endswith('.tar.gz'):
                cmd = ['tar', '-xzvf', sh.joinpths(tdir, fn), '-C', target_dir]
                sh.execute(*cmd)
            elif fn.endswith('.zip'):
                # TODO(harlowja) this might not be 100% right...
                # we might have to move the finished directory...
                cmd = ['unzip', sh.joinpths(tdir, fn), '-d', target_dir]
                sh.execute(*cmd)
            else:
                raise excp.DownloadException("Unable to extract %s downloaded from %s" % (fn, uri))
    else:
        raise excp.DownloadException("Unknown scheme %s, unable to download from %s" % (scheme, uri))
Exemple #35
0
 def _execute_make(self, filename, marks_dir, jobs):
     cmdline = ["make", "-f", filename, "-j", str(jobs)]
     out_filename = sh.joinpths(self._log_dir, "%s.log" % sh.basename(filename))
     sh.execute_save_output(cmdline, out_filename, cwd=marks_dir)
Exemple #36
0
 def _extract_url_fn(self):
     return sh.basename(self.parsed_url.path)
Exemple #37
0
    def _build_dependencies(self):
        (pips_downloaded, package_files) = self.download_dependencies()

        # Analyze what was downloaded and eject things that were downloaded
        # by pip as a dependency of a download but which we do not want to
        # build or can satisfy by other means
        no_pips = [
            pkg_resources.Requirement.parse(name).key
            for name in self.python_names
        ]
        no_pips.extend(self.ignore_pips)
        yum_map = self._get_known_yum_packages()
        pips_keys = set([p.key for p in pips_downloaded])
        package_reqs = []
        for filename in package_files:
            package_details = pip_helper.get_archive_details(filename)
            package_reqs.append((filename, package_details['req']))

        def _filter_package_files():
            yum_provided = []
            req_names = [req.key for (filename, req) in package_reqs]
            package_rpm_names = self.py2rpm_helper.names_to_rpm_names(
                req_names)
            filtered_files = []
            for filename, req in package_reqs:
                rpm_name = package_rpm_names[req.key]
                if req.key in no_pips:
                    LOG.info(("Dependency %s was downloaded additionally "
                              "but it is disallowed."), colorizer.quote(req))
                    continue
                if req.key in pips_keys:
                    filtered_files.append(filename)
                    continue
                # See if pip tried to download it but we already can satisfy
                # it via yum and avoid building it in the first place...
                rpm_info = self._find_yum_match(yum_map, req, rpm_name)
                if not rpm_info:
                    filtered_files.append(filename)
                else:
                    yum_provided.append((req, rpm_info))
                    LOG.info(("Dependency %s was downloaded additionally "
                              "but it can be satisfied by %s from repository "
                              "%s instead."), colorizer.quote(req),
                             colorizer.quote(rpm_name),
                             colorizer.quote(rpm_info['repo']))
            return (filtered_files, yum_provided)

        LOG.info("Filtering %s downloaded files.", len(package_files))
        filtered_package_files, yum_provided = _filter_package_files()
        if yum_provided:
            yum_buff = six.StringIO()
            for (req, rpm_info) in yum_provided:
                dep_info = {
                    'requirement': str(req),
                    'rpm': rpm_info,
                }
                yum_buff.write(json.dumps(dep_info))
                yum_buff.write("\n")
            sh.append_file(self.yum_satisfies_filename, yum_buff.getvalue())
        if not filtered_package_files:
            LOG.info("No SRPM package dependencies to build.")
            return
        for filename in package_files:
            if filename not in filtered_package_files:
                sh.unlink(filename)
        build_requires = six.StringIO()
        for (filename, req) in package_reqs:
            if filename in filtered_package_files:
                build_requires.write("%s # %s\n" %
                                     (req, sh.basename(filename)))
        sh.write_file(self.build_requires_filename, build_requires.getvalue())

        # Now build them into SRPM rpm files.
        package_files = sorted(filtered_package_files)
        self.py2rpm_helper.build_all_srpms(package_files=package_files,
                                           tracewriter=self.tracewriter,
                                           jobs=self.jobs)
Exemple #38
0
    def build_binary(self):

        def _install_build_requirements():
            build_requires = self.requirements["build-requires"]
            if build_requires:
                utils.log_iterable(sorted(build_requires),
                                   header=("Installing %s build requirements" % len(build_requires)),
                                   logger=LOG)
                cmdline = ["yum", "install", "-y"] + list(build_requires)
                sh.execute(cmdline)

        def _is_src_rpm(filename):
            return filename.endswith('.src.rpm')

        _install_build_requirements()

        for repo_name in self.REPOS:
            repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
            sh.mkdirslist(repo_dir, tracewriter=self.tracewriter)
            src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
            if sh.isdir(src_repo_dir):
                src_repo_files = sh.listdir(src_repo_dir, files_only=True)
                src_repo_files = sorted([f for f in src_repo_files if _is_src_rpm(f)])
            else:
                src_repo_files = []
            if not src_repo_files:
                continue
            src_repo_base_files = [sh.basename(f) for f in src_repo_files]
            LOG.info("Installing build requirements for repo %s" % repo_name)
            # NOTE(aababilov): yum-builddep is buggy and can fail when several
            # package names are given, so, pass them one by one
            for srpm_filename in src_repo_files:
                cmdline = ["yum-builddep", "-q", "-y", srpm_filename]
                sh.execute(cmdline)
            header = 'Building %s RPM packages from their SRPMs for repo %s using %s jobs'
            header = header % (len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs)
            utils.log_iterable(src_repo_base_files, header=header, logger=LOG)

            binary_makefile_name = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name)
            marks_dir = sh.joinpths(self.deps_dir, "marks-binary")
            sh.mkdirslist(marks_dir)
            with open(binary_makefile_name, "w") as makefile:
                rpmbuild_flags = ("--rebuild --define '_topdir %s'" %
                                  self.rpmbuild_dir)
                if self.opts.get("usr_only", False):
                    rpmbuild_flags += "--define 'usr_only 1'"
                print >> makefile, "SRC_REPO_DIR :=", src_repo_dir
                print >> makefile, "RPMBUILD := rpmbuild"
                print >> makefile, "RPMBUILD_FLAGS :=", rpmbuild_flags
                print >> makefile, "LOGS_DIR :=", self.log_dir
                print >> makefile, """
%.mark: $(SRC_REPO_DIR)/%
\t@$(RPMBUILD) $(RPMBUILD_FLAGS) -- $^ &> $(LOGS_DIR)/rpmbuild-$*.log
\t@touch "$@"
\t@echo "$* is processed"
"""
                print >> makefile, "MARKS :=", " ".join(
                    "%s.mark" % sh.basename(i) for i in src_repo_files)
                print >> makefile
                print >> makefile, "all: $(MARKS)"
            with sh.remove_before_after(self.rpmbuild_dir):
                self._execute_make(binary_makefile_name, marks_dir)
                self._move_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                 repo_dir)
            self._create_repo(repo_name)
Exemple #39
0
    def _build_dependencies(self):
        (pips_downloaded, package_files) = self.download_dependencies()

        # Analyze what was downloaded and eject things that were downloaded
        # by pip as a dependency of a download but which we do not want to
        # build or can satisfy by other means
        no_pips = [pkg_resources.Requirement.parse(name).key
                   for name in self.python_names]
        no_pips.extend(self.BANNED_PACKAGES)
        yum_map = self._get_yum_available()
        pips_keys = set([p.key for p in pips_downloaded])

        def _filter_package_files(package_files):
            package_reqs = []
            package_keys = []
            for filename in package_files:
                package_details = pip_helper.get_archive_details(filename)
                package_reqs.append(package_details['req'])
                package_keys.append(package_details['req'].key)
            package_rpm_names = self._convert_names_python2rpm(package_keys)
            filtered_files = []
            for (filename, req, rpm_name) in zip(package_files, package_reqs,
                                                 package_rpm_names):
                if req.key in no_pips:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it is disallowed."), colorizer.quote(req))
                    continue
                if req.key in pips_keys:
                    filtered_files.append(filename)
                    continue
                # See if pip tried to download it but we already can satisfy
                # it via yum and avoid building it in the first place...
                (_version, repo) = self._find_yum_match(yum_map, req, rpm_name)
                if not repo:
                    filtered_files.append(filename)
                else:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it can be satisfied by %s from repository "
                             "%s instead."), colorizer.quote(req),
                             colorizer.quote(rpm_name),
                             colorizer.quote(repo))
            return filtered_files

        LOG.info("Filtering %s downloaded files.", len(package_files))
        package_files = _filter_package_files(package_files)
        if not package_files:
            LOG.info("No SRPM package dependencies to build.")
            return
        deps_makefile_name = sh.joinpths(self.deps_dir, "deps.mk")
        marks_dir = sh.joinpths(self.deps_dir, "marks-deps")
        sh.mkdirslist(marks_dir)
        with open(deps_makefile_name, "w") as makefile:
            scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
            py2rpm_options = self.py2rpm_options() + [
                "--scripts-dir", scripts_dir,
                "--source-only",
                "--rpm-base", self.rpmbuild_dir,
            ]
            print >> makefile, "DOWNLOADS_DIR :=", self.download_dir
            print >> makefile, "LOGS_DIR :=", self.log_dir
            print >> makefile, "PY2RPM :=", self.py2rpm_executable
            print >> makefile, "PY2RPM_FLAGS :=", " ".join(py2rpm_options)
            print >> makefile, """
%.mark: $(DOWNLOADS_DIR)/%
\t@$(PY2RPM) $(PY2RPM_FLAGS) -- $^ &> $(LOGS_DIR)/py2rpm-$*.log
\t@touch "$@"
\t@echo "$* is processed"
"""
            print >> makefile, "MARKS :=", " ".join(
                "%s.mark" % sh.basename(i) for i in package_files)
            print >> makefile
            print >> makefile, "all: $(MARKS)"

        base_package_files = [sh.basename(f) for f in package_files]
        utils.log_iterable(base_package_files,
                           header="Building %s SRPM packages using %s jobs" %
                           (len(package_files), self.jobs),
                           logger=LOG)
        self._execute_make(deps_makefile_name, marks_dir)
Exemple #40
0
 def builddep(self, srpm_path, tracewriter=None):
     self._traced_yyoom(['builddep', srpm_path],
                        'builddep-%s' % sh.basename(srpm_path), tracewriter)
Exemple #41
0
 def _get_component_name(pkg_dir):
     return sh.basename(sh.dirname(pkg_dir))
Exemple #42
0
 def _extract_url_fn(self):
     return sh.basename(self.parsed_url.path)
Exemple #43
0
 def builddep(self, srpm_path, tracewriter=None):
     self._traced_yyoom(['builddep', srpm_path],
                        'builddep-%s' % sh.basename(srpm_path), tracewriter)
Exemple #44
0
 def _extract_url_fn(self):
     """Extract filename from an image url."""
     return sh.basename(self._parsed_url.path)
Exemple #45
0
    def _build_dependencies(self):
        (pips_downloaded, package_files) = self.download_dependencies()

        # Analyze what was downloaded and eject things that were downloaded
        # by pip as a dependency of a download but which we do not want to
        # build or can satisfy by other means
        no_pips = [pkg_resources.Requirement.parse(name).key
                   for name in self.python_names]
        no_pips.extend(self.ignore_pips)
        yum_map = self._get_known_yum_packages()
        pips_keys = set([p.key for p in pips_downloaded])
        package_reqs = []
        for filename in package_files:
            package_details = pip_helper.get_archive_details(filename)
            package_reqs.append((filename, package_details['req']))

        def _filter_package_files():
            yum_provided = []
            req_names = [req.key for (filename, req) in package_reqs]
            package_rpm_names = self.py2rpm_helper.names_to_rpm_names(req_names)
            filtered_files = []
            for filename, req in package_reqs:
                rpm_name = package_rpm_names[req.key]
                if req.key in no_pips:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it is disallowed."), colorizer.quote(req))
                    continue
                if req.key in pips_keys:
                    filtered_files.append(filename)
                    continue
                # See if pip tried to download it but we already can satisfy
                # it via yum and avoid building it in the first place...
                rpm_info = self._find_yum_match(yum_map, req, rpm_name)
                if not rpm_info:
                    filtered_files.append(filename)
                else:
                    yum_provided.append((req, rpm_info))
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it can be satisfied by %s from repository "
                             "%s instead."), colorizer.quote(req),
                             colorizer.quote(rpm_name),
                             colorizer.quote(rpm_info['repo']))
            return (filtered_files, yum_provided)

        LOG.info("Filtering %s downloaded files.", len(package_files))
        filtered_package_files, yum_provided = _filter_package_files()
        if yum_provided:
            yum_buff = six.StringIO()
            for (req, rpm_info) in yum_provided:
                dep_info = {
                    'requirement': str(req),
                    'rpm': rpm_info,
                }
                yum_buff.write(json.dumps(dep_info))
                yum_buff.write("\n")
            sh.append_file(self.yum_satisfies_filename, yum_buff.getvalue())
        if not filtered_package_files:
            LOG.info("No SRPM package dependencies to build.")
            return
        for filename in package_files:
            if filename not in filtered_package_files:
                sh.unlink(filename)
        build_requires = six.StringIO()
        for (filename, req) in package_reqs:
            if filename in filtered_package_files:
                build_requires.write("%s # %s\n" % (req, sh.basename(filename)))
        sh.write_file(self.build_requires_filename, build_requires.getvalue())

        # Now build them into SRPM rpm files.
        package_files = sorted(filtered_package_files)
        self.py2rpm_helper.build_all_srpms(package_files=package_files,
                                           tracewriter=self.tracewriter,
                                           jobs=self.jobs)
Exemple #46
0
 def _get_component_name(pkg_dir):
     return sh.basename(sh.dirname(pkg_dir))