Пример #1
0
 def __init__(self, *args, **kargs):
     PkgInstallComponent.__init__(self, *args, **kargs)
     self.requires_files = [
         sh.joinpths(self.get_option('app_dir'), 'tools', 'pip-requires'),
     ]
     if self.get_bool_option('use_tests_requires', default_value=True):
         self.requires_files.append(sh.joinpths(self.get_option('app_dir'), 'tools', 'test-requires'))
Пример #2
0
 def _fetch_argv(self, name):
     if name.find('api') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', API_CONF)]
     elif name.find('registry') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', REG_CONF)]
     else:
         return []
Пример #3
0
 def build_all_srpms(self, package_files, tracewriter, jobs):
     (_fn,
      content) = utils.load_template(sh.joinpths("packaging", "makefiles"),
                                     "source.mk")
     scripts_dir = sh.abspth(
         sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
     cmdline = self._start_cmdline(escape_values=True)[1:] + [
         "--scripts-dir",
         scripts_dir,
         "--source-only",
         "--rpm-base",
         self._rpmbuild_dir,
         "--debug",
     ]
     executable = " ".join(self._start_cmdline()[0:1])
     params = {
         "DOWNLOADS_DIR": self._download_dir,
         "LOGS_DIR": self._log_dir,
         "PY2RPM": executable,
         "PY2RPM_FLAGS": " ".join(cmdline)
     }
     marks_dir = sh.joinpths(self._deps_dir, "marks-deps")
     if not sh.isdir(marks_dir):
         sh.mkdirslist(marks_dir, tracewriter=tracewriter)
     makefile_path = sh.joinpths(self._deps_dir, "deps.mk")
     sh.write_file(makefile_path,
                   utils.expand_template(content, params),
                   tracewriter=tracewriter)
     utils.log_iterable(package_files,
                        header="Building %s SRPM packages using %s jobs" %
                        (len(package_files), jobs),
                        logger=LOG)
     self._execute_make(makefile_path, marks_dir, jobs)
Пример #4
0
 def __init__(self, distro, root_dir, instances, opts=None):
     self.distro = distro
     self.root_dir = root_dir
     self.instances = instances
     self.opts = opts or {}
     self.deps_dir = sh.joinpths(self.root_dir, "deps")
     self.download_dir = sh.joinpths(self.deps_dir, "download")
     self.log_dir = sh.joinpths(self.deps_dir, "output")
     self.gathered_requires_filename = sh.joinpths(
         self.deps_dir, "pip-requires")
     self.forced_requires_filename = sh.joinpths(
         self.deps_dir, "forced-requires")
     self.pip_executable = str(self.distro.get_command_config('pip'))
     # list of requirement strings
     self.pips_to_install = []
     self.forced_packages = []
     self.package_dirs = self._get_package_dirs(instances)
     # Instantiate this as late as we can.
     self._python_names = None
     # Track what file we create so they can be cleaned up on uninstall.
     trace_fn = tr.trace_filename(self.root_dir, 'deps')
     self.tracewriter = tr.TraceWriter(trace_fn, break_if_there=False)
     self.tracereader = tr.TraceReader(trace_fn)
     self.requirements = {}
     for key in ("build-requires", "requires", "conflicts"):
         req_set = set()
         for inst in self.instances:
             req_set |= set(pkg["name"]
                            for pkg in inst.get_option(key) or [])
         self.requirements[key] = req_set
Пример #5
0
def download_dependencies(download_dir, pips_to_download, output_filename):
    if not pips_to_download:
        return
    # NOTE(aababilov): pip has issues with already downloaded files
    if sh.isdir(download_dir):
        for filename in sh.listdir(download_dir, files_only=True):
            sh.unlink(filename)
    else:
        sh.mkdir(download_dir)
    # Clean out any previous paths that we don't want around.
    build_path = sh.joinpths(download_dir, ".build")
    if sh.isdir(build_path):
        sh.deldir(build_path)
    sh.mkdir(build_path)
    # Ensure certain directories exist that we want to exist (but we don't
    # want to delete them run after run).
    cache_path = sh.joinpths(download_dir, ".cache")
    if not sh.isdir(cache_path):
        sh.mkdir(cache_path)
    cmdline = [
        PIP_EXECUTABLE, '-v',
        'install', '-I', '-U',
        '--download', download_dir,
        '--build', build_path,
        '--download-cache', cache_path,
    ]
    # Don't download wheels...
    #
    # See: https://github.com/pypa/pip/issues/1439
    if dist_version.StrictVersion(PIP_VERSION) >= dist_version.StrictVersion('1.5'):
        cmdline.append("--no-use-wheel")
    cmdline.extend([str(p) for p in pips_to_download])
    sh.execute_save_output(cmdline, output_filename)
Пример #6
0
    def _install_into_venv(self, instance, requirements):
        venv_dir = self._venv_directory_for(instance)
        base_pip = [sh.joinpths(venv_dir, 'bin', 'pip')]
        env_overrides = {
            'PATH': os.pathsep.join([sh.joinpths(venv_dir, "bin"),
                                     env.get_key('PATH', default_value='')]),
            'VIRTUAL_ENV': venv_dir,
        }
        sh.mkdirslist(self.cache_dir, tracewriter=self.tracewriter)

        def try_install(attempt, requirements):
            cmd = list(base_pip) + ['install']
            cmd.extend([
                '--download-cache',
                self.cache_dir,
            ])
            if isinstance(requirements, six.string_types):
                cmd.extend([
                    '--requirement',
                    requirements
                ])
            else:
                for req in requirements:
                    cmd.append(str(req))
            sh.execute(cmd, env_overrides=env_overrides)

        # Sometimes pip fails downloading things, retry it when this happens...
        utils.retry(3, 5, try_install, requirements=requirements)
Пример #7
0
 def __init__(self, *args, **kargs):
     comp.PythonRuntime.__init__(self, *args, **kargs)
     self.wait_time = self.get_int_option('service_wait_seconds')
     self.virsh = lv.Virsh(self.wait_time, self.distro)
     self.config_path = sh.joinpths(self.get_option('cfg_dir'), API_CONF)
     self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
     self.net_init_fn = sh.joinpths(self.get_option('trace_dir'), NET_INITED_FN)
Пример #8
0
 def _get_target_config_name(self, config_name):
     if config_name == HORIZON_PY_CONF:
         # FIXME don't write to checked out locations...
         dash_dir = sh.joinpths(self.get_option('app_dir'), ROOT_DASH)
         return sh.joinpths(dash_dir, *HORIZON_PY_CONF_TGT)
     else:
         return comp.PythonInstallComponent._get_target_config_name(self, config_name)
Пример #9
0
    def package_finish(self):
        super(VenvDependencyHandler, self).package_finish()
        for instance in self.instances:
            if not self._is_buildable(instance):
                continue
            venv_dir = sh.abspth(self._venv_directory_for(instance))

            # Replace paths with virtualenv deployment directory.
            if self.opts.get('venv_deploy_dir'):
                deploy_dir = sh.joinpths(self.opts.get('venv_deploy_dir'),
                                         instance.name)
                replacer = functools.partial(
                    re.subn, re.escape(instance.get_option('component_dir')),
                    deploy_dir)
                bin_dir = sh.joinpths(venv_dir, 'bin')
                adjustments, files_replaced = self._replace_deployment_paths(bin_dir,
                                                                             replacer)
                if files_replaced:
                    LOG.info("Adjusted %s deployment path(s) in %s files",
                             adjustments, files_replaced)

            # Create a tarball containing the virtualenv.
            tar_filename = sh.joinpths(venv_dir, '%s-venv.tar.gz' % instance.name)
            LOG.info("Making tarball of %s built for %s at %s", venv_dir,
                     instance.name, tar_filename)
            with contextlib.closing(tarfile.open(tar_filename, "w:gz")) as tfh:
                for path in sh.listdir(venv_dir, recursive=True):
                    tfh.add(path, recursive=False, arcname=path[len(venv_dir):])
Пример #10
0
 def __init__(self, cache_dir, url):
     self._cache_dir = cache_dir
     self._url = url
     hashed_url = self._hash(self._url)
     self._cache_path = sh.joinpths(self._cache_dir, hashed_url)
     self._details_path = sh.joinpths(self._cache_dir,
                                      hashed_url + ".details")
Пример #11
0
 def build_all_srpms(self, package_files, tracewriter, jobs):
     (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk")
     scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
     cmdline = self._start_cmdline(escape_values=True)[1:] + [
         "--scripts-dir", scripts_dir,
         "--source-only",
         "--rpm-base", self._rpmbuild_dir
     ]
     executable = " ".join(self._start_cmdline()[0:1])
     params = {
         "DOWNLOADS_DIR": self._download_dir,
         "LOGS_DIR": self._log_dir,
         "PY2RPM": executable,
         "PY2RPM_FLAGS": " ".join(cmdline)
     }
     marks_dir = sh.joinpths(self._deps_dir, "marks-deps")
     if not sh.isdir(marks_dir):
         sh.mkdirslist(marks_dir, tracewriter=tracewriter)
     makefile_path = sh.joinpths(self._deps_dir, "deps.mk")
     sh.write_file(makefile_path, utils.expand_template(content, params),
                   tracewriter=tracewriter)
     utils.log_iterable(package_files,
                        header="Building %s SRPM packages using %s jobs" %
                               (len(package_files), jobs),
                        logger=LOG)
     self._execute_make(makefile_path, marks_dir, jobs)
Пример #12
0
 def _build_from_spec(self, instance, spec_filename, patches=None):
     pkg_dir = instance.get_option('app_dir')
     if sh.isfile(sh.joinpths(pkg_dir, "setup.py")):
         self._write_python_tarball(instance, pkg_dir, ENSURE_NOT_MISSING)
     else:
         self._write_git_tarball(instance, pkg_dir, spec_filename)
     self._copy_sources(instance)
     if patches:
         self._copy_patches(patches)
     cmdline = [self.specprint_executable]
     cmdline.extend(['-f', spec_filename])
     spec_details = json.loads(sh.execute(cmdline)[0])
     rpm_requires = []
     for k in ('requires', 'requirenevrs'):
         try:
             rpm_requires.extend(spec_details['headers'][k])
         except (KeyError, TypeError):
             pass
     if rpm_requires:
         buff = six.StringIO()
         buff.write("# %s\n" % instance.name)
         if rpm_requires:
             for req in rpm_requires:
                 buff.write("%s\n" % req)
             buff.write("\n")
         sh.append_file(self.rpm_build_requires_filename, buff.getvalue())
     self._copy_startup_scripts(instance, spec_details)
     cmdline = [
         self.rpmbuild_executable,
         "-bs",
         "--define", "_topdir %s" % self.rpmbuild_dir,
         spec_filename,
     ]
     out_filename = sh.joinpths(self.log_dir, "rpmbuild-%s.log" % instance.name)
     sh.execute_save_output(cmdline, out_filename)
Пример #13
0
 def _move_srpms(self, repo_name, rpmbuild_dir=None):
     if rpmbuild_dir is None:
         rpmbuild_dir = self.rpmbuild_dir
     src_repo_name = self.SRC_REPOS[repo_name]
     src_repo_dir = sh.joinpths(self.anvil_repo_dir, src_repo_name)
     return self._move_rpm_files(sh.joinpths(rpmbuild_dir, "SRPMS"),
                                 src_repo_dir)
Пример #14
0
 def pre_build():
     build_requirements = self.requirements.get("build-requires")
     if build_requirements:
         utils.log_iterable(build_requirements,
                            header="Installing build requirements",
                            logger=LOG)
         self.helper.transaction(install_pkgs=build_requirements,
                                 tracewriter=self.tracewriter)
     build_requirements = ''
     try:
         build_requirements = sh.load_file(self.rpm_build_requires_filename)
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
     build_requirements = set(pkg_resources.yield_lines(build_requirements))
     for repo_name in self.REPOS:
         repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
         matched_paths = []
         paths = list_src_rpms(repo_dir)
         envra_details = self.envra_helper.explode(*paths)
         for (path, envra_detail) in zip(paths, envra_details):
             package_name = envra_detail.get('name')
             if package_name in build_requirements:
                 matched_paths.append(path)
                 build_requirements.discard(package_name)
         if matched_paths:
             with sh.remove_before(self.prebuild_dir) as prebuild_dir:
                 if not sh.isdir(prebuild_dir):
                     sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter)
                 for path in matched_paths:
                     sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path)))
                 build(prebuild_dir, repo_name,
                       'Prebuilding %s RPM packages from their SRPMs'
                       ' for repo %s using %s jobs')
     return build_requirements
Пример #15
0
    def _write_spec_file(self, instance, rpm_name, template_name, params):
        requires_what = params.get('requires', [])
        test_requires_what = params.get('test_requires', [])
        egg_info = getattr(instance, 'egg_info', None)
        if egg_info:

            def ei_names(key):
                try:
                    requires_python = [str(req) for req in egg_info[key]]
                except KeyError:
                    return []
                else:
                    return self.py2rpm_helper.names_to_rpm_requires(
                        requires_python)

            requires_what.extend(ei_names('dependencies'))
            test_requires_what.extend(ei_names('test_dependencies'))

        params["requires"] = requires_what
        params["test_requires"] = test_requires_what
        params["epoch"] = self.OPENSTACK_EPOCH
        params["part_fn"] = lambda filename: sh.joinpths(
            settings.TEMPLATE_DIR, self.SPEC_TEMPLATE_DIR, filename)
        parsed_version = pkg_resources.parse_version(params["version"])
        params.update(self._make_spec_functors(parsed_version))
        content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1]
        spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS",
                                    "%s.spec" % rpm_name)
        sh.write_file(spec_filename,
                      utils.expand_template(content, params),
                      tracewriter=self.tracewriter)
        return spec_filename
Пример #16
0
 def __init__(self, *args, **kargs):
     PkgInstallComponent.__init__(self, *args, **kargs)
     self.requires_files = [
         sh.joinpths(self.get_option('app_dir'), 'tools', 'pip-requires'),
     ]
     if self.get_bool_option('use_tests_requires', default_value=True):
         self.requires_files.append(sh.joinpths(self.get_option('app_dir'), 'tools', 'test-requires'))
Пример #17
0
    def _write_spec_file(self, instance, rpm_name, template_name, params):
        requires_what = params.get('requires', [])
        test_requires_what = params.get('test_requires', [])
        egg_info = getattr(instance, 'egg_info', None)
        if egg_info:

            def ei_names(key):
                try:
                    requires_python = [str(req) for req in egg_info[key]]
                except KeyError:
                    return []
                else:
                    return self.py2rpm_helper.names_to_rpm_requires(requires_python)

            requires_what.extend(ei_names('dependencies'))
            test_requires_what.extend(ei_names('test_dependencies'))

        params["requires"] = requires_what
        params["test_requires"] = test_requires_what
        params["epoch"] = self.OPENSTACK_EPOCH
        params["part_fn"] = lambda filename: sh.joinpths(
            settings.TEMPLATE_DIR,
            self.SPEC_TEMPLATE_DIR,
            filename)
        parsed_version = pkg_resources.parse_version(params["version"])
        params.update(self._make_spec_functors(parsed_version))
        content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1]
        spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name)
        sh.write_file(spec_filename, utils.expand_template(content, params),
                      tracewriter=self.tracewriter)
        return spec_filename
Пример #18
0
 def _install_into_venv(self, instance, requirements,
                        upgrade=False, extra_env_overrides=None):
     venv_dir = self._venv_directory_for(instance)
     base_pip = [sh.joinpths(venv_dir, 'bin', 'pip')]
     env_overrides = {
         'PATH': os.pathsep.join([sh.joinpths(venv_dir, "bin"),
                                  env.get_key('PATH', default_value='')]),
         'VIRTUAL_ENV': venv_dir,
     }
     if extra_env_overrides:
         env_overrides.update(extra_env_overrides)
     cmd = list(base_pip) + ['install']
     if upgrade:
         cmd.append("--upgrade")
     if isinstance(requirements, six.string_types):
         cmd.extend([
             '--requirement',
             requirements
         ])
     else:
         for req in requirements:
             cmd.append(str(req))
     count = self.install_counters.get(instance.name, 0)
     self.install_counters[instance.name] = count + 1
     out_filename = sh.joinpths(self.log_dir, "venv-install-%s-%s.log" % (instance.name, count))
     sh.execute_save_output(cmd, out_filename, env_overrides=env_overrides)
Пример #19
0
 def _get_param_map(self, app_name):
     param_dict = comp.ProgramRuntime._get_param_map(self, app_name)
     if app_name == APP_Q_AGENT:
         param_dict['OVS_CONFIG_FILE'] = sh.joinpths(self.get_option('cfg_dir'), AGENT_CONF)
     elif app_name == APP_Q_SERVER:
         param_dict['QUANTUM_CONFIG_FILE'] = sh.joinpths(self.get_option('cfg_dir'), QUANTUM_CONF)
     return param_dict
Пример #20
0
 def _create_package(self):
     files = self._gather_files()
     params = {
         "files": files,
         "requires": self._requirements(),
         "obsoletes": self._obsoletes(),
         "conflicts": self._conflicts(),
         "defines": self._defines(),
         "undefines": self._undefines(),
         "build": self._build_details(),
         "who": sh.getuser(),
         "date": utils.iso8601(),
         "patches": self._patches(),
         "details": self.details,
     }
     (_fn, content) = utils.load_template("packaging", "spec.tmpl")
     spec_base = self._make_fn("spec")
     spec_fn = sh.joinpths(self.build_paths["specs"], spec_base)
     LOG.debug("Creating spec file %s with params:", spec_fn)
     files["sources"].append("%s.tar.gz" % (spec_base))
     utils.log_object(params, logger=LOG, level=logging.DEBUG)
     sh.write_file(spec_fn, utils.expand_template(content, params))
     tar_it(
         sh.joinpths(self.build_paths["sources"], "%s.tar.gz" % (spec_base)),
         spec_base,
         wkdir=self.build_paths["specs"],
     )
Пример #21
0
 def _do_network_init(self):
     ran_fn = sh.joinpths(self.get_option("trace_dir"), NET_INITED_FN)
     if not sh.isfile(ran_fn) and self.net_enabled:
         LOG.info("Creating your nova network to be used with instances.")
         # Figure out the commands to run
         mp = {}
         cmds = []
         mp["CFG_FILE"] = sh.joinpths(self.get_option("cfg_dir"), API_CONF)
         mp["BIN_DIR"] = sh.joinpths(self.get_option("app_dir"), BIN_DIR)
         if self.cfg.getboolean("nova", "enable_fixed"):
             # Create a fixed network
             mp["FIXED_NETWORK_SIZE"] = self.cfg.getdefaulted("nova", "fixed_network_size", "256")
             mp["FIXED_RANGE"] = self.cfg.getdefaulted("nova", "fixed_range", "10.0.0.0/24")
             cmds.extend(FIXED_NET_CMDS)
         if not self.get_option("quantum"):
             if self.cfg.getboolean("nova", "enable_floating"):
                 # Create a floating network + test floating pool
                 cmds.extend(FLOATING_NET_CMDS)
                 mp["FLOATING_RANGE"] = self.cfg.getdefaulted("nova", "floating_range", "172.24.4.224/28")
                 mp["TEST_FLOATING_RANGE"] = self.cfg.getdefaulted("nova", "test_floating_range", "192.168.253.0/29")
                 mp["TEST_FLOATING_POOL"] = self.cfg.getdefaulted("nova", "test_floating_pool", "test")
         else:
             LOG.info("Not creating floating IPs (not supported by quantum server)")
             LOG.info(
                 "Waiting %s seconds so that quantum can start up before running first time init." % (self.wait_time)
             )
             sh.sleep(self.wait_time)
         # Anything to run??
         if cmds:
             utils.execute_template(*cmds, params=mp)
         # Writing this makes sure that we don't init again
         cmd_mp = {"cmds": cmds, "replacements": mp}
         sh.write_file(ran_fn, utils.prettify_yaml(cmd_mp))
         LOG.info("If you wish to re-run initialization, delete %s", colorizer.quote(ran_fn))
Пример #22
0
 def __init__(self, *args, **kargs):
     comp.PythonRuntime.__init__(self, *args, **kargs)
     self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
     self.wait_time = max(self.cfg.getint('DEFAULT', 'service_wait_seconds'), 1)
     self.init_fn = sh.joinpths(self.get_option('trace_dir'), INIT_WHAT_HAPPENED)
     (fn, contents) = utils.load_template(self.name, INIT_WHAT_FN)
     self.init_what = yaml.load(contents)
Пример #23
0
 def app_options(self, app):
     if app.find('api') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', API_CONF)]
     elif app.find('registry') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', REG_CONF)]
     else:
         return []
Пример #24
0
 def _create_repo(self, repo_name):
     repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
     src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name])
     for a_dir in (repo_dir, src_repo_dir):
         if not sh.isdir(a_dir):
             sh.mkdirslist(a_dir, tracewriter=self.tracewriter)
         cmdline = ["createrepo", a_dir]
         LOG.info("Creating repo at %s", a_dir)
         sh.execute(cmdline)
     repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name)
     LOG.info("Writing %s", repo_filename)
     (_fn, content) = utils.load_template("packaging", "common.repo")
     params = {
         "repo_name": repo_name,
         "baseurl_bin": "file://%s" % repo_dir,
         "baseurl_src": "file://%s" % src_repo_dir,
     }
     sh.write_file(repo_filename, utils.expand_template(content, params),
                   tracewriter=self.tracewriter)
     # Install *.repo file so that anvil deps will be available
     # when building OpenStack
     system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name)
     sh.copy(repo_filename, system_repo_filename)
     LOG.info("Copying to %s", system_repo_filename)
     self.tracewriter.file_touched(system_repo_filename)
Пример #25
0
 def _create_package(self):
     files = self._gather_files()
     params = {
         'files': files,
         'requires': self._requirements(),
         'obsoletes': self._obsoletes(),
         'conflicts': self._conflicts(),
         'defines': self._defines(),
         'undefines': self._undefines(),
         'build': self._build_details(),
         'who': sh.getuser(),
         'date': utils.iso8601(),
         'details': self.details,
     }
     (_fn, content) = utils.load_template('packaging', 'spec.tmpl')
     spec_base = self._make_fn("spec")
     spec_fn = sh.joinpths(self.build_paths['specs'], spec_base)
     LOG.debug("Creating spec file %s with params:", spec_fn)
     files['sources'].append("%s.tar.gz" % (spec_base))
     utils.log_object(params, logger=LOG, level=logging.DEBUG)
     sh.write_file(spec_fn, utils.expand_template(content, params))
     tar_it(sh.joinpths(self.build_paths['sources'],
                        "%s.tar.gz" % (spec_base)),
            spec_base,
            wkdir=self.build_paths['specs'])
Пример #26
0
Файл: yum.py Проект: jzako/anvil
 def __init__(self, distro, root_dir,
              instances, opts, group, prior_groups):
     super(YumDependencyHandler, self).__init__(distro, root_dir,
                                                instances, opts, group,
                                                prior_groups)
     # Various paths we will use while operating
     self.rpmbuild_dir = sh.joinpths(self.deps_dir, "rpmbuild")
     self.prebuild_dir = sh.joinpths(self.deps_dir, "prebuild")
     self.deps_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps")
     self.deps_src_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps-sources")
     self.rpm_sources_dir = sh.joinpths(self.rpmbuild_dir, "SOURCES")
     self.anvil_repo_dir = sh.joinpths(self.root_dir, "repo")
     self.generated_srpms_filename = sh.joinpths(self.deps_dir, "generated-srpms-%s" % group)
     self.build_requires_filename = sh.joinpths(self.deps_dir, "build-requires-%s" % group)
     self.yum_satisfies_filename = sh.joinpths(self.deps_dir, "yum-satisfiable-%s" % group)
     self.rpm_build_requires_filename = sh.joinpths(self.deps_dir, "rpm-build-requires-%s" % group)
     # Executables we require to operate
     self.rpmbuild_executable = sh.which("rpmbuild")
     self.specprint_executable = sh.which('specprint', ["tools/"])
     # We inspect yum for packages, this helper allows us to do this.
     self.helper = yum_helper.Helper(self.log_dir, self.REPOS)
     self.envra_helper = envra_helper.Helper()
     # See if we are requested to run at a higher make parallelism level
     try:
         self.jobs = max(self.JOBS, int(self.opts.get('jobs')))
     except (TypeError, ValueError):
         self.jobs = self.JOBS
Пример #27
0
 def _install_into_venv(self,
                        instance,
                        requirements,
                        upgrade=False,
                        extra_env_overrides=None):
     venv_dir = self._venv_directory_for(instance)
     base_pip = [sh.joinpths(venv_dir, 'bin', 'pip')]
     env_overrides = {
         'PATH':
         os.pathsep.join([
             sh.joinpths(venv_dir, "bin"),
             env.get_key('PATH', default_value='')
         ]),
         'VIRTUAL_ENV':
         venv_dir,
     }
     if extra_env_overrides:
         env_overrides.update(extra_env_overrides)
     cmd = list(base_pip) + ['install']
     if upgrade:
         cmd.append("--upgrade")
     if isinstance(requirements, six.string_types):
         cmd.extend(['--requirement', requirements])
     else:
         for req in requirements:
             cmd.append(str(req))
     count = self.install_counters.get(instance.name, 0)
     self.install_counters[instance.name] = count + 1
     out_filename = sh.joinpths(
         self.log_dir, "venv-install-%s-%s.log" % (instance.name, count))
     sh.execute_save_output(cmd, out_filename, env_overrides=env_overrides)
Пример #28
0
 def _create_repo(self, repo_name):
     repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
     src_repo_dir = sh.joinpths(self.anvil_repo_dir,
                                self.SRC_REPOS[repo_name])
     for a_dir in (repo_dir, src_repo_dir):
         if not sh.isdir(a_dir):
             sh.mkdirslist(a_dir, tracewriter=self.tracewriter)
         cmdline = ["createrepo", a_dir]
         LOG.info("Creating repo at %s", a_dir)
         sh.execute(cmdline)
     repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name)
     LOG.info("Writing %s", repo_filename)
     (_fn, content) = utils.load_template("packaging", "common.repo")
     params = {
         "repo_name": repo_name,
         "baseurl_bin": "file://%s" % repo_dir,
         "baseurl_src": "file://%s" % src_repo_dir,
     }
     sh.write_file(repo_filename,
                   utils.expand_template(content, params),
                   tracewriter=self.tracewriter)
     # NOTE(harlowja): Install *.repo file so that anvil deps will be available
     # when building openstack core project packages.
     system_repo_filename = sh.joinpths(self.YUM_REPO_DIR,
                                        "%s.repo" % repo_name)
     sh.copy(repo_filename,
             system_repo_filename,
             tracewriter=self.tracewriter)
     LOG.info("Copied to %s", system_repo_filename)
Пример #29
0
 def app_options(self, app):
     if app.find('api') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', API_CONF)]
     elif app.find('registry') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', REG_CONF)]
     else:
         return []
Пример #30
0
 def _unpack_tar(self, file_name, file_location, tmp_dir):
     (root_name, _) = os.path.splitext(file_name)
     tar_members = self._filter_files(
         self._get_tar_file_members(file_location))
     (root_img_fn, ramdisk_fn,
      kernel_fn) = self._find_pieces(tar_members, file_location)
     if not root_img_fn:
         msg = "Tar file %r has no root image member" % (file_name)
         raise IOError(msg)
     kernel_real_fn = None
     root_real_fn = None
     ramdisk_real_fn = None
     self._log_pieces_found('archive', root_img_fn, ramdisk_fn, kernel_fn)
     extract_dir = sh.mkdir(sh.joinpths(tmp_dir, root_name))
     with contextlib.closing(tarfile.open(file_location, 'r')) as tfh:
         for m in tfh.getmembers():
             if m.name == root_img_fn:
                 root_real_fn = sh.joinpths(extract_dir,
                                            sh.basename(root_img_fn))
                 self._unpack_tar_member(tfh, m, root_real_fn)
             elif ramdisk_fn and m.name == ramdisk_fn:
                 ramdisk_real_fn = sh.joinpths(extract_dir,
                                               sh.basename(ramdisk_fn))
                 self._unpack_tar_member(tfh, m, ramdisk_real_fn)
             elif kernel_fn and m.name == kernel_fn:
                 kernel_real_fn = sh.joinpths(extract_dir,
                                              sh.basename(kernel_fn))
                 self._unpack_tar_member(tfh, m, kernel_real_fn)
     return self._describe(root_real_fn, ramdisk_real_fn, kernel_real_fn)
Пример #31
0
def download(distro, uri, target_dir, **kwargs):
    puri = urlparse(uri)
    scheme = puri.scheme.lower()
    path = puri.path
    if scheme in ['git'] or path.find('.git') != -1:
        dirs_made = sh.mkdirslist(target_dir)
        downloader = GitDownloader(distro, uri, target_dir)
        downloader.download()
        return dirs_made
    if scheme in ['http', 'https']:
        dirs_made = []
        with utils.tempdir() as tdir:
            fn = sh.basename(path)
            downloader = UrlLibDownloader(uri, sh.joinpths(tdir, fn))
            downloader.download()
            if fn.endswith('.tar.gz'):
                dirs_made = sh.mkdirslist(target_dir)
                cmd = ['tar', '-xzvf', sh.joinpths(tdir, fn), '-C', target_dir]
                sh.execute(*cmd)
            elif fn.endswith('.zip'):
                # TODO(harlowja) this might not be 100% right...
                # we might have to move the finished directory...
                dirs_made = sh.mkdirslist(target_dir)
                cmd = ['unzip', sh.joinpths(tdir, fn), '-d', target_dir]
                sh.execute(*cmd)
            else:
                raise excp.DownloadException("Unable to extract %s downloaded from %s" % (fn, uri))
        return dirs_made
    else:
        raise excp.DownloadException("Unknown scheme %s, unable to download from %s" % (scheme, uri))
Пример #32
0
 def _fetch_argv(self, name):
     if name.find('api') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', API_CONF)]
     elif name.find('registry') != -1:
         return ['--config-file', sh.joinpths('$CONFIG_DIR', REG_CONF)]
     else:
         return []
Пример #33
0
 def __init__(self, distro, root_dir, instances, opts=None):
     super(YumDependencyHandler, self).__init__(distro, root_dir, instances,
                                                opts)
     # Various paths we will use while operating
     self.rpmbuild_dir = sh.joinpths(self.deps_dir, "rpmbuild")
     self.deps_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps")
     self.deps_src_repo_dir = sh.joinpths(self.deps_dir,
                                          "openstack-deps-sources")
     self.rpm_sources_dir = sh.joinpths(self.rpmbuild_dir, "SOURCES")
     self.anvil_repo_dir = sh.joinpths(self.root_dir, "repo")
     # Executables we require to operate
     self.py2rpm_executable = sh.which("py2rpm", ["tools/"])
     self.rpmbuild_executable = sh.which("rpmbuild")
     self.specprint_executable = sh.which('specprint', ["tools/"])
     self.yumfind_executable = sh.which("yumfind", ["tools/"])
     # We inspect yum for packages, this helper allows us to do this.
     self.helper = yum_helper.Helper(self.log_dir)
     # See if we are requested to run at a higher make parallelism level
     self._jobs = self.JOBS
     if 'jobs' in self.opts:
         try:
             self._jobs = int(self.opts.get('jobs', self.JOBS))
             if self._jobs <= 0:
                 self._jobs = self.JOBS
         except (TypeError, ValueError):
             pass
Пример #34
0
 def _create_log_dirs(self):
     data_dir = sh.joinpths(self.get_option("app_dir"), self.cfg.getdefaulted("swift", "data_location", "data"))
     cfg_dir = self.get_option("cfg_dir")
     log_dir = sh.joinpths(data_dir, LOG_DIR)
     self.tracewriter.dirs_made(*sh.mkdirslist(sh.joinpths(log_dir, "hourly")))
     sh.symlink(sh.joinpths(cfg_dir, SYSLOG_CONF), SWIFT_RSYNC_LOC)
     self.tracewriter.symlink_made(SWIFT_RSYNC_LOC)
Пример #35
0
 def app_options(self, app):
     return [
         '--config-file=%s' % (sh.joinpths('$CONFIG_DIR', ROOT_CONF)),
         "--debug",
         '--verbose',
         '--nouse-syslog',
         '--log-config=%s' % (sh.joinpths('$CONFIG_DIR', LOGGING_CONF)),
     ]
Пример #36
0
 def _make_source_archive(self):
     with utils.tempdir() as td:
         arch_base_name = "%s-%s" % (self.details['name'], self.details['version'])
         sh.copytree(self.get_option('app_dir'), sh.joinpths(td, arch_base_name))
         arch_tmp_fn = sh.joinpths(td, "%s.tar.gz" % (arch_base_name))
         tar_it(arch_tmp_fn, arch_base_name, td)
         sh.move(arch_tmp_fn, self.build_paths['sources'])
     return "%s.tar.gz" % (arch_base_name)
Пример #37
0
 def app_options(self, app):
     return [
         '--config-file=%s' % (sh.joinpths('$CONFIG_DIR', ROOT_CONF)),
         "--debug",
         '--verbose',
         '--nouse-syslog',
         '--log-config=%s' % (sh.joinpths('$CONFIG_DIR', LOGGING_CONF)),
     ]
Пример #38
0
 def _fetch_argv(self, name):
     return [
         '--config-file=%s' % (sh.joinpths('$CONFIG_DIR', kconf.ROOT_CONF)),
         "--debug",
         '--verbose',
         '--nouse-syslog',
         '--log-config=%s' % (sh.joinpths('$CONFIG_DIR', kconf.LOGGING_CONF)),
     ]
Пример #39
0
 def app_options(self, app):
     return [
         "--config-file=%s" % (sh.joinpths("$CONFIG_DIR", ROOT_CONF)),
         "--debug",
         "--verbose",
         "--nouse-syslog",
         "--log-config=%s" % (sh.joinpths("$CONFIG_DIR", LOGGING_CONF)),
     ]
Пример #40
0
 def _fetch_argv(self, name):
     return [
         '--config-file=%s' % (sh.joinpths('$CONFIG_DIR', ROOT_CONF)),
         "--debug",
         '--verbose',
         '--nouse-syslog',
         '--log-config=%s' % (sh.joinpths('$CONFIG_DIR', LOGGING_CONF)),
     ]
Пример #41
0
 def post_bootstrap(self):
     bs_rpmbuild_dir = sh.joinpths('.bootstrap', 'rpmbuild')
     if sh.isdir(bs_rpmbuild_dir):
         LOG.info("Moving RPMS build on bootstrap to deps repo")
         self._move_srpms("anvil-deps", bs_rpmbuild_dir)
         self._move_rpm_files(bs_rpmbuild_dir,
                              sh.joinpths(self.anvil_repo_dir, 'anvil-deps'))
         sh.deldir(bs_rpmbuild_dir)
Пример #42
0
 def _get_dir_opts(self, component):
     component_dir = sh.joinpths(self._root_dir, component)
     trace_dir = sh.joinpths(component_dir, 'traces')
     app_dir = sh.joinpths(component_dir, 'app')
     return utils.OrderedDict([('app_dir', app_dir),
                               ('component_dir', component_dir),
                               ('root_dir', self._root_dir),
                               ('trace_dir', trace_dir)])
Пример #43
0
 def _make_source_archive(self):
     with utils.tempdir() as td:
         arch_base_name = "%s-%s" % (self.details['name'], self.details['version'])
         sh.copytree(self.get_option('app_dir'), sh.joinpths(td, arch_base_name))
         arch_tmp_fn = sh.joinpths(td, "%s.tar.gz" % (arch_base_name))
         tar_it(arch_tmp_fn, arch_base_name, td)
         sh.move(arch_tmp_fn, self.build_paths['sources'])
     return "%s.tar.gz" % (arch_base_name)
Пример #44
0
 def _setup_cleaner(self):
     LOG.info("Configuring cleaner template: %s", colorizer.quote(CLEANER_DATA_CONF))
     (_fn, contents) = utils.load_template(self.name, CLEANER_DATA_CONF)
     # FIXME(harlowja), stop placing in checkout dir...
     cleaner_fn = sh.joinpths(sh.joinpths(self.get_option('app_dir'), BIN_DIR), CLEANER_DATA_CONF)
     sh.write_file(cleaner_fn, contents)
     sh.chmod(cleaner_fn, 0755)
     self.tracewriter.file_touched(cleaner_fn)
Пример #45
0
 def __init__(self, *args, **kargs):
     comp.PythonRuntime.__init__(self, *args, **kargs)
     self.wait_time = self.get_int_option('service_wait_seconds')
     self.virsh = lv.Virsh(self.wait_time, self.distro)
     self.config_path = sh.joinpths(self.get_option('cfg_dir'), API_CONF)
     self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
     self.net_init_fn = sh.joinpths(self.get_option('trace_dir'),
                                    NET_INITED_FN)
Пример #46
0
 def symlinks(self):
     links = super(HorizonConfigurator, self).symlinks
     links[self.installer.access_log] = [
         sh.joinpths(self.link_dir, 'access.log')
     ]
     links[self.installer.error_log] = [
         sh.joinpths(self.link_dir, 'error.log')
     ]
     return links
Пример #47
0
 def post_bootstrap(self):
     bs_rpmbuild_dir = sh.joinpths('.bootstrap', 'rpmbuild')
     if sh.isdir(bs_rpmbuild_dir):
         LOG.info("Moving RPMS build on bootstrap to deps repo")
         self._move_srpms("anvil-deps", bs_rpmbuild_dir)
         self._move_rpm_files(
             bs_rpmbuild_dir, sh.joinpths(self.anvil_repo_dir,
                                          'anvil-deps'))
         sh.deldir(bs_rpmbuild_dir)
Пример #48
0
 def _get_component_dirs(self, component):
     component_dir = sh.joinpths(self.root_dir, component)
     trace_dir = sh.joinpths(component_dir, 'traces')
     app_dir = sh.joinpths(component_dir, 'app')
     return {
         'app_dir': app_dir,
         'component_dir': component_dir,
         'root_dir': self.root_dir,
         'trace_dir': trace_dir,
     }
Пример #49
0
 def __init__(self, *args, **kargs):
     comp.PythonInstallComponent.__init__(self, *args, **kargs)
     self.blackhole_dir = sh.joinpths(self.get_option('app_dir'),
                                      '.blackhole')
     self.access_log = sh.joinpths(
         '/var/log/', self.distro.get_command_config('apache', 'name'),
         'horizon_access.log')
     self.error_log = sh.joinpths(
         '/var/log/', self.distro.get_command_config('apache', 'name'),
         'horizon_error.log')
Пример #50
0
 def _setup_cleaner(self):
     LOG.info("Configuring cleaner template: %s",
              colorizer.quote(CLEANER_DATA_CONF))
     (_fn, contents) = utils.load_template(self.name, CLEANER_DATA_CONF)
     # FIXME(harlowja), stop placing in checkout dir...
     cleaner_fn = sh.joinpths(
         sh.joinpths(self.get_option('app_dir'), BIN_DIR),
         CLEANER_DATA_CONF)
     sh.write_file(cleaner_fn, contents)
     sh.chmod(cleaner_fn, 0755)
     self.tracewriter.file_touched(cleaner_fn)
Пример #51
0
 def __init__(self, *args, **kargs):
     super(PythonBuildComponent, self).__init__(*args, **kargs)
     self._origins_fn = kargs['origins_fn']
     app_dir = self.get_option('app_dir')
     tools_dir = sh.joinpths(app_dir, 'tools')
     self.requires_files = []
     self.test_requires_files = []
     for path in [app_dir, tools_dir]:
         for req_fn in REQUIREMENT_FILES:
             self.requires_files.append(sh.joinpths(path, req_fn))
         for req_fn in TEST_REQUIREMENT_FILES:
             self.test_requires_files.append(sh.joinpths(path, req_fn))
Пример #52
0
 def __init__(self, distro, root_dir, instances):
     super(YumDependencyHandler, self).__init__(distro, root_dir, instances)
     self.rpmbuild_dir = sh.joinpths(self.deps_dir, "rpmbuild")
     self.deps_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps")
     self.deps_src_repo_dir = sh.joinpths(self.deps_dir,
                                          "openstack-deps-sources")
     self.anvil_repo_filename = sh.joinpths(self.deps_dir, self.REPO_FN)
     # Track what file we create so they can be cleaned up on uninstall.
     trace_fn = tr.trace_filename(root_dir, 'deps')
     self.tracewriter = tr.TraceWriter(trace_fn, break_if_there=False)
     self.tracereader = tr.TraceReader(trace_fn)
     self.helper = yum_helper.Helper()
Пример #53
0
    def download_dependencies(self, clear_cache=False):
        """Download dependencies from `$deps_dir/download-requires`.

        :param clear_cache: clear `$deps_dir/cache` dir (pip can work incorrectly
            when it has a cache)
        """
        sh.deldir(self.download_dir)
        sh.mkdir(self.download_dir, recurse=True)
        download_requires_filename = sh.joinpths(self.deps_dir,
                                                 "download-requires")
        raw_pips_to_download = self.filter_download_requires()
        pips_to_download = [
            pkg_resources.Requirement.parse(str(p.strip()))
            for p in raw_pips_to_download if p.strip()
        ]
        sh.write_file(download_requires_filename,
                      "\n".join(str(req) for req in pips_to_download))
        if not pips_to_download:
            return []
        pip_dir = sh.joinpths(self.deps_dir, "pip")
        pip_download_dir = sh.joinpths(pip_dir, "download")
        pip_build_dir = sh.joinpths(pip_dir, "build")
        pip_cache_dir = sh.joinpths(pip_dir, "cache")
        if clear_cache:
            sh.deldir(pip_cache_dir)
        pip_failures = []
        how_many = len(pips_to_download)
        for attempt in xrange(self.MAX_PIP_DOWNLOAD_ATTEMPTS):
            # NOTE(aababilov): pip has issues with already downloaded files
            sh.deldir(pip_download_dir)
            sh.mkdir(pip_download_dir, recurse=True)
            sh.deldir(pip_build_dir)
            utils.log_iterable(sorted(raw_pips_to_download),
                               logger=LOG,
                               header=("Downloading %s python dependencies "
                                       "(attempt %s)" % (how_many, attempt)))
            failed = False
            try:
                self._try_download_dependencies(attempt, pips_to_download,
                                                pip_download_dir,
                                                pip_cache_dir, pip_build_dir)
                pip_failures = []
            except exc.ProcessExecutionError as e:
                LOG.exception("Failed downloading python dependencies")
                pip_failures.append(e)
                failed = True
            if not failed:
                break
        if pip_failures:
            raise pip_failures[-1]
        for filename in sh.listdir(pip_download_dir, files_only=True):
            sh.move(filename, self.download_dir)
        return sh.listdir(self.download_dir, files_only=True)
Пример #54
0
 def __init__(self, *args, **kargs):
     PkgInstallComponent.__init__(self, *args, **kargs)
     app_dir = self.get_option('app_dir')
     tools_dir = sh.joinpths(app_dir, 'tools')
     self.requires_files = [
         sh.joinpths(tools_dir, 'pip-requires'),
         sh.joinpths(app_dir, 'requirements.txt'),
     ]
     if self.get_bool_option('use_tests_requires', default_value=True):
         self.requires_files.append(sh.joinpths(tools_dir, 'test-requires'))
         self.requires_files.append(sh.joinpths(app_dir,
                                                'test-requirements.txt'))
     self._egg_info = None
Пример #55
0
 def __init__(self, *args, **kargs):
     super(PythonComponent, self).__init__(*args, **kargs)
     app_dir = self.get_option('app_dir')
     tools_dir = sh.joinpths(app_dir, 'tools')
     self.requires_files = [
         sh.joinpths(tools_dir, 'pip-requires'),
         sh.joinpths(app_dir, 'requirements.txt'),
     ]
     self.test_requires_files = [
         sh.joinpths(tools_dir, 'test-requires'),
         sh.joinpths(app_dir, 'test-requirements.txt'),
     ]
     self._origins_fn = kargs['origins_fn']
Пример #56
0
    def build_binary(self):
        def is_src_rpm(path):
            if not path:
                return False
            if not sh.isfile(path):
                return False
            if not path.lower().endswith('.src.rpm'):
                return False
            return True

        def list_src_rpms(path):
            path_files = []
            if sh.isdir(path):
                path_files = sh.listdir(path, filter_func=is_src_rpm)
            return sorted(path_files)

        build_requirements = self.requirements.get("build-requires")
        if build_requirements:
            utils.log_iterable(build_requirements,
                               header="Installing build requirements",
                               logger=LOG)
            self.helper.transaction(install_pkgs=build_requirements,
                                    tracewriter=self.tracewriter)

        for repo_name in self.REPOS:
            src_repo_dir = sh.joinpths(self.anvil_repo_dir,
                                       self.SRC_REPOS[repo_name])
            src_repo_files = list_src_rpms(src_repo_dir)
            if not src_repo_files:
                continue
            utils.log_iterable(
                src_repo_files,
                header=('Building %s RPM packages from their'
                        ' SRPMs for repo %s using %s jobs') %
                (len(src_repo_files), self.SRC_REPOS[repo_name], self.jobs),
                logger=LOG)
            rpmbuild_flags = "--rebuild"
            if self.opts.get("usr_only", False):
                rpmbuild_flags += " --define 'usr_only 1'"
            with sh.remove_before_after(self.rpmbuild_dir):
                self._create_rpmbuild_subdirs()
                self.py2rpm_helper.build_all_binaries(repo_name, src_repo_dir,
                                                      rpmbuild_flags,
                                                      self.tracewriter,
                                                      self.jobs)
                repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name)
                for d in sh.listdir(self.rpmbuild_dir, dirs_only=True):
                    self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir)
                self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"),
                                     repo_dir)
            self._create_repo(repo_name)
Пример #57
0
 def _get_package_dirs(instances):
     package_dirs = []
     for inst in instances:
         app_dir = inst.get_option("app_dir")
         if sh.isfile(sh.joinpths(app_dir, "setup.py")):
             package_dirs.append(app_dir)
     return package_dirs
Пример #58
0
    def __init__(self, name, subsystems, instances, options, siblings, distro, **kwargs):
        # Subsystems this was requested with
        self.subsystems = subsystems

        # The component name (from config)
        self.name = name

        # Any component options
        self.options = options

        # All the other active instances
        self.instances = instances

        # All the other class names that can be used alongside this class
        self.siblings = siblings

        # The distribution 'interaction object'
        self.distro = distro

        # Turned on and off as phases get activated
        self.activated = False

        # Where our binaries will be located
        self.bin_dir = "/usr/bin/"

        # Where configuration will be written
        self.cfg_dir = sh.joinpths("/etc/", self.name)