示例#1
0
def get_directory_details(path):
    if not sh.isdir(path):
        raise IOError("Can not detail non-existent directory %s" % (path))

    # Check if we already got the details of this dir previously
    path = sh.abspth(path)
    cache_key = "d:%s" % (sh.abspth(path))
    if cache_key in EGGS_DETAILED:
        return EGGS_DETAILED[cache_key]

    req = extract(path)
    req.source_dir = path
    req.run_egg_info()

    dependencies = []
    for d in req.requirements():
        if not d.startswith("-e") and d.find("#"):
            d = d.split("#")[0]
        d = d.strip()
        if d:
            dependencies.append(d)

    details = {
        'req': req.req,
        'dependencies': dependencies,
        'name': req.name,
        'pkg_info': req.pkg_info(),
        'dependency_links': req.dependency_links,
        'version': req.installed_version,
    }

    EGGS_DETAILED[cache_key] = details
    return details
示例#2
0
def get_directory_details(path):
    if not sh.isdir(path):
        raise IOError("Can not detail non-existent directory %s" % (path))

    # Check if we already got the details of this dir previously
    path = sh.abspth(path)
    cache_key = "d:%s" % (sh.abspth(path))
    if cache_key in EGGS_DETAILED:
        return EGGS_DETAILED[cache_key]

    req = extract(path)
    req.source_dir = path
    req.run_egg_info()

    dependencies = []
    for d in req.requirements():
        if not d.startswith("-e") and d.find("#"):
            d = d.split("#")[0]
        d = d.strip()
        if d:
            dependencies.append(d)

    details = {
        'req': req.req,
        'dependencies': dependencies,
        'name': req.name,
        'pkg_info': req.pkg_info(),
        'dependency_links': req.dependency_links,
        'version': req.installed_version,
    }

    EGGS_DETAILED[cache_key] = details
    return details
 def load_all(cls, path=settings.DISTRO_DIR):
     """Returns a list of the known distros."""
     results = []
     input_files = glob.glob(sh.joinpths(path, '*.yaml'))
     if not input_files:
         raise RuntimeError(
             'Did not find any distro definition files in %r' %
             path)
     for fn in input_files:
         cls_kvs = None
         filename = sh.abspth(fn)
         LOG.audit("Attempting to load distro definition from %r" % (filename))
         try:
             with open(filename, 'r') as f:
                 cls_kvs = yaml.load(f)
         except (IOError, yaml.YAMLError) as err:
             LOG.warning('Could not load distro definition from %r: %s',
                         filename, err)
         if cls_kvs is not None:
             try:
                 results.append(cls(**cls_kvs))
             except Exception as err:
                 LOG.warning('Could not initialize instance %s using parameter map %s: %s',
                             cls, cls_kvs, err)
     return results
示例#4
0
文件: venv.py 项目: y2kbot/anvil
    def package_finish(self):
        super(VenvDependencyHandler, self).package_finish()
        for instance in self.instances:
            if not self._is_buildable(instance):
                continue
            venv_dir = sh.abspth(self._venv_directory_for(instance))

            # Replace paths with virtualenv deployment directory.
            if self.opts.get('venv_deploy_dir'):
                deploy_dir = sh.joinpths(self.opts.get('venv_deploy_dir'),
                                         instance.name)
                replacer = functools.partial(
                    re.subn, re.escape(instance.get_option('component_dir')),
                    deploy_dir)
                bin_dir = sh.joinpths(venv_dir, 'bin')
                adjustments, files_replaced = self._replace_deployment_paths(bin_dir,
                                                                             replacer)
                if files_replaced:
                    LOG.info("Adjusted %s deployment path(s) in %s files",
                             adjustments, files_replaced)

            # Create a tarball containing the virtualenv.
            tar_filename = sh.joinpths(venv_dir, '%s-venv.tar.gz' % instance.name)
            LOG.info("Making tarball of %s built for %s at %s", venv_dir,
                     instance.name, tar_filename)
            with contextlib.closing(tarfile.open(tar_filename, "w:gz")) as tfh:
                for path in sh.listdir(venv_dir, recursive=True):
                    tfh.add(path, recursive=False, arcname=path[len(venv_dir):])
示例#5
0
 def build_all_srpms(self, package_files, tracewriter, jobs):
     (_fn,
      content) = utils.load_template(sh.joinpths("packaging", "makefiles"),
                                     "source.mk")
     scripts_dir = sh.abspth(
         sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
     cmdline = self._start_cmdline(escape_values=True)[1:] + [
         "--scripts-dir",
         scripts_dir,
         "--source-only",
         "--rpm-base",
         self._rpmbuild_dir,
         "--debug",
     ]
     executable = " ".join(self._start_cmdline()[0:1])
     params = {
         "DOWNLOADS_DIR": self._download_dir,
         "LOGS_DIR": self._log_dir,
         "PY2RPM": executable,
         "PY2RPM_FLAGS": " ".join(cmdline)
     }
     marks_dir = sh.joinpths(self._deps_dir, "marks-deps")
     if not sh.isdir(marks_dir):
         sh.mkdirslist(marks_dir, tracewriter=tracewriter)
     makefile_path = sh.joinpths(self._deps_dir, "deps.mk")
     sh.write_file(makefile_path,
                   utils.expand_template(content, params),
                   tracewriter=tracewriter)
     utils.log_iterable(package_files,
                        header="Building %s SRPM packages using %s jobs" %
                        (len(package_files), jobs),
                        logger=LOG)
     self._execute_make(makefile_path, marks_dir, jobs)
示例#6
0
 def build_all_srpms(self, package_files, tracewriter, jobs):
     (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk")
     scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
     cmdline = self._start_cmdline(escape_values=True)[1:] + [
         "--scripts-dir", scripts_dir,
         "--source-only",
         "--rpm-base", self._rpmbuild_dir
     ]
     executable = " ".join(self._start_cmdline()[0:1])
     params = {
         "DOWNLOADS_DIR": self._download_dir,
         "LOGS_DIR": self._log_dir,
         "PY2RPM": executable,
         "PY2RPM_FLAGS": " ".join(cmdline)
     }
     marks_dir = sh.joinpths(self._deps_dir, "marks-deps")
     if not sh.isdir(marks_dir):
         sh.mkdirslist(marks_dir, tracewriter=tracewriter)
     makefile_path = sh.joinpths(self._deps_dir, "deps.mk")
     sh.write_file(makefile_path, utils.expand_template(content, params),
                   tracewriter=tracewriter)
     utils.log_iterable(package_files,
                        header="Building %s SRPM packages using %s jobs" %
                               (len(package_files), jobs),
                        logger=LOG)
     self._execute_make(makefile_path, marks_dir, jobs)
示例#7
0
 def _uninstall_dirs(self):
     dirs_made = self.tracereader.dirs_made()
     dirs_alive = filter(sh.isdir, [sh.abspth(d) for d in dirs_made])
     if dirs_alive:
         utils.log_iterable(dirs_alive, logger=LOG,
             header="Removing %s created directories" % (len(dirs_alive)))
         for dir_name in dirs_alive:
             sh.deldir(dir_name, run_as_root=True)
 def _write_rc_file(self):
     fn = sh.abspth(settings.gen_rc_filename('core'))
     writer = env_rc.RcWriter(self.cfg, self.root_dir)
     if not sh.isfile(fn):
         LOG.info("Generating a file at %s that will contain your environment settings.", colorizer.quote(fn))
         writer.write(fn)
     else:
         LOG.info("Updating a file at %s that contains your environment settings.", colorizer.quote(fn))
         am_upd = writer.update(fn)
         LOG.info("Updated %s settings.", colorizer.quote(am_upd))
示例#9
0
def expand_patches(paths, patch_ext='.patch'):
    if not paths:
        return []
    all_paths = []
    # Expand patch files/dirs
    for path in paths:
        path = sh.abspth(path)
        if sh.isdir(path):
            all_paths.extend([p for p in sh.listdir(path, files_only=True)])
        else:
            all_paths.append(path)
    # Now filter on valid patches
    return [p for p in all_paths if _is_patch(p, patch_ext=patch_ext)]
示例#10
0
def expand_patches(paths):
    if not paths:
        return []
    all_paths = []
    # Expand patch files/dirs
    for path in paths:
        path = sh.abspth(path)
        if sh.isdir(path):
            all_paths.extend([p for p in sh.listdir(path, files_only=True)])
        else:
            all_paths.append(path)
    # Now filter on valid patches
    return [p for p in all_paths if _is_patch(p)]
示例#11
0
def read_requirement_files(files):
    pip_requirements = []
    session = pip_download.PipSession()
    for filename in files:
        if sh.isfile(filename):
            cache_key = "f:%s:%s" % (sh.abspth(filename), sh.getsize(filename))
            with REQUIREMENT_FILE_CACHE_LOCK:
                try:
                    reqs = REQUIREMENT_FILE_CACHE[cache_key]
                except KeyError:
                    reqs = tuple(
                        pip_req.parse_requirements(filename, session=session))
                    REQUIREMENT_FILE_CACHE[cache_key] = reqs
                pip_requirements.extend(reqs)
    return (pip_requirements, [req.req for req in pip_requirements])
示例#12
0
 def _patches(self):
     your_patches = []
     in_patches = self.get_option('patches', 'package')
     if in_patches:
         for path in in_patches:
             path = sh.abspth(path)
             if sh.isdir(path):
                 for c_path in sh.listdir(path, files_only=True):
                     tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(c_path))
                     sh.copy(c_path, tgt_fn)
                     your_patches.append(sh.basename(tgt_fn))
             else:
                 tgt_fn = sh.joinpths(self.build_paths['sources'], sh.basename(path))
                 sh.copy(path, tgt_fn)
                 your_patches.append(sh.basename(tgt_fn))
     return your_patches
示例#13
0
文件: pip_helper.py 项目: jzako/anvil
def read_requirement_files(files):
    pip_requirements = []
    session = pip_download.PipSession()
    for filename in files:
        if sh.isfile(filename):
            cache_key = "f:%s:%s" % (sh.abspth(filename), sh.getsize(filename))
            with REQUIREMENT_FILE_CACHE_LOCK:
                try:
                    reqs = REQUIREMENT_FILE_CACHE[cache_key]
                except KeyError:
                    reqs = tuple(pip_req.parse_requirements(filename,
                                                            session=session))
                    REQUIREMENT_FILE_CACHE[cache_key] = reqs
                pip_requirements.extend(reqs)
    return (pip_requirements,
            [req.req for req in pip_requirements])
 def _uninstall_dirs(self):
     dirs_made = self.tracereader.dirs_made()
     if dirs_made:
         dirs_made = [sh.abspth(d) for d in dirs_made]
         if self.get_option('keep_old', False):
             download_places = [path_location[0] for path_location in self.tracereader.download_locations()]
             if download_places:
                 utils.log_iterable(download_places, logger=LOG,
                     header="Keeping %s download directories (and there children directories)" % (len(download_places)))
                 for download_place in download_places:
                     dirs_made = sh.remove_parents(download_place, dirs_made)
         if dirs_made:
             utils.log_iterable(dirs_made, logger=LOG,
                 header="Removing %s created directories" % (len(dirs_made)))
             for dir_name in dirs_made:
                 if sh.isdir(dir_name):
                     sh.deldir(dir_name, run_as_root=True)
                 else:
                     LOG.warn("No directory found at %s - skipping", colorizer.quote(dir_name, quote_color='red'))
 def load_file(cls, fn):
     persona_fn = sh.abspth(fn)
     LOG.audit("Loading persona from file %r", persona_fn)
     cls_kvs = None
     try:
         with open(persona_fn, "r") as fh:
             cls_kvs = yaml.load(fh.read())
     except (IOError, yaml.YAMLError) as err:
         LOG.warning('Could not load persona definition from %s: %s',
                          persona_fn, err)
     instance = None
     if cls_kvs is not None:
         try:
             cls_kvs['source'] = persona_fn
             instance = cls(**cls_kvs)
         except Exception as err:
             LOG.warning('Could not initialize instance %s using parameter map %s: %s',
                             cls, cls_kvs, err)
     return instance
示例#16
0
def apply_patches(patch_files, working_dir):
    if not patch_files:
        return
    apply_files = []
    for p in patch_files:
        p = sh.abspth(p)
        if not sh.isfile(p):
            LOG.warn("Can not apply non-file patch %s", p)
        apply_files.append(p)
    if not apply_files:
        return
    if not sh.isdir(working_dir):
        LOG.warn("Can only apply %s patches 'inside' a directory and not '%s'",
                 len(apply_files), working_dir)
        return
    with utils.chdir(working_dir):
        for p in apply_files:
            LOG.debug("Applying patch %s in directory %s", p, working_dir)
            patch_contents = sh.load_file(p)
            if patch_contents:
                sh.execute(*PATCH_CMD, process_input=patch_contents)
示例#17
0
    def package_finish(self):
        super(VenvDependencyHandler, self).package_finish()
        for instance in self.instances:
            if not self._is_buildable(instance):
                continue
            venv_dir = sh.abspth(self._venv_directory_for(instance))

            release = str(instance.get_option("release", default_value=1))
            if release and not release.startswith('-'):
                release = '-' + release
            version_full = instance.egg_info['version'] + release

            # Replace paths with virtualenv deployment directory.
            if self.opts.get('venv_deploy_dir'):
                deploy_dir = sh.joinpths(self.opts.get('venv_deploy_dir'),
                                         instance.name)
                replacer = functools.partial(
                    re.subn, re.escape(instance.get_option('component_dir')),
                    deploy_dir)
                bin_dir = sh.joinpths(venv_dir, 'bin')
                adjustments, files_replaced = self._replace_deployment_paths(
                    bin_dir, replacer)
                if files_replaced:
                    LOG.info("Adjusted %s deployment path(s) in %s files",
                             adjustments, files_replaced)

                tar_path = sh.joinpths(
                    self.opts.get('venv_deploy_dir'), '%s/%s-%s-venv/venv' %
                    (instance.name, instance.name, version_full))
            else:
                tar_path = '%s/%s-%s-venv/venv' % (instance.name,
                                                   instance.name, version_full)

            # Create a tarball containing the virtualenv.
            tar_filename = sh.joinpths(
                venv_dir, '%s-%s-venv.tar.gz' % (instance.name, version_full))
            LOG.info("Making tarball of %s built for %s with version %s at %s",
                     venv_dir, instance.name, version_full, tar_filename)
            utils.time_it(functools.partial(_on_finish, "Tarball creation"),
                          self._make_tarball, venv_dir, tar_filename, tar_path)
示例#18
0
文件: venv.py 项目: jzako/anvil
    def package_finish(self):
        super(VenvDependencyHandler, self).package_finish()
        for instance in self.instances:
            if not self._is_buildable(instance):
                continue
            venv_dir = sh.abspth(self._venv_directory_for(instance))

            release = str(instance.get_option("release", default_value=1))
            if release and not release.startswith('-'):
                release = '-' + release
            version_full = instance.egg_info['version'] + release

            # Replace paths with virtualenv deployment directory.
            if self.opts.get('venv_deploy_dir'):
                deploy_dir = sh.joinpths(self.opts.get('venv_deploy_dir'),
                                         instance.name)
                replacer = functools.partial(
                    re.subn, re.escape(instance.get_option('component_dir')),
                    deploy_dir)
                bin_dir = sh.joinpths(venv_dir, 'bin')
                adjustments, files_replaced = self._replace_deployment_paths(bin_dir,
                                                                             replacer)
                if files_replaced:
                    LOG.info("Adjusted %s deployment path(s) in %s files",
                             adjustments, files_replaced)

                tar_path = sh.joinpths(self.opts.get('venv_deploy_dir'), '%s/%s-%s-venv/venv' % (
                                       instance.name, instance.name, version_full))
            else:
                tar_path = '%s/%s-%s-venv/venv' % (instance.name, instance.name, version_full)

            # Create a tarball containing the virtualenv.
            tar_filename = sh.joinpths(venv_dir, '%s-%s-venv.tar.gz' % (instance.name,
                                       version_full))
            LOG.info("Making tarball of %s built for %s with version %s at %s", venv_dir,
                     instance.name, version_full, tar_filename)
            utils.time_it(functools.partial(_on_finish, "Tarball creation"),
                          self._make_tarball, venv_dir, tar_filename, tar_path)
示例#19
0
 def _begin_start(self, name, program, args):
     run_trace = tr.TraceWriter(tr.trace_fn(self.runtime.get_option('trace_dir'), SCREEN_TEMPL % (name)))
     run_trace.trace(NAME, name)
     run_trace.trace(ARGS, json.dumps(args))
     full_cmd = [program] + list(args)
     session_name = self._get_session()
     inited_screen = False
     if session_name is None:
         inited_screen = True
         self._do_screen_init()
         session_name = self._get_session()
         if session_name is None:
             msg = "After initializing screen with session named %r, no screen session with that name was found!" % (SESSION_NAME)
             raise excp.StartException(msg)
     run_trace.trace(SESSION_ID, session_name)
     if inited_screen or not sh.isfile(SCREEN_RC):
         rc_gen = ScreenRcGenerator(self)
         rc_contents = rc_gen.create(session_name, self._get_env())
         out_fn = sh.abspth(SCREEN_RC)
         LOG.info("Writing your created screen rc file to %r" % (out_fn))
         sh.write_file(out_fn, rc_contents)
     self._do_start(session_name, name, full_cmd)
     return run_trace.filename()
示例#20
0
文件: __main__.py 项目: minacel/anvil
def run(args):
    """Starts the execution after args have been parsed and logging has been setup.
    """

    LOG.debug("CLI arguments are:")
    utils.log_object(args, logger=LOG, level=logging.DEBUG, item_max_len=128)

    # Keep the old args around so we have the full set to write out
    saved_args = dict(args)
    action = args.pop("action", '').strip().lower()
    if re.match(r"^moo[o]*$", action):
        return

    try:
        runner_cls = actions.class_for(action)
    except Exception as ex:
        raise excp.OptionException(str(ex))

    if runner_cls.needs_sudo:
        ensure_perms()

    # Check persona file exists
    persona_fn = args.pop('persona_fn')
    if not persona_fn:
        raise excp.OptionException("No persona file name specified!")
    if not sh.isfile(persona_fn):
        raise excp.OptionException("Invalid persona file %r specified!" %
                                   (persona_fn))

    # Check origin file exists
    origins_fn = args.pop('origins_fn')
    if not origins_fn:
        raise excp.OptionException("No origin file name specified!")
    if not sh.isfile(origins_fn):
        raise excp.OptionException("Invalid origin file %r specified!" %
                                   (origins_fn))
    args['origins_fn'] = sh.abspth(origins_fn)

    # Determine the root directory...
    root_dir = sh.abspth(args.pop("dir"))

    (repeat_string, line_max_len) = utils.welcome()
    print(pprint.center_text("Action Runner", repeat_string, line_max_len))

    # !!
    # Here on out we should be using the logger (and not print)!!
    # !!

    # Ensure the anvil dirs are there if others are about to use it...
    if not sh.isdir(root_dir):
        LOG.info("Creating anvil root directory at path: %s", root_dir)
        sh.mkdir(root_dir)
    try:
        for d in ANVIL_DIRS:
            if sh.isdir(d):
                continue
            LOG.info("Creating anvil auxiliary directory at path: %s", d)
            sh.mkdir(d)
    except OSError as e:
        LOG.warn("Failed ensuring auxiliary directories due to %s", e)

    # Load the origins...
    origins = _origins.load(args['origins_fn'],
                            patch_file=args.get('origins_patch'))

    # Load the distro/s
    possible_distros = distro.load(settings.DISTRO_DIR,
                                   distros_patch=args.get('distros_patch'))

    # Load + match the persona to the possible distros...
    try:
        persona_obj = persona.load(persona_fn)
    except Exception as e:
        raise excp.OptionException("Error loading persona file: %s due to %s" %
                                   (persona_fn, e))
    else:
        dist = persona_obj.match(possible_distros, origins)
        LOG.info('Persona selected distro: %s from %s possible distros',
                 colorizer.quote(dist.name), len(possible_distros))

    # Update the dist with any other info...
    dist.inject_platform_overrides(persona_obj.distro_updates,
                                   source=persona_fn)
    dist.inject_platform_overrides(origins, source=origins_fn)

    # Print it out...
    LOG.debug("Distro settings are:")
    for line in dist.pformat(item_max_len=128).splitlines():
        LOG.debug(line)

    # Get the object we will be running with...
    runner = runner_cls(distro=dist,
                        root_dir=root_dir,
                        name=action,
                        cli_opts=args)

    # Now that the settings are known to work, store them for next run
    store_current_settings(saved_args)

    LOG.info("Starting action %s on %s for distro: %s",
             colorizer.quote(action), colorizer.quote(utils.iso8601()),
             colorizer.quote(dist.name))
    LOG.info("Using persona: %s", colorizer.quote(persona_fn))
    LOG.info("Using origins: %s", colorizer.quote(origins_fn))
    LOG.info("In root directory: %s", colorizer.quote(root_dir))

    start_time = time.time()
    runner.run(persona_obj)
    end_time = time.time()

    pretty_time = utils.format_time(end_time - start_time)
    LOG.info("It took %s seconds or %s minutes to complete action %s.",
             colorizer.quote(pretty_time['seconds']),
             colorizer.quote(pretty_time['minutes']), colorizer.quote(action))
示例#21
0
文件: __main__.py 项目: jzako/anvil
def run(args):
    """Starts the execution after args have been parsed and logging has been setup.
    """

    LOG.debug("CLI arguments are:")
    utils.log_object(args, logger=LOG, level=logging.DEBUG, item_max_len=128)

    # Keep the old args around so we have the full set to write out
    saved_args = dict(args)
    action = args.pop("action", '').strip().lower()
    if re.match(r"^moo[o]*$", action):
        return

    try:
        runner_cls = actions.class_for(action)
    except Exception as ex:
        raise excp.OptionException(str(ex))

    if runner_cls.needs_sudo:
        ensure_perms()

    # Check persona file exists
    persona_fn = args.pop('persona_fn')
    if not persona_fn:
        raise excp.OptionException("No persona file name specified!")
    if not sh.isfile(persona_fn):
        raise excp.OptionException("Invalid persona file %r specified!" % (persona_fn))

    # Check origin file exists
    origins_fn = args.pop('origins_fn')
    if not origins_fn:
        raise excp.OptionException("No origin file name specified!")
    if not sh.isfile(origins_fn):
        raise excp.OptionException("Invalid origin file %r specified!" % (origins_fn))
    args['origins_fn'] = sh.abspth(origins_fn)

    # Determine the root directory...
    root_dir = sh.abspth(args.pop("dir"))

    (repeat_string, line_max_len) = utils.welcome()
    print(pprint.center_text("Action Runner", repeat_string, line_max_len))

    # !!
    # Here on out we should be using the logger (and not print)!!
    # !!

    # Ensure the anvil dirs are there if others are about to use it...
    if not sh.isdir(root_dir):
        LOG.info("Creating anvil root directory at path: %s", root_dir)
        sh.mkdir(root_dir)
    try:
        for d in ANVIL_DIRS:
            if sh.isdir(d):
                continue
            LOG.info("Creating anvil auxiliary directory at path: %s", d)
            sh.mkdir(d)
    except OSError as e:
        LOG.warn("Failed ensuring auxiliary directories due to %s", e)

    # Load the origins...
    origins = _origins.load(args['origins_fn'],
                            patch_file=args.get('origins_patch'))

    # Load the distro/s
    possible_distros = distro.load(settings.DISTRO_DIR,
                                   distros_patch=args.get('distros_patch'))

    # Load + match the persona to the possible distros...
    try:
        persona_obj = persona.load(persona_fn)
    except Exception as e:
        raise excp.OptionException("Error loading persona file: %s due to %s" % (persona_fn, e))
    else:
        dist = persona_obj.match(possible_distros, origins)
        LOG.info('Persona selected distro: %s from %s possible distros',
                 colorizer.quote(dist.name), len(possible_distros))

    # Update the dist with any other info...
    dist.inject_platform_overrides(persona_obj.distro_updates, source=persona_fn)
    dist.inject_platform_overrides(origins, source=origins_fn)

    # Print it out...
    LOG.debug("Distro settings are:")
    for line in dist.pformat(item_max_len=128).splitlines():
        LOG.debug(line)

    # Get the object we will be running with...
    runner = runner_cls(distro=dist,
                        root_dir=root_dir,
                        name=action,
                        cli_opts=args)

    # Now that the settings are known to work, store them for next run
    store_current_settings(saved_args)

    LOG.info("Starting action %s on %s for distro: %s",
             colorizer.quote(action), colorizer.quote(utils.iso8601()),
             colorizer.quote(dist.name))
    LOG.info("Using persona: %s", colorizer.quote(persona_fn))
    LOG.info("Using origins: %s", colorizer.quote(origins_fn))
    LOG.info("In root directory: %s", colorizer.quote(root_dir))

    start_time = time.time()
    runner.run(persona_obj)
    end_time = time.time()

    pretty_time = utils.format_time(end_time - start_time)
    LOG.info("It took %s seconds or %s minutes to complete action %s.",
             colorizer.quote(pretty_time['seconds']), colorizer.quote(pretty_time['minutes']), colorizer.quote(action))
示例#22
0
    def _build_dependencies(self):
        (pips_downloaded, package_files) = self.download_dependencies()

        # Analyze what was downloaded and eject things that were downloaded
        # by pip as a dependency of a download but which we do not want to
        # build or can satisfy by other means
        no_pips = [pkg_resources.Requirement.parse(name).key
                   for name in self.python_names]
        yum_map = self._get_known_yum_packages()
        pips_keys = set([p.key for p in pips_downloaded])

        def _filter_package_files(package_files):
            package_reqs = []
            package_keys = []
            for filename in package_files:
                package_details = pip_helper.get_archive_details(filename)
                package_reqs.append(package_details['req'])
                package_keys.append(package_details['req'].key)
            package_rpm_names = self._convert_names_python2rpm(package_keys)
            filtered_files = []
            for (filename, req, rpm_name) in zip(package_files, package_reqs,
                                                 package_rpm_names):
                if req.key in no_pips:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it is disallowed."), colorizer.quote(req))
                    continue
                if req.key in pips_keys:
                    filtered_files.append(filename)
                    continue
                # See if pip tried to download it but we already can satisfy
                # it via yum and avoid building it in the first place...
                (_version, repo) = self._find_yum_match(yum_map, req, rpm_name)
                if not repo:
                    filtered_files.append(filename)
                else:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it can be satisfied by %s from repository "
                             "%s instead."), colorizer.quote(req),
                             colorizer.quote(rpm_name),
                             colorizer.quote(repo))
            return filtered_files

        LOG.info("Filtering %s downloaded files.", len(package_files))
        filtered_package_files = _filter_package_files(package_files)
        if not filtered_package_files:
            LOG.info("No SRPM package dependencies to build.")
            return
        for filename in package_files:
            if filename not in filtered_package_files:
                sh.unlink(filename)
        package_files = sorted(filtered_package_files)

        # Now build them into SRPM rpm files.
        (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk")
        scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
        py2rpm_options = self._py2rpm_start_cmdline()[1:] + [
            "--scripts-dir", scripts_dir,
            "--source-only",
            "--rpm-base", self.rpmbuild_dir,
        ]
        params = {
            "DOWNLOADS_DIR": self.download_dir,
            "LOGS_DIR": self.log_dir,
            "PY2RPM": self.py2rpm_executable,
            "PY2RPM_FLAGS": " ".join(py2rpm_options),
        }
        marks_dir = sh.joinpths(self.deps_dir, "marks-deps")
        if not sh.isdir(marks_dir):
            sh.mkdirslist(marks_dir, tracewriter=self.tracewriter)
        makefile_path = sh.joinpths(self.deps_dir, "deps.mk")
        sh.write_file(makefile_path, utils.expand_template(content, params),
                      tracewriter=self.tracewriter)
        utils.log_iterable(package_files,
                           header="Building %s SRPM packages using %s jobs" % (len(package_files), self._jobs),
                           logger=LOG)
        self._execute_make(makefile_path, marks_dir)
示例#23
0
def main():
    if len(sys.argv) < 3:
        print("%s distro_yaml root_dir ..." % sys.argv[0])
        return 1
    root_dirs = sys.argv[2:]
    yaml_fn = sh.abspth(sys.argv[1])

    requires_files = []
    for d in root_dirs:
        all_contents = sh.listdir(d, recursive=True, files_only=True)
        requires_files = [
            sh.abspth(f) for f in all_contents
            if re.search(r"(test|pip)[-]requires$", f, re.I)
        ]

    requires_files = sorted(list(set(requires_files)))
    requirements = []
    source_requirements = {}
    for fn in requires_files:
        source_requirements[fn] = []
        for req in pip_helper.parse_requirements(sh.load_file(fn)):
            requirements.append(req.key.lower().strip())
            source_requirements[fn].append(req.key.lower().strip())

    print("Comparing pips/pip2pkgs in %s to those found in %s" %
          (yaml_fn, root_dirs))
    for fn in sorted(requires_files):
        print(" + " + str(fn))

    requirements = set(requirements)
    print("All known requirements:")
    for r in sorted(requirements):
        print("+ " + str(r))

    distro_yaml = utils.load_yaml(yaml_fn)
    components = distro_yaml.get('components', {})
    all_known_names = []
    components_pips = {}
    for (c, details) in components.items():
        components_pips[c] = []
        pip2pkgs = details.get('pip_to_package', [])
        pips = details.get('pips', [])
        known_names = []
        for item in pip2pkgs:
            known_names.append(item['name'].lower().strip())
        for item in pips:
            known_names.append(item['name'].lower().strip())
        components_pips[c].extend(known_names)
        all_known_names.extend(known_names)

    all_known_names = sorted(list(set(all_known_names)))
    not_needed = []
    for n in all_known_names:
        if n not in requirements:
            not_needed.append(n)
    if not_needed:
        print("The following distro yaml mappings may not be needed:")
        for n in sorted(not_needed):
            msg = "  + %s (" % (n)
            # Find which components said they need this...
            for (c, known_names) in components_pips.items():
                if n in known_names:
                    msg += c + ","
            msg += ")"
            print(msg)
    not_found = []
    for n in requirements:
        name = n.lower().strip()
        if name not in all_known_names:
            not_found.append(name)
    not_found = sorted(list(set(not_found)))
    if not_found:
        print(
            "The following distro yaml mappings may be required but were not found:"
        )
        for n in sorted(not_found):
            msg = "  + %s" % (n)
            msg += " ("
            # Find which file/s said they need this...
            for (fn, reqs) in source_requirements.items():
                matched = False
                for r in reqs:
                    if r.lower().strip() == name:
                        matched = True
                if matched:
                    msg += fn + ","
            msg += ")"
            print(msg)
    return len(not_found) + len(not_needed)
示例#24
0
def run(args):
    """
    Starts the execution after args have been parsed and logging has been setup.

    Arguments: N/A
    Returns: True for success to run, False for failure to start
    """
    LOG.debug("CLI arguments are:")
    utils.log_object(args, logger=LOG, level=logging.DEBUG, item_max_len=128)

    # Keep the old args around so we have the full set to write out
    saved_args = dict(args)
    action = args.pop("action", '').strip().lower()
    if action not in actions.names():
        raise excp.OptionException("Invalid action name %r specified!" %
                                   (action))

    # Determine + setup the root directory...
    # If not provided attempt to locate it via the environment control files
    args_root_dir = args.pop("dir")
    root_dir = env.get_key('INSTALL_ROOT')
    if not root_dir:
        root_dir = args_root_dir
    if not root_dir:
        root_dir = sh.joinpths(sh.gethomedir(), 'openstack')
    root_dir = sh.abspth(root_dir)
    sh.mkdir(root_dir)

    persona_fn = args.pop('persona_fn')
    if not persona_fn:
        raise excp.OptionException("No persona file name specified!")
    if not sh.isfile(persona_fn):
        raise excp.OptionException("Invalid persona file %r specified!" %
                                   (persona_fn))

    # !!
    # Here on out we should be using the logger (and not print)!!
    # !!

    # Stash the dryrun value (if any)
    if 'dryrun' in args:
        env.set("ANVIL_DRYRUN", str(args['dryrun']))

    # Ensure the anvil etc dir is there if others are about to use it
    ensure_anvil_dir()

    # Load the distro
    dist = distro.load(settings.DISTRO_DIR)

    # Load + verify the person
    try:
        persona_obj = persona.load(persona_fn)
        persona_obj.verify(dist)
    except Exception as e:
        raise excp.OptionException("Error loading persona file: %s due to %s" %
                                   (persona_fn, e))

    # Get the object we will be running with...
    runner_cls = actions.class_for(action)
    runner = runner_cls(distro=dist,
                        root_dir=root_dir,
                        name=action,
                        cli_opts=args)

    (repeat_string, line_max_len) = utils.welcome()
    print(center_text("Action Runner", repeat_string, line_max_len))

    # Now that the settings are known to work, store them for next run
    store_current_settings(saved_args)

    LOG.info("Starting action %s on %s for distro: %s",
             colorizer.quote(action), colorizer.quote(utils.iso8601()),
             colorizer.quote(dist.name))
    LOG.info("Using persona: %s", colorizer.quote(persona_fn))
    LOG.info("In root directory: %s", colorizer.quote(root_dir))

    start_time = time.time()
    runner.run(persona_obj)
    end_time = time.time()

    pretty_time = utils.format_time(end_time - start_time)
    LOG.info("It took %s seconds or %s minutes to complete action %s.",
             colorizer.quote(pretty_time['seconds']),
             colorizer.quote(pretty_time['minutes']), colorizer.quote(action))
示例#25
0
文件: yum.py 项目: skybobbi/anvil
    def _build_dependencies(self):
        (pips_downloaded, package_files) = self.download_dependencies()

        # Analyze what was downloaded and eject things that were downloaded
        # by pip as a dependency of a download but which we do not want to
        # build or can satisfy by other means
        no_pips = [
            pkg_resources.Requirement.parse(name).key
            for name in self.python_names
        ]
        yum_map = self._get_known_yum_packages()
        pips_keys = set([p.key for p in pips_downloaded])

        def _filter_package_files(package_files):
            package_reqs = []
            package_keys = []
            for filename in package_files:
                package_details = pip_helper.get_archive_details(filename)
                package_reqs.append(package_details['req'])
                package_keys.append(package_details['req'].key)
            package_rpm_names = self._convert_names_python2rpm(package_keys)
            filtered_files = []
            for (filename, req, rpm_name) in zip(package_files, package_reqs,
                                                 package_rpm_names):
                if req.key in no_pips:
                    LOG.info(("Dependency %s was downloaded additionally "
                              "but it is disallowed."), colorizer.quote(req))
                    continue
                if req.key in pips_keys:
                    filtered_files.append(filename)
                    continue
                # See if pip tried to download it but we already can satisfy
                # it via yum and avoid building it in the first place...
                (_version, repo) = self._find_yum_match(yum_map, req, rpm_name)
                if not repo:
                    filtered_files.append(filename)
                else:
                    LOG.info(("Dependency %s was downloaded additionally "
                              "but it can be satisfied by %s from repository "
                              "%s instead."), colorizer.quote(req),
                             colorizer.quote(rpm_name), colorizer.quote(repo))
            return filtered_files

        LOG.info("Filtering %s downloaded files.", len(package_files))
        filtered_package_files = _filter_package_files(package_files)
        if not filtered_package_files:
            LOG.info("No SRPM package dependencies to build.")
            return
        for filename in package_files:
            if filename not in filtered_package_files:
                sh.unlink(filename)
        package_files = sorted(filtered_package_files)

        # Now build them into SRPM rpm files.
        (_fn,
         content) = utils.load_template(sh.joinpths("packaging", "makefiles"),
                                        "source.mk")
        scripts_dir = sh.abspth(
            sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
        py2rpm_options = self._py2rpm_start_cmdline()[1:] + [
            "--scripts-dir",
            scripts_dir,
            "--source-only",
            "--rpm-base",
            self.rpmbuild_dir,
        ]
        params = {
            "DOWNLOADS_DIR": self.download_dir,
            "LOGS_DIR": self.log_dir,
            "PY2RPM": self.py2rpm_executable,
            "PY2RPM_FLAGS": " ".join(py2rpm_options),
        }
        marks_dir = sh.joinpths(self.deps_dir, "marks-deps")
        if not sh.isdir(marks_dir):
            sh.mkdirslist(marks_dir, tracewriter=self.tracewriter)
        makefile_path = sh.joinpths(self.deps_dir, "deps.mk")
        sh.write_file(makefile_path,
                      utils.expand_template(content, params),
                      tracewriter=self.tracewriter)
        utils.log_iterable(package_files,
                           header="Building %s SRPM packages using %s jobs" %
                           (len(package_files), self._jobs),
                           logger=LOG)
        self._execute_make(makefile_path, marks_dir)
示例#26
0
def run(args):
    """Starts the execution after args have been parsed and logging has been setup.
    """

    LOG.debug("CLI arguments are:")
    utils.log_object(args, logger=LOG, level=logging.DEBUG, item_max_len=128)

    # Keep the old args around so we have the full set to write out
    saved_args = dict(args)
    action = args.pop("action", "").strip().lower()
    if re.match(r"^moo[o]*$", action):
        return

    try:
        runner_cls = actions.class_for(action)
    except Exception as ex:
        raise excp.OptionException(str(ex))

    if runner_cls.needs_sudo:
        ensure_perms()

    # Check persona file exists
    persona_fn = args.pop("persona_fn")
    if not persona_fn:
        raise excp.OptionException("No persona file name specified!")
    if not sh.isfile(persona_fn):
        raise excp.OptionException("Invalid persona file %r specified!" % (persona_fn))

    # Check origin file exists
    origins_fn = args.pop("origins_fn")
    if not origins_fn:
        raise excp.OptionException("No origin file name specified!")
    if not sh.isfile(origins_fn):
        raise excp.OptionException("Invalid origin file %r specified!" % (origins_fn))
    args["origins_fn"] = sh.abspth(origins_fn)

    # Determine the root directory...
    root_dir = sh.abspth(args.pop("dir"))

    (repeat_string, line_max_len) = utils.welcome()
    print(pprint.center_text("Action Runner", repeat_string, line_max_len))

    # !!
    # Here on out we should be using the logger (and not print)!!
    # !!

    # Stash the dryrun value (if any)
    if "dryrun" in args:
        sh.set_dry_run(args["dryrun"])

    # Ensure the anvil dirs are there if others are about to use it...
    ensure_anvil_dirs(root_dir)

    # Load the distro
    dist = distro.load(settings.DISTRO_DIR)

    # Load + verify the person
    try:
        persona_obj = persona.load(persona_fn)
        persona_obj.verify(dist)
    except Exception as e:
        raise excp.OptionException("Error loading persona file: %s due to %s" % (persona_fn, e))

    yum.YumDependencyHandler.jobs = args["jobs"]
    # Get the object we will be running with...
    runner = runner_cls(distro=dist, root_dir=root_dir, name=action, cli_opts=args)

    # Now that the settings are known to work, store them for next run
    store_current_settings(saved_args)

    LOG.info(
        "Starting action %s on %s for distro: %s",
        colorizer.quote(action),
        colorizer.quote(utils.iso8601()),
        colorizer.quote(dist.name),
    )
    LOG.info("Using persona: %s", colorizer.quote(persona_fn))
    LOG.info("Using origins: %s", colorizer.quote(origins_fn))
    LOG.info("In root directory: %s", colorizer.quote(root_dir))

    start_time = time.time()
    runner.run(persona_obj)
    end_time = time.time()

    pretty_time = utils.format_time(end_time - start_time)
    LOG.info(
        "It took %s seconds or %s minutes to complete action %s.",
        colorizer.quote(pretty_time["seconds"]),
        colorizer.quote(pretty_time["minutes"]),
        colorizer.quote(action),
    )
示例#27
0
文件: pip_helper.py 项目: jzako/anvil
def get_directory_details(path, pbr_version=None):
    if not sh.isdir(path):
        raise IOError("Can not detail non-existent directory %s" % (path))

    # Check if we already got the details of this dir previously
    with EGGS_DETAILED_LOCK:
        path = sh.abspth(path)
        cache_key = "d:%s" % (sh.abspth(path))
        if cache_key in EGGS_DETAILED:
            return EGGS_DETAILED[cache_key]

        details = None
        skip_paths = [
            sh.joinpths(path, "PKG-INFO"),
            sh.joinpths(path, "EGG-INFO"),
        ]
        skip_paths.extend(glob.glob(sh.joinpths(path, "*.egg-info")))
        if any(sh.exists(a_path) for a_path in skip_paths):
            # Some packages seem to not support the 'egg_info' call and
            # provide there own path/file that contains this information
            # already, so just use it if we can get at it...
            #
            # Ie for pyyaml3.x:
            #
            # error: invalid command 'egg_info'
            details = pkginfo.Develop(path)
        if not details or not details.name:
            cmd = [sys.executable, 'setup.py', 'egg_info']
            if pbr_version:
                env_overrides = {
                    "PBR_VERSION": str(pbr_version),
                }
            else:
                env_overrides = {}
            sh.execute(cmd, cwd=path, env_overrides=env_overrides)
            details = pkginfo.get_metadata(path)
        if not details or not details.name:
            raise RuntimeError("No egg detail information discovered"
                               " at '%s'" % path)

        egg_details = {
            'req': create_requirement(details.name, version=details.version),
        }
        for attr_name in ['description', 'author',
                          'version', 'name', 'summary']:
            egg_details[attr_name] = getattr(details, attr_name)
        for attr_name in ['description', 'author', 'summary']:
            attr_value = egg_details[attr_name]
            if isinstance(attr_value, six.text_type):
                # Fix any unicode which will cause unicode decode failures...
                # versions or names shouldn't be unicode, and the rest
                # we don't really care about being unicode (since its
                # just used for logging right now anyway...).
                #
                # The reason this is done is that 'elasticsearch' seems to
                # have a unicode author name, and that causes the log_object
                # to blowup, so just avoid that by replacing this information
                # in the first place.
                egg_details[attr_name] = attr_value.encode("ascii",
                                                           errors='replace')

        LOG.debug("Extracted '%s' egg detail information:", path)
        utils.log_object(egg_details, logger=LOG, level=logging.DEBUG)

        EGGS_DETAILED[cache_key] = egg_details
        return egg_details
示例#28
0
def main():
    if len(sys.argv) < 3:
        print("%s distro_yaml root_dir ..." % sys.argv[0])
        return 1
    root_dirs = sys.argv[2:]
    yaml_fn = sh.abspth(sys.argv[1])

    requires_files = []
    for d in root_dirs:
        all_contents = sh.listdir(d, recursive=True, files_only=True)
        requires_files = [sh.abspth(f) for f in all_contents
                          if re.search(r"(test|pip)[-]requires$", f, re.I)]

    requires_files = sorted(list(set(requires_files)))
    requirements = []
    source_requirements = {}
    for fn in requires_files:
        source_requirements[fn] = []
        for req in pip_helper.parse_requirements(sh.load_file(fn)):
            requirements.append(req.key.lower().strip())
            source_requirements[fn].append(req.key.lower().strip())

    print("Comparing pips/pip2pkgs in %s to those found in %s" % (yaml_fn, root_dirs))
    for fn in sorted(requires_files):
        print(" + " + str(fn))

    requirements = set(requirements)
    print("All known requirements:")
    for r in sorted(requirements):
        print("+ " + str(r))

    distro_yaml = utils.load_yaml(yaml_fn)
    components = distro_yaml.get('components', {})
    all_known_names = []
    components_pips = {}
    for (c, details) in components.items():
        components_pips[c] = []
        pip2pkgs = details.get('pip_to_package', [])
        pips = details.get('pips', [])
        known_names = []
        for item in pip2pkgs:
            known_names.append(item['name'].lower().strip())
        for item in pips:
            known_names.append(item['name'].lower().strip())
        components_pips[c].extend(known_names)
        all_known_names.extend(known_names)

    all_known_names = sorted(list(set(all_known_names)))
    not_needed = []
    for n in all_known_names:
        if n not in requirements:
            not_needed.append(n)
    if not_needed:
        print("The following distro yaml mappings may not be needed:")
        for n in sorted(not_needed):
            msg = "  + %s (" % (n)
            # Find which components said they need this...
            for (c, known_names) in components_pips.items():
                if n in known_names:
                    msg += c + ","
            msg += ")"
            print(msg)
    not_found = []
    for n in requirements:
        name = n.lower().strip()
        if name not in all_known_names:
            not_found.append(name)
    not_found = sorted(list(set(not_found)))
    if not_found:
        print("The following distro yaml mappings may be required but were not found:")
        for n in sorted(not_found):
            msg = "  + %s" % (n)
            msg += " ("
            # Find which file/s said they need this...
            for (fn, reqs) in source_requirements.items():
                matched = False
                for r in reqs:
                    if r.lower().strip() == name:
                        matched = True
                if matched:
                    msg += fn + ","
            msg += ")"
            print(msg)
    return len(not_found) + len(not_needed)
示例#29
0
文件: yum.py 项目: aababilov/anvil
    def _build_dependencies(self):
        (pips_downloaded, package_files) = self.download_dependencies()

        # Analyze what was downloaded and eject things that were downloaded
        # by pip as a dependency of a download but which we do not want to
        # build or can satisfy by other means
        no_pips = [pkg_resources.Requirement.parse(name).key
                   for name in self.python_names]
        no_pips.extend(self.BANNED_PACKAGES)
        yum_map = self._get_yum_available()
        pips_keys = set([p.key for p in pips_downloaded])

        def _filter_package_files(package_files):
            package_reqs = []
            package_keys = []
            for filename in package_files:
                package_details = pip_helper.get_archive_details(filename)
                package_reqs.append(package_details['req'])
                package_keys.append(package_details['req'].key)
            package_rpm_names = self._convert_names_python2rpm(package_keys)
            filtered_files = []
            for (filename, req, rpm_name) in zip(package_files, package_reqs,
                                                 package_rpm_names):
                if req.key in no_pips:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it is disallowed."), colorizer.quote(req))
                    continue
                if req.key in pips_keys:
                    filtered_files.append(filename)
                    continue
                # See if pip tried to download it but we already can satisfy
                # it via yum and avoid building it in the first place...
                (_version, repo) = self._find_yum_match(yum_map, req, rpm_name)
                if not repo:
                    filtered_files.append(filename)
                else:
                    LOG.info(("Dependency %s was downloaded additionally "
                             "but it can be satisfied by %s from repository "
                             "%s instead."), colorizer.quote(req),
                             colorizer.quote(rpm_name),
                             colorizer.quote(repo))
            return filtered_files

        LOG.info("Filtering %s downloaded files.", len(package_files))
        package_files = _filter_package_files(package_files)
        if not package_files:
            LOG.info("No SRPM package dependencies to build.")
            return
        deps_makefile_name = sh.joinpths(self.deps_dir, "deps.mk")
        marks_dir = sh.joinpths(self.deps_dir, "marks-deps")
        sh.mkdirslist(marks_dir)
        with open(deps_makefile_name, "w") as makefile:
            scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts"))
            py2rpm_options = self.py2rpm_options() + [
                "--scripts-dir", scripts_dir,
                "--source-only",
                "--rpm-base", self.rpmbuild_dir,
            ]
            print >> makefile, "DOWNLOADS_DIR :=", self.download_dir
            print >> makefile, "LOGS_DIR :=", self.log_dir
            print >> makefile, "PY2RPM :=", self.py2rpm_executable
            print >> makefile, "PY2RPM_FLAGS :=", " ".join(py2rpm_options)
            print >> makefile, """
%.mark: $(DOWNLOADS_DIR)/%
\t@$(PY2RPM) $(PY2RPM_FLAGS) -- $^ &> $(LOGS_DIR)/py2rpm-$*.log
\t@touch "$@"
\t@echo "$* is processed"
"""
            print >> makefile, "MARKS :=", " ".join(
                "%s.mark" % sh.basename(i) for i in package_files)
            print >> makefile
            print >> makefile, "all: $(MARKS)"

        base_package_files = [sh.basename(f) for f in package_files]
        utils.log_iterable(base_package_files,
                           header="Building %s SRPM packages using %s jobs" %
                           (len(package_files), self.jobs),
                           logger=LOG)
        self._execute_make(deps_makefile_name, marks_dir)
示例#30
0
文件: __main__.py 项目: spandhe/anvil
def run(args):
    """Starts the execution after args have been parsed and logging has been setup.
    """

    LOG.debug("CLI arguments are:")
    utils.log_object(args, logger=LOG, level=logging.DEBUG, item_max_len=128)

    # Keep the old args around so we have the full set to write out
    saved_args = dict(args)
    action = args.pop("action", '').strip().lower()
    if re.match(r"^moo[o]*$", action):
        return

    try:
        runner_cls = actions.class_for(action)
    except Exception as ex:
        raise excp.OptionException(str(ex))

    if runner_cls.needs_sudo:
        ensure_perms()

    persona_fn = args.pop('persona_fn')
    if not persona_fn:
        raise excp.OptionException("No persona file name specified!")
    if not sh.isfile(persona_fn):
        raise excp.OptionException("Invalid persona file %r specified!" %
                                   (persona_fn))

    # Determine the root directory...
    root_dir = sh.abspth(args.pop("dir"))

    (repeat_string, line_max_len) = utils.welcome()
    print(pprint.center_text("Action Runner", repeat_string, line_max_len))

    # !!
    # Here on out we should be using the logger (and not print)!!
    # !!

    # Stash the dryrun value (if any)
    if 'dryrun' in args:
        sh.set_dry_run(args['dryrun'])

    # Ensure the anvil dirs are there if others are about to use it...
    ensure_anvil_dirs(root_dir)

    # Load the distro
    dist = distro.load(settings.DISTRO_DIR)

    # Load + verify the person
    try:
        persona_obj = persona.load(persona_fn)
        persona_obj.verify(dist)
    except Exception as e:
        raise excp.OptionException("Error loading persona file: %s due to %s" %
                                   (persona_fn, e))

    yum.YumDependencyHandler.jobs = args["jobs"]
    # Get the object we will be running with...
    runner = runner_cls(distro=dist,
                        root_dir=root_dir,
                        name=action,
                        cli_opts=args)

    # Now that the settings are known to work, store them for next run
    store_current_settings(saved_args)

    LOG.info("Starting action %s on %s for distro: %s",
             colorizer.quote(action), colorizer.quote(utils.iso8601()),
             colorizer.quote(dist.name))
    LOG.info("Using persona: %s", colorizer.quote(persona_fn))
    LOG.info("In root directory: %s", colorizer.quote(root_dir))

    start_time = time.time()
    runner.run(persona_obj)
    end_time = time.time()

    pretty_time = utils.format_time(end_time - start_time)
    LOG.info("It took %s seconds or %s minutes to complete action %s.",
             colorizer.quote(pretty_time['seconds']),
             colorizer.quote(pretty_time['minutes']), colorizer.quote(action))
示例#31
0
def run(args):
    """
    Starts the execution after args have been parsed and logging has been setup.
    """

    LOG.debug("CLI arguments are:")
    utils.log_object(args, logger=LOG, level=logging.DEBUG, item_max_len=128)

    # Keep the old args around so we have the full set to write out
    saved_args = dict(args)
    action = args.pop("action", '').strip().lower()
    try:
        runner_cls = actions.class_for(action)
    except Exception as ex:
        raise excp.OptionException(str(ex))

    if runner_cls.needs_sudo:
        ensure_perms()

    persona_fn = args.pop('persona_fn')
    if not persona_fn:
        raise excp.OptionException("No persona file name specified!")
    if not sh.isfile(persona_fn):
        raise excp.OptionException("Invalid persona file %r specified!" % (persona_fn))

    # Determine + setup the root directory...
    # If not provided attempt to locate it via the environment control files
    args_root_dir = args.pop("dir")
    root_dir = env.get_key('INSTALL_ROOT')
    if not root_dir:
        root_dir = args_root_dir
    if not root_dir:
        root_dir = sh.joinpths(sh.gethomedir(), 'openstack')
    root_dir = sh.abspth(root_dir)

    (repeat_string, line_max_len) = utils.welcome()
    print(center_text("Action Runner", repeat_string, line_max_len))

    # !!
    # Here on out we should be using the logger (and not print)!!
    # !!

    # Stash the dryrun value (if any)
    if 'dryrun' in args:
        sh.set_dry_run(args['dryrun'])

    # Ensure the anvil dirs are there if others are about to use it...
    ensure_anvil_dirs(root_dir)

    # Load the distro
    dist = distro.load(settings.DISTRO_DIR)

    # Load + verify the person
    try:
        persona_obj = persona.load(persona_fn)
        persona_obj.verify(dist)
    except Exception as e:
        raise excp.OptionException("Error loading persona file: %s due to %s" % (persona_fn, e))

    # Get the object we will be running with...
    runner = runner_cls(distro=dist,
                        root_dir=root_dir,
                        name=action,
                        cli_opts=args)

    # Now that the settings are known to work, store them for next run
    store_current_settings(saved_args)

    LOG.info("Starting action %s on %s for distro: %s",
             colorizer.quote(action), colorizer.quote(utils.iso8601()),
             colorizer.quote(dist.name))
    LOG.info("Using persona: %s", colorizer.quote(persona_fn))
    LOG.info("In root directory: %s", colorizer.quote(root_dir))

    start_time = time.time()
    runner.run(persona_obj)
    end_time = time.time()

    pretty_time = utils.format_time(end_time - start_time)
    LOG.info("It took %s seconds or %s minutes to complete action %s.",
             colorizer.quote(pretty_time['seconds']), colorizer.quote(pretty_time['minutes']), colorizer.quote(action))
示例#32
0
def get_directory_details(path, pbr_version=None):
    if not sh.isdir(path):
        raise IOError("Can not detail non-existent directory %s" % (path))

    # Check if we already got the details of this dir previously
    with EGGS_DETAILED_LOCK:
        path = sh.abspth(path)
        cache_key = "d:%s" % (sh.abspth(path))
        if cache_key in EGGS_DETAILED:
            return EGGS_DETAILED[cache_key]

        details = None
        skip_paths = [
            sh.joinpths(path, "PKG-INFO"),
            sh.joinpths(path, "EGG-INFO"),
        ]
        skip_paths.extend(glob.glob(sh.joinpths(path, "*.egg-info")))
        if any(sh.exists(a_path) for a_path in skip_paths):
            # Some packages seem to not support the 'egg_info' call and
            # provide there own path/file that contains this information
            # already, so just use it if we can get at it...
            #
            # Ie for pyyaml3.x:
            #
            # error: invalid command 'egg_info'
            details = pkginfo.Develop(path)
        if not details or not details.name:
            cmd = [sys.executable, 'setup.py', 'egg_info']
            if pbr_version:
                env_overrides = {
                    "PBR_VERSION": str(pbr_version),
                }
            else:
                env_overrides = {}
            sh.execute(cmd, cwd=path, env_overrides=env_overrides)
            details = pkginfo.get_metadata(path)
        if not details or not details.name:
            raise RuntimeError("No egg detail information discovered"
                               " at '%s'" % path)

        egg_details = {
            'req': create_requirement(details.name, version=details.version),
        }
        for attr_name in [
                'description', 'author', 'version', 'name', 'summary'
        ]:
            egg_details[attr_name] = getattr(details, attr_name)
        for attr_name in ['description', 'author', 'summary']:
            attr_value = egg_details[attr_name]
            if isinstance(attr_value, six.text_type):
                # Fix any unicode which will cause unicode decode failures...
                # versions or names shouldn't be unicode, and the rest
                # we don't really care about being unicode (since its
                # just used for logging right now anyway...).
                #
                # The reason this is done is that 'elasticsearch' seems to
                # have a unicode author name, and that causes the log_object
                # to blowup, so just avoid that by replacing this information
                # in the first place.
                egg_details[attr_name] = attr_value.encode("ascii",
                                                           errors='replace')

        LOG.debug("Extracted '%s' egg detail information:", path)
        utils.log_object(egg_details, logger=LOG, level=logging.DEBUG)

        EGGS_DETAILED[cache_key] = egg_details
        return egg_details