Example #1
0
def _using_testr(test_type, app_dir):
    if test_type == 'testr':
        return True
    for i in ['.testr.conf', '.testrepository']:
        if sh.exists(sh.joinpths(app_dir, i)):
            return True
    return False
Example #2
0
def _using_testr(test_type, app_dir):
    if test_type == 'testr':
        return True
    for i in ['.testr.conf', '.testrepository']:
        if sh.exists(sh.joinpths(app_dir, i)):
            return True
    return False
Example #3
0
 def _install_node_repo(self):
     repo_url = self.get_option('nodejs_repo')
     if not repo_url:
         return
     # Download the said url and install it so that we can actually install
     # the node.js requirement which seems to be needed by horizon for css compiling??
     repo_basename = sh.basename(repo_url)
     (_fn, fn_ext) = os.path.splitext(repo_basename)
     fn_ext = fn_ext.lower().strip()
     if fn_ext not in ['.rpm', '.repo']:
         LOG.warn("Unknown node.js repository configuration extension %s (we only support .rpm or .repo)!", colorizer.quote(fn_ext))
         return
     with NamedTemporaryFile(suffix=fn_ext) as temp_fh:
         LOG.info("Downloading node.js repository configuration from %s to %s.", repo_url, temp_fh.name)
         down.UrlLibDownloader(repo_url, temp_fh.name).download()
         temp_fh.flush()
         if fn_ext == ".repo":
             # Just write out the repo file after downloading it...
             repo_file_name = sh.joinpths("/etc/yum.repos.d", repo_basename)
             if not sh.exists(repo_file_name):
                 with sh.Rooted(True):
                     sh.write_file(repo_file_name, sh.load_file(temp_fh.name),
                                   tracewriter=self.tracewriter)
                     sh.chmod(repo_file_name, 0644)
         elif fn_ext == ".rpm":
             # Install it instead from said rpm (which likely is a
             # file that contains said repo location)...
             packager = yum.YumPackager(self.distro).direct_install(temp_fh.name)
Example #4
0
def _generate_log_filename(arglist):
    pieces = ['yyoom-']
    for a in arglist:
        a = a.strip()
        if not a or a.startswith("-") or sh.exists(a):
            break
        else:
            pieces.append(a)
            pieces.append("_")
    pieces.append(int(time.time()))
    pieces.append("_")
    pieces.append(utils.get_random_string(4))
    pieces.append('.log')
    return "".join([str(p) for p in pieces])
Example #5
0
def _generate_log_filename(arglist):
    pieces = ['yyoom-']
    for a in arglist:
        a = a.strip()
        if not a or a.startswith("-") or sh.exists(a):
            break
        else:
            pieces.append(a)
            pieces.append("_")
    pieces.append(int(time.time()))
    pieces.append("_")
    pieces.append(utils.get_random_string(4))
    pieces.append('.log')
    return "".join([str(p) for p in pieces])
Example #6
0
    def _write_python_tarball(self, instance, pkg_dir, ensure_exists=None):
        def prefix_exists(text, in_what):
            for t in in_what:
                if t.startswith(text):
                    return True
            return False

        pkg_name = instance.egg_info['name']
        version = instance.egg_info['version']
        base_name = "%s-%s" % (pkg_name, version)
        cmdline = [
            sys.executable,
            "setup.py",
            "sdist",
            "--formats=tar",
            "--dist-dir",
            self.rpm_sources_dir,
        ]
        out_filename = sh.joinpths(self.log_dir,
                                   "sdist-%s.log" % (instance.name))
        sh.execute_save_output(cmdline, cwd=pkg_dir, out_filename=out_filename)
        archive_name = sh.joinpths(self.rpm_sources_dir,
                                   "%s.tar" % (base_name))
        if ensure_exists:
            with contextlib.closing(tarfile.open(archive_name, 'r')) as tfh:
                tar_entries = [t.path for t in tfh.getmembers()]
            missing_paths = {}
            for path in ensure_exists:
                tar_path = sh.joinpths(base_name, path)
                source_path = sh.joinpths(pkg_dir, path)
                if not prefix_exists(tar_path,
                                     tar_entries) and sh.exists(source_path):
                    missing_paths[tar_path] = source_path
            if missing_paths:
                utils.log_iterable(
                    sorted(missing_paths.keys()),
                    logger=LOG,
                    header='%s paths were not archived and will now be' %
                    (len(missing_paths)))
                with contextlib.closing(tarfile.open(archive_name,
                                                     'a')) as tfh:
                    for (tar_path, source_path) in missing_paths.items():
                        tfh.add(source_path, tar_path)
        sh.gzip(archive_name)
        sh.unlink(archive_name)
Example #7
0
 def _validate_cache(self, cache_path, details_path):
     for path in [cache_path, details_path]:
         if not sh.exists(path):
             return False
     check_files = []
     try:
         unpack_info = utils.load_yaml_text(sh.load_file(details_path))
         check_files.append(unpack_info['file_name'])
         if 'kernel' in unpack_info:
             check_files.append(unpack_info['kernel']['file_name'])
         if 'ramdisk' in unpack_info:
             check_files.append(unpack_info['ramdisk']['file_name'])
     except Exception:
         return False
     for path in check_files:
         if not sh.isfile(path):
             return False
     return True
Example #8
0
 def is_valid(self):
     """Check if cache is valid."""
     for path in (self._cache_path, self._details_path):
         if not sh.exists(path):
             return False
     check_files = []
     try:
         image_details = self.load_details()
         check_files.append(image_details['file_name'])
         if 'kernel' in image_details:
             check_files.append(image_details['kernel']['file_name'])
         if 'ramdisk' in image_details:
             check_files.append(image_details['ramdisk']['file_name'])
     except Exception:
         return False
     for path in check_files:
         if not sh.isfile(path):
             return False
     return True
Example #9
0
File: yum.py Project: jzako/anvil
    def _write_python_tarball(self, instance, pkg_dir, ensure_exists=None):

        def prefix_exists(text, in_what):
            for t in in_what:
                if t.startswith(text):
                    return True
            return False

        pkg_name = instance.egg_info['name']
        version = instance.egg_info['version']
        base_name = "%s-%s" % (pkg_name, version)
        cmdline = [
            sys.executable,
            "setup.py",
            "sdist",
            "--formats=tar",
            "--dist-dir", self.rpm_sources_dir,
        ]
        env_overrides = {
            'PBR_VERSION': version,
        }
        out_filename = sh.joinpths(self.log_dir, "sdist-%s.log" % (instance.name))
        sh.execute_save_output(cmdline, out_filename,
                               cwd=pkg_dir, env_overrides=env_overrides)
        archive_name = sh.joinpths(self.rpm_sources_dir, "%s.tar" % (base_name))
        if ensure_exists:
            with contextlib.closing(tarfile.open(archive_name, 'r')) as tfh:
                tar_entries = [t.path for t in tfh.getmembers()]
            missing_paths = {}
            for path in ensure_exists:
                tar_path = sh.joinpths(base_name, path)
                source_path = sh.joinpths(pkg_dir, path)
                if not prefix_exists(tar_path, tar_entries) and sh.exists(source_path):
                    missing_paths[tar_path] = source_path
            if missing_paths:
                utils.log_iterable(sorted(missing_paths.keys()),
                                   logger=LOG,
                                   header='%s paths were not archived and will now be' % (len(missing_paths)))
                with contextlib.closing(tarfile.open(archive_name, 'a')) as tfh:
                    for (tar_path, source_path) in missing_paths.items():
                        tfh.add(source_path, tar_path)
        sh.gzip(archive_name)
        sh.unlink(archive_name)
Example #10
0
 def stop(self, app_name):
     # The location of the pid file should be in the attached
     # runtimes trace directory, so see if we can find said file
     # and then attempt to kill the pid that exists in that file
     # which if succesffully will signal to the rest of this code
     # that we can go through and cleanup the other remnants of said
     # pid such as the stderr/stdout files that were being written to...
     trace_dir = self.runtime.get_option('trace_dir')
     if not sh.isdir(trace_dir):
         msg = "No trace directory found from which to stop: %r" % (
             app_name)
         raise excp.StopException(msg)
     fork_fns = self._form_file_names(app_name)
     skip_kill = True
     pid = None
     try:
         pid = fork_fns.extract_pid()
         skip_kill = False
     except IOError as e:
         if e.errno == errno.ENOENT:
             pass
         else:
             skip_kill = False
     if not skip_kill and pid is None:
         msg = "Could not extract a valid pid from %r" % (fork_fns.pid)
         raise excp.StopException(msg)
     # Bother trying to kill said process?
     if not skip_kill:
         (killed, attempts) = sh.kill(pid)
     else:
         (killed, attempts) = (True, 0)
     # Trash the files if it worked
     if killed:
         if not skip_kill:
             LOG.debug("Killed pid '%s' after %s attempts.", pid, attempts)
         for leftover_fn in fork_fns.as_list():
             if sh.exists(leftover_fn):
                 LOG.debug("Removing forking related file %r",
                           (leftover_fn))
                 sh.unlink(leftover_fn)
     else:
         msg = "Could not stop %r after %s attempts" % (app_name, attempts)
         raise excp.StopException(msg)
Example #11
0
 def stop(self, app_name):
     # The location of the pid file should be in the attached
     # runtimes trace directory, so see if we can find said file
     # and then attempt to kill the pid that exists in that file
     # which if succesffully will signal to the rest of this code
     # that we can go through and cleanup the other remnants of said
     # pid such as the stderr/stdout files that were being written to...
     trace_dir = self.runtime.get_option('trace_dir')
     if not sh.isdir(trace_dir):
         msg = "No trace directory found from which to stop: %r" % (app_name)
         raise excp.StopException(msg)
     with sh.Rooted(True):
         fork_fns = self._form_file_names(app_name)
         skip_kill = True
         pid = None
         try:
             pid = fork_fns.extract_pid()
             skip_kill = False
         except IOError as e:
             if e.errno == errno.ENOENT:
                 pass
             else:
                 skip_kill = False
         if not skip_kill and pid is None:
             msg = "Could not extract a valid pid from %r" % (fork_fns.pid)
             raise excp.StopException(msg)
         # Bother trying to kill said process?
         if not skip_kill:
             (killed, attempts) = sh.kill(pid)
         else:
             (killed, attempts) = (True, 0)
         # Trash the files if it worked
         if killed:
             if not skip_kill:
                 LOG.debug("Killed pid '%s' after %s attempts.", pid, attempts)
             for leftover_fn in fork_fns.as_list():
                 if sh.exists(leftover_fn):
                     LOG.debug("Removing forking related file %r", (leftover_fn))
                     sh.unlink(leftover_fn)
         else:
             msg = "Could not stop %r after %s attempts" % (app_name, attempts)
             raise excp.StopException(msg)
Example #12
0
    def _install_src_rpm(self, rpm):
        filename = sh.basename(rpm)
        if not filename:
            LOG.error("Cannot determine file name from rpm: %r", rpm)
            return False
        (package, ext) = os.path.splitext(filename)
        if not package:
            LOG.error("Cannot determine package name from rpm: %r", rpm)
            return False

        if self._is_installed(package):
            return True
        with utils.tempdir() as tdir:
            if not sh.exists(rpm):
                (fetched_filen, bytes_down) = downloader.UrlLibDownloader(rpm, sh.joinpths(tdir, filename)).download()
                LOG.debug("For url %s, we downloaded %s bytes to %s", rpm, bytes_down, fetched_filen)
                # RLOO, do we want to catch any exceptions?
            else:
                fetched_filen = rpm
        
            cmd = YUM_INSTALL + ["--nogpgcheck", fetched_filen] 
            self._execute_yum(cmd)

        return True
Example #13
0
 def _is_url_local(self):
     """Check if image url is local."""
     return sh.exists(self._url) or (self._parsed_url.scheme == '' and
                                     self._parsed_url.netloc == '')
Example #14
0
 def _is_url_local(self):
     return (sh.exists(self.url) or (self.parsed_url.scheme == '' and self.parsed_url.netloc == ''))
Example #15
0
 def exists(self):
     return sh.exists(self.trace_fn)
Example #16
0
def get_directory_details(path, pbr_version=None):
    if not sh.isdir(path):
        raise IOError("Can not detail non-existent directory %s" % (path))

    # Check if we already got the details of this dir previously
    with EGGS_DETAILED_LOCK:
        path = sh.abspth(path)
        cache_key = "d:%s" % (sh.abspth(path))
        if cache_key in EGGS_DETAILED:
            return EGGS_DETAILED[cache_key]

        details = None
        skip_paths = [
            sh.joinpths(path, "PKG-INFO"),
            sh.joinpths(path, "EGG-INFO"),
        ]
        skip_paths.extend(glob.glob(sh.joinpths(path, "*.egg-info")))
        if any(sh.exists(a_path) for a_path in skip_paths):
            # Some packages seem to not support the 'egg_info' call and
            # provide there own path/file that contains this information
            # already, so just use it if we can get at it...
            #
            # Ie for pyyaml3.x:
            #
            # error: invalid command 'egg_info'
            details = pkginfo.Develop(path)
        if not details or not details.name:
            cmd = [sys.executable, 'setup.py', 'egg_info']
            if pbr_version:
                env_overrides = {
                    "PBR_VERSION": str(pbr_version),
                }
            else:
                env_overrides = {}
            sh.execute(cmd, cwd=path, env_overrides=env_overrides)
            details = pkginfo.get_metadata(path)
        if not details or not details.name:
            raise RuntimeError("No egg detail information discovered"
                               " at '%s'" % path)

        egg_details = {
            'req': create_requirement(details.name, version=details.version),
        }
        for attr_name in [
                'description', 'author', 'version', 'name', 'summary'
        ]:
            egg_details[attr_name] = getattr(details, attr_name)
        for attr_name in ['description', 'author', 'summary']:
            attr_value = egg_details[attr_name]
            if isinstance(attr_value, six.text_type):
                # Fix any unicode which will cause unicode decode failures...
                # versions or names shouldn't be unicode, and the rest
                # we don't really care about being unicode (since its
                # just used for logging right now anyway...).
                #
                # The reason this is done is that 'elasticsearch' seems to
                # have a unicode author name, and that causes the log_object
                # to blowup, so just avoid that by replacing this information
                # in the first place.
                egg_details[attr_name] = attr_value.encode("ascii",
                                                           errors='replace')

        LOG.debug("Extracted '%s' egg detail information:", path)
        utils.log_object(egg_details, logger=LOG, level=logging.DEBUG)

        EGGS_DETAILED[cache_key] = egg_details
        return egg_details
Example #17
0
 def _is_url_local(self):
     return (sh.exists(self.url) or (self.parsed_url.scheme == ''
                                     and self.parsed_url.netloc == ''))
Example #18
0
def get_directory_details(path, pbr_version=None):
    if not sh.isdir(path):
        raise IOError("Can not detail non-existent directory %s" % (path))

    # Check if we already got the details of this dir previously
    with EGGS_DETAILED_LOCK:
        path = sh.abspth(path)
        cache_key = "d:%s" % (sh.abspth(path))
        if cache_key in EGGS_DETAILED:
            return EGGS_DETAILED[cache_key]

        details = None
        skip_paths = [
            sh.joinpths(path, "PKG-INFO"),
            sh.joinpths(path, "EGG-INFO"),
        ]
        skip_paths.extend(glob.glob(sh.joinpths(path, "*.egg-info")))
        if any(sh.exists(a_path) for a_path in skip_paths):
            # Some packages seem to not support the 'egg_info' call and
            # provide there own path/file that contains this information
            # already, so just use it if we can get at it...
            #
            # Ie for pyyaml3.x:
            #
            # error: invalid command 'egg_info'
            details = pkginfo.Develop(path)
        if not details or not details.name:
            cmd = [sys.executable, 'setup.py', 'egg_info']
            if pbr_version:
                env_overrides = {
                    "PBR_VERSION": str(pbr_version),
                }
            else:
                env_overrides = {}
            sh.execute(cmd, cwd=path, env_overrides=env_overrides)
            details = pkginfo.get_metadata(path)
        if not details or not details.name:
            raise RuntimeError("No egg detail information discovered"
                               " at '%s'" % path)

        egg_details = {
            'req': create_requirement(details.name, version=details.version),
        }
        for attr_name in ['description', 'author',
                          'version', 'name', 'summary']:
            egg_details[attr_name] = getattr(details, attr_name)
        for attr_name in ['description', 'author', 'summary']:
            attr_value = egg_details[attr_name]
            if isinstance(attr_value, six.text_type):
                # Fix any unicode which will cause unicode decode failures...
                # versions or names shouldn't be unicode, and the rest
                # we don't really care about being unicode (since its
                # just used for logging right now anyway...).
                #
                # The reason this is done is that 'elasticsearch' seems to
                # have a unicode author name, and that causes the log_object
                # to blowup, so just avoid that by replacing this information
                # in the first place.
                egg_details[attr_name] = attr_value.encode("ascii",
                                                           errors='replace')

        LOG.debug("Extracted '%s' egg detail information:", path)
        utils.log_object(egg_details, logger=LOG, level=logging.DEBUG)

        EGGS_DETAILED[cache_key] = egg_details
        return egg_details
Example #19
0
 def exists(self):
     return sh.exists(self.trace_fn)