예제 #1
0
 def test_no_dirs(self):
     # make sure dirs aren't returned as binaries
     with pytest.raises(process.CommandNotFound):
         process.find_binary(os.path.basename(self.dir),
                             os.path.dirname(self.dir))
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.dir)
예제 #2
0
 def test_not_executable(self):
     fp = os.path.join(self.dir, self.script)
     touch(fp)
     os.chmod(fp, 0o640)
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.script)
     with pytest.raises(process.CommandNotFound):
         process.find_binary(fp)
예제 #3
0
 def test_not_executable(self):
     fp = os.path.join(self.dir, self.script)
     touch(fp)
     os.chmod(fp, 0o640)
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.script)
     with pytest.raises(process.CommandNotFound):
         process.find_binary(fp)
예제 #4
0
 def test_path_override(self):
     # check PATH override
     tempdir = tempfile.mkdtemp(dir=self.dir)
     fp = os.path.join(tempdir, self.script)
     touch(fp)
     os.chmod(fp, 0o750)
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.script)
     assert fp == process.find_binary(self.script, paths=[tempdir])
예제 #5
0
 def _initialize_paths(self, pkg, progs):
     for x in progs:
         obj = getattr(self, f"_{x}_binary")
         if obj is None:
             try:
                 obj = process.find_binary(f"{pkg.chost}-{x}")
             except process.CommandNotFound:
                 obj = process.find_binary(x)
         setattr(self, f'{x}_binary', obj)
예제 #6
0
 def test_path_override(self):
     # check PATH override
     tempdir = tempfile.mkdtemp(dir=self.dir)
     fp = os.path.join(tempdir, self.script)
     touch(fp)
     os.chmod(fp, 0o750)
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.script)
     assert fp == process.find_binary(self.script, paths=[tempdir])
예제 #7
0
파일: git.py 프로젝트: sbraz/pkgcheck
    def __init__(self, *args):
        super().__init__(*args)
        # disable git support if git isn't installed
        if self.options.cache['git']:
            try:
                find_binary('git')
            except CommandNotFound:
                self.options.cache['git'] = False

        # mapping of repo locations to their corresponding git repo caches
        self._cached_repos = {}
예제 #8
0
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()):
    """Generate file of install path constants."""
    path = os.path.join(python_base, "pkgcore", "_const.py")
    try:
        os.makedirs(os.path.dirname(path))
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise
    log.info("writing lookup config to %r" % path)

    with open(path, "w") as f:
        os.chmod(path, 0o644)
        # write more dynamic _const file for wheel installs
        if install_prefix != os.path.abspath(sys.prefix):
            import textwrap
            f.write(textwrap.dedent("""\
                import os.path as osp
                import sys

                from snakeoil import process

                INSTALL_PREFIX = osp.abspath(sys.prefix)
                DATA_PATH = osp.join(INSTALL_PREFIX, {!r})
                CONFIG_PATH = osp.join(INSTALL_PREFIX, {!r})
                LIBDIR_PATH = osp.join(INSTALL_PREFIX, {!r})
                EBD_PATH = osp.join(INSTALL_PREFIX, {!r})
                INJECTED_BIN_PATH = ()
                CP_BINARY = process.find_binary('cp')
            """.format(
                DATA_INSTALL_OFFSET, CONFIG_INSTALL_OFFSET,
                LIBDIR_INSTALL_OFFSET, EBD_INSTALL_OFFSET)))
        else:
            f.write("INSTALL_PREFIX=%r\n" % install_prefix)
            f.write("DATA_PATH=%r\n" %
                    os.path.join(install_prefix, DATA_INSTALL_OFFSET))
            f.write("CONFIG_PATH=%r\n" %
                    os.path.join(install_prefix, CONFIG_INSTALL_OFFSET))
            f.write("LIBDIR_PATH=%r\n" %
                    os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET))
            f.write("EBD_PATH=%r\n" %
                    os.path.join(install_prefix, EBD_INSTALL_OFFSET))

            # This is added to suppress the default behaviour of looking
            # within the repo for a bin subdir.
            f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),))

            # Static paths for various utilities.
            from snakeoil import process
            required_progs = ('cp',)
            try:
                for prog in required_progs:
                    prog_path = process.find_binary(prog)
                    f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path))
            except process.CommandNotFound:
                raise DistutilsExecError(
                    "generating lookup config failed: required utility %r missing from PATH" % (prog,))

            f.close()
            byte_compile([path], prefix=python_base)
            byte_compile([path], optimize=2, prefix=python_base)
예제 #9
0
def find_invoking_python():
    # roughly... use sys.executable if possible, then major ver variations-
    # look for python2.5, python2, then just python, for example
    # NOTE: sys.executable in unreliable if the interpreter is embedded
    global _invoking_python
    if _invoking_python is not None and os.path.exists(_invoking_python):
        return _invoking_python
    if os.path.exists(sys.executable):
        test_input = "oh hai"
        returncode, output = spawn_get_output(
            [sys.executable, '-c',
             'print("%s")' % test_input],
            collect_fds=(1, 2))
        if output and output[0].strip() == test_input:
            _invoking_python = sys.executable
            return _invoking_python

    chunks = list(str(x) for x in sys.version_info[:2])
    for potential in (chunks, chunks[:-1], ''):
        try:
            command_name = 'python%s' % '.'.join(potential)
            _invoking_python = find_binary(command_name)
            return _invoking_python
        except CommandNotFound:
            continue
    raise CommandNotFound('python')
예제 #10
0
 def require_binary(bin_name, fatal=True):
     try:
         return process.find_binary(bin_name)
     except process.CommandNotFound as e:
         if fatal:
             raise MissingBinary(bin_name, str(e))
         return None
예제 #11
0
 def __init__(self, *a, **kw):
     try:
         self.bash_path = process.find_binary("bash")
         self.null_file = open("/dev/null", "w")
         self.null = self.null_file.fileno()
     except process.CommandNotFound:
         self.skip = "bash wasn't found.  this will be ugly."
     super(SpawnTest, self).__init__(*a, **kw)
예제 #12
0
 def __init__(self, *a, **kw):
     try:
         self.bash_path = process.find_binary("bash")
         self.null_file = open("/dev/null", "w")
         self.null = self.null_file.fileno()
     except process.CommandNotFound:
         self.skip = "bash wasn't found.  this will be ugly."
     super(SpawnTest, self).__init__(*a, **kw)
예제 #13
0
    def test_find_binary(self):
        script_name = "pkgcore-findpath-test.sh"
        self.assertRaises(process.CommandNotFound, process.find_binary, script_name)
        fp = os.path.join(self.dir, script_name)
        touch(fp)
        os.chmod(fp, 0o640)
        self.assertRaises(process.CommandNotFound, process.find_binary, script_name)
        self.assertRaises(process.CommandNotFound, process.find_binary, fp)
        os.chmod(fp, 0o750)
        self.assertIn(self.dir, process.find_binary(script_name))
        self.assertIn(self.dir, process.find_binary(fp))
        os.unlink(fp)

        # make sure dirs aren't returned as binaries
        self.assertRaises(
            process.CommandNotFound, process.find_binary, os.path.basename(self.dir), os.path.dirname(self.dir)
        )
        self.assertRaises(process.CommandNotFound, process.find_binary, self.dir)
예제 #14
0
def write_pkgcore_lookup_configs(python_base,
                                 install_prefix,
                                 injected_bin_path=()):
    """Generate file of install path constants."""
    path = os.path.join(python_base, "pkgcore", "_const.py")
    log.info("writing lookup config to %r" % path)
    with open(path, "w") as f:
        os.chmod(path, 0o644)
        f.write("INSTALL_PREFIX=%r\n" % install_prefix)
        f.write("DATA_PATH=%r\n" %
                os.path.join(install_prefix, DATA_INSTALL_OFFSET))
        f.write("CONFIG_PATH=%r\n" %
                os.path.join(install_prefix, CONFIG_INSTALL_OFFSET))
        f.write("LIBDIR_PATH=%r\n" %
                os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET))
        f.write("EBD_PATH=%r\n" %
                os.path.join(install_prefix, EBD_INSTALL_OFFSET))

        # This is added to suppress the default behaviour of looking
        # within the repo for a bin subdir.
        f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path), ))

        # Static paths for various utilities.
        from snakeoil import process
        required_progs = ('bash', 'cp')
        try:
            for prog in required_progs:
                prog_path = process.find_binary(prog)
                f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path))
        except process.CommandNotFound:
            raise DistutilsExecError(
                "generating lookup config failed: required utility %r missing from PATH"
                % (prog, ))

        extra_progs = ('sandbox', )
        for prog in extra_progs:
            try:
                prog_path = process.find_binary(prog)
            except process.CommandNotFound:
                prog_path = ''
            f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path))

    byte_compile([path], prefix=python_base)
    byte_compile([path], optimize=2, prefix=python_base)
예제 #15
0
파일: setup.py 프로젝트: radhermit/pkgcore
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()):
    """Generate file of install path constants."""
    path = os.path.join(python_base, "pkgcore", "_const.py")
    os.makedirs(os.path.dirname(path), exist_ok=True)
    log.info("writing lookup config to %r" % path)

    with open(path, "w") as f:
        os.chmod(path, 0o644)
        # write more dynamic _const file for wheel installs
        if install_prefix != os.path.abspath(sys.prefix):
            import textwrap
            f.write(textwrap.dedent("""\
                import os.path as osp
                import sys

                from snakeoil import process

                INSTALL_PREFIX = osp.abspath(sys.prefix)
                DATA_PATH = osp.join(INSTALL_PREFIX, {!r})
                CONFIG_PATH = osp.join(INSTALL_PREFIX, {!r})
                LIBDIR_PATH = osp.join(INSTALL_PREFIX, {!r})
                EBD_PATH = osp.join(INSTALL_PREFIX, {!r})
                INJECTED_BIN_PATH = ()
                CP_BINARY = process.find_binary('cp')
            """.format(
                DATA_INSTALL_OFFSET, CONFIG_INSTALL_OFFSET,
                LIBDIR_INSTALL_OFFSET, EBD_INSTALL_OFFSET)))
        else:
            f.write("INSTALL_PREFIX=%r\n" % install_prefix)
            f.write("DATA_PATH=%r\n" %
                    os.path.join(install_prefix, DATA_INSTALL_OFFSET))
            f.write("CONFIG_PATH=%r\n" %
                    os.path.join(install_prefix, CONFIG_INSTALL_OFFSET))
            f.write("LIBDIR_PATH=%r\n" %
                    os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET))
            f.write("EBD_PATH=%r\n" %
                    os.path.join(install_prefix, EBD_INSTALL_OFFSET))

            # This is added to suppress the default behaviour of looking
            # within the repo for a bin subdir.
            f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),))

            # Static paths for various utilities.
            from snakeoil import process
            required_progs = ('cp',)
            try:
                for prog in required_progs:
                    prog_path = process.find_binary(prog)
                    f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path))
            except process.CommandNotFound:
                raise DistutilsExecError(
                    "generating lookup config failed: required utility %r missing from PATH" % (prog,))

            f.close()
            byte_compile([path], prefix=python_base)
            byte_compile([path], optimize=2, prefix=python_base)
예제 #16
0
파일: setup.py 프로젝트: pkgcore/pkgcore
def write_pkgcore_lookup_configs(python_base, install_prefix, injected_bin_path=()):
    """Generate file of install path constants."""
    path = os.path.join(python_base, "pkgcore", "_const.py")
    log.info("writing lookup config to %r" % path)
    with open(path, "w") as f:
        os.chmod(path, 0o644)
        f.write("INSTALL_PREFIX=%r\n" % install_prefix)
        f.write("DATA_PATH=%r\n" %
                os.path.join(install_prefix, DATA_INSTALL_OFFSET))
        f.write("CONFIG_PATH=%r\n" %
                os.path.join(install_prefix, CONFIG_INSTALL_OFFSET))
        f.write("LIBDIR_PATH=%r\n" %
                os.path.join(install_prefix, LIBDIR_INSTALL_OFFSET))
        f.write("EBD_PATH=%r\n" %
                os.path.join(install_prefix, EBD_INSTALL_OFFSET))

        # This is added to suppress the default behaviour of looking
        # within the repo for a bin subdir.
        f.write("INJECTED_BIN_PATH=%r\n" % (tuple(injected_bin_path),))

        # Static paths for various utilities.
        from snakeoil import process
        required_progs = ('bash', 'cp')
        try:
            for prog in required_progs:
                prog_path = process.find_binary(prog)
                f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path))
        except process.CommandNotFound:
            raise DistutilsExecError(
                "generating lookup config failed: required utility %r missing from PATH" % (prog,))

        extra_progs = ('sandbox',)
        for prog in extra_progs:
            try:
                prog_path = process.find_binary(prog)
            except process.CommandNotFound:
                prog_path = ''
            f.write("%s_BINARY=%r\n" % (prog.upper(), prog_path))

    byte_compile([path], prefix=python_base)
    byte_compile([path], optimize=2, prefix=python_base)
예제 #17
0
 def test_find_binary(self):
     script_name = "pkgcore-findpath-test.sh"
     self.assertRaises(process.CommandNotFound, process.find_binary,
                       script_name)
     fp = os.path.join(self.dir, script_name)
     open(fp, "w")
     os.chmod(fp, 0640)
     self.assertRaises(process.CommandNotFound, process.find_binary,
                       script_name)
     os.chmod(fp, 0750)
     self.assertIn(self.dir, process.find_binary(script_name))
     os.unlink(fp)
예제 #18
0
 def test_find_binary(self):
     script_name = "pkgcore-findpath-test.sh"
     self.assertRaises(process.CommandNotFound,
                       process.find_binary, script_name)
     fp = os.path.join(self.dir, script_name)
     open(fp, "w").close()
     os.chmod(fp, 0640)
     self.assertRaises(process.CommandNotFound,
                       process.find_binary, script_name)
     os.chmod(fp, 0750)
     self.assertIn(self.dir, process.find_binary(script_name))
     os.unlink(fp)
예제 #19
0
    def test_find_binary(self):
        script_name = "pkgcore-findpath-test.sh"
        self.assertRaises(process.CommandNotFound, process.find_binary,
                          script_name)
        fp = os.path.join(self.dir, script_name)
        touch(fp)
        os.chmod(fp, 0o640)
        self.assertRaises(process.CommandNotFound, process.find_binary,
                          script_name)
        self.assertRaises(process.CommandNotFound, process.find_binary, fp)
        os.chmod(fp, 0o750)
        self.assertIn(self.dir, process.find_binary(script_name))
        self.assertIn(self.dir, process.find_binary(fp))
        os.unlink(fp)

        # make sure dirs aren't returned as binaries
        self.assertRaises(process.CommandNotFound, process.find_binary,
                          os.path.basename(self.dir),
                          os.path.dirname(self.dir))
        self.assertRaises(process.CommandNotFound, process.find_binary,
                          self.dir)
예제 #20
0
def find_invoking_python():
    # roughly... use sys.executable if possible, then major ver variations-
    # look for python2.5, python2, then just python, for example
    if os.path.exists(sys.executable):
        return sys.executable
    chunks = list(str(x) for x in sys.version_info[:2])
    for potential in (chunks, chunks[:-1], ''):
        try:
            command_name = 'python%s' % '.'.join(potential)
            return find_binary(command_name)
        except CommandNotFound:
            continue
    raise CommandNotFound('python')
예제 #21
0
파일: spawn.py 프로젝트: chutz/pkgcore
def find_invoking_python():
    # roughly... use sys.executable if possible, then major ver variations-
    # look for python2.5, python2, then just python, for example
    if os.path.exists(sys.executable):
        return sys.executable
    chunks = list(str(x) for x in sys.version_info[:2])
    for potential in (chunks, chunks[:-1], ''):
        try:
            command_name = 'python%s' % '.'.join(potential)
            return find_binary(command_name)
        except CommandNotFound:
            continue
    raise CommandNotFound('python')
예제 #22
0
 def test_binary_path(self):
     existing = os.environ.get("PATH", self)
     try:
         try:
             path = process.find_binary('install-info')
         except process.CommandNotFound:
             path = None
         self.assertEqual(path, self.trigger.get_binary_path())
         if path is not self:
             os.environ["PATH"] = ""
             self.assertEqual(None, self.trigger.get_binary_path())
     finally:
         if existing is self:
             os.environ.pop("PATH", None)
         else:
             os.environ["PATH"] = existing
예제 #23
0
 def test_binary_path(self):
     existing = os.environ.get("PATH", self)
     try:
         try:
             path = process.find_binary('install-info')
         except process.CommandNotFound:
             path = None
         self.assertEqual(path, self.trigger.get_binary_path())
         if path is not self:
             os.environ["PATH"] = ""
             self.assertEqual(None, self.trigger.get_binary_path())
     finally:
         if existing is self:
             os.environ.pop("PATH", None)
         else:
             os.environ["PATH"] = existing
예제 #24
0
파일: spawn.py 프로젝트: veelai/pkgcore
def spawn(mycommand, env=None, name=None, fd_pipes=None, returnpid=False,
          uid=None, gid=None, groups=None, umask=None, cwd=None):

    """wrapper around execve

    :type mycommand: list or string
    :type env: mapping with string keys and values
    :param name: controls what the process is named
        (what it would show up as under top for example)
    :type fd_pipes: mapping from existing fd to fd (inside the new process)
    :param fd_pipes: controls what fd's are left open in the spawned process-
    :param returnpid: controls whether spawn waits for the process to finish,
        or returns the pid.
    """
    if env is None:
        env = {}
    # mycommand is either a str or a list
    if isinstance(mycommand, str):
        mycommand = mycommand.split()

    # If an absolute path to an name file isn't given
    # search for it unless we've been told not to.
    binary = find_binary(mycommand[0])

    # If we haven't been told what file descriptors to use
    # default to propogating our stdin, stdout and stderr.
    if fd_pipes is None:
        fd_pipes = {0:0, 1:1, 2:2}

    # mypids will hold the pids of all processes created.
    mypids = []

    pid = os.fork()

    if not pid:
        # 'Catch "Exception"'
        # pylint: disable-msg=W0703
        try:
            _exec(binary, mycommand, name, fd_pipes, env, gid, groups,
                  uid, umask, cwd)
        except Exception, e:
            # We need to catch _any_ exception so that it doesn't
            # propogate out of this function and cause exiting
            # with anything other than os._exit()
            sys.stderr.write("%s:\n   %s\n" % (e, " ".join(mycommand)))
            os._exit(1)
예제 #25
0
def find_invoking_python():
    # roughly... use sys.executable if possible, then major ver variations-
    # look for python2.5, python2, then just python, for example
    # NOTE: sys.executable in unreliable if the interpreter is embedded
    global _invoking_python
    if _invoking_python is not None and os.path.exists(_invoking_python):
        return _invoking_python
    if os.path.exists(sys.executable):
        test_input = "oh hai"
        returncode, output = spawn_get_output([sys.executable, "-c", 'print("%s")' % test_input], collect_fds=(1, 2))
        if output and output[0].strip() == test_input:
            _invoking_python = sys.executable
            return _invoking_python

    chunks = list(str(x) for x in sys.version_info[:2])
    for potential in (chunks, chunks[:-1], ""):
        try:
            command_name = "python%s" % ".".join(potential)
            _invoking_python = find_binary(command_name)
            return _invoking_python
        except CommandNotFound:
            continue
    raise CommandNotFound("python")
예제 #26
0
 def test_no_dirs(self):
     # make sure dirs aren't returned as binaries
     with pytest.raises(process.CommandNotFound):
         process.find_binary(os.path.basename(self.dir), os.path.dirname(self.dir))
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.dir)
예제 #27
0
 def test_found(self):
     fp = os.path.join(self.dir, self.script)
     touch(fp)
     os.chmod(fp, 0o750)
     assert fp == process.find_binary(self.script)
예제 #28
0
 def test_missing(self):
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.script)
예제 #29
0
 def test_fallback(self):
     fallback = process.find_binary(self.script, fallback=os.path.join('bin', self.script))
     assert fallback == os.path.join('bin', self.script)
예제 #30
0
 def test_missing(self):
     with pytest.raises(process.CommandNotFound):
         process.find_binary(self.script)
예제 #31
0
파일: spawn.py 프로젝트: chutz/pkgcore
def spawn(mycommand, env=None, name=None, fd_pipes=None, returnpid=False,
          uid=None, gid=None, groups=None, umask=None, cwd=None):

    """wrapper around execve

    :type mycommand: list or string
    :type env: mapping with string keys and values
    :param name: controls what the process is named
        (what it would show up as under top for example)
    :type fd_pipes: mapping from existing fd to fd (inside the new process)
    :param fd_pipes: controls what fd's are left open in the spawned process-
    :param returnpid: controls whether spawn waits for the process to finish,
        or returns the pid.
    """
    if env is None:
        env = {}
    # mycommand is either a str or a list
    if isinstance(mycommand, str):
        mycommand = mycommand.split()

    # If an absolute path to an name file isn't given
    # search for it unless we've been told not to.
    binary = find_binary(mycommand[0])

    # If we haven't been told what file descriptors to use
    # default to propogating our stdin, stdout and stderr.
    if fd_pipes is None:
        fd_pipes = {0:0, 1:1, 2:2}

    # mypids will hold the pids of all processes created.
    mypids = []

    pid = os.fork()

    if not pid:
        # 'Catch "Exception"'
        # pylint: disable-msg=W0703
        try:
            _exec(binary, mycommand, name, fd_pipes, env, gid, groups,
                  uid, umask, cwd)
        except Exception as e:
            # We need to catch _any_ exception so that it doesn't
            # propogate out of this function and cause exiting
            # with anything other than os._exit()
            sys.stderr.write("%s:\n   %s\n" % (e, " ".join(mycommand)))
            os._exit(1)

    # Add the pid to our local and the global pid lists.
    mypids.append(pid)
    spawned_pids.append(pid)

    # If the caller wants to handle cleaning up the processes, we tell
    # it about all processes that were created.
    if returnpid:
        return mypids

    try:
        # Otherwise we clean them up.
        while mypids:

            # Pull the last reader in the pipe chain. If all processes
            # in the pipe are well behaved, it will die when the process
            # it is reading from dies.
            pid = mypids.pop(0)

            # and wait for it.
            retval = os.waitpid(pid, 0)[1]

            # When it's done, we can remove it from the
            # global pid list as well.
            spawned_pids.remove(pid)

            if retval:
                # If it failed, kill off anything else that
                # isn't dead yet.
                for pid in mypids:
                    if os.waitpid(pid, os.WNOHANG) == (0, 0):
                        os.kill(pid, signal.SIGTERM)
                        os.waitpid(pid, 0)
                    spawned_pids.remove(pid)

                return process_exit_code(retval)
    finally:
        cleanup_pids(mypids)

    # Everything succeeded
    return 0
예제 #32
0
"""

__all__ = ("compress_data", "decompress_data")

from functools import partial
import multiprocessing
import sys

from snakeoil import process
from snakeoil.compression import _util

# Unused import
# pylint: disable=W0611

# if Bzip2 can't be found, throw an error.
bz2_path = process.find_binary("bzip2")

try:
    from bz2 import (compress as _compress_data, decompress as
                     _decompress_data, BZ2File)
    native = True
except ImportError:

    # We need this because if we are not native then TarFile.bz2open will fail
    # (and some code needs to be able to check that).
    native = False

    _compress_data = partial(_util.compress_data, bz2_path)
    _decompress_data = partial(_util.decompress_data, bz2_path)

_compress_handle = partial(_util.compress_handle, bz2_path)
예제 #33
0
 def get_binary_path(self):
     try:
         return process.find_binary('install-info')
     except process.CommandNotFound:
         # swallow it.
         return None
예제 #34
0
def spawn(mycommand,
          env=None,
          name=None,
          fd_pipes=None,
          returnpid=False,
          uid=None,
          gid=None,
          groups=None,
          umask=None,
          cwd=None):
    """wrapper around execve

    :type mycommand: list or string
    :type env: mapping with string keys and values
    :param name: controls what the process is named
        (what it would show up as under top for example)
    :type fd_pipes: mapping from existing fd to fd (inside the new process)
    :param fd_pipes: controls what fd's are left open in the spawned process-
    :param returnpid: controls whether spawn waits for the process to finish,
        or returns the pid.
    """
    if env is None:
        env = {}
    # mycommand is either a str or a list
    if isinstance(mycommand, str):
        mycommand = mycommand.split()

    # If an absolute path to an name file isn't given
    # search for it unless we've been told not to.
    binary = find_binary(mycommand[0])

    # If we haven't been told what file descriptors to use
    # default to propogating our stdin, stdout and stderr.
    if fd_pipes is None:
        fd_pipes = {0: 0, 1: 1, 2: 2}

    # mypids will hold the pids of all processes created.
    mypids = []

    pid = os.fork()

    if not pid:
        # 'Catch "Exception"'
        # pylint: disable-msg=W0703
        try:
            _exec(binary, mycommand, name, fd_pipes, env, gid, groups, uid,
                  umask, cwd)
        except Exception as e:
            # We need to catch _any_ exception so that it doesn't
            # propogate out of this function and cause exiting
            # with anything other than os._exit()
            sys.stderr.write("%s:\n   %s\n" % (e, " ".join(mycommand)))
            os._exit(1)

    # Add the pid to our local and the global pid lists.
    mypids.append(pid)
    spawned_pids.append(pid)

    # If the caller wants to handle cleaning up the processes, we tell
    # it about all processes that were created.
    if returnpid:
        return mypids

    try:
        # Otherwise we clean them up.
        while mypids:

            # Pull the last reader in the pipe chain. If all processes
            # in the pipe are well behaved, it will die when the process
            # it is reading from dies.
            pid = mypids.pop(0)

            # and wait for it.
            retval = os.waitpid(pid, 0)[1]

            # When it's done, we can remove it from the
            # global pid list as well.
            spawned_pids.remove(pid)

            if retval:
                # If it failed, kill off anything else that
                # isn't dead yet.
                for pid in mypids:
                    if os.waitpid(pid, os.WNOHANG) == (0, 0):
                        os.kill(pid, signal.SIGTERM)
                        os.waitpid(pid, 0)
                    spawned_pids.remove(pid)

                return process_exit_code(retval)
    finally:
        cleanup_pids(mypids)

    # Everything succeeded
    return 0
예제 #35
0
파일: _bzip2.py 프로젝트: den4ix/snakeoil
"""

__all__ = ("compress_data", "decompress_data")

from functools import partial
import multiprocessing
import sys

from snakeoil import process
from snakeoil.compression import _util

# Unused import
# pylint: disable=W0611

# if Bzip2 can't be found, throw an error.
bz2_path = process.find_binary("bzip2")


try:
    from bz2 import (compress as _compress_data,
                     decompress as _decompress_data,
                     BZ2File)
    native = True
except ImportError:

    # We need this because if we are not native then TarFile.bz2open will fail
    # (and some code needs to be able to check that).
    native = False

    _compress_data = partial(_util.compress_data, bz2_path)
    _decompress_data = partial(_util.decompress_data, bz2_path)
예제 #36
0
파일: const.py 프로젝트: neko259/pkgcore
from snakeoil.process import find_binary
try:
    # This is a file written during pkgcore installation;
    # if it exists, we defer to it.  If it doesn't, then we're
    # running from a git checkout or a tarball.
    from pkgcore import _const as _defaults
except ImportError:
    _defaults = object()

# the pkgcore package directory
PKGCORE_BASE_PATH  = osp.dirname(osp.abspath(__file__))
SYSTEM_CONF_FILE   = '/etc/pkgcore.conf'
USER_CONF_FILE     = osp.expanduser('~/.pkgcore.conf')

SANDBOX_BINARY     = '/usr/bin/sandbox'
BASH_BINARY        = find_binary('bash')
COPY_BINARY        = find_binary('cp')

HOST_NONROOT_PATHS = ("/usr/local/bin", "/usr/bin", "/bin")
HOST_ROOT_PATHS    = ("/usr/local/sbin", "/usr/local/bin", "/usr/sbin",
                      "/usr/bin", "/sbin", "/bin")

# no longer used.
LIBFAKEROOT_PATH   = "/usr/lib/libfakeroot.so"
FAKED_PATH         = "/usr/bin/faked"


def _GET_CONST(attr, default_value, allow_environment_override=False):
    consts = mappings.ProxiedAttrs(sys.modules[__name__])
    if compatibility.is_py3k:
        is_tuple = not isinstance(default_value, str)
예제 #37
0
# Copyright: 2006-2009 Brian Harring <*****@*****.**>
# License: GPL2/BSD

import os
import signal

import pytest

from snakeoil import process
from snakeoil.process import spawn
from snakeoil.test.fixtures import TempDir

BASH_BINARY = process.find_binary("bash", fallback='')

@pytest.mark.skipif(not BASH_BINARY, reason='missing bash binary')
class TestSpawn(TempDir):

    def setup(self):
        self.orig_env = os.environ["PATH"]
        self.null_file = open("/dev/null", "w")
        self.null = self.null_file.fileno()
        os.environ["PATH"] = ":".join([self.dir] + self.orig_env.split(":"))

    def teardown(self):
        self.null_file.close()
        os.environ["PATH"] = self.orig_env

    def generate_script(self, filename, text):
        if not os.path.isabs(filename):
            fp = os.path.join(self.dir, filename)
        with open(fp, "w") as f:
예제 #38
0
try:
    # This is a file written during pkgcore installation;
    # if it exists, we defer to it.  If it doesn't, then we're
    # running from a git checkout or a tarball.
    from pkgcore import _const as _defaults
except ImportError:
    _defaults = object()

SYSTEM_CONF_FILE = '/etc/pkgcore/pkgcore.conf'
USER_CONF_FILE = osp.expanduser('~/.config/pkgcore/pkgcore.conf')
# TODO: deprecated, drop support in 0.10
OLD_SYSTEM_CONF_FILE = '/etc/pkgcore.conf'
OLD_USER_CONF_FILE = osp.expanduser('~/.pkgcore.conf')

SANDBOX_BINARY = '/usr/bin/sandbox'
BASH_BINARY = find_binary('bash')
COPY_BINARY = find_binary('cp')

HOST_NONROOT_PATHS = ("/usr/local/bin", "/usr/bin", "/bin")


def _GET_CONST(attr, default_value, allow_environment_override=False):
    consts = mappings.ProxiedAttrs(sys.modules[__name__])
    if compatibility.is_py3k:
        is_tuple = not isinstance(default_value, str)
    else:
        is_tuple = not isinstance(default_value, basestring)
    if is_tuple:
        default_value = tuple(x % consts for x in default_value)
    else:
        default_value %= consts
예제 #39
0
import os
import signal

import pytest

from snakeoil import process
from snakeoil.process import spawn
from snakeoil.test.fixtures import TempDir

BASH_BINARY = process.find_binary("bash", fallback='')

@pytest.mark.skipif(not BASH_BINARY, reason='missing bash binary')
class TestSpawn(TempDir):

    def setup(self):
        self.orig_env = os.environ["PATH"]
        self.null_file = open("/dev/null", "w")
        self.null = self.null_file.fileno()
        os.environ["PATH"] = ":".join([self.dir] + self.orig_env.split(":"))

    def teardown(self):
        self.null_file.close()
        os.environ["PATH"] = self.orig_env

    def generate_script(self, filename, text):
        if not os.path.isabs(filename):
            fp = os.path.join(self.dir, filename)
        with open(fp, "w") as f:
            f.write("#!/usr/bin/env bash\n")
            f.write(text)
        os.chmod(fp, 0o750)
예제 #40
0
 def test_fallback(self):
     fallback = process.find_binary(self.script,
                                    fallback=os.path.join(
                                        'bin', self.script))
     assert fallback == os.path.join('bin', self.script)
예제 #41
0
 def test_found(self):
     fp = os.path.join(self.dir, self.script)
     touch(fp)
     os.chmod(fp, 0o750)
     assert fp == process.find_binary(self.script)
예제 #42
0
def _main(parser, opts):
    """The "main" main function so we can trace/profile."""
    # Initialize the logger before anything else.
    log_level = opts.log_level
    if log_level is None:
        if opts.debug:
            log_level = 'debug'
        elif opts.verbose:
            log_level = 'info'
        else:
            log_level = 'notice'
    log.setup_logging(log_level,
                      output=opts.log_file,
                      debug=opts.debug,
                      color=opts.color)

    # Parse the command line options.
    myconfigs = opts.configs
    if not myconfigs:
        myconfigs = [DEFAULT_CONFIG_FILE]
    myspecfile = opts.file
    mycmdline = opts.cli[:]

    if opts.snapshot:
        mycmdline.append('target=snapshot')
        mycmdline.append('version_stamp=' + opts.snapshot)

    conf_values['DEBUG'] = opts.debug
    conf_values['VERBOSE'] = opts.debug or opts.verbose

    options = set()
    if opts.fetchonly:
        options.add('fetch')
    if opts.purge:
        options.add('purge')
    if opts.purgeonly:
        options.add('purgeonly')
    if opts.purgetmponly:
        options.add('purgetmponly')
    if opts.clear_autoresume:
        options.add('clear-autoresume')

    # Make sure we have some work before moving further.
    if not myspecfile and not mycmdline:
        parser.error('please specify one of either -f or -C or -s')

    # made it this far so start by outputting our version info
    version()
    # import configuration file and import our main module using those settings
    parse_config(myconfigs)

    conf_values["options"].update(options)
    log.notice('conf_values[options] = %s', conf_values['options'])

    # initialize our contents generator
    contents_map = ContentsMap(CONTENTS_DEFINITIONS,
                               comp_prog=conf_values['comp_prog'],
                               decomp_opt=conf_values['decomp_opt'],
                               list_xattrs_opt=conf_values['list_xattrs_opt'])
    conf_values["contents_map"] = contents_map

    # initialze our hash and contents generators
    hash_map = HashMap(HASH_DEFINITIONS)
    conf_values["hash_map"] = hash_map

    # initialize our (de)compression definitions
    conf_values['decompress_definitions'] = DECOMPRESS_DEFINITIONS
    conf_values['compress_definitions'] = COMPRESS_DEFINITIONS
    # TODO add capability to config/spec new definitions

    # Start checking that digests are valid now that hash_map is initialized
    if "digests" in conf_values:
        digests = set(conf_values['digests'].split())
        valid_digests = set(HASH_DEFINITIONS.keys())

        # Use the magic keyword "auto" to use all algos that are available.
        skip_missing = False
        if 'auto' in digests:
            skip_missing = True
            digests.remove('auto')
            if not digests:
                digests = set(valid_digests)

        # First validate all the requested digests are valid keys.
        if digests - valid_digests:
            log.critical(
                'These are not valid digest entries:\n'
                '%s\n'
                'Valid digest entries:\n'
                '%s', ', '.join(digests - valid_digests),
                ', '.join(sorted(valid_digests)))

        # Then check for any programs that the hash func requires.
        for digest in digests:
            try:
                process.find_binary(hash_map.hash_map[digest].cmd)
            except process.CommandNotFound:
                # In auto mode, just ignore missing support.
                if skip_missing:
                    digests.remove(digest)
                    continue
                log.critical(
                    'The "%s" binary needed by digest "%s" was not found. '
                    'It needs to be in your system path.',
                    hash_map.hash_map[digest].cmd, digest)

        # Now reload the config with our updated value.
        conf_values['digests'] = ' '.join(digests)

    if "hash_function" in conf_values:
        if conf_values["hash_function"] not in HASH_DEFINITIONS:
            log.critical(
                '%s is not a valid hash_function entry\n'
                'Valid hash_function entries:\n'
                '%s', conf_values["hash_function"], HASH_DEFINITIONS.keys())
        try:
            process.find_binary(
                hash_map.hash_map[conf_values["hash_function"]].cmd)
        except process.CommandNotFound:
            log.critical(
                'The "%s" binary needed by hash_function "%s" was not found. '
                'It needs to be in your system path.',
                hash_map.hash_map[conf_values['hash_function']].cmd,
                conf_values['hash_function'])

    # detect GNU sed
    for sed in ('/usr/bin/gsed', '/bin/sed', '/usr/bin/sed'):
        if os.path.exists(sed):
            conf_values["sed"] = sed
            break

    addlargs = {}

    if myspecfile:
        log.notice("Processing spec file: %s", myspecfile)
        spec = catalyst.config.SpecParser(myspecfile)
        addlargs.update(spec.get_values())

    if mycmdline:
        try:
            cmdline = catalyst.config.ConfigParser()
            cmdline.parse_lines(mycmdline)
            addlargs.update(cmdline.get_values())
        except CatalystError:
            log.critical('Could not parse commandline')

    if "target" not in addlargs:
        raise CatalystError("Required value \"target\" not specified.")

    if os.getuid() != 0:
        # catalyst cannot be run as a normal user due to chroots, mounts, etc
        log.critical('This script requires root privileges to operate')

    # Namespaces aren't supported on *BSDs at the moment. So let's check
    # whether we're on Linux.
    if os.uname().sysname in ["Linux", "linux"]:
        # Start off by creating unique namespaces to run in.  Would be nice to
        # use pid & user namespaces, but snakeoil's namespace module has signal
        # transfer issues (CTRL+C doesn't propagate), and user namespaces need
        # more work due to Gentoo build process (uses sudo/root/portage).
        namespaces.simple_unshare(mount=True,
                                  uts=True,
                                  ipc=True,
                                  pid=False,
                                  net=False,
                                  user=False,
                                  hostname='catalyst')

    # everything is setup, so the build is a go
    try:
        success = build_target(addlargs)
    except KeyboardInterrupt:
        log.critical('Catalyst build aborted due to user interrupt (Ctrl-C)')
    if not success:
        sys.exit(2)
    sys.exit(0)
예제 #43
0
def main(argv):
	parser = get_parser()
	opts = parser.parse_args(argv)

	# Initialize the logger before anything else.
	log_level = opts.log_level
	if log_level is None:
		if opts.debug:
			log_level = 'debug'
		elif opts.verbose:
			log_level = 'info'
		else:
			log_level = 'notice'
	log.setup_logging(log_level, output=opts.log_file, debug=opts.debug,
		color=opts.color)

	# Parse the command line options.
	myconfigs = opts.configs
	if not myconfigs:
		myconfigs = [DEFAULT_CONFIG_FILE]
	myspecfile = opts.file
	mycmdline = opts.cli[:]

	if opts.snapshot:
		mycmdline.append('target=snapshot')
		mycmdline.append('version_stamp=' + opts.snapshot)

	conf_values['DEBUG'] = opts.debug
	conf_values['VERBOSE'] = opts.debug or opts.verbose

	options = set()
	if opts.fetchonly:
		options.add('fetch')
	if opts.purge:
		options.add('purge')
	if opts.purgeonly:
		options.add('purgeonly')
	if opts.purgetmponly:
		options.add('purgetmponly')
	if opts.clear_autoresume:
		options.add('clear-autoresume')

	# Make sure we have some work before moving further.
	if not myspecfile and not mycmdline:
		parser.error('please specify one of either -f or -C or -s')

	# made it this far so start by outputting our version info
	version()
	# import configuration file and import our main module using those settings
	parse_config(myconfigs)

	conf_values["options"].update(options)
	log.debug('conf_values[options] = %s', conf_values['options'])

	# initialize our contents generator
	contents_map = ContentsMap(CONTENTS_DEFINITIONS)
	conf_values["contents_map"] = contents_map

	# initialze our hash and contents generators
	hash_map = HashMap(HASH_DEFINITIONS)
	conf_values["hash_map"] = hash_map

	# initialize our (de)compression definitions
	conf_values['decompress_definitions'] = DECOMPRESS_DEFINITIONS
	conf_values['compress_definitions'] = COMPRESS_DEFINITIONS
	# TODO add capability to config/spec new definitions

	# Start checking that digests are valid now that hash_map is initialized
	if "digests" in conf_values:
		digests = set(conf_values['digests'].split())
		valid_digests = set(HASH_DEFINITIONS.keys())

		# Use the magic keyword "auto" to use all algos that are available.
		skip_missing = False
		if 'auto' in digests:
			skip_missing = True
			digests.remove('auto')
			if not digests:
				digests = set(valid_digests)

		# First validate all the requested digests are valid keys.
		if digests - valid_digests:
			log.critical(
				'These are not valid digest entries:\n'
				'%s\n'
				'Valid digest entries:\n'
				'%s',
				', '.join(digests - valid_digests),
				', '.join(sorted(valid_digests)))

		# Then check for any programs that the hash func requires.
		for digest in digests:
			try:
				process.find_binary(hash_map.hash_map[digest].cmd)
			except process.CommandNotFound:
				# In auto mode, just ignore missing support.
				if skip_missing:
					digests.remove(digest)
					continue
				log.critical(
					'The "%s" binary needed by digest "%s" was not found. '
					'It needs to be in your system path.',
					hash_map.hash_map[digest].cmd, digest)

		# Now reload the config with our updated value.
		conf_values['digests'] = ' '.join(digests)

	if "hash_function" in conf_values:
		if conf_values["hash_function"] not in HASH_DEFINITIONS:
			log.critical(
				'%s is not a valid hash_function entry\n'
				'Valid hash_function entries:\n'
				'%s', HASH_DEFINITIONS.keys())
		try:
			process.find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd)
		except process.CommandNotFound:
			log.critical(
				'The "%s" binary needed by hash_function "%s" was not found. '
				'It needs to be in your system path.',
				hash_map.hash_map[conf_values['hash_function']].cmd,
				conf_values['hash_function'])

	addlargs={}

	if myspecfile:
		spec = catalyst.config.SpecParser(myspecfile)
		addlargs.update(spec.get_values())

	if mycmdline:
		try:
			cmdline = catalyst.config.ConfigParser()
			cmdline.parse_lines(mycmdline)
			addlargs.update(cmdline.get_values())
		except CatalystError:
			log.critical('Could not parse commandline')

	if "target" not in addlargs:
		raise CatalystError("Required value \"target\" not specified.")

	if os.getuid() != 0:
		# catalyst cannot be run as a normal user due to chroots, mounts, etc
		log.critical('This script requires root privileges to operate')

	# everything is setup, so the build is a go
	try:
		success = build_target(addlargs)
	except KeyboardInterrupt:
		log.critical('Catalyst build aborted due to user interrupt (Ctrl-C)')
	if not success:
		sys.exit(2)
	sys.exit(0)