コード例 #1
0
    def inspect(self):

        pkg = self.package

        if self.setup_path and not self.egg_path:

            log.info(style_note('Building Python egg-info'))
            res = call_setup_py(self.setup_path, ['egg_info'],
                                env=pkg.fresh_environ(),
                                indent=True,
                                verbosity=1)
            if res:
                raise RuntimeError('Could not build Python package')

            self.egg_path = find_in_tree(pkg.build_path, '*.egg-info', 'dir')
            if not self.egg_path:
                log.warning('Could not find newly created *.egg-info')

        if self.egg_path:
            requires_path = os.path.join(self.egg_path, 'requires.txt')
            if os.path.exists(requires_path):
                for line in open(requires_path, 'rb'):
                    line = line.strip()
                    if not line:
                        continue
                    if line.startswith('['):
                        break
                    name = re.split('\W', line)[0].lower()
                    log.debug('%s depends on %s' % (pkg.name, name))
                    pkg.dependencies.append(
                        Package(name=name, url='pypi:%s' % name))
コード例 #2
0
ファイル: environment.py プロジェクト: immersionroom/vee
    def create_if_not_exists(self):

        python = os.path.join(self.path, 'bin', 'python')
        if not os.path.exists(python):
            makedirs(self.path)
            log.info(style_note('Creating Python virtualenv', self.path))

            if hasattr(virtualenv, 'cli_run'):
                # New API (in which there isn't really any API)
                virtualenv.cli_run(
                    ['--no-pip', '--no-wheel', '--no-setuptools', self.path])
            else:
                # Old API
                virtualenv.create_environment(self.path,
                                              no_setuptools=True,
                                              no_pip=True)

        if not os.path.exists(python + '-config'):
            version = get_default_python().version
            names = (
                'python{}.{}-config'.format(*version),
                'python{}-config'.format(*version),
                'python-config',
            )
            prefix = getattr(sys, 'real_prefix', sys.prefix)
            for name in names:
                old_path = os.path.join(prefix, 'bin', name)
                if os.path.exists(old_path):
                    for name in names:
                        new_path = os.path.join(self.path, 'bin', name)
                        self.rewrite_shebang_or_link(old_path, new_path)
                    break
            else:
                log.warning('Could not find python-config')
コード例 #3
0
ファイル: git.py プロジェクト: immersionroom/vee
    def check_ff_safety(self, rev='HEAD', ignore_permissions=True):

        # Check the status of the work tree and index.
        status_ok = True
        for idx, tree, name in self.status(
                ignore_permissions=ignore_permissions):
            if idx or tree:
                log.error('uncomitted changes:')
                self.git('status')
                status_ok = False
                break

        # Make sure we haven't forked.
        ahead, behind = self.distance(self.head, rev)
        if ahead and behind:
            log.error('you and the repo have forked')
            status_ok = False
        elif ahead:
            log.warning(
                'you are %s commits ahead of the remote repo; please `vee push`'
                % ahead)
            status_ok = False
        elif behind:
            log.info('You are %d commits behind.' % behind)

        return status_ok
コード例 #4
0
ファイル: libs.py プロジェクト: immersionroom/vee
def relocate_pkgconfig(root):

    # Do trivial rewrites of pkgconfig files.
    pkg_config = os.path.join(root, 'lib', 'pkgconfig')
    if os.path.exists(pkg_config):
        for name in os.listdir(pkg_config):
            if not name.endswith('.pc'):
                continue
            path = os.path.join(pkg_config, name)
            log.info(path)
            lines = list(open(path))
            for i, line in enumerate(lines):
                if re.match(r'^prefix=([^\$]+)\s*$', line):
                    lines[i] = 'prefix=%s\n' % root
                    break
            else:
                with log.indent():
                    log.warning('No obvious prefix to replace')
                continue
            # As silly as this seems, *.pc files we have seen have their
            # write flag removed, but we still own them (since we just installed
            # them). Quickest way to fix: delete them.
            if not os.access(path, os.W_OK):
                os.unlink(path)
            with open(path, 'w') as fh:
                fh.writelines(lines)
コード例 #5
0
ファイル: make.py プロジェクト: westernx/vee
    def factory(cls, step, pkg):
        
        if step not in ('build', 'install'):
            return

        makefile = find_in_tree(pkg.build_path, 'Makefile')
        configure = find_in_tree(pkg.build_path, 'configure') if step == 'build' else None
        configure_ac = find_in_tree(pkg.build_path, 'configure.ac') if step == 'build' else None

        # We generally provide 'install' via get_next, but if you specify   
        # --build-sh it will come looking here, and so we must redo that
        # logic.
        if step == 'install':
            if makefile:
                # HACK: I was too lazy too refactor the logic for --make-install,
                # so I just use this API.
                return cls(pkg, (None, None, makefile)).get_next(step)
            else:
                return

        # Warn about both autoconf conflict states.
        if configure_ac and not pkg.autoconf:
            log.warning('autoconf detected, but --autoconf flag is not set on package')
        if configure and pkg.autoconf:
            log.warning('--autoconf flag is set on package but ./configure was found')

        # Only return with configure.ac iff the user set --autoconf
        if (configure_ac and pkg.autoconf) or configure or makefile:
            return cls(pkg, (configure_ac, configure, makefile))
コード例 #6
0
ファイル: environment.py プロジェクト: westernx/vee
    def create_if_not_exists(self):

        python = os.path.join(self.path, 'bin', 'python')
        if not os.path.exists(python):
            makedirs(self.path)
            print style('Creating Python virtualenv', 'blue',
                        bold=True), style(self.path, bold=True)
            virtualenv.create_environment(self.path,
                                          no_setuptools=True,
                                          no_pip=True)

        if not os.path.exists(python + '-config'):
            names = (
                'python%d.%d-config' % sys.version_info[:2],
                'python%d-config' % sys.version_info[0],
                'python-config',
            )
            prefix = getattr(sys, 'real_prefix', sys.prefix)
            for name in names:
                old_path = os.path.join(prefix, 'bin', name)
                if os.path.exists(old_path):
                    for name in names:
                        new_path = os.path.join(self.path, 'bin', name)
                        self.rewrite_shebang_or_link(old_path, new_path)
                    break
            else:
                log.warning('Could not find python-config')
コード例 #7
0
    def install(self):

        pkg = self.package

        if pkg.pseudo_homebrew:
            homebrew = Homebrew(home=pkg.home)
            version = pkg.revision.split('+')[0]
            pkg.install_path = os.path.join(homebrew.cellar, pkg.name, version)
            log.info(style_note('Re-installing into Homebrew', 'as %s/%s' % (pkg.name, version)))

        pkg._assert_paths(install=True)

        if pkg.make_install:
            log.warning('--make-install specified, but no Makefile found.')

        if os.path.exists(pkg.install_path):
            log.warning('Removing existing install', pkg.install_path)
            shutil.rmtree(pkg.install_path)

        if pkg.hard_link:
            log.info(style_note('Installing via hard-link', 'to ' + pkg.install_path))
            linktree(pkg.build_path_to_install, pkg.install_path_from_build, symlinks=True)
        else:
            log.info(style_note('Installing via copy', 'to ' + pkg.install_path))
            shutil.copytree(pkg.build_path_to_install, pkg.install_path_from_build, symlinks=True)
コード例 #8
0
def rescan(args):

    home = args.assert_home()
    con = home.db.connect()

    args.force = True

    pairs = []

    if args.names:
        for name in args.names:
            row = con.execute(
                'SELECT path FROM development_packages WHERE name = ? OR name = ?',
                [name, os.path.basename(os.path.abspath(name))]).fetchone()
            if not row:
                log.warning('No dev package named %s' % name)
                continue
            path = row[0]
            if not os.path.exists(path):
                log.warning('Dev package %s not longer exists at %s' %
                            (name, path))
                continue
            pairs.append((name, path))
    else:
        pairs = list(
            con.execute('SELECT name, path FROM development_packages'))

    for name, path in pairs:
        args.name = name
        args.path = path
        init(args, do_add=True)
コード例 #9
0
        def callback_target():
            
            try:
                fh = os.fdopen(prfd)
            except:
                os.close(prfd)
                raise

            while True:
                arg_count = fh.readline().strip()
                if not arg_count:
                    break
                arg_count = int(arg_count)
                args = []
                for arg_i in range(arg_count):
                    arg_len = int(fh.readline())
                    args.append(fh.read(arg_len))
                name = args[0]
                if name in callbacks:
                    try:
                        res = callbacks[name](*args[1:])
                    except Exception as e:
                        log.exception('exception in callback %s: %s' % (name, e))
                        res = None
                else:
                    log.warning('no callback %s' % name)
                    res = None
                if res is None:
                    os.write(pwfd, '0\n')
                else:
                    res = str(res)
                    os.write(pwfd, '%s\n' % len(res))
                    os.write(pwfd, res)
コード例 #10
0
ファイル: requirements.py プロジェクト: westernx/vee
    def parse_file(self, source):

        if source == '-':
            source = sys.stdin
        elif isinstance(source, basestring):
            source = open(source, 'r')

        line_iter = iter(source)
        for line in line_iter:

            line = line.rstrip()
            while line.endswith('\\'):
                line = line[:-1] + next(line_iter).rstrip()

            m = re.match(r'^(\s*)([^#]*?)(\s*#.*)?$', line)
            before, spec, after = m.groups()
            before = before or ''
            after = after or ''

            if not spec:
                self.append((before, '', after))
                continue

            # Note: This will freak out with colons in comments.
            # TODO: Pull parsing from PyHAML.
            m = re.match(r'^%\s*(if|elif|else|endif)\s*(.*?):?\s*$', spec)
            if m:
                type_, expr = m.groups()
                self.append((before, Control(type_, expr), after))
                continue

            m = re.match(r'^(\w+)=(\S.*)$', spec)
            if m:
                name, value = m.groups()
                self._cumulative_environ[name] = value
                self.append((before, Envvar(name, value), after))
                continue

            m = re.match(r'^([\w-]+): (\S.*)$', spec)
            if m:
                header = Header(*m.groups())
                self.headers[header.name] = header
                self.append((before, header, after))
                continue

            try:
                pkg = Package(spec, home=self.home)
            except RequirementParseError as e:
                log.warning('parse error: %s' % e)
                self.append(('', '', '# RequirementParseError: %s' % e.args))
                self.append(('', '', '# ' + line.strip()))
                continue
            for k, v in self._cumulative_environ.iteritems():
                pkg.base_environ.setdefault(k, v)
            self.append((before, pkg, after))

        self._guess_names()
コード例 #11
0
 def shared_libraries(self, rescan=False):
     self._assert_paths(install=True)
     if not self.installed:
         raise RuntimeError('cannot find libraries if not installed')
     if not self.id:
         # I'm not sure if this is a big deal, but I want to see when
         # it is happening.
         log.warning('Finding shared libraries before package is in database.')
     return libs.get_installed_shared_libraries(self.home.db.connect(), self.id_or_persist(), self.install_path, rescan)
コード例 #12
0
ファイル: make.py プロジェクト: westernx/vee
 def get_next(self, step):
     if step != 'install':
         return
     if self.makefile_path:
         if self.package.make_install:
             return self
         else:
             log.warning('Skipping `make install` and installing full package.\n'
                 'Usually you will want to specify one of:\n'
                 '    --make-install\n'
                 '    --build-subdir PATH\n'
                 '    --install-subdir PATH'
             )
コード例 #13
0
    def _install_wheel(self, pkg):

        pkg._assert_paths(install=True)

        if pkg.package_path.endswith('.whl'):
            log.info(
                style_note("Found Python Wheel",
                           os.path.basename(self.dist_info_dir)))
        else:
            log.info(
                style_note("Found dist-info",
                           os.path.basename(self.dist_info_dir)))
            log.warning("Bare dist-info does not appear to be a wheel.")

        wheel_dir, dist_info_name = os.path.split(self.dist_info_dir)
        wheel_name = os.path.splitext(dist_info_name)[0]

        # Lets just take advantage of pip!
        # The only reason we're reading into pip like this is because we
        # would rather just do this part, rather than have it go through
        # the full process with the *.whl file. If this breaks, feel
        # free to do something like:
        #     pip install --force-reinstall --prefix {pkg.install_path} --no-deps {pkg.package_path}
        # along with:
        #     --no-warn-script-location
        #     --disable-pip-version-check

        # We delay the import just in case the bootstrap is borked.
        from pip._internal.operations.install.wheel import install_wheel
        from pip._internal.locations import get_scheme

        # We may to trick pip into installing into another version's directories.
        scheme = get_scheme(self.name, prefix=pkg.install_path)
        version = get_default_python().version
        src_python = '{}python{}.{}{}'.format(os.path.sep, sys.version_info[0],
                                              sys.version_info[1], os.path.sep)
        dst_python = '{}python{}.{}{}'.format(os.path.sep, version[0],
                                              version[1], os.path.sep)
        if src_python != dst_python:
            for k in 'platlib', 'purelib', 'headers', 'scripts', 'data':
                setattr(scheme, k,
                        getattr(scheme, k).replace(src_python, dst_python))

        req = DummyPipRequirement()
        req.name = wheel_name
        install_wheel(pkg.name, pkg.package_path, scheme,
                      '<VEE dummy request>')
コード例 #14
0
    def _install_setup(self, pkg):

        pkg._assert_paths(install=True)

        site_packages = get_default_python().rel_site_packages
        install_site_packages = os.path.join(pkg.install_path, site_packages)

        # Setup the PYTHONPATH to point to the "install" directory.
        env = pkg.fresh_environ()
        env['PYTHONPATH'] = join_env_path(install_site_packages,
                                          env.get('PYTHONPATH'))

        if os.path.exists(pkg.install_path):
            log.warning('Removing existing install: ' + pkg.install_path)
            shutil.rmtree(pkg.install_path)
        os.makedirs(install_site_packages)

        log.info(
            style_note('Installing Python package',
                       'to ' + install_site_packages))

        cmd = [
            'install',
            '--root',
            pkg.install_path,  # Better than prefix
            '--prefix',
            '.',

            # At one point we forced everything into `lib`, so we don't get a
            # `lib64`. Virtualenv symlinked them together anyways. But then we
            # switched to using pip's internals to unpack wheels, and it would
            # place stuff into both `lib` and `lib64`. So we don't really
            # know where we stand on this anymore.
            '--install-lib',
            site_packages,
            '--single-version-externally-managed',
        ]
        if not pkg.defer_setup_build:
            cmd.append('--skip-build')

        res = call_setup_py(self.setup_path,
                            cmd,
                            env=env,
                            indent=True,
                            verbosity=1)
        if res:
            raise RuntimeError('Could not install Python package')
コード例 #15
0
def list_(args):
    home = args.assert_home()
    rows = list(home.db.execute('SELECT * FROM repositories'))
    if not rows:
        log.warning('No repositories.')
        return
    max_len = max(len(row['name']) for row in rows)
    for row in rows:
        repo = EnvironmentRepo(row, home=home)
        if repo.exists:
            log.info(
                style_note(
                    repo.name,
                    '%s/%s' % (repo.remote_name, repo.branch_name),
                    repo.remotes().get(repo.remote_name, '') +
                    ' --default' if row['is_default'] else '',
                ))
コード例 #16
0
    def update(self, force=False):

        log.info(style_note('Updating repo', self.name))

        self.clone_if_not_exists()

        if self.remote_name not in self.remotes():
            log.warning('"%s" does not have remote "%s"' % (self.name, self.remote_name))
            return True

        rev = self.fetch()

        if not force and not self.check_ff_safety(rev):
            log.error('Cannot fast-forward; skipping.')
            return False

        self.checkout(force=force)
        return True
コード例 #17
0
    def install(self):

        if not self.setup_path:
            return super(PythonBuilder, self).install()

        pkg = self.package
        pkg._assert_paths(install=True)

        install_site_packages = os.path.join(pkg.install_path, site_packages)

        # Setup the PYTHONPATH to point to the "install" directory.
        env = pkg.fresh_environ()
        env['PYTHONPATH'] = '%s:%s' % (install_site_packages,
                                       env.get('PYTHONPATH', ''))

        if os.path.exists(pkg.install_path):
            log.warning('Removing existing install', pkg.install_path)
            shutil.rmtree(pkg.install_path)
        os.makedirs(install_site_packages)

        log.info(
            style_note('Installing Python package',
                       'to ' + install_site_packages))

        cmd = [
            'install',
            '--root',
            pkg.install_path,  # Better than prefix
            '--prefix',
            '.',
            '--install-lib',
            site_packages,  # So that we don't get lib64; virtualenv symlinks them together anyways.
            '--single-version-externally-managed',
        ]
        if not pkg.defer_setup_build:
            cmd.append('--skip-build')

        res = call_setup_py(self.setup_path,
                            cmd,
                            env=env,
                            indent=True,
                            verbosity=1)
        if res:
            raise RuntimeError('Could not install Python package')
コード例 #18
0
 def persist_in_db(self, con=None):
     self._set_names(package=True, build=True, install=True)
     if not self.installed:
         log.warning('%s does not appear to be installed to %s' % (self.name, self.install_path))
         raise ValueError('cannot record requirement that is not installed')
     con = con or self.home.db.connect()
     with con:
         exists = self.id is not None
         res = super(Package, self).persist_in_db(con=con)
         if exists:
             con.execute('DELETE FROM package_dependencies WHERE depender_id = ?', [self.id])
         for dep in self.dependencies:
             dep_id = dep.id_or_persist(con=con)
             log.debug('Recorded %s -> %s dependency as %d' % (
                 self.name, dep.name, dep_id
             ))
             con.execute('INSERT INTO package_dependencies (depender_id, dependee_id) VALUES (?, ?)', [
                 self.id, dep_id
             ])
         return res
コード例 #19
0
ファイル: develop.py プロジェクト: immersionroom/vee
def rescan(args):

    home = args.assert_home()
    con = home.db.connect()

    args.force = True

    repos = []

    if args.names:
        for name in args.names:
            dev_repo = home.find_development_package(name)
            if not dev_repo:
                log.warning("No dev package: {}".format(name))
                continue
            repos.append(dev_repo)
    else:
        repos = list(home.iter_development_packages())

    for repo in repos:
        args.name = os.path.basename(repo.work_tree)
        args.path = repo.work_tree
        init(args, do_add=True)
コード例 #20
0
    def restore_from_row(self, row, ignore=None):

        try:
            if self.id and self.id != row['id']:
                log.warning('Restoring from a mismatched ID; %s %d != %d' %
                            (self.__tablename__, self.id, row['id']))
            self.id = row['id']
        except KeyError:
            pass

        for col in self.__columns__:

            try:
                val = row[col.name]
            except KeyError:
                continue

            if ignore and col.name in ignore:
                continue

            if col._restore:
                col._restore(self, val)
            else:
                self.__dict__[col.name] = val
コード例 #21
0
    def parse_file(self, source, filename=None, alt_open=None, _depth=0):

        open_ = alt_open or open

        if source == '-':
            filename = filename or '<stdin>'
            source = sys.stdin
        elif isinstance(source, str):
            filename = filename or source
            source = open_(source)

        self.filename = self.filename or filename

        def append(x):
            self._append(ManifestItem(x, prefix, suffix, filename, line_i + 1))

        line_iter = iter(source)
        for line_i, line in enumerate(line_iter):

            line = line.rstrip()
            while line.endswith('\\'):
                line = line[:-1] + next(line_iter).rstrip()

            m = re.match(r'^(\s*)([^#]*?)(\s*#.*)?$', line)
            prefix, spec, suffix = m.groups()

            if not spec:
                append(None)
                continue

            # Note: This will freak out with colons in comments.
            # TODO: Pull parsing from PyHAML.
            m = re.match(r'^%\s*(if|elif|else|endif)\s*(.*?):?\s*$', spec)
            if m:
                type_, expr = m.groups()
                append(Control(type_, expr))
                continue

            m = re.match(r'^%\s*(set|eval|expr)\s+(.+?)\s*$', spec)
            if m:
                type_, source = m.groups()
                append(Expression(source, type_))
                continue

            m = re.match(r'^%\s*include\s+(.+?)\s*$', spec)
            if m:
                raw_path = m.group(1)
                path = os.path.normpath(raw_path).strip('/')
                if raw_path != path:
                    raise ValueError("Malformed include path.", raw_path)
                if self.filename:
                    path = os.path.join(os.path.dirname(self.filename), path)
                other = Manifest(repo=self.repo, home=self.home)
                other.parse_file(path, alt_open=alt_open, _depth=_depth + 1)
                append(Include(raw_path, other))
                continue

            m = re.match(r'^(\w+)=(\S.*)$', spec)
            if m:
                name, value = m.groups()
                self._cumulative_environ[name] = value
                append(Envvar(name, value))
                continue

            m = re.match(r'^([\w-]+): (\S.*)$', spec)
            if m:
                header = Header(*m.groups())
                self.headers[header.name] = header
                append(header)
                continue

            try:
                pkg = Package(spec, context=self, home=self.home)
            except RequirementParseError as e:
                log.warning('parse error: %s' % e)
                self._append('', '', '# RequirementParseError: %s' % e.args)
                self._append('', '', '# ' + line.strip())
                continue
            for k, v in self._cumulative_environ.items():
                pkg.base_environ.setdefault(k, v)
            append(pkg)
コード例 #22
0
    def __init__(self,
                 args=None,
                 *,
                 home=None,
                 set=None,
                 dev=False,
                 context=None,
                 parent=None,
                 source=None,
                 **kwargs):

        super(Package, self).__init__()

        source = source or parent

        # Must be early due to some properties using this.
        self.home = home = home or (source.home if source else None)
        if not home:
            raise ValueError("Package requires home")

        self.context = context = context or (source.context
                                             if source else None)
        if not context and not dev:
            raise ValueError(
                "Package requires context (Manifest) when not dev")

        if args and kwargs:
            raise ValueError('specify either args OR kwargs')

        if isinstance(args, self.__class__):
            kwargs = args.to_kwargs()
            args = None
        elif isinstance(args, dict):
            kwargs = args
            args = None

        if args:

            if isinstance(args, six.string_types):
                args = shlex.split(args)

            if isinstance(args, (list, tuple)):
                try:
                    requirement_parser.parse_args(args, namespace=self)
                except RequirementParseError as e:
                    raise RequirementParseError("%s in %s" % (e.args[0], args))
            elif isinstance(args, argparse.Namespace):
                for action in requirement_parser._actions:
                    name = action.dest
                    setattr(self, name, getattr(args, name))
            else:
                raise TypeError(
                    "args must be one of (str, list, tuple, dict); got {}".
                    format(args.__class__))

        else:
            for action in requirement_parser._actions:
                name = action.dest

                # Version is a bit special, and should not have a default applied
                # here, otherwise to_kwargs will clear it out.
                if name in ('version', ):
                    try:
                        value = kwargs.pop(name)
                    except KeyError:
                        continue
                else:
                    value = kwargs.pop(name, action.default)

                setattr(self, name, value)

            if kwargs:
                raise ValueError("too many kwargs: {}".format(list(kwargs)))

        assert self.url

        # Assert we have a name.
        if self.name:
            if self.name.lower() != self.name:
                log.warning("package name {!r} was not lowercase".format(
                    self.name))
                self.name = self.name.lower()
        else:
            self.name = guess_name(self.url)

        # TODO: Deprecate these.
        self.dependencies = []
        self.set = set

        # Variant relationships.
        self.parent = parent
        self._children = None
        self._child_is_self = None

        # Make sure to make copies of anything that is mutable.
        self.base_environ = self.base_environ.copy(
        ) if self.base_environ else {}
        self.environ = self.environ.copy() if self.environ else {}
        self.config = self.config[:] if self.config else []

        # Initialize other state not covered by the argument parser.
        # TODO: Should this come from the parent?
        self.link_id = None
        self.package_name = self.build_name = None
        self.package_path = self.build_path = self.install_path = None

        # Share some state with the parent.
        if parent:
            self.meta = parent.meta  # Directly shared.
            self.pipeline = parent.pipeline.copy(self)
        else:
            self.meta = context.load_meta(self.name) if context else None
            self.url = self.get_meta('url') or self.url
            self.version = self.get_meta('version') or self.version
            self._init_pipeline(dev=dev)
コード例 #23
0
    def resolve_existing(self, env=None, weak=False):
        """Check against the database to see if this was already installed."""

        if self.id is not None:
            raise ValueError('requirement already in database')

        cur = self.home.db.cursor()

        # Dependencies are deferred.
        deferred = self.url.startswith('deferred:')
        if deferred:
            deferred_id = int(self.url.split(':')[1])
            cur.execute('SELECT * from packages WHERE id = ?', [deferred_id])

        else:

            clauses = ['install_path IS NOT NULL']
            values = []
            if not weak and self.url:
                clauses.append('url = ?')
                values.append(self.url)
            for name in ('name', 'etag', 'install_name'):
                if getattr(self, name):
                    clauses.append('%s = ?' % name)
                    values.append(getattr(self, name))
            clause = ' AND '.join(clauses)

            # log.debug('SELECT FROM packages WHERE %s' % ' AND '.join('%s = %r' % (c.replace(' = ?', ''), v) for c, v in zip(clauses[1:], values)), verbosity=2)

            if env:
                values.append(env.id_or_persist())
                cur.execute(
                    '''
                    SELECT packages.*, links.id as link_id FROM packages
                    LEFT OUTER JOIN links ON packages.id = links.package_id
                    WHERE %s AND links.environment_id = ?
                    ORDER BY links.created_at DESC, packages.created_at DESC
                ''' % clause, values)
            else:
                cur.execute(
                    '''
                    SELECT * FROM packages
                    WHERE %s
                    ORDER BY packages.created_at DESC
                ''' % clause, values)

        for row in cur:

            # Make sure it has enough provisions.
            provides = Provision(row['provides'])
            if any(
                    provides.get(key, None) != value
                    for key, value in self.provides.items()):
                log.debug(
                    'Found %s (%d) whose provisions %s do not satisfy %s' % (
                        self.name or row['name'],
                        row['id'],
                        row['provides'],
                        self.provides,
                    ),
                    verbosity=2)
                continue

            # Make sure it has enough requirements.
            requires = RequirementSet(row['requires'])
            reqs_ok = True
            for name in self.requires:
                try:
                    reqs = requires[name]
                except KeyError:
                    reqs_ok = False
                    continue
                if any(
                        reqs.get(key, None) != value
                        for key, value in self.requires.items()):
                    reqs_ok = False
                    continue
            if not reqs_ok:
                log.debug(
                    'Found %s (%d) whose requirements %s do not satisfy %s' % (
                        self.name or row['name'],
                        row['id'],
                        row['requires'],
                        self.requires,
                    ),
                    verbosity=2)

            if not os.path.exists(row['install_path']):
                log.warning(
                    'Found %s (%d) does not exist at %s' %
                    (self.name or row['name'], row['id'], row['install_path']))
                continue
            break
        else:

            if deferred:
                raise ValueError(
                    'deferred package %d no longer exists; consider `vee gc`' %
                    deferred_id)
            return

        log.debug('Found %s (%d%s%s) at %s' % (
            self.name or row['name'],
            row['id'],
            ' weakly' if weak else '',
            ' in env %d' % env.id if env else '',
            row['install_path'],
        ))

        self.restore_from_row(row)
        self.link_id = row.get('link_id')

        if deferred:
            self._init_pipeline()

        self._load_dependencies(cur)

        return True
コード例 #24
0
    def _install_one(self, names, name, link_env, reinstall, relink, no_deps):

        try:
            pkg = self[name]
        except KeyError:
            # print(', '.join(sorted(self.keys())))
            raise

        reinstall_this = name in reinstall
        relink_this = name in relink

        if name not in self._extracted:
            try:
                # Between every step, take a look to see if we now have
                # enough information to tell that it is already installed.
                pkg.assert_uninstalled(uninstall=reinstall_this)
                pkg.pipeline.run_to('fetch')
                pkg.assert_uninstalled(uninstall=reinstall_this)
                pkg.pipeline.run_to('extract')
                pkg.assert_uninstalled(uninstall=reinstall_this)
                pkg.pipeline.run_to('inspect')
                pkg.assert_uninstalled(uninstall=reinstall_this)
            except AlreadyInstalled:
                self._installed.add(name)
            finally:
                self._extracted.add(name)

        # Loop around for dependencies. We insert dependencies, and the
        # package itself, back into the names to check. If we get back to
        # a name that we have already deferred in this manner, we continue
        # anyways, since that means there is a dependency cycle. We assume
        # that dependency order is resolved in the requirements file.
        deferred = False
        deps_installed = True
        insert_i = 0
        for i, dep in ([] if no_deps else enumerate(pkg.dependencies)):

            # Since resolution is rather loose in here (only by name, not URL)
            # we want to replace dependencies with their concrete variant to
            # ease recording that into the database.
            dep = self.resolve(dep, weak=True)
            pkg.dependencies[i] = dep
            self._parent_names.setdefault(dep.name, pkg.name)

            # We must check if the dependency errored, otherwise we will still
            # end up in an infinite loop.
            if dep.name in self._errored:
                log.warning('Skipping due to error in %s' % dep.name)
                self._errored.add(pkg.name)
                return

            if dep.name not in self._installed:
                key = (name, dep.name)

                if key not in self._deferred:
                    log.debug('%s needs %s; deferring install' %
                              (name, dep.name))
                    self._deferred.add(key)
                    deferred = True
                else:
                    log.debug('%s needs %s, but install was already deferred' %
                              (name, dep.name))

                deps_installed = False
                names.insert(insert_i, dep.name)
                insert_i += 1

        if deferred:
            names.insert(insert_i, name)
            return

        pre_build_deps = pkg.dependencies[:]

        if name not in self._installed:
            try:
                pkg.pipeline.run_to('build')
                pkg.pipeline.run_to('install')
                pkg.pipeline.run_to('relocate')
            except AlreadyInstalled:
                pass
            pkg.pipeline.run_to('optlink')
            self._installed.add(name)

        # We need to build/install Homebrew packages before we can decide
        # which of their optional dependencies will be used. The relocation
        # process can also determine other dependencies. We need to run
        # these new ones through the pipe too.
        if pkg.dependencies != pre_build_deps:
            log.debug('%s has changed dependencies after build/install')
            names.insert(insert_i, name)
            return

        if pkg.virtual:
            return

        if name not in self._persisted:

            # We need to wait to persist until all dependencies are
            # installed.
            if not deps_installed:
                log.debug('%s cannot persist without all dependencies' %
                          (name, ))
                names.insert(insert_i, name)
                return

            pkg.persist_in_db()
            pkg.shared_libraries()  # TODO: Move this earlier?
            self._persisted.add(name)

        if link_env and name not in self._linked:
            try:
                pkg.link(link_env, force=relink_this)
            except AlreadyLinked:
                pass
            self._linked.add(name)
コード例 #25
0
    def install(self,
                names=None,
                link_env=None,
                reinstall=False,
                relink=False,
                no_deps=False):

        # I'd love to split this method into an "install" and "link" step, but
        # then we'd need to reimplement the dependency resolution. That would
        # be a good idea to do anyways, but... meh.

        if isinstance(names, str):
            names = [names]
        names = list(names if names else self.keys())

        for name in names:
            if name not in self:
                raise KeyError(name)

        if not isinstance(reinstall, set):
            reinstall = set(
                names if no_deps else self.keys()) if reinstall else set()
        if not isinstance(relink, set):
            relink = set(
                names if no_deps else self.keys()) if relink else set()

        while names:
            name = names.pop(0)

            self._parent_names.setdefault(name, None)

            parent_chain = []
            tip = name
            while tip and tip not in parent_chain:
                parent_chain.append(tip)
                tip = self._parent_names.get(tip)
            parent_chain = parent_chain[1:]

            print(
                '==>', style(name, 'blue'),
                style('(%s)' % ' < '.join(parent_chain), faint=True)
                if parent_chain else '')

            with log.indent():

                # Avoid infinite error loops.
                if name in self._errored:
                    log.warning('Skipping due to previous error.')
                    continue

                try:
                    self._install_one(names, name, link_env, reinstall, relink,
                                      no_deps)
                except PipelineError as e:
                    self._errored.add(name)
                    log.error(str(e))
                    continue
                except Exception as e:
                    self._errored.add(name)
                    print_cli_exc(e, verbose=True)
                    log.exception('Exception while processing %s' % name)
                    continue

        if self._errored:
            log.warning('There were errors in: %s' %
                        ', '.join(sorted(self._errored)))
コード例 #26
0
    def inspect(self, pkg):

        if self.setup_path:

            stdout = call_setup_py(self.setup_path, ['egg_info'],
                                   env=pkg.fresh_environ(),
                                   stdout=True).decode()
            m = re.search(r'writing requirements to (.+?)\n', stdout)
            if not m:
                log.debug("No requirements")
                return

            requirements_path = os.path.join(os.path.dirname(self.setup_path),
                                             m.group(1))
            for line in open(requirements_path):

                # Stop once we get to the "extras".
                if line.startswith('['):
                    break

                m = re.match(r'^([\w\.-]+)', line)
                if m:
                    name = m.group(1).lower()
                    log.debug('%s depends on %s' % (pkg.name, name))
                    pkg.add_dependency(name=name, url='pypi:%s' % name.lower())

        if self.dist_info_dir:

            for line in open(os.path.join(self.dist_info_dir, 'METADATA')):

                line = line.strip()
                if not line:
                    break  # We're at the end of the headers.

                key, value = line.split(': ', 1)
                key = key.lower()

                if key == 'requires-dist':

                    # Environmental markers look like `FOO; extra == 'BAR'`.
                    if ';' in value:

                        value, raw_marker = value.split(';')
                        value = value.strip()

                        # We delay the import just in case the bootstrap is borked.
                        from packaging.markers import Marker

                        marker = Marker(raw_marker)
                        if not marker.evaluate({'extra': None}):
                            continue

                    m = re.match(r'([\w-]+)(?:\s+\(([^)]+)\))?', value)
                    if not m:
                        log.warning(
                            'Could not parse requires-dist {!r}'.format(value))
                        continue

                    dep_name, version_expr = m.groups()
                    pkg.add_dependency(
                        name=dep_name,
                        url='pypi:{}'.format(dep_name),
                        version=version_expr,
                    )
コード例 #27
0
ファイル: pypi.py プロジェクト: immersionroom/vee
    def fetch(self, pkg):

        meta = self._get_meta(pkg)

        all_releases = [(Version(v), rs) for v, rs in meta['releases'].items()]
        all_releases.sort(reverse=True)

        if not all_releases:
            raise ValueError('no releases of {} (any version) on the PyPI'.format(self.name))

        if pkg.version:
            version_expr = VersionExpr(pkg.version)
            matching_releases = [(v, rs) for v, rs in all_releases if version_expr.eval(v)]
            if not matching_releases:
                for v, rs in all_releases:
                    print(v)
                raise ValueError('no releases of {} {} on the PyPI'.format(self.name, pkg.version))

        else:
            matching_releases = all_releases

        supported_tags = get_supported_tags()

        usable_releases = []
        for version, releases in matching_releases:

            for release in releases:

                if release['packagetype'] == 'sdist':
                    usable_releases.append((version, 0, release))
                    continue

                elif release['packagetype'] == 'bdist_wheel':
                    m = re.match(r'^(.+)-([^-]+)-([^-]+)-([^-]+)-([^-]+)\.whl$', release['filename'])
                    if not m:
                        log.warning("Could not parse wheel filename: {}".format(release['filename']))
                    
                    name, version_tag, python_tag, abi_tag, platform_tags = m.groups()

                    # Platform tags can have multiple seperated by dots.
                    for platform_tag in platform_tags.split('.'):
                        tags = (python_tag, abi_tag, platform_tag)
                        if tags in supported_tags:
                            break
                    else:
                        continue

                    usable_releases.append((version, 1, release))

        if not usable_releases:
            raise ValueError('no usable release of %s %s on the PyPI;' % (self.name, expr if pkg.version else '(any version)'))
        usable_releases.sort(key=lambda x: x[:2])

        version, _, release = usable_releases[-1]

        pkg.version = str(version)
        
        if release.get('md5_digest'):
            pkg.checksum = 'md5:%s' % release['md5_digest']

        pkg.package_name = os.path.join(self.name, os.path.basename(release['url']))
        pkg._assert_paths(package=True)

        if os.path.exists(pkg.package_path):
            log.info(style_note('Already downloaded', release['url']))
            return
        
        log.info(style_note('Downloading', release['url']))
        download(release['url'], pkg.package_path)
コード例 #28
0
ファイル: libs.py プロジェクト: immersionroom/vee
def _relocate_darwin_library(lib_path, con, flags, include, exclude, dry_run,
                             target_cache):

    auto = 'auto' in flags
    lib_id, lib_deps = get_dependencies(lib_path)

    id_versions = set(
        name_variants(os.path.basename(lib_id),
                      version_only=True)) if lib_id else set()
    lib_versions = set(
        name_variants(os.path.basename(lib_path), version_only=True))

    cmd = ['install_name_tool']

    if lib_id != lib_path:
        log.info('id %s' % (lib_path), verbosity=1)
        cmd.extend(('-id', lib_path))

    lib_def, lib_undef = get_symbols(lib_path)

    for dep_i, dep_path in enumerate(lib_deps):

        if dep_path == lib_id:
            log.warning('The ID is included?! %s' % lib_path)
            cmd.extend(('-change', dep_path, lib_path))
            continue

        # If the dependency is similarly named to the library itself, then we
        # assume it is its own dependency. Which I don't understand...
        dep_versions = set(
            name_variants(os.path.basename(dep_path), version_only=True))
        if dep_versions.intersection(id_versions) or dep_versions.intersection(
                lib_versions):
            log.warning('Library depends on itself?! %s' % dep_path)
            cmd.extend(('-change', dep_path, lib_path))
            continue

        do_exclude = any(dep_path.startswith(x) for x in exclude)
        if not do_exclude and os.path.exists(dep_path):
            log.debug('skipping %s' % dep_path)
            continue

        dep_name = os.path.basename(dep_path)

        targets = []

        for variant in name_variants(dep_name):
            if variant in target_cache:
                targets.extend(target_cache[variant])
            if auto:
                cur = con.execute(
                    'SELECT path FROM shared_libraries WHERE name = ? ORDER BY created_at DESC',
                    [variant])
                new_targets = target_cache.setdefault(variant, [])
                new_targets.extend([row[0] for row in cur])
                targets.extend(new_targets)

        # Go searching for the "best" relocation target.
        # The one with the most defined symbols missing from the lib wins
        # (essentially; it is more complex then that below). We also, dubiously,
        # accept libraries which provide no matching symbols as long as they
        # don't introduct any conflicts. There are a TON of these in FFmpeg.
        best_score = -1
        best_target = None
        seen_targets = set()
        for target in targets:
            if target in seen_targets:
                continue
            seen_targets.add(target)

            if not os.path.exists(target):
                continue

            tar_def, tar_undef = get_symbols(target)

            pros = len(tar_def.intersection(lib_undef))
            shared = len(tar_def.intersection(lib_def))
            cons = len(lib_undef.intersection(lib_def))
            log.debug('+%d ~%d -%d %s' % (pros, shared, cons, target),
                      verbosity=2)
            if pros - shared - cons > best_score:
                best_score = pros - shared - cons
                best_target = (pros, shared, cons, target)

        if best_target is None:
            log.warning('No relocation targets for %s' % dep_path)
            continue
        if best_score < 0:
            log.warning('No positive relocation targets for %s' % dep_path)
            continue

        if best_target[1] or best_target[2]:
            log.warning('Best target has %s collisions for %s' %
                        (best_target[1] + best_target[2], dep_path))

        target = best_target[3]

        log.info('change %s -> %s' % (dep_name, target), verbosity=1)

        cmd.extend(('-change', dep_path, target))

    if len(cmd) > 1 and not dry_run:

        cmd.append(lib_path)

        s = os.stat(lib_path)
        if not s.st_mode & stat.S_IWUSR:
            os.chmod(lib_path, s.st_mode | stat.S_IWUSR)
            call(cmd)
            os.chmod(lib_path, s.st_mode)
        else:
            call(cmd)
コード例 #29
0
ファイル: gc.py プロジェクト: westernx/vee
def gc(args):

    home = args.assert_home()
    con = home.db.connect()

    with con:

        repo_ids = {}
        for row in con.execute('SELECT id, name from repositories'):
            repo_ids[row['name']] = row['id']

        envs_by_id = {}

        log.info(style_note('Cleaning environments'))
        for row in con.execute(
                'SELECT id, name, path, repository_id from environments ORDER BY created_at ASC'
        ):

            id_, name, path, repo_id = row

            if not os.path.exists(path):
                log.info('environment does not exist at %s; deleting' % (path))
                if not args.dry_run:
                    delete_environment(con, id_)
                continue

            # Track for later.
            envs_by_id.setdefault(repo_id, []).append((id_, name, path))

            # The rest is making sure the repo_id and commit are correct.
            if repo_id:
                continue

            m = re.match(r'(\w+)/commits/([0-9a-f]{7,8}(?:-dirty)?)$', name)
            if not m:
                log.warning(
                    '%s (%d) does not appear to be managed by git; skipping' %
                    (name, id_))
                continue

            repo_name, commit_name = m.groups()
            repo_id = repo_ids.get(repo_name)
            if not repo_id:
                log.warning('repo %s does not exist for %s (%d); skipping' %
                            (repo_name, name, id_))
                continue

            log.info('Fixing repo relationship for %s (%d)' % (name, id_))
            if not args.dry_run:
                con.execute(
                    'UPDATE environments SET repository_id = ?, repository_commit = ? WHERE id = ?',
                    [repo_id, commit_name, id_])

        if args.prune_environments:
            log.info(style_note('Pruning old environments'))
            for repo_id, envs in sorted(envs_by_id.iteritems()):
                for id_, name, path in envs[:-args.keep_latest]:
                    log.info('Deleting %s (%d)' % (name, id_))
                    if not args.dry_run:
                        shutil.rmtree(path)
                        delete_environment(con, id_)

        log.info(style_note('Cleaning installed packages'))
        package_ids = []
        install_paths_to_id = {}
        for row in con.execute(
                'SELECT id, name, install_path, build_path from packages ORDER by created_at DESC'
        ):

            id_, name, install_path, build_path = row
            log.debug('%s %s %s' % (id_, name, install_path))

            if not os.path.exists(install_path):
                log.info('%s no longer exists at %s; deleting' %
                         (name, install_path))
                if not args.dry_run:
                    delete_package(con, id_)
                    continue

            real_id = install_paths_to_id.get(install_path)
            if real_id:
                log.info('%s %d is a duplicate of %s; deleting' %
                         (name, id_, real_id))
                # TODO: update any links or package_dependencies which to point to this.
                if not args.dry_run:
                    delete_package(con, id_)
                continue
            install_paths_to_id[install_path] = id_

            if args.prune_orphaned_packages:
                row = con.execute(
                    'SELECT count(1) FROM links WHERE package_id = ?',
                    [id_]).fetchone()
                if not row[0]:
                    log.info('%s (%d) is not linked; deleting' % (name, id_))
                    if not args.dry_run:
                        if build_path and os.path.exists(build_path):
                            shutil.rmtree(build_path)
                        shutil.rmtree(install_path)
                        delete_package(con, id_)
コード例 #30
0
    def upgrade(self,
                dirty=False,
                subset=None,
                reinstall=False,
                relink=False,
                no_deps=False,
                force_branch_link=True):

        self.clone_if_not_exists()

        try:
            head = self.head
        except CalledProcessError:
            log.warning(style_warning('no commits in repository'))
            head = None

        try:
            remote_head = self.rev_parse('%s/%s' %
                                         (self.remote_name, self.branch_name))
        except ValueError:
            log.warning(
                style_warning('tracked %s/%s does not exist in self' %
                              (self.remote_name, self.branch_name)))
            remote_head = None

        if remote_head and head != remote_head:
            log.warning(
                style_warning('%s repo not checked out to %s/%s' %
                              (self.name, self.remote_name, self.branch_name)))

        dirty = bool(list(self.status()))
        if not dirty and self.is_dirty():
            log.error('%s repo is dirty; force with --dirty' % self.name)
            return False

        env = self.get_environment()

        req_set = self.load_requirements()
        pkg_set = PackageSet(env=env, home=self.home)

        # Register the whole set, so that dependencies are pulled from here instead
        # of weakly resolved from installed packages.
        # TODO: This blanket reinstalls things, even if no_deps is set.
        pkg_set.resolve_set(req_set, check_existing=not reinstall)

        # Install and/or link.
        pkg_set.install(subset or None,
                        link_env=env,
                        reinstall=reinstall,
                        relink=relink,
                        no_deps=no_deps)

        if pkg_set._errored and not force_branch_link:
            log.warning(
                style_warning(
                    "Not creating branch or version links; force with --force-branch-link"
                ))
            return False

        # Create a symlink by branch.
        path_by_branch = self.home._abs_path('environments', self.name,
                                             self.branch_name)
        if os.path.lexists(path_by_branch):
            os.unlink(path_by_branch)
        makedirs(os.path.dirname(path_by_branch))
        os.symlink(env.path, path_by_branch)

        # Create a symlink by version.
        version = req_set.headers.get('Version')
        if version:
            path_by_version = self.home._abs_path(
                'environments', self.name, 'versions',
                version.value + ('-dirty' if dirty else ''))
            if os.path.lexists(path_by_version):
                os.unlink(path_by_version)
            makedirs(os.path.dirname(path_by_version))
            os.symlink(env.path, path_by_version)

        return True