Ejemplo n.º 1
0
def relocate(root, con, spec=None, dry_run=False, target_cache=None):

    target_cache = {} if target_cache is None else target_cache

    flags, include, exclude = _parse_spec(spec, root)

    if 'linux' in flags and not sys.platform.startswith('linux'):
        return
    if 'darwin' in flags and sys.platform != 'darwin':
        return

    if not (include or 'auto' in flags):
        raise ValueError('no libraries to include')

    # Find everything in include on OS X, since we need to actually find the
    # individual dependencies.
    if sys.platform == 'darwin':
        for path in include:
            for found in find_shared_libraries(path):
                log.debug('found %s' % found)
                for name in name_variants(os.path.basename(found)):
                    target_cache.setdefault(name, []).append(found)

    for lib_path in find_shared_libraries(root):
        log.info(lib_path)
        with log.indent():
            if sys.platform == 'darwin':
                _relocate_darwin_library(lib_path, con, flags, include,
                                         exclude, dry_run, target_cache)
            else:
                _relocate_linux_library(lib_path, include, dry_run)

    if 'pkgconfig' in flags:
        relocate_pkgconfig(root)
Ejemplo n.º 2
0
    def iter_development_packages(self, exists=True, search=True):

        if not search:
            # We used to have a development_packages table, and searching was
            # something that had to be requested.
            log.debug(
                "iter_development_packages(..., seach=False) is deprecated.")

        for root in self.dev_search_path:

            if not os.path.exists(root):
                continue

            for name in os.listdir(root):

                path = os.path.join(root, name)

                if name.endswith('.vee-dev.json'):
                    yield DevPackage.from_tag(path, home=self)
                    continue

                # Not used yet.
                sub_path = os.path.join(path, '.vee-dev.json')
                if os.path.exists(sub_path):
                    yield DevPackage.from_tag(sub_path, home=self)
Ejemplo n.º 3
0
Archivo: libs.py Proyecto: westernx/vee
def get_installed_shared_libraries(con, package_id, install_path, rescan=False):

    with con:

        # Determine if we should scan.
        do_scan = rescan
        if not rescan:
            row = con.execute('SELECT scanned_for_libraries FROM packages WHERE id = ?', [package_id]).fetchone()
            do_scan = not row[0]

        # Return existing results if we should not scan.
        if not do_scan:
            cur = con.execute('SELECT path FROM shared_libraries WHERE package_id = ?', [package_id])
            return [row[0] for row in cur]

        # Blow out any existing results, and then scan.
        cur = con.execute('DELETE FROM shared_libraries WHERE package_id = ?', [package_id])

        res = []
        for lib_path in find_shared_libraries(install_path):
            log.debug('Found shared library %s' % lib_path)
            res.append(lib_path)
            con.execute('''INSERT INTO shared_libraries (package_id, name, path) VALUES (?, ?, ?)''', [
                package_id, os.path.basename(lib_path), lib_path,
            ])
        
        con.execute('UPDATE packages SET scanned_for_libraries = 1 WHERE id = ?', [package_id])

        return res
Ejemplo n.º 4
0
    def persist_in_db(self, con=None, force=False):

        if not self.is_dirty and not force:
            return self.id

        data = {}
        for col in self.__columns__:
            try:
                if col._persist:
                    data[col.name] = col._persist(self)
                elif col._getter:
                    data[col.name] = col._getter(self)
                else:
                    data[col.name] = self.__dict__[col.name]
            except KeyError:
                pass

        con = con or self._connect()
        if self.id:
            con.update(self.__tablename__, data, {'id': self.id})
        else:
            self.id = con.insert(self.__tablename__, data)
            log.debug('%s added to %s with ID %d' %
                      (self.__class__.__name__, self.__tablename__, self.id))
        self.is_dirty = False

        return self.id
Ejemplo n.º 5
0
    def inspect(self):

        pkg = self.package

        if self.setup_path and not self.egg_path:

            log.info(style_note('Building Python egg-info'))
            res = call_setup_py(self.setup_path, ['egg_info'],
                                env=pkg.fresh_environ(),
                                indent=True,
                                verbosity=1)
            if res:
                raise RuntimeError('Could not build Python package')

            self.egg_path = find_in_tree(pkg.build_path, '*.egg-info', 'dir')
            if not self.egg_path:
                log.warning('Could not find newly created *.egg-info')

        if self.egg_path:
            requires_path = os.path.join(self.egg_path, 'requires.txt')
            if os.path.exists(requires_path):
                for line in open(requires_path, 'rb'):
                    line = line.strip()
                    if not line:
                        continue
                    if line.startswith('['):
                        break
                    name = re.split('\W', line)[0].lower()
                    log.debug('%s depends on %s' % (pkg.name, name))
                    pkg.dependencies.append(
                        Package(name=name, url='pypi:%s' % name))
Ejemplo n.º 6
0
Archivo: home.py Proyecto: westernx/vee
    def get_env_repo(self, name=None):

        name = name or self.default_repo_name

        if name:
            row = self.db.execute(
                'SELECT * FROM repositories WHERE name = ? LIMIT 1',
                [name]).fetchone()
            if not row:
                raise ValueError('%r repo does not exist' % name)

        else:
            # Grab the default repo if possible, otherwise make sure there is
            # only one.
            rows = self.db.execute(
                'SELECT * FROM repositories ORDER BY is_default DESC LIMIT 2'
            ).fetchall()
            if not rows:
                raise ValueError('no repositories exist')
            elif rows[0]['is_default']:
                row = rows[0]
            elif len(rows) == 1:
                row = rows[0]
            else:
                raise ValueError('multiple repositories with no default')

        env_repo = EnvironmentRepo(row, home=self)
        if not env_repo.exists:
            log.debug('Looking for env_repo: %s' % env_repo.work_tree)
            raise ValueError('%r repo does not exist' % env_repo.name)

        return env_repo
Ejemplo n.º 7
0
def edit(args):

    home = args.assert_home()
    repo = home.get_repo(args.repo)

    cmd = []
    cmd.extend(shlex.split(os.environ.get('EDITOR', 'vim')))
    cmd.append(os.path.join(repo.work_tree, 'manifest.txt'))

    log.debug(cmd, verbosity=1)

    os.execvp(cmd[0], cmd)
Ejemplo n.º 8
0
Archivo: edit.py Proyecto: westernx/vee
def edit(args):

    home = args.assert_home()
    env_repo = home.get_env_repo(args.repo)

    cmd = []
    cmd.extend(shlex.split(os.environ['EDITOR']))
    cmd.append(os.path.join(env_repo.work_tree, 'requirements.txt'))

    log.debug(cmd, verbosity=1)

    os.execvp(cmd[0], cmd)
Ejemplo n.º 9
0
 def insert(self, table, data, on_conflict=None):
     pairs = sorted(data.iteritems())
     query = 'INSERT %s INTO %s (%s) VALUES (%s)' % (
         'OR ' + on_conflict if on_conflict else '',
         escape_identifier(table),
         ','.join(escape_identifier(k) for k, v in pairs),
         ','.join('?' for _ in pairs),
     )
     params = [v for k, v in pairs]
     log.debug('%s %r' % (query, params))
     self.execute(query, params)
     return self.lastrowid
Ejemplo n.º 10
0
def vee(args, environ=None, check=True, stdout=False):
    full_environ = os.environ.copy()
    full_environ.update(environ or {})
    full_environ.update(_environ_diff)
    log.debug('$ vee ' + ' '.join(args), name='vee.tests')
    if stdout:
        cmd = ['vee']
        cmd.extend(args)
        return subprocess.check_output(cmd, env=environ)
    res = _main(args, environ=full_environ)
    if check and res:
        raise ValueError('return code %d' % res)
    return res
Ejemplo n.º 11
0
def call(cmd, **kwargs):

    # Log the call.
    kwargs.pop('silent', None) # B/C.
    VEE = os.environ.get('VEE')
    cmd_collapsed = [x.replace(VEE, '$VEE') if VEE else x for x in cmd]
    log.debug(
        '$ ' + ' '.join(cmd_collapsed),
        verbosity=2,
        _frame=kwargs.pop('_frame', 0) + 3
    )

    check = kwargs.pop('check', True)
    
    verbosity = kwargs.pop('verbosity', 0)
    indent = kwargs.pop('indent', False)
    if indent:
        indent = log.indent()
        indent.__enter__()

    pty = kwargs.pop('pty', None)
    stdout = _CallOutput(kwargs.pop('stdout', None), 'stdout', verbosity, pty=pty)
    stderr = _CallOutput(kwargs.pop('stderr', None), 'stderr', verbosity, pty=pty)

    proc = subprocess.Popen(cmd, stdout=stdout.slave_fd, stderr=stderr.slave_fd, bufsize=0, **kwargs)
    stdout.start(proc)
    stderr.start(proc)

    proc.wait()
    stdout.join()
    stderr.join()

    if indent:
        indent.__exit__(None, None, None)

    if (check or stdout.return_buffer or stderr.return_buffer) and proc.returncode:
        raise subprocess.CalledProcessError(proc.returncode, cmd)

    if stdout.return_buffer and stderr.return_buffer:
        return ''.join(stdout.buffer), ''.join(stderr.buffer)
    if stdout.return_buffer:
        return ''.join(stdout.buffer)
    if stderr.return_buffer:
        return ''.join(stderr.buffer)

    return proc.returncode
Ejemplo n.º 12
0
    def rewrite_shebang(self, old_path, new_path):

        # Only care if it is at all executable.
        stat = os.stat(old_path)
        if not (stat.st_mode & 0o111):
            return

        # If it starts with a Python shebang, rewrite it.
        with open(old_path, 'rb') as old_fh:
            old_shebang = old_fh.readline()
            m = re.match(r'#!(|\S+/)([^\s/]+)', old_shebang)
            if not m:
                return

            new_bin = os.path.join(self.path, 'bin', m.group(2))
            if not os.path.exists(new_bin):
                return

            new_shebang = '#!%s%s' % (new_bin, old_shebang[m.end(2):])
            log.info('Rewriting shebang of %s' % old_path, verbosity=1)
            log.debug('New shebang: %s' % new_shebang.strip(), verbosity=1)

            self._assert_real_dir(os.path.dirname(new_path))

            # Due to the way the _assert_real_dir works, we may have already
            # created a symlink in the location of the new_path which points to
            # the old_path. If we don't delete it first, then we will be
            # reading and writing to the same time, and will only get the
            # shebang + 1024 bytes (the buffer size on my machine).
            if os.path.lexists(new_path):
                os.unlink(new_path)

            with open(new_path, 'wb') as new_fh:
                new_fh.write(new_shebang)
                new_fh.writelines(old_fh)
            try:
                shutil.copystat(old_path, new_path)
            except OSError as e:
                # These often come up when you are not the owner
                # of the file.
                log.exception('Could not copystat to %s' % new_path)
                if e.errno != errno.EPERM:
                    raise

            return True
Ejemplo n.º 13
0
    def environ_diff(self):
        if self._environ_diff is None:

            self._environ_diff = {}
            for e in (self.base_environ, self.environ):
                for k, v in e.items():
                    self._environ_diff[k] = self.render_template(v, name=k)

            # Just for debugging...
            for k, v in sorted(self._environ_diff.items()):
                old_v = os.environ.get(k)
                if old_v is not None:
                    v = v.replace(old_v, '@')
                v = v.replace(self.home.root, '$VEE')
                log.debug('%s %s=%s' % (style('setenv', 'blue'), k, v),
                          verbosity=2)

        return self._environ_diff
Ejemplo n.º 14
0
    def fetch(self):

        pkg = self.package
        meta = self._meta()

        all_releases = [(Version(v), rs)
                        for v, rs in meta['releases'].iteritems()]
        all_releases.sort(reverse=True)

        if pkg.revision:
            expr = VersionExpr(pkg.revision)
            matching_releases = [(v, rs) for v, rs in all_releases
                                 if expr.eval(v)]
            log.debug(
                '%s matched %s' %
                (expr, ','.join(str(v)
                                for v, _ in matching_releases) or 'none'))
        else:
            matching_releases = all_releases

        for version, releases in matching_releases:
            release = next(
                (r for r in releases if r['packagetype'] == 'sdist'), None)
            if release:
                break
        else:
            raise ValueError('no sdist %s %s on the PyPI;' %
                             (self.name, expr if pkg.revision else '(any)'))

        pkg.revision = str(version)

        if release.get('md5_digest'):
            pkg.checksum = 'md5:%s' % release['md5_digest']

        pkg.package_name = os.path.join(self.name,
                                        os.path.basename(release['url']))
        pkg._assert_paths(package=True)

        if os.path.exists(pkg.package_path):
            log.info(style_note('Already downloaded', release['url']))
            return
        log.info(style_note('Downloading', release['url']))
        download(release['url'], pkg.package_path)
Ejemplo n.º 15
0
 def persist_in_db(self, con=None):
     self._set_names(package=True, build=True, install=True)
     if not self.installed:
         log.warning('%s does not appear to be installed to %s' % (self.name, self.install_path))
         raise ValueError('cannot record requirement that is not installed')
     con = con or self.home.db.connect()
     with con:
         exists = self.id is not None
         res = super(Package, self).persist_in_db(con=con)
         if exists:
             con.execute('DELETE FROM package_dependencies WHERE depender_id = ?', [self.id])
         for dep in self.dependencies:
             dep_id = dep.id_or_persist(con=con)
             log.debug('Recorded %s -> %s dependency as %d' % (
                 self.name, dep.name, dep_id
             ))
             con.execute('INSERT INTO package_dependencies (depender_id, dependee_id) VALUES (?, ?)', [
                 self.id, dep_id
             ])
         return res
Ejemplo n.º 16
0
    def load(self, step_name):

        try:
            return self.steps[step_name]
        except KeyError:
            pass

        # See if any previous steps provide it.
        step_i = self._step_index[step_name]
        for i in xrange(step_i - 1, -1, -1):
            prev_name = self._step_names[i]
            prev_step = self.steps[prev_name]
            step = prev_step.get_next(step_name)
            if step:
                log.debug('%s (%s) provided sucessor %s (%s) for %s' % (
                    prev_step.name, prev_name, step.name, step_name, self._package
                ), verbosity=2)
                self.steps[step_name] = step
                return step

        # Load the step classes.
        if not _step_classes:
            for ep in pkg_resources.iter_entry_points('vee_pipeline_steps'):
                cls = ep.load()
                cls.name = ep.name
                _step_classes.append(cls)
                _step_classes_by_name[ep.name] = cls
            _step_classes.sort(key=lambda cls: getattr(cls, 'factory_priority', 1), reverse=True)
        
        # Find something that self-identifies it provides this step.
        for cls in _step_classes:
            step = cls.factory(step_name, self._package)
            if step:
                log.debug('%s factory built %s for %s' % (step.name, step_name, self._package), verbosity=2)
                self.steps[step_name] = step
                return step

        raise ValueError('Cannot load %s step for %s' % (step_name, self._package.freeze()))
Ejemplo n.º 17
0
    def resolve_existing(self, env=None, weak=False):
        """Check against the database to see if this was already installed."""

        if self.id is not None:
            raise ValueError('requirement already in database')

        cur = self.home.db.cursor()

        # Dependencies are deferred.
        deferred = self.url.startswith('deferred:')
        if deferred:
            deferred_id = int(self.url.split(':')[1])
            cur.execute('SELECT * from packages WHERE id = ?', [deferred_id])

        else:

            clauses = ['install_path IS NOT NULL']
            values = []
            if not weak and self.url:
                clauses.append('url = ?')
                values.append(self.url)
            for name in ('name', 'etag', 'install_name'):
                if getattr(self, name):
                    clauses.append('%s = ?' % name)
                    values.append(getattr(self, name))
            clause = ' AND '.join(clauses)

            # log.debug('SELECT FROM packages WHERE %s' % ' AND '.join('%s = %r' % (c.replace(' = ?', ''), v) for c, v in zip(clauses[1:], values)), verbosity=2)

            if env:
                values.append(env.id_or_persist())
                cur.execute(
                    '''
                    SELECT packages.*, links.id as link_id FROM packages
                    LEFT OUTER JOIN links ON packages.id = links.package_id
                    WHERE %s AND links.environment_id = ?
                    ORDER BY links.created_at DESC, packages.created_at DESC
                ''' % clause, values)
            else:
                cur.execute(
                    '''
                    SELECT * FROM packages
                    WHERE %s
                    ORDER BY packages.created_at DESC
                ''' % clause, values)

        for row in cur:

            # Make sure it has enough provisions.
            provides = Provision(row['provides'])
            if any(
                    provides.get(key, None) != value
                    for key, value in self.provides.items()):
                log.debug(
                    'Found %s (%d) whose provisions %s do not satisfy %s' % (
                        self.name or row['name'],
                        row['id'],
                        row['provides'],
                        self.provides,
                    ),
                    verbosity=2)
                continue

            # Make sure it has enough requirements.
            requires = RequirementSet(row['requires'])
            reqs_ok = True
            for name in self.requires:
                try:
                    reqs = requires[name]
                except KeyError:
                    reqs_ok = False
                    continue
                if any(
                        reqs.get(key, None) != value
                        for key, value in self.requires.items()):
                    reqs_ok = False
                    continue
            if not reqs_ok:
                log.debug(
                    'Found %s (%d) whose requirements %s do not satisfy %s' % (
                        self.name or row['name'],
                        row['id'],
                        row['requires'],
                        self.requires,
                    ),
                    verbosity=2)

            if not os.path.exists(row['install_path']):
                log.warning(
                    'Found %s (%d) does not exist at %s' %
                    (self.name or row['name'], row['id'], row['install_path']))
                continue
            break
        else:

            if deferred:
                raise ValueError(
                    'deferred package %d no longer exists; consider `vee gc`' %
                    deferred_id)
            return

        log.debug('Found %s (%d%s%s) at %s' % (
            self.name or row['name'],
            row['id'],
            ' weakly' if weak else '',
            ' in env %d' % env.id if env else '',
            row['install_path'],
        ))

        self.restore_from_row(row)
        self.link_id = row.get('link_id')

        if deferred:
            self._init_pipeline()

        self._load_dependencies(cur)

        return True
Ejemplo n.º 18
0
    def _install_one(self, names, name, link_env, reinstall, relink, no_deps):

        try:
            pkg = self[name]
        except KeyError:
            # print(', '.join(sorted(self.keys())))
            raise

        reinstall_this = name in reinstall
        relink_this = name in relink

        if name not in self._extracted:
            try:
                # Between every step, take a look to see if we now have
                # enough information to tell that it is already installed.
                pkg.assert_uninstalled(uninstall=reinstall_this)
                pkg.pipeline.run_to('fetch')
                pkg.assert_uninstalled(uninstall=reinstall_this)
                pkg.pipeline.run_to('extract')
                pkg.assert_uninstalled(uninstall=reinstall_this)
                pkg.pipeline.run_to('inspect')
                pkg.assert_uninstalled(uninstall=reinstall_this)
            except AlreadyInstalled:
                self._installed.add(name)
            finally:
                self._extracted.add(name)

        # Loop around for dependencies. We insert dependencies, and the
        # package itself, back into the names to check. If we get back to
        # a name that we have already deferred in this manner, we continue
        # anyways, since that means there is a dependency cycle. We assume
        # that dependency order is resolved in the requirements file.
        deferred = False
        deps_installed = True
        insert_i = 0
        for i, dep in ([] if no_deps else enumerate(pkg.dependencies)):

            # Since resolution is rather loose in here (only by name, not URL)
            # we want to replace dependencies with their concrete variant to
            # ease recording that into the database.
            dep = self.resolve(dep, weak=True)
            pkg.dependencies[i] = dep
            self._parent_names.setdefault(dep.name, pkg.name)

            # We must check if the dependency errored, otherwise we will still
            # end up in an infinite loop.
            if dep.name in self._errored:
                log.warning('Skipping due to error in %s' % dep.name)
                self._errored.add(pkg.name)
                return

            if dep.name not in self._installed:
                key = (name, dep.name)

                if key not in self._deferred:
                    log.debug('%s needs %s; deferring install' %
                              (name, dep.name))
                    self._deferred.add(key)
                    deferred = True
                else:
                    log.debug('%s needs %s, but install was already deferred' %
                              (name, dep.name))

                deps_installed = False
                names.insert(insert_i, dep.name)
                insert_i += 1

        if deferred:
            names.insert(insert_i, name)
            return

        pre_build_deps = pkg.dependencies[:]

        if name not in self._installed:
            try:
                pkg.pipeline.run_to('build')
                pkg.pipeline.run_to('install')
                pkg.pipeline.run_to('relocate')
            except AlreadyInstalled:
                pass
            pkg.pipeline.run_to('optlink')
            self._installed.add(name)

        # We need to build/install Homebrew packages before we can decide
        # which of their optional dependencies will be used. The relocation
        # process can also determine other dependencies. We need to run
        # these new ones through the pipe too.
        if pkg.dependencies != pre_build_deps:
            log.debug('%s has changed dependencies after build/install')
            names.insert(insert_i, name)
            return

        if pkg.virtual:
            return

        if name not in self._persisted:

            # We need to wait to persist until all dependencies are
            # installed.
            if not deps_installed:
                log.debug('%s cannot persist without all dependencies' %
                          (name, ))
                names.insert(insert_i, name)
                return

            pkg.persist_in_db()
            pkg.shared_libraries()  # TODO: Move this earlier?
            self._persisted.add(name)

        if link_env and name not in self._linked:
            try:
                pkg.link(link_env, force=relink_this)
            except AlreadyLinked:
                pass
            self._linked.add(name)
Ejemplo n.º 19
0
def call(cmd, **kwargs):

    # Log the call.
    kwargs.pop('silent', None) # B/C.
    VEE = os.environ.get('VEE')
    cmd_collapsed = [x.replace(VEE, '$VEE') if VEE else x for x in cmd]
    log.debug(
        '$ ' + ' '.join(cmd_collapsed),
        verbosity=2,
        _frame=kwargs.pop('_frame', 0) + 3
    )

    check = kwargs.pop('check', True)
    decode = kwargs.pop('decode', False)

    verbosity = kwargs.pop('verbosity', 0)
    indent = kwargs.pop('indent', False)
    if indent:
        indent = log.indent()
        indent.__enter__()

    pty = kwargs.pop('pty', None)
    stdout = _CallOutput(kwargs.pop('stdout', None), 'stdout', verbosity, pty=pty)
    stderr = _CallOutput(kwargs.pop('stderr', None), 'stderr', verbosity, pty=pty)

    if kwargs.pop('vee_in_env', False):

        vee_src = os.path.abspath(os.path.join(__file__, '..', '..'))

        env = kwargs.get('env', os.environ).copy()
        env['PYTHONPATH'] = join_env_path(
            vee_src,
            env.get('PYTHONPATH')
        )
        env['PATH'] = join_env_path(os.path.join(vee_src, 'bin'), env.get('PATH'))
        kwargs['env'] = env

    proc = subprocess.Popen(cmd, stdout=stdout.slave_fd, stderr=stderr.slave_fd, bufsize=0, **kwargs)
    stdout.start(proc)
    stderr.start(proc)

    proc.wait()
    stdout.join()
    stderr.join()

    if indent:
        indent.__exit__(None, None, None)

    if (check or stdout.return_buffer or stderr.return_buffer) and proc.returncode:
        raise subprocess.CalledProcessError(proc.returncode, cmd)

    # Decode if requested.
    if stdout.return_buffer:
        stdout_res = b''.join(stdout.buffer)
        if decode:
            stdout_res = stdout_res.decode()
    if stderr.return_buffer:
        stderr_res = b''.join(stderr.buffer)
        if decode:
            stderr_res = stderr_res.decode()

    if stdout.return_buffer and stderr.return_buffer:
        return stdout_res, stderr_res
    if stdout.return_buffer:
        return stdout_res
    if stderr.return_buffer:
        return stderr_res

    return proc.returncode
Ejemplo n.º 20
0
    def inspect(self, pkg):

        if self.setup_path:

            stdout = call_setup_py(self.setup_path, ['egg_info'],
                                   env=pkg.fresh_environ(),
                                   stdout=True).decode()
            m = re.search(r'writing requirements to (.+?)\n', stdout)
            if not m:
                log.debug("No requirements")
                return

            requirements_path = os.path.join(os.path.dirname(self.setup_path),
                                             m.group(1))
            for line in open(requirements_path):

                # Stop once we get to the "extras".
                if line.startswith('['):
                    break

                m = re.match(r'^([\w\.-]+)', line)
                if m:
                    name = m.group(1).lower()
                    log.debug('%s depends on %s' % (pkg.name, name))
                    pkg.add_dependency(name=name, url='pypi:%s' % name.lower())

        if self.dist_info_dir:

            for line in open(os.path.join(self.dist_info_dir, 'METADATA')):

                line = line.strip()
                if not line:
                    break  # We're at the end of the headers.

                key, value = line.split(': ', 1)
                key = key.lower()

                if key == 'requires-dist':

                    # Environmental markers look like `FOO; extra == 'BAR'`.
                    if ';' in value:

                        value, raw_marker = value.split(';')
                        value = value.strip()

                        # We delay the import just in case the bootstrap is borked.
                        from packaging.markers import Marker

                        marker = Marker(raw_marker)
                        if not marker.evaluate({'extra': None}):
                            continue

                    m = re.match(r'([\w-]+)(?:\s+\(([^)]+)\))?', value)
                    if not m:
                        log.warning(
                            'Could not parse requires-dist {!r}'.format(value))
                        continue

                    dep_name, version_expr = m.groups()
                    pkg.add_dependency(
                        name=dep_name,
                        url='pypi:{}'.format(dep_name),
                        version=version_expr,
                    )
Ejemplo n.º 21
0
def _relocate_darwin_library(lib_path, con, flags, include, exclude, dry_run,
                             target_cache):

    auto = 'auto' in flags
    lib_id, lib_deps = get_dependencies(lib_path)

    id_versions = set(
        name_variants(os.path.basename(lib_id),
                      version_only=True)) if lib_id else set()
    lib_versions = set(
        name_variants(os.path.basename(lib_path), version_only=True))

    cmd = ['install_name_tool']

    if lib_id != lib_path:
        log.info('id %s' % (lib_path), verbosity=1)
        cmd.extend(('-id', lib_path))

    lib_def, lib_undef = get_symbols(lib_path)

    for dep_i, dep_path in enumerate(lib_deps):

        if dep_path == lib_id:
            log.warning('The ID is included?! %s' % lib_path)
            cmd.extend(('-change', dep_path, lib_path))
            continue

        # If the dependency is similarly named to the library itself, then we
        # assume it is its own dependency. Which I don't understand...
        dep_versions = set(
            name_variants(os.path.basename(dep_path), version_only=True))
        if dep_versions.intersection(id_versions) or dep_versions.intersection(
                lib_versions):
            log.warning('Library depends on itself?! %s' % dep_path)
            cmd.extend(('-change', dep_path, lib_path))
            continue

        do_exclude = any(dep_path.startswith(x) for x in exclude)
        if not do_exclude and os.path.exists(dep_path):
            log.debug('skipping %s' % dep_path)
            continue

        dep_name = os.path.basename(dep_path)

        targets = []

        for variant in name_variants(dep_name):
            if variant in target_cache:
                targets.extend(target_cache[variant])
            if auto:
                cur = con.execute(
                    'SELECT path FROM shared_libraries WHERE name = ? ORDER BY created_at DESC',
                    [variant])
                new_targets = target_cache.setdefault(variant, [])
                new_targets.extend([row[0] for row in cur])
                targets.extend(new_targets)

        # Go searching for the "best" relocation target.
        # The one with the most defined symbols missing from the lib wins
        # (essentially; it is more complex then that below). We also, dubiously,
        # accept libraries which provide no matching symbols as long as they
        # don't introduct any conflicts. There are a TON of these in FFmpeg.
        best_score = -1
        best_target = None
        seen_targets = set()
        for target in targets:
            if target in seen_targets:
                continue
            seen_targets.add(target)

            if not os.path.exists(target):
                continue

            tar_def, tar_undef = get_symbols(target)

            pros = len(tar_def.intersection(lib_undef))
            shared = len(tar_def.intersection(lib_def))
            cons = len(lib_undef.intersection(lib_def))
            log.debug('+%d ~%d -%d %s' % (pros, shared, cons, target),
                      verbosity=2)
            if pros - shared - cons > best_score:
                best_score = pros - shared - cons
                best_target = (pros, shared, cons, target)

        if best_target is None:
            log.warning('No relocation targets for %s' % dep_path)
            continue
        if best_score < 0:
            log.warning('No positive relocation targets for %s' % dep_path)
            continue

        if best_target[1] or best_target[2]:
            log.warning('Best target has %s collisions for %s' %
                        (best_target[1] + best_target[2], dep_path))

        target = best_target[3]

        log.info('change %s -> %s' % (dep_name, target), verbosity=1)

        cmd.extend(('-change', dep_path, target))

    if len(cmd) > 1 and not dry_run:

        cmd.append(lib_path)

        s = os.stat(lib_path)
        if not s.st_mode & stat.S_IWUSR:
            os.chmod(lib_path, s.st_mode | stat.S_IWUSR)
            call(cmd)
            os.chmod(lib_path, s.st_mode)
        else:
            call(cmd)
Ejemplo n.º 22
0
Archivo: add.py Proyecto: westernx/vee
def add(args):

    home = args.assert_home()
    env_repo = home.get_env_repo(args.repo)
    req_set = env_repo.load_requirements()
    pkg_set = PackageSet(home=home)

    baked_any = None

    if args.update:
        baked_any = False
        for req in req_set.iter_packages():
            pkg = pkg_set.resolve(req, check_existing=False)
            if pkg.fetch_type != 'git':
                continue
            print style_note('Fetching', str(req))
            pkg.repo.fetch('origin',
                           'master')  # TODO: track these another way?
            if pkg.repo.check_ff_safety('origin/master'):
                pkg.repo.checkout('origin/master')
                head = pkg.repo.head[:8]
                if head != req.revision:
                    req.revision = pkg.repo.head[:8]
                    print style_note('Updated', str(req))
                    baked_any = True

    if args.bake_installed:
        baked_any = False

        for req in req_set.iter_packages():

            pkg = pkg_set.resolve(req)
            if pkg.fetch_type != 'git':
                continue
            repo = pkg.pipeline.steps['fetch'].repo

            if req.name and req.name == guess_name(req.url):
                req.name = None
                baked_any = True
                print style_note('Unset redundant name', req.name)

            if pkg.installed and req.revision != repo.head[:8]:
                req.revision = repo.head[:8]
                baked_any = True
                print style_note('Pinned', req.name, req.revision)

    if args.checksum:
        baked_any = False

        for req in req_set.iter_packages():
            pkg = pkg_set.resolve(req)
            if pkg.checksum:
                continue
            if not pkg.package_path or not os.path.isfile(pkg.package_path):
                continue
            req.checksum = checksum_file(pkg.package_path)
            print style_note('Checksummed', pkg.name, req.checksum)
            baked_any = True

    if baked_any is not None:
        if baked_any:
            env_repo.dump_requirements(req_set)
        else:
            print style_note('No changes.')
        return

    row = home.get_development_record(os.path.abspath(args.package))

    if not row:
        raise ValueError('No development package %r' % args.package)

    dev_repo = GitRepo(row['path'])

    # Get the normalized origin.
    dev_remote_urls = set()
    for url in dev_repo.remotes().itervalues():
        url = normalize_git_url(url, prefer='scp') or url
        log.debug('adding dev remote url: %s' % url)
        dev_remote_urls.add(url)
    if not dev_remote_urls:
        print style_error('No git remotes for %s' % row['path'])
        return 1

    for req in req_set.iter_packages(eval_control=False):

        # We only deal with git packages.
        pkg = pkg_set.resolve(req, check_existing=False)
        if pkg.fetch_type != 'git':
            continue

        req_url = normalize_git_url(req.url, prefer='scp')
        log.debug('does match package url?: %s' % req_url)
        if req_url in dev_remote_urls:
            if req.revision == dev_repo.head[:8]:
                print style_note('No change to', str(req))
            else:
                req.revision = dev_repo.head[:8]
                print style_note('Updated', str(req))
            break

    else:
        if not args.init:
            print '{error}: No required package {name}; would match one of:'.format(
                error=style('Error', 'red'),
                name=style(args.package, bold=True))
            for url in sorted(dev_remote_urls):
                print '    {}'.format(url)
            print 'Use {} to setup: git+{} --revision {}'.format(
                style('vee add --init %s' % args.package, 'green'),
                dev_repo.remotes()['origin'], dev_repo.head[:8])
            return 1

        req = Package(
            url=normalize_git_url(dev_repo.remotes()['origin'], prefix=True),
            revision=dev_repo.head[:8],
            home=home,
        )
        req_set.append(('', req, ''))

    env_repo.dump_requirements(req_set)
Ejemplo n.º 23
0
Archivo: gc.py Proyecto: westernx/vee
def gc(args):

    home = args.assert_home()
    con = home.db.connect()

    with con:

        repo_ids = {}
        for row in con.execute('SELECT id, name from repositories'):
            repo_ids[row['name']] = row['id']

        envs_by_id = {}

        log.info(style_note('Cleaning environments'))
        for row in con.execute(
                'SELECT id, name, path, repository_id from environments ORDER BY created_at ASC'
        ):

            id_, name, path, repo_id = row

            if not os.path.exists(path):
                log.info('environment does not exist at %s; deleting' % (path))
                if not args.dry_run:
                    delete_environment(con, id_)
                continue

            # Track for later.
            envs_by_id.setdefault(repo_id, []).append((id_, name, path))

            # The rest is making sure the repo_id and commit are correct.
            if repo_id:
                continue

            m = re.match(r'(\w+)/commits/([0-9a-f]{7,8}(?:-dirty)?)$', name)
            if not m:
                log.warning(
                    '%s (%d) does not appear to be managed by git; skipping' %
                    (name, id_))
                continue

            repo_name, commit_name = m.groups()
            repo_id = repo_ids.get(repo_name)
            if not repo_id:
                log.warning('repo %s does not exist for %s (%d); skipping' %
                            (repo_name, name, id_))
                continue

            log.info('Fixing repo relationship for %s (%d)' % (name, id_))
            if not args.dry_run:
                con.execute(
                    'UPDATE environments SET repository_id = ?, repository_commit = ? WHERE id = ?',
                    [repo_id, commit_name, id_])

        if args.prune_environments:
            log.info(style_note('Pruning old environments'))
            for repo_id, envs in sorted(envs_by_id.iteritems()):
                for id_, name, path in envs[:-args.keep_latest]:
                    log.info('Deleting %s (%d)' % (name, id_))
                    if not args.dry_run:
                        shutil.rmtree(path)
                        delete_environment(con, id_)

        log.info(style_note('Cleaning installed packages'))
        package_ids = []
        install_paths_to_id = {}
        for row in con.execute(
                'SELECT id, name, install_path, build_path from packages ORDER by created_at DESC'
        ):

            id_, name, install_path, build_path = row
            log.debug('%s %s %s' % (id_, name, install_path))

            if not os.path.exists(install_path):
                log.info('%s no longer exists at %s; deleting' %
                         (name, install_path))
                if not args.dry_run:
                    delete_package(con, id_)
                    continue

            real_id = install_paths_to_id.get(install_path)
            if real_id:
                log.info('%s %d is a duplicate of %s; deleting' %
                         (name, id_, real_id))
                # TODO: update any links or package_dependencies which to point to this.
                if not args.dry_run:
                    delete_package(con, id_)
                continue
            install_paths_to_id[install_path] = id_

            if args.prune_orphaned_packages:
                row = con.execute(
                    'SELECT count(1) FROM links WHERE package_id = ?',
                    [id_]).fetchone()
                if not row[0]:
                    log.info('%s (%d) is not linked; deleting' % (name, id_))
                    if not args.dry_run:
                        if build_path and os.path.exists(build_path):
                            shutil.rmtree(build_path)
                        shutil.rmtree(install_path)
                        delete_package(con, id_)