def uninstall(self, sequence: Optional[str] = None, dev: bool = False, **kwargs: Any) -> None: '''Uninstall package and dependencies.''' # TODO: compare removed dependencies with remaining if sequence: # TODO: json/rpc does not include run_requires # package = self.locator.locate(sequence) package = locate(sequence) if package and self.__source_tree.is_dependency(package.name, dev): self.__source_tree.remove_dependency(package.name, dev) dependencies = [package] + self.get_dependencies(package) else: dependencies = [] for lock in self.__lockfile.get_locks(dev): # TODO: need better load from lockfile dependencies.append(locate(lock['name'])) if dependencies != []: with ThreadPoolExecutor(max_workers=8) as executor: jobs = [ executor.submit(self._uninstall_package, dependency, dev, **kwargs) for dependency in dependencies ] for future in as_completed(jobs): result = future.result() if result: print('result', result) self.save()
def install( self, sequence: Optional[str] = None, dev: bool = False, python: Optional[str] = None, platform: Optional[str] = None, optional: bool = False, prerelease: bool = False, **kwargs: Any, ) -> None: '''Install package and dependencies.''' # TODO: selectable distribution path # self.distribution_path.clear_cache() self.distribution_path.create_pypackages() if sequence: # TODO: json/rpc does not include run_requires # package = self.locator.locate(sequence) package = locate(sequence) self.__source_tree.add_dependency(package, dev) dependencies = [package] + self.get_dependencies(package) else: dependencies = [] for lock in self.__lockfile.get_locks(dev): # TODO: need better load from lockfile dependencies.append(locate(lock['name'])) if dependencies != []: with TemporaryDirectory() as temp_dir: kwargs['temp_dir'] = temp_dir with ThreadPoolExecutor(max_workers=8) as executor: jobs = [ executor.submit(self._perform_install, dependency, dev, **kwargs) for dependency in dependencies ] for future in as_completed(jobs): result = future.result() if result: # print( # '---', # result.key in [x.key for x in dependencies] # ) installed = [ x for x in dependencies if x.key == result.key ][0] print(installed) self.save()
def dependencies(project): """Get the dependencies for a project.""" log = logging.getLogger('ciu') log.info('Locating dependencies for {}'.format(project['name'])) located = locate(project['name'], prereleases=True) if not located: log.warning('{0} not found'.format(project['name'])) return None deps = [] for dep in located.run_requires: d = locate(dep) deps.append({ 'name': packaging.utils.canonicalize_name(d.name), 'version': packaging.utils.canonicalize_version(d.version) }) return deps
def get_dependencies(package: Distribution) -> List[Distribution]: '''Get package dependencies.''' dependencies = [] for sequence in package.run_requires: dependency = locate(sequence) dependencies.append(dependency) sub_dependencies = (PackageManager.get_dependencies(dependency)) dependencies = dependencies + sub_dependencies return dependencies
def test_prereleases(self): locator = AggregatingLocator( JSONLocator(), SimpleScrapingLocator('https://pypi.python.org/simple/', timeout=3.0), scheme='legacy') REQT = 'SQLAlchemy (>0.5.8, < 0.6)' finder = DependencyFinder(locator) d = locate(REQT) self.assertIsNone(d) d = locate(REQT, True) self.assertIsNotNone(d) self.assertEqual(d.name_and_version, 'SQLAlchemy (0.6beta3)') dist = make_dist('dummy', '0.1') dist.metadata['Requires-Dist'] = [REQT] dists, problems = finder.find(dist, prereleases=True) self.assertFalse(problems) actual = sorted([d.name_and_version for d in dists]) self.assertEqual(actual[0], 'SQLAlchemy (0.6beta3)') dists, problems = finder.find(dist) self.assertEqual(dists, set([dist])) self.assertEqual(len(problems), 1) problem = problems.pop() self.assertEqual(problem, ('unsatisfied', REQT))
def upgrade(self, sequence: Optional[str], force: bool = False) -> None: '''Upgrade/downgrade package and dependencies.''' if sequence: package = locate(sequence) if self.distribution_path.is_installed(package.name): self.distribution_path.upgrade(package, force) # version = ( # self.distribution_path.get_version(name) # ) # if self.__lockfile.is_locked(name): # self.__lockfile.update_lock(name, version) # else: # self.__lockfile.add_lock(name, version) else: # TODO: iterate and update all locks # self.distribution_path.upgrade_all() print('need process to uninstall and update packages')
def projects_from_metadata(metadata): """Extract the project dependencies from a metadata spec.""" projects = [] for data in metadata: meta = distlib.metadata.Metadata(fileobj=io.StringIO(data)) projects.append( { 'name': packaging.utils.canonicalize_name(meta.name), 'version': packaging.utils.canonicalize_version(meta.version) }) for dep in meta.run_requires: d = locate(dep) projects.append({ 'name': packaging.utils.canonicalize_name(d.name), 'version': packaging.utils.canonicalize_version(d.version) }) return projects
def test_nonexistent(self): # See Issue #58 d = locate("foobarbazbishboshboo") self.assertTrue(d is None or isinstance(d, Distribution))
# type: ignore import os from distlib.locators import locate from proman_packaging.config import Config from proman_packaging.source_tree import LockManager lock_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proman-lock.json') lock_config = Config(filepath=lock_file, writable=True) pyproject_file = os.path.join(os.path.dirname(__file__), 'pyproject.toml') pyproject_config = Config(filepath=pyproject_file, writable=True) package = locate('urllib3==1.20.0') update = locate('urllib3==1.25.0') def test_no_lock(fs): fs.add_real_file(lock_file, False) lockfile = LockManager(lock_config) dep = lockfile.get_lock(package) assert dep == {} def test_add_lock(fs): fs.add_real_file(lock_file, False) lockfile = LockManager(lock_config) lockfile.add_lock(package) dep = lockfile.get_lock(package.name) assert package.name == dep['name']
def test_nonexistent(self): # See Issue #58 d = locate('foobarbazbishboshboo') self.assertTrue(d is None or isinstance(d, Distribution))