Пример #1
0
    def loads(self, content: str) -> RootDependency:
        doc = yaml_load(content)

        # make root
        root = RootDependency(
            package=PackageRoot(path=self.project_path or Path()),
        )
        if 'name' in doc:
            root.raw_name = doc['name']
            root.package.name = doc['name']
        root.repo = CondaRepo(channels=doc.get('channels', []))

        # make dependencies
        for req in doc.get('dependencies', []):
            parsed = root.repo.parse_req(req)
            if parsed['name'] == 'python':
                if parsed.get('version', '*') not in ('*', ''):
                    spec = '.'.join((parsed['version'].split('.') + ['*', '*'])[:3])
                    root.python = RangeSpecifier(spec)
                continue
            root.attach_dependencies(DependencyMaker.from_params(
                raw_name=parsed['name'],
                constraint=parsed.get('version', '*'),
                source=root,
                repo=root.repo,
            ))
        return root
Пример #2
0
    def load(self, path) -> RootDependency:
        if isinstance(path, str):
            path = Path(path)
        if path.is_file():
            return self.loads(content=path.read_text(encoding='utf-8'))
        root = RootDependency(package=PackageRoot(path=path))

        # get modules
        modules = set()
        for package in root.package.packages:
            for module in package:
                content = module.read_text(encoding='utf-8')
                modules.update(self._get_modules(content=content))

        # attach modules
        local_modules = {package.module for package in root.package.packages}
        for module in sorted(modules):
            if module in local_modules:
                continue
            if Path(*module.split('.')).exists():
                continue
            root.attach_dependencies(
                DependencyMaker.from_params(
                    source=root,
                    raw_name=module,
                    constraint='*',
                ))
        return root
Пример #3
0
    def loads(self, content) -> RootDependency:
        doc = tomlkit.parse(content)
        root = RootDependency(
            package=PackageRoot(path=self.project_path or Path()), )
        root.python = RangeSpecifier(
            doc.get('metadata', {}).get('python-versions', '*'))

        # get repositories
        root.repo = RepositoriesRegistry()
        if doc.get('source'):
            for source in doc['source']:
                root.repo.add_repo(url=source['url'], name=source['name'])
        root.repo.attach_config()

        envs = defaultdict(set)
        for extra, deps in doc.get('extras', {}).items():
            for dep in deps:
                envs[dep].add(extra)
        for content in doc.get('package', []):
            # category can be "dev" or "main"
            envs[content['name']].add(content['category'])

        deps = []
        for content in doc.get('package', []):
            deps.extend(
                self._make_deps(
                    root=root,
                    content=content,
                    envs=envs[content['name']],
                    repo=root.repo,
                ))
        root.attach_dependencies(deps)
        return root
Пример #4
0
    def loads(self, content) -> RootDependency:
        doc = json.loads(content, object_pairs_hook=OrderedDict)
        deps = []
        root = RootDependency(
            package=PackageRoot(path=self.project_path or Path()), )

        repo = RepositoriesRegistry()
        for repo_info in doc.get('_meta', {}).get('sources', []):
            repo.add_repo(name=repo_info['name'], url=repo_info['url'])
        repo.attach_config()

        python = doc.get('_meta', {}).get('requires',
                                          {}).get('python_version', '')
        if python not in {'', '*'}:
            root.python = RangeSpecifier('==' + python)

        for section, is_dev in [('default', False), ('develop', True)]:
            for name, content in doc.get(section, {}).items():
                subdeps = self._make_deps(root, name, content)
                # set repo
                if 'index' in content:
                    dep_repo = repo.make(name=content['index'])
                else:
                    dep_repo = repo
                for dep in subdeps:
                    if isinstance(dep.repo, WarehouseBaseRepo):
                        dep.repo = dep_repo
                # set envs
                for dep in subdeps:
                    dep.envs = {'dev'} if is_dev else {'main'}
                deps.extend(subdeps)
        root.attach_dependencies(deps)
        return root
Пример #5
0
    def loads(self, content: str) -> RootDependency:
        doc = tomlkit.parse(content)
        deps = []
        root = RootDependency(
            package=PackageRoot(path=self.project_path or Path()), )

        repo = RepositoriesRegistry()
        if 'source' in doc:
            for repo_info in doc['source']:
                repo.add_repo(name=repo_info['name'], url=repo_info['url'])
        repo.attach_config()

        python = doc.get('requires', {}).get('python_version', '')
        if python not in {'', '*'}:
            root.python = RangeSpecifier('==' + python)

        for section, is_dev in [('packages', False), ('dev-packages', True)]:
            for name, content in doc.get(section, {}).items():
                subdeps = self._make_deps(root, name, content)
                if isinstance(content, dict) and 'index' in content:
                    dep_repo = repo.make(name=content['index'])
                    for dep in subdeps:
                        if isinstance(dep.repo, WarehouseBaseRepo):
                            dep.repo = dep_repo

                for dep in subdeps:
                    # Pipfile doesn't support any other envs
                    dep.envs = {'dev'} if is_dev else {'main'}
                deps.extend(subdeps)
        root.attach_dependencies(deps)
        return root
Пример #6
0
    def load_dir(self, path) -> RootDependency:
        if not (path / 'METADATA').exists():
            raise FileNotFoundError('cannot find METADATA: {}'.format(
                str(path)))
        converter = EggInfoConverter()

        # dependency_links.txt
        urls = dict()
        if (path / 'dependency_links.txt').exists():
            with (path / 'dependency_links.txt').open('r') as stream:
                content = stream.read()
            urls = converter.parse_dependency_links(content)

        # METADATA
        with (path / 'METADATA').open('r') as stream:
            content = stream.read()
        root = converter.parse_info(content, urls=urls)

        # entry_points.txt
        if (path / 'entry_points.txt').exists():
            with (path / 'entry_points.txt').open('r') as stream:
                content = stream.read()
            root = converter.parse_entrypoints(content, root=root)

        root.package = PackageRoot(path=path.parent, name=root.name)
        return root
Пример #7
0
    def __call__(self) -> bool:
        old_version = None
        root = None
        loader = None
        project_path = Path(self.config['project'])
        package = PackageRoot(path=project_path)

        if 'from' in self.config:
            # get project metainfo
            loader = CONVERTERS[self.config['from']['format']]
            loader = loader.copy(project_path=Path(self.config['project']))
            root = loader.load(path=self.config['from']['path'])
            if root.version != '0.0.0':
                package = root.package
                old_version = root.version
            else:
                self.logger.warning('cannot get version from `from` file')
        else:
            self.logger.warning('`from` file is not specified')

        if old_version is None and package.metainfo:
            old_version = package.metainfo.version

        if old_version is None:
            if self.args.name == 'init':
                old_version = ''
            else:
                self.logger.error('cannot find old project version')
                return False

        # make new version
        new_version = bump_version(
            version=old_version,
            rule=self.args.name,
            scheme=self.config['versioning'],
        )
        self.logger.info('generated new version', extra=dict(
            old=old_version,
            new=new_version,
        ))

        # update version in project files
        paths = []
        for path in self._bump_project(project=package, old=old_version, new=new_version):
            paths.append(path)
            self.logger.info('file bumped', extra=dict(path=str(path)))

        # update version in project metadata
        updated = self._update_metadata(root=root, loader=loader, new_version=new_version)
        if updated:
            paths.append(Path(self.config['from']['path']))

        # set git tag
        tagged = True
        if self.config.get('tag') is not None:
            tagged = self._add_git_tag(paths=paths, new_version=new_version, template=self.config['tag'])

        return tagged
Пример #8
0
 def loads(self, content: str) -> RootDependency:
     doc = parse(content)
     deps = []
     root = RootDependency(
         package=PackageRoot(path=self.project_path or Path()), )
     for req in doc['build-system']['requires']:
         req = Requirement(req)
         deps.extend(DependencyMaker.from_requirement(source=root, req=req))
     root.attach_dependencies(deps)
     return root
Пример #9
0
    def load(self, path) -> RootDependency:
        if isinstance(path, str):
            path = Path(path)
        path = self._make_source_path_absolute(path)
        self._resolve_path = path.parent

        root = RootDependency(
            package=PackageRoot(path=self.project_path or path.parent), )

        finder = PackageFinder(
            find_links=[],
            index_urls=[],
            session=PipSession(),
        )
        # https://github.com/pypa/pip/blob/master/src/pip/_internal/req/constructors.py
        with chdir(self.resolve_path or path.parent):
            reqs = parse_requirements(
                filename=str(path),
                session=PipSession(),
                finder=finder,
            )

            deps = []
            for req in reqs:
                # https://github.com/pypa/pip/blob/master/src/pip/_internal/req/req_install.py
                if req.req is None:
                    req.req = SimpleNamespace(
                        name=req.link.url.split('/')[-1],
                        specifier='*',
                        marker=None,
                        extras=None,
                    )
                deps.extend(
                    DependencyMaker.from_requirement(
                        source=root,
                        req=req.req,
                        url=req.link and req.link.url,
                        editable=req.editable,
                    ))

        # update repository
        if finder.index_urls or finder.find_links:
            repo = RepositoriesRegistry()
            for url in chain(finder.index_urls, finder.find_links):
                repo.add_repo(url=url)
            repo.attach_config()
            for dep in deps:
                if isinstance(dep.repo, WarehouseBaseRepo):
                    dep.repo = repo

        root.attach_dependencies(deps)
        return root
Пример #10
0
    def load_dir(self, *paths) -> RootDependency:
        # drop duplicates
        paths = list({str(path): path for path in paths}.values())
        if not paths:
            raise FileNotFoundError('cannot find egg-info')
        # maybe it's possible, so we will have to process it
        if len(paths) > 1:
            min_parts = min(len(path.parts) for path in paths)
            paths = [path for path in paths if len(path.parts) == min_parts]
            if len(paths) > 1:
                raise FileExistsError('too many egg-info', paths)
        path = paths[0]

        # sometimes pypy stores only pkg-info as *.egg-info file
        if not (path / 'PKG-INFO').exists():
            with path.open('r') as stream:
                content = stream.read()
            return self.parse_info(content)

        # dependency_links.txt
        urls = dict()
        if (path / 'dependency_links.txt').exists():
            with (path / 'dependency_links.txt').open('r') as stream:
                content = stream.read()
            urls = self.parse_dependency_links(content)

        # pkg-info
        with (path / 'PKG-INFO').open('r') as stream:
            content = stream.read()
        root = self.parse_info(content, urls=urls)

        # requires.txt
        if not root.dependencies and (path / 'requires.txt').exists():
            with (path / 'requires.txt').open('r') as stream:
                content = stream.read()
            root = self.parse_requires(content, root=root, urls=urls)

        # entry_points.txt
        if (path / 'entry_points.txt').exists():
            with (path / 'entry_points.txt').open('r') as stream:
                content = stream.read()
            root = self.parse_entrypoints(content, root=root)

        # readme and package files
        root.readme = Readme.discover(path=path)
        root.package = PackageRoot(
            path=self.project_path or path.parent,
            name=root.name,
        )
        return root
Пример #11
0
    def __call__(self) -> bool:
        # get license object
        name = ' '.join(self.args.name).strip()
        if not name:
            name = 'MIT'
        license = licenses.get_by_id(name)
        if license is None:
            license = licenses.get_by_name(name)
        if license is None:
            self.logger.error('cannot find license with given name')
            return False

        # author name from --owner
        author = self.config.get('owner')

        # get author from `from`
        if not author and 'from' in self.config:
            loader = CONVERTERS[self.config['from']['format']]
            loader = loader.copy(project_path=Path(self.config['project']))
            root = loader.load(self.config['from']['path'])
            if root.authors:
                author = root.authors[0]

        # author from project config file
        if not author:
            metainfo = PackageRoot(Path(self.config['project'])).metainfo
            if metainfo and metainfo.authors:
                author = metainfo.authors[0]

        # author from getuser().title
        if not author:
            author = getuser().title()

        # generate license text
        text = license.make_text(copyright='{year} {name}'.format(
            year=datetime.now().year,
            name=author,
        ))
        (Path(self.config['project']) / 'LICENSE').write_text(text)
        self.logger.info('license generated', extra=dict(license=license.name))
        return True
Пример #12
0
    def _extract_modules(self, dep, archive_path: Path,
                         output_path: Path) -> bool:
        # say to shutils that wheel can be parsed as zip
        if 'wheel' not in shutil._UNPACK_FORMATS:  # type: ignore
            shutil.register_unpack_format(
                name='wheel',
                extensions=['.whl'],
                function=shutil._unpack_zipfile,  # type: ignore
            )

        with TemporaryDirectory(
                suffix=dep.name) as package_path:  # type: Path # type: ignore
            package_path = Path(package_path)
            shutil.unpack_archive(str(archive_path), str(package_path))
            if len(list(package_path.iterdir())) == 1:
                package_path = next(package_path.iterdir())

            # find modules
            root = PackageRoot(name=dep.name, path=package_path)
            if not root.packages:
                self.logger.error('cannot find modules',
                                  extra=dict(
                                      dependency=dep.name,
                                      version=dep.group.best_release.version,
                                  ))
                return False

            # copy modules
            module_path = root.packages[0].path
            module_name = root.packages[0].module
            self.logger.info('copying module...',
                             extra=dict(
                                 path=str(
                                     module_path.relative_to(package_path)),
                                 dependency=dep.name,
                             ))
            shutil.copytree(
                src=str(module_path),
                dst=str(output_path.joinpath(*module_name.split('.'))),
            )
            return True
Пример #13
0
def test_patch_imports(temp_path: Path):
    (temp_path / 'project').mkdir()
    (temp_path / 'project' / '__init__.py').write_text('import requests\nimport django')
    (temp_path / 'project' / 'vendor' / 'requests').mkdir(parents=True)
    (temp_path / 'project' / 'vendor' / 'requests' / '__init__.py').touch()

    config = Config()
    config.attach(dict(project=str(temp_path)))
    package = PackageRoot(name='project', path=temp_path)
    root = RootDependency(raw_name='project', package=package)
    resolver = Resolver(
        graph=Graph(root),
        mutator=Mutator(),
    )
    command = VendorImportCommand(argv=[], config=config)
    command._patch_imports(
        resolver=resolver,
        output_path=temp_path / 'project' / 'vendor',
    )

    expected = 'import project.vendor.requests as requests\nimport django'
    assert (temp_path / 'project' / '__init__.py').read_text() == expected
Пример #14
0
    def loads(self, content: str) -> RootDependency:
        doc = tomlkit.parse(content)
        section = doc['tool']['flit']['metadata']
        root = RootDependency(
            raw_name=section.get('dist-name') or section['module'],
            python=RangeSpecifier(section.get('requires-python')),
            classifiers=section.get('classifiers', tuple()),
            license=section.get('license', ''),
            package=PackageRoot(path=Path('.').resolve(), name=section['module']),
        )

        if 'keywords' in section:
            if isinstance(section['keywords'], str):
                if ',' in section['keywords']:
                    root.keywords = tuple(section['keywords'].split(','))
                else:
                    root.keywords = tuple(section['keywords'].split())
            else:
                root.keywords = tuple(section['keywords'])

        # description
        if 'description-file' in section:
            root.readme = Readme(path=Path(section['description-file']))

        # entrypoints
        entrypoints = []
        path = Path(section.get('entry-points-file', 'entry_points.txt'))
        if path.exists():
            with path.open('rb', encoding='utf-8') as stream:
                tmp_root = EggInfoConverter().parse_entrypoints(content=stream.read())
                entrypoints = list(tmp_root.entrypoints)
        for group, subentrypoints in doc['tool']['flit'].get('entrypoints', {}).items():
            for name, entrypoint in subentrypoints.items():
                entrypoints.append(EntryPoint(name=name, path=entrypoint, group=group))
        for name, entrypoint in doc['tool']['flit'].get('scripts', {}).items():
            entrypoints.append(EntryPoint(name=name, path=entrypoint))
        root.entrypoints = tuple(entrypoints)

        # authors
        authors = []
        if 'author' in section:
            authors.append(Author(
                name=section['author'],
                mail=section['author-email'],
            ))
        if 'maintainer' in section:
            authors.append(Author(
                name=section['maintainer'],
                mail=section['maintainer-email'],
            ))
        root.authors = tuple(authors)

        # links
        if 'home-page' in section:
            root.links['homepage'] = section['home-page']
        if 'urls' in section:
            root.links.update(section['urls'])

        # requirements
        for req in section.get('requires', []):
            root.attach_dependencies(DependencyMaker.from_requirement(
                source=root,
                req=Requirement(req),
            ))
        for req in section.get('dev-requires', []):
            root.attach_dependencies(DependencyMaker.from_requirement(
                source=root,
                req=Requirement(req),
                envs={'dev'},
            ))

        # extras
        for extra, reqs in section.get('requires-extra', {}).items():
            for req in reqs:
                req = Requirement(req)
                root.attach_dependencies(DependencyMaker.from_requirement(
                    source=root,
                    req=req,
                    envs={'main', extra},
                ))

        return root
Пример #15
0
    def load(self, path) -> RootDependency:
        if isinstance(path, str):
            path = Path(path)
        path = self._make_source_path_absolute(path)
        self._resolve_path = path.parent

        data = read_setup(path=path, error_handler=logger.debug)
        root = RootDependency(
            raw_name=data['name'],
            version=data.get('version', '0.0.0'),
            package=PackageRoot(
                path=self.project_path or Path(),
                name=data['name'],
            ),

            description=data.get('description'),
            license=data.get('license'),

            keywords=tuple(data.get('keywords', ())),
            classifiers=tuple(data.get('classifiers', ())),
            platforms=tuple(data.get('platforms', ())),

            python=RangeSpecifier(data.get('python_requires')),
            readme=Readme.from_code(path=path),
        )

        # links
        fields = (
            (HOMEPAGE_FIELD, 'url'),
            (DOWNLOAD_FIELD, 'download_url'),
        )
        for key, name in fields:
            link = data.get(name)
            if link:
                root.links[key] = link

        # authors
        for name in ('author', 'maintainer'):
            author = data.get(name)
            if author:
                root.authors += (
                    Author(name=author, mail=data.get(name + '_email')),
                )

        # entrypoints
        entrypoints = []
        for group, content in data.get('entry_points', {}).items():
            for entrypoint in content:
                entrypoints.append(EntryPoint.parse(text=entrypoint, group=group))
        root.entrypoints = tuple(entrypoints)

        # dependency_links
        urls = dict()
        for url in data.get('dependency_links', ()):
            parsed = parse_link(url)
            name = parsed.name.split('-')[0]
            urls[name] = url

        # dependencies
        for req in data.get('install_requires', ()):
            req = Requirement(req)
            root.attach_dependencies(DependencyMaker.from_requirement(
                source=root,
                req=req,
                url=urls.get(req.name),
            ))

        # extras
        for extra, reqs in data.get('extras_require', {}).items():
            extra, marker = self._split_extra_and_marker(extra)
            envs = {extra} if extra == 'dev' else {'main', extra}
            for req in reqs:
                req = Requirement(req)
                root.attach_dependencies(DependencyMaker.from_requirement(
                    source=root,
                    req=req,
                    marker=marker,
                    envs=envs,
                ))

        return root
Пример #16
0
    def loads(self, content) -> RootDependency:
        doc = tomlkit.parse(content)
        if 'tool' not in doc:
            doc['tool'] = {'poetry': tomlkit.table()}
        elif 'poetry' not in doc['tool']:
            doc['tool']['poetry'] = tomlkit.table()
        section = doc['tool']['poetry']
        root = RootDependency(
            package=PackageRoot(path=self.project_path or Path()), )

        # read metainfo
        if 'name' in section:
            root.raw_name = section['name']
        for field in self._metafields:
            if field in section:
                value = section[field]
                if isinstance(value, list):
                    value = tuple(value)
                setattr(root, field, value)
        if 'authors' in section:
            root.authors = tuple(
                Author.parse(str(author)) for author in section['authors'])
        if 'readme' in section:
            path = Path(section['readme'])
            if path.exists():
                root.readme = Readme(path=path)

        # read links
        for field in ('homepage', 'repository', 'documentation'):
            if field in section:
                root.links[field] = section[field]

        # read entrypoints
        root.entrypoints = []
        for name, content in section.get('scripts', {}).items():
            if isinstance(content, str):
                entrypoint = EntryPoint(name=name, path=content)
            else:
                entrypoint = EntryPoint(
                    name=name,
                    path=content['callable'],
                    extras=content['extras'],
                )
            root.entrypoints.append(entrypoint)
        for group_name, group_content in section.get('plugins', {}).items():
            for name, path in sorted(group_content.items()):
                root.entrypoints.append(
                    EntryPoint(name=name, path=path, group=group_name))
        root.entrypoints = tuple(root.entrypoints)

        # update repository
        root.repo = RepositoriesRegistry()
        if section.get('source'):
            for source in section['source']:
                root.repo.add_repo(url=source['url'], name=source['name'])
        root.repo.attach_config()

        # get envs for deps
        envs = defaultdict(set)
        for extra, deps in section.get('extras', {}).items():
            for dep in deps:
                envs[dep].add(extra)
        for dep in section.get('dependencies', {}):
            envs[dep].add('main')
        for dep in section.get('dev-dependencies', {}):
            envs[dep].add('dev')

        # read dependencies
        deps = []
        for section_name in ('dependencies', 'dev-dependencies'):
            for name, content in section.get(section_name, {}).items():
                if name == 'python' and section_name == 'dependencies':
                    root.python = RangeSpecifier(content)
                    continue
                deps.extend(
                    self._make_deps(
                        root=root,
                        name=name,
                        content=content,
                        envs=envs.get(name),
                    ))

        root.attach_dependencies(deps)
        return root
Пример #17
0
class RootDependency:
    raw_name = attr.ib(default='root')
    dependencies = attr.ib(factory=list, repr=False)

    # additional info strings
    version = attr.ib(default='0.0.0', repr=False)  # Version
    description = attr.ib(default='', repr=False)  # Summary
    license = attr.ib(default='', repr=False)  # License

    # additional info lists
    links = attr.ib(factory=dict, repr=False)  # Home-page, Download-URL
    authors = attr.ib(default=tuple(), repr=False)  # Author, Author-email
    keywords = attr.ib(default=tuple(), repr=False)  # Keywords
    classifiers = attr.ib(type=tuple, default=tuple(),
                          repr=False)  # Classifier
    platforms = attr.ib(default=tuple(), repr=False)  # Platform
    entrypoints = attr.ib(default=tuple(), repr=False)  # entry_points

    # additional info objects
    package = attr.ib(default=PackageRoot(Path('.').resolve()),
                      repr=False)  # packages, package_data
    python = attr.ib(default=RangeSpecifier(), repr=False)  # Requires-Python
    readme = attr.ib(default=None, repr=False)  # Description

    repo = None
    applied = False
    locked = False
    compat = True
    used = True
    constraint = None

    @cached_property
    def name(self) -> str:
        return canonicalize_name(self.raw_name)

    @cached_property
    def all_releases(self) -> Tuple[RootRelease]:
        release = RootRelease(
            raw_name=self.raw_name,
            dependencies=self.dependencies,
            version=self.version,
        )
        return (release, )

    @cached_property
    def group(self) -> Group:
        return Group(number=0, releases=self.all_releases)

    @property
    def groups(self) -> Tuple[Group]:
        return (self.group, )

    @property
    def python_compat(self):
        return True

    def attach_dependencies(self, dependencies):
        self.dependencies.extend(dependencies)

    def unlock(self):
        raise NotImplementedError

    def merge(self, dep):
        raise NotImplementedError

    def unapply(self, name: str):
        raise NotImplementedError

    def copy(self):
        return type(self)(**attr.asdict(self, recurse=False))

    @classmethod
    def get_metainfo(cls, other, *others):
        """Merge metainfo, but not dependencies
        """
        merged = attr.asdict(other, recurse=False)
        infos = [attr.asdict(other, recurse=False) for other in others]
        for key, value in merged.items():
            if value:
                continue
            values = (info[key] for info in infos if info[key])
            with suppress(StopIteration):
                merged[key] = next(values)
        root = cls(**merged)

        # get some metainfo from package
        if root.raw_name == 'root' and root.package.packages:
            root.raw_name = root.package.packages[0].module
        info = root.package.metainfo
        if root.version == '0.0.0' and info.version:
            root.version = info.version
        if not root.description and info.summary:
            root.description = info.summary
        if not root.license and info.license:
            root.license = info.license
        if not root.authors and info.authors:
            root.authors = tuple(
                Author.parse(author) for author in info.authors)

        return root

    def __str__(self):
        return self.name

    def __repr__(self):
        return '{cls}({name})'.format(cls=self.__class__.__name__,
                                      name=self.name)
Пример #18
0
    def load(self, path) -> RootDependency:
        if isinstance(path, str):
            path = Path(path)
        path = self._make_source_path_absolute(path)
        self._resolve_path = path.parent

        info = self._execute(path=path)
        if info is None:
            with chdir(path.parent):
                info = run_setup(path.name)

        root = RootDependency(
            raw_name=self._get(info, 'name'),
            version=self._get(info, 'version') or '0.0.0',
            package=PackageRoot(
                path=self.project_path or Path(),
                name=self._get(info, 'name') or None,
            ),

            description=self._get(info, 'description'),
            license=self._get(info, 'license'),

            keywords=tuple(self._get_list(info, 'keywords')),
            classifiers=tuple(self._get_list(info, 'classifiers')),
            platforms=tuple(self._get_list(info, 'platforms')),

            python=RangeSpecifier(self._get(info, 'python_requires')),
            readme=Readme.from_code(path=path),
        )

        # links
        for key, name in (('home', 'url'), ('download', 'download_url')):
            link = self._get(info, name)
            if link:
                root.links[key] = link

        # authors
        for name in ('author', 'maintainer'):
            author = self._get(info, name)
            if author:
                root.authors += (
                    Author(name=author, mail=self._get(info, name + '_email')),
                )

        # entrypoints
        entrypoints = []
        for group, content in (getattr(info, 'entry_points', {}) or {}).items():
            for entrypoint in content:
                entrypoints.append(EntryPoint.parse(text=entrypoint, group=group))
        root.entrypoints = tuple(entrypoints)

        # dependency_links
        urls = dict()
        for url in self._get_list(info, 'dependency_links'):
            parsed = parse_link(url)
            name = parsed.name.split('-')[0]
            urls[name] = url

        # dependencies
        for req in self._get_list(info, 'install_requires'):
            req = Requirement(req)
            root.attach_dependencies(DependencyMaker.from_requirement(
                source=root,
                req=req,
                url=urls.get(req.name),
            ))

        # extras
        for extra, reqs in getattr(info, 'extras_require', {}).items():
            extra, marker = self._split_extra_and_marker(extra)
            envs = {extra} if extra == 'dev' else {'main', extra}
            for req in reqs:
                req = Requirement(req)
                root.attach_dependencies(DependencyMaker.from_requirement(
                    source=root,
                    req=req,
                    marker=marker,
                    envs=envs,
                ))

        return root
Пример #19
0
    def load(self,
             path: Union[Path, str] = None,
             paths: Iterable[Union[Path, str]] = None,
             names: Iterable[str] = None) -> RootDependency:
        if names:
            names = {
                canonicalize_name(name).replace('-', '_')
                for name in names
            }

        if paths is None:
            if path is not None:
                paths = [path]
            else:
                paths = sys.path

        root = RootDependency(raw_name='installed')
        parsers = [
            (EggInfoConverter(), '*.egg-info'),
            (WheelConverter(), '*.dist-info'),
        ]
        all_deps = dict()
        for path in paths:
            if isinstance(path, str):
                path = Path(path)
            if 'dist-packages' in path.parts:
                continue

            if path.suffix == '.egg':
                name = canonicalize_name(
                    path.with_suffix('').name.split('-')[0])
                if names is not None and name not in names:
                    continue

                # read *.egg dir
                egg_path = path / 'EGG-INFO'
                if not egg_path.exists():
                    continue
                subroot = EggInfoConverter().load_dir(egg_path)
                subroot.package = PackageRoot(path=path, name=root.name)
                if not subroot.package.packages:  # we cannot read single *.py file yet
                    continue
                deps = DependencyMaker.from_root(dep=subroot, root=root)
                for dep in deps:
                    if dep.name in self._blacklist:
                        continue
                    if dep.name in all_deps:
                        all_deps[dep.name] |= dep
                    else:
                        all_deps[dep.name] = dep
                continue

            # read site-packages / dist-packages
            for converter, pattern in parsers:
                for info_path in path.glob(pattern):
                    name = canonicalize_name(
                        info_path.with_suffix('').name.split('-')[0])
                    if names is not None and name not in names:
                        continue
                    subroot = converter.load_dir(info_path)
                    deps = DependencyMaker.from_root(dep=subroot, root=root)
                    for dep in deps:
                        if dep.name in self._blacklist:
                            continue
                        if dep.name in all_deps:
                            all_deps[dep.name] |= dep
                        else:
                            all_deps[dep.name] = dep
        root.attach_dependencies(all_deps.values())
        return root
Пример #20
0
    def __call__(self) -> bool:
        old_version = None
        root = None
        package = PackageRoot(path=Path(self.config['project']))

        if 'from' in self.config:
            # get project metainfo
            loader = CONVERTERS[self.config['from']['format']]
            root = loader.load(path=self.config['from']['path'])
            if root.version != '0.0.0':
                package = root.package
                old_version = root.version
            else:
                self.logger.warning('cannot get version from `from` file')
        else:
            self.logger.warning('`from` file is not specified')

        if old_version is None:
            old_version = get_version_from_project(project=package)

        if old_version is None:
            if self.args.name == 'init':
                old_version = ''
            else:
                self.logger.error('cannot find old project version')
                return False

        # make new version
        new_version = bump_version(
            version=old_version,
            rule=self.args.name,
            scheme=self.config['versioning'],
        )
        self.logger.info('generated new version',
                         extra=dict(
                             old=old_version,
                             new=new_version,
                         ))

        # update version in project files
        paths = []
        for path in bump_project(project=package,
                                 old=old_version,
                                 new=new_version):
            paths.append(path)
            self.logger.info('file bumped', extra=dict(path=str(path)))

        # update version in project metadata
        if root is not None and root.version != '0.0.0':
            # we can reproduce metadata only for poetry yet
            if self.config['from']['format'] == 'poetry':
                paths.append(Path(self.config['from']['path']))
                root.version = new_version
                loader.dump(
                    project=root,
                    path=self.config['from']['path'],
                    reqs=[
                        Requirement(dep=dep, lock=loader.lock)
                        for dep in root.dependencies
                    ],
                )
            else:
                path = Path(self.config['from']['path'])
                with path.open('r', encoding='utf8') as stream:
                    content = stream.read()
                new_content = content.replace(str(root.version),
                                              str(new_version))
                if new_content == content:
                    self.logger.warning('cannot bump version in metadata file')
                else:
                    with path.open('w', encoding='utf8') as stream:
                        stream.write(new_content)

        # set git tag
        if self.config.get('tag'):
            project = Path(self.config['project'])
            if (project / '.git').exists():
                self.logger.info('commit and tag')
                ok = git_commit(
                    message='bump version to {}'.format(str(new_version)),
                    paths=paths,
                    project=project,
                )
                if not ok:
                    self.logger.error('cannot commit files')
                    return False
                ok = git_tag(
                    name='v.' + str(new_version),
                    project=project,
                )
                if not ok:
                    self.logger.error('cannot add tag into git repo')
                    return False

                self.logger.info(
                    'tag created, do not forget to push it: git push --tags')

        return True