Пример #1
0
 def combine(self, existing):
     ""  # suppress inherited doc
     self.previous = existing.previous + [existing]
     existing.read()
     self._layer_refs.update(existing._layer_refs)
     self.read()
     new_pkgs = set()
     for line in self.lines:
         try:
             req = Requirement.parse(line)
             new_pkgs.add(req.name)
         except ValueError:
             pass  # ignore comments, blank lines, etc
     existing_lines = []
     for line in existing.lines:
         try:
             req = Requirement.parse(line)
             # new explicit reqs will override existing ones
             if req.name not in new_pkgs:
                 existing_lines.append(line)
             else:
                 existing_lines.append('# {}  # overridden by {}'
                                       ''.format(line,
                                                 self.layer.url))
         except ValueError:
             existing_lines.append(line)  # ignore comments, blank lines, &c
     self.lines = existing_lines + self.lines
     return self
Пример #2
0
def build_changelog(change: File) -> List[str]:
    lines = ['# Changelogs']
    patch = whatthepatch.parse_patch(change.patch).__next__()
    oldversions: Dict[str, str] = {}
    newversions: Dict[str, str] = {}
    for p in patch.changes:
        if len(p) > 1:
            if p[1] is None:
                req = Requirement.parse(p[2])
                oldversions[req.name] = req.specs[0][1]
            elif p[0] is None:
                req = Requirement.parse(p[2])
                newversions[req.name] = req.specs[0][1]
        else:
            lines.append(f'> Warning: {p[0]} is pinned to a specific version.')
    for package in newversions:
        old = packaging.version.parse(oldversions.get(package))
        new = packaging.version.parse(newversions.get(package))
        changes = changelogs.get(package)
        logged = False
        for version_string in changes.keys():
            v = packaging.version.parse(version_string)
            if old < v <= new:
                lines.append(f'## {package} {version_string}')
                lines.append(changes[version_string])
                logged = True
        if not logged:
            lines.append(f'## {package} {new}')
            lines.append('No release notes found.')
    return lines
Пример #3
0
    def _validate_requirements(self):
        """
        Make sure the format used in the requirements list is valid.

        :raises ValueError: if format is not valid
        """
        for line in self.requirements:
            Requirement.parse(line)
Пример #4
0
 def validate_excludes(self, value):
     """Validates the excludes"""
     for pkg in value:
         try:
             Requirement.parse(pkg)
         except ValueError as ve:
             raise serializers.ValidationError(
                 _("excludes specifier {} is invalid. {}".format(pkg, ve)))
     return value
Пример #5
0
def get_edx_owned_requirements(repo, requirements_files):
    """
    Given a github repo object and a list of requirements files.
    Get all requirements that are owned by edX or dependencies not from
    PYPI.
    """

    for file_path in requirements_files:

        # Get the file.
        content = repo.get_contents(file_path)
        text = content.decoded_content.decode('utf-8')
    
        # Go line by line because the requirements library won't work
        # if any of the lines fail to parse.
        for line in text.splitlines():
            try:
                req = Requirement.parse(line)
            except ValueError as e:
                #msg = "ERROR: {line}, {exception}"
                #print(msg.format(line=line, exception=e))
                continue
    
            pypi_data = get_pypi_data(req.name)
    
            if not pypi_data:
                print(req.uri.lstrip("git+").rstrip(".git"))
            elif 'info' in pypi_data:
                author = pypi_data['info']['author']
                home_page = pypi_data['info']['home_page']
    
                if author.lower() == "edx":
                    print(home_page.rstrip(".git"))
Пример #6
0
def parse(reqstr):
    try:
        # Python 2.x compatibility
        if not isinstance(reqstr, basestring):
            reqstr = reqstr.read()
    except NameError:
        # Python 3.x only
        if not isinstance(reqstr, str):
            reqstr = reqstr.read()

    for line in reqstr.splitlines():
        line = line.strip()
        if line == '':
            continue
        elif not line or line.startswith('#'):
            # comments are lines that start with # only
            continue
        elif line.startswith('-r') or line.startswith('--requirement'):
            yield line
        elif line.startswith('--'):
            yield line
        elif line.startswith('-f') or line.startswith('--find-links') or \
                line.startswith('-i') or line.startswith('--index-url') or \
                line.startswith('--extra-index-url') or \
                line.startswith('--no-index'):
            warnings.warn('Private repos not supported. Skipping.')
            continue
        elif line.startswith('-Z') or line.startswith('--always-unzip'):
            warnings.warn('Unused option --always-unzip. Skipping.')
            continue
        else:
            yield Requirement.parse(line)
Пример #7
0
def get_missing_packages(requirements_file: str = None,
                         special_reqs: List[str] = []) -> List[Requirement]:
    needed = [Requirement.parse_line(sr) for sr in special_reqs]
    if requirements_file is not None:
        with open(requirements_file, 'r') as fd:
            needed += [r for r in requirements.parse(fd) if r.specifier]

    installed = get_installed_packages()
    needed = [
        r for r in needed if pkg_resources.safe_name(r.name) not in installed
    ]
    return needed
Пример #8
0
def test_deps_consistency():

    IGNORE = [
        "flake8", "isort", "black", "mypy", "pydocstyle", "importlib_metadata",
        "tensorflow-cpu"
    ]
    # Collect the deps from all requirements.txt
    REQ_FILES = [
        "requirements.txt", "requirements-pt.txt", "tests/requirements.txt",
        "docs/requirements.txt"
    ]
    folder = Path(__file__).parent.parent.parent.absolute()
    req_deps = {}
    for file in REQ_FILES:
        with open(folder.joinpath(file), "r") as f:
            _deps = [(req.name, req.specs) for req in requirements.parse(f)]

        for _dep in _deps:
            lib, specs = _dep
            assert req_deps.get(lib,
                                specs) == specs, f"conflicting deps for {lib}"
            req_deps[lib] = specs

    # Collect the one from setup.py
    setup_deps = {}
    with open(folder.joinpath("setup.py"), "r") as f:
        setup = f.readlines()
    lines = setup[setup.index("_deps = [\n") + 1:]
    lines = [_dep.strip() for _dep in lines[:lines.index("]\n")]]
    lines = [_dep.split('"')[1] for _dep in lines if _dep.startswith('"')]
    _reqs = [Requirement.parse(_line) for _line in lines]
    _deps = [(req.name, req.specs) for req in _reqs]
    for _dep in _deps:
        lib, specs = _dep
        assert setup_deps.get(lib) is None, f"conflicting deps for {lib}"
        setup_deps[lib] = specs

    # Remove ignores
    for k in IGNORE:
        if isinstance(req_deps.get(k), list):
            del req_deps[k]
        if isinstance(setup_deps.get(k), list):
            del setup_deps[k]

    # Compare them
    assert len(req_deps) == len(setup_deps)
    for k, v in setup_deps.items():
        assert isinstance(req_deps.get(k), list)
        assert req_deps[
            k] == v, f"Mismatch on dependency {k}: {v} from setup.py, {req_deps[k]} from requirements.txt"
Пример #9
0
def find_requirement_in_list(package_name: str,
                             raw_old_requirements: List[str]) -> Optional[int]:
    match_line_num = None
    for old_requirement_num, old_requirement in enumerate(
            raw_old_requirements):
        requirement_str = old_requirement.strip()
        if requirement_str.startswith('#'):
            continue
        try:
            parsed_requirement = Requirement.parse(
                requirement_str) if requirement_str else None
        except ValueError:
            # happens on weird chars in requirements, like `parsed_requirement = 'A  # \x850'`
            continue
        if parsed_requirement and parsed_requirement.name == package_name:
            match_line_num = old_requirement_num
    return match_line_num
def get_edx_owned_requirements(repo, requirements_files):
    """
    Given a github repo object and a list of requirements files.
    Get all requirements that are owned by edX or dependencies not from
    PYPI.
    """

    for file_path in requirements_files:

        # Get the file.
        content = repo.get_contents(file_path)
        text = content.decoded_content.decode('utf-8')

        # Go line by line because the requirements library won't work
        # if any of the lines fail to parse.
        for line in text.splitlines():
            try:
                req = Requirement.parse(line)
            except ValueError as e:
                #msg = "ERROR: {line}, {exception}"
                #print(msg.format(line=line, exception=e))
                continue

            pypi_data = get_pypi_data(req.name)

            if not pypi_data:
                print(req.uri.lstrip("git+").rstrip(".git"))
            elif 'info' in pypi_data:
                author = pypi_data['info']['author']
                home_page = pypi_data['info']['home_page']

                if author.lower() == "edx":
                    if home_page.endswith('.git'):
                        print(home_page[:-4])
                    else:
                        print(home_page)
Пример #11
0
def sort_requirements(fn=None):
    """
    Prints to stdout the current pip-requirements.txt sorted by dependency.
    """

    ignore_packages = set(['setuptools'])

    package_names_dep = set()
    package_names_req = set()
    package_name_to_version = {}
    package_name_to_original = {}

    fn = fn or 'roles/all/pip-requirements.txt'

    i = 0
    for line in open(fn).readlines():
        i += 1
        try:
            line = line.strip()
            parent = Requirement.parse_line(line)
            print(parent.specs, parent.__dict__)
            package_name, package_version = line.split('==')
            if package_name in ignore_packages:
                continue
            package_name_to_original[package_name.lower()] = package_name
            package_names_req.add(package_name.lower())
            package_name_to_version[package_name.lower()] = package_version
        except Exception as e:
            print('Error on line %i.' % i, file=sys.stderr)
            raise

    package_to_deps = defaultdict(set)  # {package:set(dependencies)}

    depends_fn = get_dependencies_fn()
    reader = csv.DictReader(open(depends_fn))
    for line in reader:
        print(line, file=sys.stderr)
        package_name = line['package_name'].lower()
        if package_name in ignore_packages:
            continue
        dependency_name = line['dependency_name'].lower()
        if dependency_name in ignore_packages:
            continue
        package_names_dep.add(package_name)
        package_names_dep.add(dependency_name)
        package_to_deps[package_name].add(dependency_name)

    reqs_missing_deps = set(map(str.lower, package_names_req)).difference(
        set(map(str.lower, package_names_dep)))
    print('reqs_missing_deps:', reqs_missing_deps, file=sys.stderr)

    deps_missing_reqs = set(map(str.lower, package_names_dep)).difference(
        set(map(str.lower, package_names_req)))
    print('deps_missing_reqs:', deps_missing_reqs, file=sys.stderr)

    #     def sort_by_dep(a_name, b_name):
    #         if a_name in package_to_deps[b_name]:
    #             # b depends on a, so a should come first
    #             return -1
    #         elif b_name in package_to_deps[a_name]:
    #             # a depends on b, so a should come first
    #             return +1
    #         #else:
    #         #    return cmp(a_name, b_name)
    #         return 0

    for package_name in package_names_req:
        package_to_deps[package_name]

    all_names = common.topological_sort(package_to_deps)
    for name in all_names:
        print('%s==%s' %
              (package_name_to_original[name], package_name_to_version[name]))
Пример #12
0
 def safe_parse(req_str):
     try:
         return next(parse(req_str))
     except Exception as ex:
         return Requirement(req_str)
Пример #13
0
def update_dependency_cache(name=None, output=None):
    """
    Reads all pip package dependencies and saves them to a file for later use with organizing
    pip-requirements.txt.
    
    Outputs CSV to stdout.
    """

    common.set_show(0)

    try:
        shutil.rmtree('./.env/build')
    except OSError:
        pass

    env.pip_path_versioned = env.pip_path % env

    #depends_fn = get_dependencies_fn(output)
    fout = open(output, 'w')
    #fout = sys.stdout
    writer = csv.DictWriter(fout, PIP_DEPENDS_HEADERS)
    writer.writerow(dict(zip(PIP_DEPENDS_HEADERS, PIP_DEPENDS_HEADERS)))

    package_to_fqv = {}
    for dep in pip_to_deps():
        #print dep
        assert dep.name not in package_to_fqv, 'Package %s specified multiple times!' % dep.name
        package_to_fqv[dep.name] = str(dep)

    #dep_tree = defaultdict(set) # {package:set([deps])}
    reqs = list(iter_pip_requirements())
    total = len(reqs)
    i = 0
    for line in reqs:
        i += 1

        if name and name not in line:
            continue

        print('line %s: %i %i %.02f%%' %
              (line, i, total, i / float(total) * 100),
              file=sys.stderr)

        env.pip_package = line
        env.pip_download_dir = tempfile.mkdtemp()
        cmd = env.pip_depend_command % env
        #with hide('output', 'running', 'warnings'):
        ret = local_or_dryrun(cmd, capture=True)
        print('ret:', ret)
        matches = PIP_DEP_PATTERN.findall(ret)  # [(child,parent)]
        print('matches:', matches)

        for child, parent in matches:
            try:
                child_line = child.strip()
                #print 'child_line:',child_line
                #child = Requirement(child_line)
                child_name = PIP_REQ_NAME_PATTERN.findall(child_line)[0]
                child_specs = PIP_REQ_SPEC_PATTERN.findall(child_line)
                #print 'child:',child_name,child_specs
                parent = Requirement.parse_line(parent.strip().split('->')[0])
                #print 'parent:',parent.__dict__
                #                 print('parent.specs:',parent.specs,bool(parent.specs)
                assert not parent.specs \
                or (parent.specs and parent.specs[0][0] in ('==', '>=', '<=', '!=', '<', '>')), \
                    'Invalid parent: %s (%s)' % (parent, parent.specs)

                #                 if parent.specs and parent.specs[0][0] == '==':
                #                     parent.specs[0] = list(parent.specs[0])
                #                     parent.specs[0][0] = '>='
                parent_version = ''
                if parent.specs:
                    parent_version = parent.specs[0][1]

                writer.writerow(
                    dict(
                        package_name=parent.name,
                        package_version=parent_version,
                        dependency_name=child_name,
                        dependency_specs=';'.join(
                            [''.join(_) for _ in child_specs]),
                    ))
                fout.flush()
            except Exception as e:
                print('Error: %s' % e, file=sys.stderr)
                print(e, file=sys.stderr)
                traceback.print_exc(file=sys.stderr)
                raise
Пример #14
0
def get_reqs(requirement_1, requirement_2):
    return (Requirement.parse(requirement_1),
            Requirement.parse(requirement_2))
Пример #15
0
    def from_requirement_str(cls, text):
        from requirements.requirement import Requirement

        req = Requirement.parse(text)
        return cls(package=req.name, specs=req.specs)
Пример #16
0
    def load_requirements(self, requirements):
        # create new environment file
        conda_env = dict()
        conda_env['channels'] = self.extra_channels
        reqs = []
        if isinstance(requirements['pip'], six.string_types):
            requirements['pip'] = requirements['pip'].split('\n')
        if isinstance(requirements.get('conda'), six.string_types):
            requirements['conda'] = requirements['conda'].split('\n')
        has_torch = False
        has_matplotlib = False
        try:
            cuda_version = int(self.session.config.get('agent.cuda_version',
                                                       0))
        except:
            cuda_version = 0

        # notice 'conda' entry with empty string is a valid conda requirements list, it means pip only
        # this should happen if experiment was executed on non-conda machine or old trains client
        conda_supported_req = requirements['pip'] if requirements.get(
            'conda', None) is None else requirements['conda']
        conda_supported_req_names = []
        for r in conda_supported_req:
            try:
                marker = list(parse(r))
            except:
                marker = None
            if not marker:
                continue

            m = MarkerRequirement(marker[0])
            conda_supported_req_names.append(m.name.lower())
            if m.req.name.lower() == 'matplotlib':
                has_matplotlib = True
            elif m.req.name.lower().startswith('torch'):
                has_torch = True

            if m.req.name.lower() in ('torch', 'pytorch'):
                has_torch = True
                m.req.name = 'pytorch'

            if m.req.name.lower() in ('tensorflow_gpu', 'tensorflow-gpu',
                                      'tensorflow'):
                has_torch = True
                m.req.name = 'tensorflow-gpu' if cuda_version > 0 else 'tensorflow'

            reqs.append(m)

        pip_requirements = []
        # if we have a conda list, the rest should be installed with pip,
        if requirements.get('conda', None) is not None:
            for r in requirements['pip']:
                try:
                    marker = list(parse(r))
                except:
                    marker = None
                if not marker:
                    continue

                m = MarkerRequirement(marker[0])
                m_name = m.name.lower()
                if m_name in conda_supported_req_names:
                    # this package is in the conda list,
                    # make sure that if we changed version and we match it in conda
                    conda_supported_req_names.remove(m_name)
                    for cr in reqs:
                        if m_name == cr.name.lower():
                            # match versions
                            cr.specs = m.specs
                            break
                else:
                    # not in conda, it is a pip package
                    pip_requirements.append(m)
                    if m_name == 'matplotlib':
                        has_matplotlib = True

            # remove any leftover conda packages (they were removed from the pip list)
            if conda_supported_req_names:
                reqs = [
                    r for r in reqs
                    if r.name.lower() not in conda_supported_req_names
                ]

        # Conda requirements Hacks:
        if has_matplotlib:
            reqs.append(MarkerRequirement(Requirement.parse('graphviz')))
            reqs.append(MarkerRequirement(
                Requirement.parse('python-graphviz')))
            reqs.append(MarkerRequirement(Requirement.parse('kiwisolver')))
        if has_torch and cuda_version == 0:
            reqs.append(MarkerRequirement(Requirement.parse('cpuonly')))

        # conform conda packages (version/name)
        for r in reqs:
            # remove .post from version numbers, it fails ~= version, and change == to ~=
            if r.specs and r.specs[0]:
                r.specs = [(r.specs[0][0].replace('==', '~='),
                            r.specs[0][1].split('.post')[0])]
            # conda always likes "-" not "_"
            r.req.name = r.req.name.replace('_', '-')

        while reqs:
            # notice, we give conda more freedom in version selection, to help it choose best combination
            conda_env['dependencies'] = [r.tostr() for r in reqs]
            with self.temp_file("conda_env",
                                yaml.dump(conda_env),
                                suffix=".yml") as name:
                print('Conda: Trying to install requirements:\n{}'.format(
                    conda_env['dependencies']))
                result = self._run_command(
                    ("env", "update", "-p", self.path, "--file", name))
            # check if we need to remove specific packages
            bad_req = self._parse_conda_result_bad_packges(result)
            if not bad_req:
                break

            solved = False
            for bad_r in bad_req:
                name = bad_r.split('[')[0].split('=')[0].split('~')[0].split(
                    '<')[0].split('>')[0]
                # look for name in requirements
                for r in reqs:
                    if r.name.lower() == name.lower():
                        pip_requirements.append(r)
                        reqs.remove(r)
                        solved = True
                        break

            # we couldn't remove even one package,
            # nothing we can do but try pip
            if not solved:
                pip_requirements.extend(reqs)
                break

        if pip_requirements:
            try:
                pip_req_str = [r.tostr() for r in pip_requirements]
                print(
                    'Conda: Installing requirements: step 2 - using pip:\n{}'.
                    format(pip_req_str))
                self.pip.load_requirements('\n'.join(pip_req_str))
            except Exception as e:
                print(e)
                raise e

        self.requirements_manager.post_install()
        return True
 def get_reqs(requirement_1: str, requirement_2: str) -> Tuple[Requirement, Requirement]:
     return (Requirement.parse(requirement_1),
             Requirement.parse(requirement_2))