Example #1
0
    def reverse_conflicts(self):
        """
        Returns a list of packages with conflicts against this package.
        """
        pkgs = Package.objects.normal().filter(conflicts__name=self.pkgname)
        if not self.arch.agnostic:
            # make sure we match architectures if possible
            pkgs = pkgs.filter(arch__in=self.applicable_arches())

        alpm = AlpmAPI()
        if not alpm.available:
            return pkgs

        # If we can use ALPM, we can filter out items that don't actually
        # conflict due to the version specification.
        pkgs = pkgs.prefetch_related('conflicts')
        new_pkgs = []
        for package in pkgs:
            for conflict in package.conflicts.all():
                if conflict.name != self.pkgname:
                    continue
                if not conflict.comparison or not conflict.version \
                        or alpm.compare_versions(self.full_version,
                        conflict.comparison, conflict.version):
                    new_pkgs.append(package)
        return new_pkgs
Example #2
0
    def reverse_conflicts(self):
        """
        Returns a list of packages with conflicts against this package.
        """
        pkgs = Package.objects.normal().filter(conflicts__name=self.pkgname)
        if not self.arch.agnostic:
            # make sure we match architectures if possible
            pkgs = pkgs.filter(arch__in=self.applicable_arches())

        alpm = AlpmAPI()
        if not alpm.available:
            return pkgs

        # If we can use ALPM, we can filter out items that don't actually
        # conflict due to the version specification.
        pkgs = pkgs.prefetch_related('conflicts')
        new_pkgs = []
        for package in pkgs:
            for conflict in package.conflicts.all():
                if conflict.name != self.pkgname:
                    continue
                if not conflict.comparison or not conflict.version \
                        or alpm.compare_versions(self.full_version,
                        conflict.comparison, conflict.version):
                    new_pkgs.append(package)
        return new_pkgs
Example #3
0
    def get_providers(self):
        '''Return providers of this related package. Does *not* include exact
        matches as it checks the Provision names only, use get_best_satisfier()
        instead for exact matches.'''
        pkgs = Package.objects.normal().filter(
            provides__name=self.name).order_by().distinct()
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)

        # If we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements.
        alpm = AlpmAPI()
        if alpm.available and self.comparison and self.version:
            pkgs = pkgs.prefetch_related('provides')
            new_pkgs = []
            for package in pkgs:
                for provide in package.provides.all():
                    if provide.name != self.name:
                        continue
                    if alpm.compare_versions(provide.version, self.comparison,
                                             self.version):
                        new_pkgs.append(package)
            pkgs = new_pkgs

        # Sort providers by preference. We sort those in same staging/testing
        # combination first, followed by others. We sort by a (staging,
        # testing) match tuple that will be (True, True) in the best case.
        key_func = lambda x: (x.repo.staging == self.pkg.repo.staging, x.repo.
                              testing == self.pkg.repo.testing)
        return sorted(pkgs, key=key_func, reverse=True)
Example #4
0
    def get_providers(self):
        '''Return providers of this related package. Does *not* include exact
        matches as it checks the Provision names only, use get_best_satisfier()
        instead for exact matches.'''
        pkgs = Package.objects.normal().filter(
                provides__name=self.name).order_by().distinct()
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)

        # If we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements.
        alpm = AlpmAPI()
        if alpm.available and self.comparison and self.version:
            pkgs = pkgs.prefetch_related('provides')
            new_pkgs = []
            for package in pkgs:
                for provide in package.provides.all():
                    if provide.name != self.name:
                        continue
                    if alpm.compare_versions(provide.version,
                            self.comparison, self.version):
                        new_pkgs.append(package)
            pkgs = new_pkgs

        # Sort providers by preference. We sort those in same staging/testing
        # combination first, followed by others. We sort by a (staging,
        # testing) match tuple that will be (True, True) in the best case.
        key_func = lambda x: (x.repo.staging == self.pkg.repo.staging,
                x.repo.testing == self.pkg.repo.testing)
        return sorted(pkgs, key=key_func, reverse=True)
Example #5
0
    def get_best_satisfier(self):
        '''Find a satisfier for this related package that best matches the
        given criteria. It will not search provisions, but will find packages
        named and matching repo characteristics if possible.'''
        pkgs = Package.objects.normal().filter(pkgname=self.name)
        # TODO: this may in fact be faster- select only the fields we know will
        # actually get used, saving us some bandwidth and hopefully query
        # construction time. However, reality hasn't quite proved it out yet.
        #pkgs = Package.objects.select_related('repo', 'arch').only(
        #        'id', 'pkgname', 'epoch', 'pkgver', 'pkgrel',
        #        'repo__id', 'repo__name', 'repo__testing', 'repo__staging',
        #        'arch__id', 'arch__name').filter(pkgname=self.name)
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)
        # if we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements
        if self.comparison and self.version:
            alpm = AlpmAPI()
            pkgs = [
                pkg for pkg in pkgs
                if not alpm.available or alpm.compare_versions(
                    pkg.full_version, self.comparison, self.version)
            ]
        if len(pkgs) == 0:
            # couldn't find a package in the DB
            # it should be a virtual depend (or a removed package)
            return None
        if len(pkgs) == 1:
            return pkgs[0]
        # more than one package, see if we can't shrink it down
        # grab the first though in case we fail
        pkg = pkgs[0]
        # prevents yet more DB queries, these lists should be short;
        # after each grab the best available in case we remove all entries
        pkgs = [p for p in pkgs if p.repo.staging == self.pkg.repo.staging]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        pkgs = [p for p in pkgs if p.repo.testing == self.pkg.repo.testing]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        return pkg
Example #6
0
    def get_providers(self):
        '''Return providers of this related package. Does *not* include exact
        matches as it checks the Provision names only, use get_best_satisfier()
        instead for exact matches.'''
        pkgs = Package.objects.normal().filter(
            provides__name=self.name).order_by().distinct()
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)

        # If we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements.
        alpm = AlpmAPI()
        if alpm.available and self.comparison and self.version:
            pkgs = pkgs.prefetch_related('provides')
            new_pkgs = []
            for package in pkgs:
                for provide in package.provides.all():
                    if provide.name != self.name:
                        continue
                    if alpm.compare_versions(provide.version, self.comparison,
                                             self.version):
                        new_pkgs.append(package)
            pkgs = new_pkgs

        # Logic here is to filter out packages that are in multiple repos if
        # they are not requested. For example, if testing is False, only show a
        # testing package if it doesn't exist in a non-testing repo.
        filtered = {}
        for package in pkgs:
            if package.pkgname not in filtered or \
                    package.repo.staging == self.pkg.repo.staging:
                filtered[package.pkgname] = package
        pkgs = filtered.values()

        filtered = {}
        for package in pkgs:
            if package.pkgname not in filtered or \
                    package.repo.testing == self.pkg.repo.testing:
                filtered[package.pkgname] = package
        pkgs = filtered.values()

        return pkgs
Example #7
0
    def get_providers(self):
        '''Return providers of this related package. Does *not* include exact
        matches as it checks the Provision names only, use get_best_satisfier()
        instead for exact matches.'''
        pkgs = Package.objects.normal().filter(
                provides__name=self.name).order_by().distinct()
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)

        # If we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements.
        alpm = AlpmAPI()
        if alpm.available and self.comparison and self.version:
            pkgs = pkgs.prefetch_related('provides')
            new_pkgs = []
            for package in pkgs:
                for provide in package.provides.all():
                    if provide.name != self.name:
                        continue
                    if alpm.compare_versions(provide.version,
                            self.comparison, self.version):
                        new_pkgs.append(package)
            pkgs = new_pkgs

        # Logic here is to filter out packages that are in multiple repos if
        # they are not requested. For example, if testing is False, only show a
        # testing package if it doesn't exist in a non-testing repo.
        filtered = {}
        for package in pkgs:
            if package.pkgname not in filtered or \
                    package.repo.staging == self.pkg.repo.staging:
                filtered[package.pkgname] = package
        pkgs = filtered.values()

        filtered = {}
        for package in pkgs:
            if package.pkgname not in filtered or \
                    package.repo.testing == self.pkg.repo.testing:
                filtered[package.pkgname] = package
        pkgs = filtered.values()

        return pkgs
Example #8
0
    def get_best_satisfier(self):
        '''Find a satisfier for this related package that best matches the
        given criteria. It will not search provisions, but will find packages
        named and matching repo characteristics if possible.'''
        pkgs = Package.objects.normal().filter(pkgname=self.name)
        # TODO: this may in fact be faster- select only the fields we know will
        # actually get used, saving us some bandwidth and hopefully query
        # construction time. However, reality hasn't quite proved it out yet.
        #pkgs = Package.objects.select_related('repo', 'arch').only(
        #        'id', 'pkgname', 'epoch', 'pkgver', 'pkgrel',
        #        'repo__id', 'repo__name', 'repo__testing', 'repo__staging',
        #        'arch__id', 'arch__name').filter(pkgname=self.name)
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)
        # if we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements
        if self.comparison and self.version:
            alpm = AlpmAPI()
            pkgs = [pkg for pkg in pkgs if not alpm.available or
                    alpm.compare_versions(pkg.full_version, self.comparison,
                        self.version)]
        if len(pkgs) == 0:
            # couldn't find a package in the DB
            # it should be a virtual depend (or a removed package)
            return None
        if len(pkgs) == 1:
            return pkgs[0]
        # more than one package, see if we can't shrink it down
        # grab the first though in case we fail
        pkg = pkgs[0]
        # prevents yet more DB queries, these lists should be short;
        # after each grab the best available in case we remove all entries
        pkgs = [p for p in pkgs if p.repo.staging == self.pkg.repo.staging]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        pkgs = [p for p in pkgs if p.repo.testing == self.pkg.repo.testing]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        return pkg
Example #9
0
def test_behavior_when_unavailable():
    mock_alpm = AlpmAPI()
    mock_alpm.available = False

    assert mock_alpm.version() is None
    assert mock_alpm.vercmp("1.0", "1.0") is None
    assert mock_alpm.compare_versions("1.0", "=", "1.0") is None
Example #10
0
    def get_best_satisfier(self):
        '''Find a satisfier for this related package that best matches the
        given criteria. It will not search provisions, but will find packages
        named and matching repo characteristics if possible.'''
        pkgs = Package.objects.normal().filter(pkgname=self.name)
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)
        # if we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements
        if self.comparison and self.version:
            alpm = AlpmAPI()
            pkgs = [
                pkg for pkg in pkgs
                if not alpm.available or alpm.compare_versions(
                    pkg.full_version, self.comparison, self.version)
            ]
        if len(pkgs) == 0:
            # couldn't find a package in the DB
            # it should be a virtual depend (or a removed package)
            return None
        if len(pkgs) == 1:
            return pkgs[0]
        # more than one package, see if we can't shrink it down
        # grab the first though in case we fail
        pkg = pkgs[0]
        # prevents yet more DB queries, these lists should be short;
        # after each grab the best available in case we remove all entries
        pkgs = [p for p in pkgs if p.repo.staging == self.pkg.repo.staging]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        pkgs = [p for p in pkgs if p.repo.testing == self.pkg.repo.testing]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        return pkg
Example #11
0
    def get_best_satisfier(self):
        '''Find a satisfier for this related package that best matches the
        given criteria. It will not search provisions, but will find packages
        named and matching repo characteristics if possible.'''
        pkgs = Package.objects.normal().filter(pkgname=self.name)
        if not self.pkg.arch.agnostic:
            # make sure we match architectures if possible
            arches = self.pkg.applicable_arches()
            pkgs = pkgs.filter(arch__in=arches)
        # if we have a comparison operation, make sure the packages we grab
        # actually satisfy the requirements
        if self.comparison and self.version:
            alpm = AlpmAPI()
            pkgs = [pkg for pkg in pkgs if not alpm.available or
                    alpm.compare_versions(pkg.full_version, self.comparison,
                        self.version)]
        if len(pkgs) == 0:
            # couldn't find a package in the DB
            # it should be a virtual depend (or a removed package)
            return None
        if len(pkgs) == 1:
            return pkgs[0]
        # more than one package, see if we can't shrink it down
        # grab the first though in case we fail
        pkg = pkgs[0]
        # prevents yet more DB queries, these lists should be short;
        # after each grab the best available in case we remove all entries
        pkgs = [p for p in pkgs if p.repo.staging == self.pkg.repo.staging]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        pkgs = [p for p in pkgs if p.repo.testing == self.pkg.repo.testing]
        if len(pkgs) > 0:
            pkg = pkgs[0]

        return pkg
Example #12
0
    def get_requiredby(self):
        """
        Returns a list of package objects. An attempt will be made to keep this
        list slim by including the corresponding package in the same testing
        category as this package if that check makes sense.
        """
        from packages.models import Depend
        sorttype = '''(CASE deptype
        WHEN 'D' THEN 0
        WHEN 'O' THEN 1
        WHEN 'M' THEN 2
        WHEN 'C' THEN 3
        ELSE 1000 END)'''
        name_clause = '''packages_depend.name IN (
        SELECT %s UNION ALL
        SELECT z.name FROM packages_provision z WHERE z.pkg_id = %s
        )'''
        requiredby = Depend.objects.select_related('pkg',
                'pkg__arch', 'pkg__repo').extra(
                select={'sorttype': sorttype},
                where=[name_clause], params=[self.pkgname, self.id]).order_by(
                'sorttype', 'pkg__pkgname',
                'pkg__arch__name', 'pkg__repo__name')
        if not self.arch.agnostic:
            # make sure we match architectures if possible
            requiredby = requiredby.filter(
                    pkg__arch__in=self.applicable_arches())

        # if we can use ALPM, ensure our returned Depend objects abide by the
        # version comparison operators they may specify
        alpm = AlpmAPI()
        if alpm.available:
            provides = self.provides.all()
            new_rqd = []
            for dep in requiredby:
                if not dep.comparison or not dep.version:
                    # no comparisson/version, so always let it through
                    new_rqd.append(dep)
                elif self.pkgname == dep.name:
                    # depends on this package, so check it directly
                    if alpm.compare_versions(self.full_version,
                            dep.comparison, dep.version):
                        new_rqd.append(dep)
                else:
                    # it must be a provision of ours at this point
                    for provide in (p for p in provides if p.name == dep.name):
                        if alpm.compare_versions(provide.version,
                                dep.comparison, dep.version):
                            new_rqd.append(dep)
                            break
            requiredby = new_rqd

        # sort out duplicate packages; this happens if something has a double
        # versioned depend such as a kernel module
        requiredby = [list(vals)[0] for _, vals in
                groupby(requiredby, lambda x: x.pkg.id)]
        if len(requiredby) == 0:
            return requiredby

        # do we have duplicate pkgbase values for non-primary depends?
        # if so, filter it down to base packages only
        def grouper(depend):
            p = depend.pkg
            return (depend.deptype, p.pkgbase, p.repo.testing, p.repo.staging)

        filtered = []
        for (typ, pkgbase, _, _), dep_pkgs in groupby(requiredby, grouper):
            dep_pkgs = list(dep_pkgs)
            if typ == 'D' or len(dep_pkgs) == 1:
                filtered.extend(dep_pkgs)
            else:
                filtered.append(DependStandin(dep_pkgs))

        # find another package by this name in a different testing or staging
        # repo; if we can't, we can short-circuit some checks
        repo_q = (Q(repo__testing=(not self.repo.testing)) |
                Q(repo__staging=(not self.repo.staging)))
        if not Package.objects.filter(
                repo_q, pkgname=self.pkgname, arch=self.arch
                ).exclude(id=self.id).exists():
            # there isn't one? short circuit, all required by entries are fine
            return filtered

        trimmed = []
        # for each unique package name, try to screen our package list down to
        # those packages in the same testing and staging category (yes or no)
        # iff there is a package in the same testing and staging category.
        for _, dep_pkgs in groupby(filtered, lambda x: x.pkg.pkgname):
            dep_pkgs = list(dep_pkgs)
            dep = dep_pkgs[0]
            if len(dep_pkgs) > 1:
                dep_pkgs = [d for d in dep_pkgs
                        if d.pkg.repo.testing == self.repo.testing and
                        d.pkg.repo.staging == self.repo.staging]
                if len(dep_pkgs) > 0:
                    dep = dep_pkgs[0]
            trimmed.append(dep)

        return trimmed
Example #13
0
    def get_requiredby(self):
        """
        Returns a list of package objects. An attempt will be made to keep this
        list slim by including the corresponding package in the same testing
        category as this package if that check makes sense.
        """
        from packages.models import Depend
        sorttype = '''(CASE deptype
        WHEN 'D' THEN 0
        WHEN 'O' THEN 1
        WHEN 'M' THEN 2
        WHEN 'C' THEN 3
        ELSE 1000 END)'''
        name_clause = '''packages_depend.name IN (
        SELECT %s UNION ALL
        SELECT z.name FROM packages_provision z WHERE z.pkg_id = %s
        )'''
        requiredby = Depend.objects.select_related('pkg',
                'pkg__arch', 'pkg__repo').extra(
                select={'sorttype': sorttype},
                where=[name_clause], params=[self.pkgname, self.id]).order_by(
                'sorttype', 'pkg__pkgname',
                'pkg__arch__name', 'pkg__repo__name')
        if not self.arch.agnostic:
            # make sure we match architectures if possible
            requiredby = requiredby.filter(
                    pkg__arch__in=self.applicable_arches())

        # if we can use ALPM, ensure our returned Depend objects abide by the
        # version comparison operators they may specify
        alpm = AlpmAPI()
        if alpm.available:
            provides = self.provides.all()
            new_rqd = []
            for dep in requiredby:
                if not dep.comparison or not dep.version:
                    # no comparison/version, so always let it through
                    new_rqd.append(dep)
                elif self.pkgname == dep.name:
                    # depends on this package, so check it directly
                    if alpm.compare_versions(self.full_version,
                            dep.comparison, dep.version):
                        new_rqd.append(dep)
                else:
                    # it must be a provision of ours at this point
                    for provide in (p for p in provides if p.name == dep.name):
                        if alpm.compare_versions(provide.version,
                                dep.comparison, dep.version):
                            new_rqd.append(dep)
                            break
            requiredby = new_rqd

        # sort out duplicate packages; this happens if something has a double
        # versioned depend such as a kernel module
        requiredby = [list(vals)[0] for _, vals in
                groupby(requiredby, lambda x: x.pkg.id)]
        if not requiredby:
            return requiredby

        # do we have duplicate pkgbase values for non-primary depends?
        # if so, filter it down to base packages only
        def grouper(depend):
            p = depend.pkg
            return (depend.deptype, p.pkgbase, p.repo.testing, p.repo.staging)

        filtered = []
        for (typ, _, _, _), dep_pkgs in groupby(requiredby, grouper):
            dep_pkgs = list(dep_pkgs)
            if typ == 'D' or len(dep_pkgs) == 1:
                filtered.extend(dep_pkgs)
            else:
                filtered.append(DependStandin(dep_pkgs))

        # find another package by this name in a different testing or staging
        # repo; if we can't, we can short-circuit some checks
        repo_q = (Q(repo__testing=(not self.repo.testing)) |
                Q(repo__staging=(not self.repo.staging)))
        if not Package.objects.filter(
                repo_q, pkgname=self.pkgname, arch=self.arch
                ).exclude(id=self.id).exists():
            # there isn't one? short circuit, all required by entries are fine
            return filtered

        trimmed = []
        # for each unique package name, try to screen our package list down to
        # those packages in the same testing and staging category (yes or no)
        # iff there is a package in the same testing and staging category.
        for _, dep_pkgs in groupby(filtered, lambda x: x.pkg.pkgname):
            dep_pkgs = list(dep_pkgs)
            dep = dep_pkgs[0]
            if len(dep_pkgs) > 1:
                dep_pkgs = [d for d in dep_pkgs
                        if d.pkg.repo.testing == self.repo.testing and
                        d.pkg.repo.staging == self.repo.staging]
                if len(dep_pkgs) > 0:
                    dep = dep_pkgs[0]
            trimmed.append(dep)

        return trimmed
Example #14
0
    def get_requiredby(self):
        """
        Returns a list of package objects. An attempt will be made to keep this
        list slim by including the corresponding package in the same testing
        category as this package if that check makes sense.
        """
        from packages.models import Depend
        provides = self.provides.all()
        provide_names = set(provide.name for provide in provides)
        provide_names.add(self.pkgname)
        requiredby = Depend.objects.select_related('pkg',
                'pkg__arch', 'pkg__repo').filter(
                name__in=provide_names).order_by(
                'pkg__pkgname', 'pkg__arch__name', 'pkg__repo__name')
        if not self.arch.agnostic:
            # make sure we match architectures if possible
            requiredby = requiredby.filter(
                    pkg__arch__in=self.applicable_arches())

        # if we can use ALPM, ensure our returned Depend objects abide by the
        # version comparison operators they may specify
        alpm = AlpmAPI()
        if alpm.available:
            new_rqd = []
            for dep in requiredby:
                if not dep.comparison or not dep.version:
                    # no comparisson/version, so always let it through
                    new_rqd.append(dep)
                elif self.pkgname == dep.name:
                    # depends on this package, so check it directly
                    if alpm.compare_versions(self.full_version,
                            dep.comparison, dep.version):
                        new_rqd.append(dep)
                else:
                    # it must be a provision of ours at this point
                    for provide in (p for p in provides if p.name == dep.name):
                        print(provide.version, dep.comparison, dep.version)
                        if alpm.compare_versions(provide.version,
                                dep.comparison, dep.version):
                            new_rqd.append(dep)
                            break
            requiredby = new_rqd

        # sort out duplicate packages; this happens if something has a double
        # versioned depend such as a kernel module
        requiredby = [list(vals)[0] for _, vals in
                groupby(requiredby, lambda x: x.pkg.id)]
        if len(requiredby) == 0:
            return requiredby

        # find another package by this name in a different testing or staging
        # repo; if we can't, we can short-circuit some checks
        repo_q = (Q(repo__testing=(not self.repo.testing)) |
                Q(repo__staging=(not self.repo.staging)))
        if not Package.objects.filter(
                repo_q, pkgname=self.pkgname, arch=self.arch
                ).exclude(id=self.id).exists():
            # there isn't one? short circuit, all required by entries are fine
            return requiredby

        trimmed = []
        # for each unique package name, try to screen our package list down to
        # those packages in the same testing and staging category (yes or no)
        # iff there is a package in the same testing and staging category.
        for _, dep_pkgs in groupby(requiredby, lambda x: x.pkg.pkgname):
            dep_pkgs = list(dep_pkgs)
            dep = dep_pkgs[0]
            if len(dep_pkgs) > 1:
                dep_pkgs = [d for d in dep_pkgs
                        if d.pkg.repo.testing == self.repo.testing and
                        d.pkg.repo.staging == self.repo.staging]
                if len(dep_pkgs) > 0:
                    dep = dep_pkgs[0]
            trimmed.append(dep)
        return trimmed
Example #15
0
from packages.alpm import AlpmAPI

import pytest

alpm = AlpmAPI()


@pytest.mark.skipif(not alpm.available, reason="ALPM is unavailable")
def test_version():
    version = alpm.version()
    assert version
    version = version.split(b'.')
    # version is a 3-tuple, e.g., '7.0.2'
    assert len(version) == 3


@pytest.mark.skipif(not alpm.available, reason="ALPM is unavailable")
def test_vercmp():
    assert alpm.vercmp("1.0", "1.0") == 0
    assert alpm.vercmp("1.1", "1.0") == 1


@pytest.mark.skipif(not alpm.available, reason="ALPM is unavailable")
def test_compare_versions():
    assert alpm.compare_versions("1.0", "<=", "2.0")
    assert alpm.compare_versions("1.0", "<", "2.0")
    assert not alpm.compare_versions("1.0", ">=", "2.0")
    assert not alpm.compare_versions("1.0", ">", "2.0")
    assert alpm.compare_versions("1:1.0", ">", "2.0")
    assert not alpm.compare_versions("1.0.2", ">=", "2.1.0")
Example #16
0
    def get_requiredby(self):
        """
        Returns a list of package objects. An attempt will be made to keep this
        list slim by including the corresponding package in the same testing
        category as this package if that check makes sense.
        """
        from packages.models import Depend
        provides = self.provides.all()
        provide_names = set(provide.name for provide in provides)
        provide_names.add(self.pkgname)
        requiredby = Depend.objects.select_related(
            'pkg', 'pkg__arch',
            'pkg__repo').filter(name__in=provide_names).order_by(
                'pkg__pkgname', 'pkg__arch__name', 'pkg__repo__name')
        if not self.arch.agnostic:
            # make sure we match architectures if possible
            requiredby = requiredby.filter(
                pkg__arch__in=self.applicable_arches())

        # if we can use ALPM, ensure our returned Depend objects abide by the
        # version comparison operators they may specify
        alpm = AlpmAPI()
        if alpm.available:
            new_rqd = []
            for dep in requiredby:
                if not dep.comparison or not dep.version:
                    # no comparisson/version, so always let it through
                    new_rqd.append(dep)
                elif self.pkgname == dep.name:
                    # depends on this package, so check it directly
                    if alpm.compare_versions(self.full_version, dep.comparison,
                                             dep.version):
                        new_rqd.append(dep)
                else:
                    # it must be a provision of ours at this point
                    for provide in (p for p in provides if p.name == dep.name):
                        print(provide.version, dep.comparison, dep.version)
                        if alpm.compare_versions(provide.version,
                                                 dep.comparison, dep.version):
                            new_rqd.append(dep)
                            break
            requiredby = new_rqd

        # sort out duplicate packages; this happens if something has a double
        # versioned depend such as a kernel module
        requiredby = [
            list(vals)[0]
            for _, vals in groupby(requiredby, lambda x: x.pkg.id)
        ]
        if len(requiredby) == 0:
            return requiredby

        # find another package by this name in a different testing or staging
        # repo; if we can't, we can short-circuit some checks
        repo_q = (Q(repo__testing=(not self.repo.testing))
                  | Q(repo__staging=(not self.repo.staging)))
        if not Package.objects.filter(
                repo_q, pkgname=self.pkgname,
                arch=self.arch).exclude(id=self.id).exists():
            # there isn't one? short circuit, all required by entries are fine
            return requiredby

        trimmed = []
        # for each unique package name, try to screen our package list down to
        # those packages in the same testing and staging category (yes or no)
        # iff there is a package in the same testing and staging category.
        for _, dep_pkgs in groupby(requiredby, lambda x: x.pkg.pkgname):
            dep_pkgs = list(dep_pkgs)
            dep = dep_pkgs[0]
            if len(dep_pkgs) > 1:
                dep_pkgs = [
                    d for d in dep_pkgs
                    if d.pkg.repo.testing == self.repo.testing
                    and d.pkg.repo.staging == self.repo.staging
                ]
                if len(dep_pkgs) > 0:
                    dep = dep_pkgs[0]
            trimmed.append(dep)
        return trimmed