Ejemplo n.º 1
0
    def __init__(self, input=None):

        if type(input) == types.StringType:
            self._filename = input
            self.load(input)
        else:
            self.tagfile = TagFile(input)
 def get_cruft(self):
     n_cruft = 0
     tagf = TagFile(open(self.status))
     while tagf.step():
         statusline = tagf.section.get("Status")
         (want, flag, status) = statusline.split()
         if want == "purge" and flag == "ok" and status == "not-installed":
             n_cruft += 1
     logging.debug("DpkgStatusPlugin found %s cruft items" % n_cruft)
     if n_cruft:
         return [DpkgStatusCruft(n_cruft)]
     return []  # pragma: no cover
 def get_cruft(self):
     n_cruft = 0
     tagf = TagFile(open(self.status))
     while tagf.step():
         statusline = tagf.section.get("Status")
         (want, flag, status) = statusline.split()
         if want == "purge" and flag == "ok" and status == "not-installed":
             n_cruft += 1
     logging.debug("DpkgStatusPlugin found %s cruft items" % n_cruft)
     if n_cruft:
         return [DpkgStatusCruft(n_cruft)]
     return [] # pragma: no cover
Ejemplo n.º 4
0
    def load(self, input):
        """
        Load control file
        """
        plain_file = open(input)
        tagfile = TagFile(plain_file)

        # Loop to skip PGP signature
        while tagfile.step():
            if tagfile.section.keys()[0][0] != '-':
                break

        self.tagfile = tagfile
Ejemplo n.º 5
0
def _parse_tagfile(filename, function):
    pdict = {}
    if filename[-3:] == '.gz':
        tagfile = gunzip(filename)
    elif filename[-4:] == '.bz2':
        tagfile = bunzip(filename)
    else:
        tagfile = file(filename)
    parser = TagFile(tagfile)
    while parser.Step():
        k, v = function(parser.Section)
        pdict[k] = v
    return pdict
Ejemplo n.º 6
0
    def load(self, input):
        """
        Load control file
        """
        plain_file = open(input)
        tagfile = TagFile(plain_file)
        
        # Loop to skip PGP signature
        while tagfile.step():
            if tagfile.section.keys()[0][0] != '-':
                break

        self.tagfile = tagfile
 def get_cruft(self):
     n_cruft = 0
     with open(self.status) as fp:
         tagf = TagFile(fp)
         while tagf.step():
             statusline = tagf.section.get('Status')
             (want, flag, status) = statusline.split()
             if (want == 'purge' and flag == 'ok'
                     and status == 'not-installed'):
                 # Then...
                 n_cruft += 1
     logging.debug('DpkgStatusPlugin found {} cruft items'.format(n_cruft))
     if n_cruft:
         return [DpkgStatusCruft(n_cruft)]
     return []
Ejemplo n.º 8
0
 def __init__(self, input=None):
     
     if type(input) == types.StringType:
         self._filename = input
         self.load(input)
     else:
         self.tagfile = TagFile(input)        
Ejemplo n.º 9
0
    def _build_installed_pkgs_cache(self, dist, component):
        for arch in self._supportedArchs:
            source_path = self._archivePath + "/dists/%s/%s/binary-%s/Packages.gz" % (dist, component, arch)
            f = gzip.open(source_path, 'rb')
            tagf = TagFile (f)
            for section in tagf:
                # make sure we have the right arch (closes bug in installed-detection)
                if section['Architecture'] != arch:
                    continue

                pkgversion = section['Version']
                pkgname = section['Package']
                pkgsource = section.get('Source', '')
                # if source has different version, we cheat and set the binary pkg version
                # to the source package version
                if "(" in pkgsource:
                    m = re.search(r"\((.*)\)", pkgsource)
                    s = m.group(1).strip()
                    if s != "":
                        pkgversion = s
                pkid = "%s_%s" % (pkgname, arch)
                if pkid in self._installedPkgs:
                   regVersion = self._installedPkgs[pkid]
                   compare = version_compare(regVersion, pkgversion)
                   if compare >= 0:
                       continue
                self._installedPkgs[pkid] = pkgversion
Ejemplo n.º 10
0
    def get_corresponding_source_packages(self, pkg_lst=None):

        if pkg_lst is None:
            pkg_lst = {p.name for p in self.cache if p.is_installed}

        src_set = set()

        with TagFile('/var/lib/dpkg/status') as tagfile:
            for section in tagfile:

                pkg = section['Package']

                if pkg not in pkg_lst:
                    continue

                tmp = self.cache[pkg].installed or self.cache[pkg].candidate

                src_set.add((tmp.source_name, tmp.source_version))

                if "Built-Using" not in section:
                    continue

                built_using_lst = section["Built-Using"].split(', ')
                for built_using in built_using_lst:
                    name, version = built_using.split(' ', 1)
                    version = version.strip('(= )')
                    src_set.add((name, version))

        return list(src_set)
Ejemplo n.º 11
0
    def _get_packages_for(self, suite, component):
        if self.useMOMCache:
            source_path = self._archive_path + "/dists/%s-%s/%s/source/Sources" % (
                self._distroName, suite, component)
        else:
            aroot = self._archive_path
            if suite.startswith("buildq"):
                aroot = self._bqueue_path
            source_path = aroot + "/%s/dists/%s/%s/source/Sources.gz" % (
                self._distroName, suite, component)
        f = gzip.open(source_path, 'rb')
        tagf = TagFile(f)
        packageList = []
        for section in tagf:
            archs = section['Architecture']
            pkgversion = section['Version']
            pkgname = section['Package']
            directory = section['Directory']
            dsc = find_dsc(section['Files'])
            pkg = PackageInfo(pkgname, pkgversion, suite, component, archs,
                              directory, dsc)
            pkg.maintainer = section['Maintainer']
            pkg.comaintainers = section.get('Uploaders', '')
            pkg.homepage = section.get('Homepage', None)

            if section.get('Extra-Source-Only', 'no') == 'yes':
                pkg.extra_source_only = True

            packageList.append(pkg)

        if self.extra_suite:
            packageList.extend(
                self._get_packages_for(self.extra_suite, component))

        return packageList
Ejemplo n.º 12
0
    def _getArchiveSourcePackageInfo(self, dist, component):
        source_path = self._archivePath + "/dists/%s/%s/source/Sources.gz" % (
            dist, component)
        f = gzip.open(source_path, 'rb')
        tagf = TagFile(f)
        packageList = []
        for section in tagf:
            # don't even try to build source-only packages
            if section.get('Extra-Source-Only', 'no') == 'yes':
                continue

            pkgname = section['Package']
            archs_str = section['Architecture']
            binaries = section['Binary']
            pkgversion = section['Version']

            pkg = PackageInfo(pkgname, pkgversion, dist, component)

            pkg.info = (
                "Maintainer: <i>%s</i>\n<br>Co-Maintainers: <i>%s</i><br>\nVCS-Browser: %s"
                % (section['Maintainer'], section.get(
                    'Uploaders', 'Nobody'), section.get('Vcs-Browser', '#')))

            packageList += [pkg]

        return packageList
Ejemplo n.º 13
0
 def get_removed_sources(self):
     f = tempfile.TemporaryFile()
     f.write(self._removalsRFC822)
     f.seek(0)
     tagf = TagFile(f)
     resultsList = []
     for section in tagf:
         suite = section.get('Suite', '').strip()
         sources_raw = section.get('Sources', '')
         # check if we have a source removal - the only thing of interest for us, at time
         if sources_raw == '' or suite == '':
             continue
         source_ids = [x.strip() for x in sources_raw.split('\n')]
         reason = section['Reason']
         # NVITs at Debian are no reason to delete anything from Tanglu ;-)
         if "NVIT" in reason:
             continue
         for source_id in source_ids:
             if not "_" in source_id:
                 continue
             version = self._get_version_from_pkid(source_id)
             source = self._get_pkgname_from_pkid(source_id)
             pkgrm = PackageRemovalItem(suite, source, version, reason)
             resultsList.append(pkgrm)
     return resultsList
Ejemplo n.º 14
0
 def get_cruft(self):
     n_cruft = 0
     with open(self.status) as fp:
         tagf = TagFile(fp)
         while tagf.step():
             statusline = tagf.section.get('Status')
             (want, flag, status) = statusline.split()
             if (want == 'purge' and
                     flag == 'ok' and
                     status == 'not-installed'):
                 # Then...
                 n_cruft += 1
     logging.debug('DpkgStatusPlugin found {} cruft items'.format(n_cruft))
     if n_cruft:
         return [DpkgStatusCruft(n_cruft)]
     return []
Ejemplo n.º 15
0
def get_dsc_size(fname):
    tf = TagFile(fname)

    sz = os.path.getsize(fname)
    for sect in tf:
        if 'Files' in sect:
            files = sect['Files'].split('\n')
            files = [f.strip().split(' ') for f in files]
            for f in files:
                sz += int(f[1])

    return sz
Ejemplo n.º 16
0
    def get_packages_for(self, dist, component):
        # create a cache of all installed packages on the different architectures
        self._build_installed_pkgs_cache(dist, component)
        source_path = self._archivePath + "/dists/%s/%s/source/Sources.gz" % (dist, component)
        f = gzip.open(source_path, 'rb')
        tagf = TagFile (f)
        packageList = []
        for section in tagf:
            # don't even try to build source-only packages
            if section.get('Extra-Source-Only', 'no') == 'yes':
                continue

            pkgname = section['Package']
            if not pkgname in self._activePackages:
                continue
            archs_str = section['Architecture']
            binaries = section['Binary']
            pkgversion = section['Version']

            if ' ' in archs_str:
                archs = archs_str.split(' ')
            else:
                archs = [archs_str]
            # remove duplicate archs from list
            # this is very important, because we otherwise will add duplicate build requests in Jenkins
            archs = list(set(archs))

            pkg = PackageInfo(pkgname, pkgversion, dist, component, archs)

            pkg.info = ("Package: %s\nBinary Packages: %s\nMaintainer: %s\nCo-Maintainers: %s\nVCS-Browser: %s" %
                        (pkgname, binaries, section['Maintainer'], section.get('Uploaders', 'Nobody'), section.get('Vcs-Browser', 'None set')))

            # values needed for build-dependency solving
            pkg.build_depends = section.get('Build-Depends', '')
            pkg.build_conflicts = section.get('Build-Conflicts', '')
            pkg.archs_str = archs_str

            # we check if one of the arch-binaries exists. if it does, we consider the package built for this architecture
            # FIXME: This does not work well for binNMUed packages! Implement a possible solution later.
            # (at time, a version-check prevents packages from being built twice)
            if "," in binaries:
                binaryPkgs = binaries.split(', ')
            else:
                binaryPkgs = [binaries]
            for binaryName in binaryPkgs:
                self._set_pkg_installed_for_arch(section["Directory"], pkg, binaryName)
                #if (pkg.installedArchs != ["all"]) or (len(binaryPkgs) <= 0:

            packageList += [pkg]

        return packageList
Ejemplo n.º 17
0
def get_dsc_size(fname):
    if not virtapt_imported:
        return 0

    tf = TagFile(fname)

    sz = os.path.getsize(fname)
    for sect in tf:
        if sect.has_key('Files'):
            files = sect['Files'].split('\n')
            files = [f.strip().split(' ') for f in files]
            for f in files:
                sz += int(f[1])

    return sz
Ejemplo n.º 18
0
    def _get_package_list(self, suite, component, is_build_queue=False):
        source_path = None
        if is_build_queue:
            source_path = self._bqueue_path + "/dists/%s/%s/source/Sources.gz" % (
                suite, component)
        else:
            source_path = self._archive_path + "/dists/%s/%s/source/Sources.gz" % (
                suite, component)
        f = gzip.open(source_path, 'rb')
        tagf = TagFile(f)
        packageList = []
        for section in tagf:
            # don't even try to build source-only packages
            if section.get('Extra-Source-Only', 'no') == 'yes':
                continue

            pkgname = section['Package']
            pkgversion = section['Version']
            archs = list(set(section['Architecture'].split(None)))
            directory = section['Directory']
            dsc = find_dsc(section['Files'])

            pkg = PackageInfo(pkgname, pkgversion, suite, component, archs,
                              directory, dsc)

            if section.get('Extra-Source-Only', 'no') == 'yes':
                pkg.extra_source_only = True

            # values needed for build-dependency solving
            pkg.build_depends = section.get('Build-Depends', '')
            pkg.build_conflicts = section.get('Build-Conflicts', '')

            pkg.maintainer = section['Maintainer']
            pkg.comaintainers = section.get('Uploaders', '')

            packageList += [pkg]

        bqueue = self._conf.get_build_queue(suite)
        if bqueue:
            packageList.extend(
                self._get_package_list(bqueue, component, is_build_queue=True))

        return packageList
Ejemplo n.º 19
0
    def _add_binaries_to_dict(self,
                              pkg_dict,
                              suite,
                              component,
                              arch,
                              udeb=False):
        aroot = self._archive_path
        if suite.startswith("buildq"):
            aroot = self._bqueue_path
        if udeb:
            source_path = aroot + "/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (
                suite, component, arch)
        else:
            source_path = aroot + "/dists/%s/%s/binary-%s/Packages.gz" % (
                suite, component, arch)
        f = gzip.open(source_path, 'rb')
        tagf = TagFile(f)
        for section in tagf:
            # make sure we have the right arch (closes bug in installed-detection)
            if section['Architecture'] != arch:
                continue

            pkgversion = section['Version']
            pkgname = section['Package']
            pkgsource = section.get('Source', pkgname)
            # if source has different version, we cheat and set the binary pkg version
            # to the source package version
            if "(" in pkgsource:
                m = re.search(r"^(.*)\((.*)\)$", pkgsource)
                pkgsource = m.group(1).strip()
                pkgversion = m.group(2).strip()

            pkg = pkg_dict.get(pkgsource, None)

            if pkg is not None and pkg.version == pkgversion:
                if arch not in pkg.installed_archs:
                    pkg.installed_archs += [arch]
                pkg.binaries += [(pkgname, arch, section['Filename'])]
                pkg_dict[pkgsource] = pkg

        return pkg_dict
Ejemplo n.º 20
0
    def _get_packages_for(self, suite, component):
        index_path = os.path.join(self.get_cache_path(), suite, component,
                                  "Packages-amd64.gz")

        f = gzip.open(index_path, 'rb')
        tagf = TagFile(f)
        package_list = list()
        for section in tagf:
            pkgversion = section.get('Version')
            if not pkgversion:
                print("Debian: Bad package data found!")
            pkgname = section['Package']
            pkg = PackageInfo(pkgname, pkgversion,
                              self._get_upstream_version(pkgversion), suite,
                              component)
            pkg.url = "https://packages.debian.org/%s/%s" % (suite, pkgname)

            package_list.append(pkg)
        packages_dict = package_list_to_dict(package_list)

        return packages_dict
Ejemplo n.º 21
0
    def binary_packages(self, suite, component, arch):
        '''
        Get a list of binary package information for the given repository suite,
        component and architecture.
        '''

        assert type(suite) is ArchiveSuite
        assert type(component) is ArchiveComponent
        assert type(arch) is ArchiveArchitecture

        index_fname = self.index_file(suite.name, os.path.join(component.name, 'binary-{}'.format(arch.name), 'Packages.xz'))
        if not index_fname:
            return []

        with TagFile(index_fname) as tf:
            return self._read_binary_packages_from_tf(tf,
                                                      index_fname,
                                                      suite,
                                                      component,
                                                      arch,
                                                      DebType.DEB)
Ejemplo n.º 22
0
    def installer_packages(self, suite, component, arch):
        '''
        Get a list of binary installer packages for the given repository suite, component
        and architecture.
        These binary packages are typically udebs used by the debian-installer, and should not
        be installed on an user's system.
        '''

        assert type(suite) is ArchiveSuite
        assert type(component) is ArchiveComponent
        assert type(arch) is ArchiveArchitecture

        index_fname = self.index_file(suite.name, os.path.join(component.name, 'debian-installer', 'binary-{}'.format(arch.name), 'Packages.xz'))
        if not index_fname:
            return []

        with TagFile(index_fname) as tf:
            return self._read_binary_packages_from_tf(tf,
                                                      index_fname,
                                                      suite,
                                                      component,
                                                      arch,
                                                      DebType.UDEB)
Ejemplo n.º 23
0
    def _create_faux_packages(self, session, mi_wspace: str,
                              suites_source: List[ArchiveSuite],
                              suite_target: ArchiveSuite):
        '''
        If we have a partial source and target suite, we need to let Britney know about the
        parent packages somehow.
        At the moment, we simply abuse the FauxPackages system for that.
        '''

        # we don't support more than one source suite for this feature at the moment
        if len(suites_source) > 1:
            log.info(
                'Not auto-generating faux packages: Multiple suites set as sources.'
            )
            return

        suite_source = suites_source[0]

        if suite_source.parent and suite_target.parent:
            log.info(
                'Creating faux-packages to aid resolving of partial suites.')
        else:
            log.info(
                'No auto-generating faux packages: No source and target suite parents, generation is unnecessary.'
            )
            return

        existing_pkg_arch_set = set()
        log.debug(
            'Creating index of valid packages that do not need a faux package.'
        )

        # we need repository information to only generate faux packages if a package doesn't exist
        # in our source suite(s) already
        repo = self._get_local_repo(session)

        for suite in suites_source:
            esuite = session.query(ArchiveSuite) \
                            .options(joinedload(ArchiveSuite.components)) \
                            .options(joinedload(ArchiveSuite.architectures)) \
                            .filter(ArchiveSuite.id == suite.id).one()
            session.expunge(
                esuite
            )  # we don't want packages accidentally added to the database here
            for component in esuite.components:
                for arch in esuite.architectures:
                    aname = arch.name
                    for bpkg in repo.binary_packages(esuite, component, arch):
                        existing_pkg_arch_set.add(aname + ':' + bpkg.name)
                    for spkg in repo.source_packages(esuite, component):
                        existing_pkg_arch_set.add(aname + ':' + spkg.name)

        archive_root_dir = self._lconf.archive_root_dir
        fauxpkg_fname = os.path.join(mi_wspace, 'input', 'faux-packages')

        log.debug('Generating faux packages list')
        fauxpkg_data = {}
        for component in suite_target.parent.components:

            for installer_dir in ['', 'debian-installer']:
                for arch in suite_target.parent.architectures:
                    pfile = os.path.join(archive_root_dir, 'dists',
                                         suite_target.parent.name,
                                         component.name, installer_dir,
                                         'binary-{}'.format(arch.name),
                                         'Packages.xz')
                    if not os.path.isfile(pfile):
                        continue

                    log.debug('Reading data for faux packages list: {}'.format(
                        pfile))

                    with TagFile(pfile) as tf:
                        for e in tf:
                            pkgname = e['Package']
                            pkgversion = e['Version']
                            pkgarch = e['Architecture']

                            pkid = '{}-{}-{}'.format(pkgname, pkgversion,
                                                     pkgarch)
                            if pkid in fauxpkg_data:
                                continue
                            pkgname_arch = pkgarch + ':' + pkgname
                            if pkgname_arch in existing_pkg_arch_set:
                                continue
                            provides = e.get('Provides', '')

                            data = 'Package: {}\nVersion: {}'.format(
                                pkgname, pkgversion)
                            if pkgarch and pkgarch != 'all':
                                data = data + '\nArchitecture: {}'.format(
                                    pkgarch)
                            if provides:
                                data = data + '\nProvides: {}'.format(provides)
                            if component.name != 'main':
                                data = data + '\nComponent: {}'.format(
                                    component.name)

                            fauxpkg_data[pkid] = data

                            # FIXME: We shouldn't have to special-case this :any case,
                            # rather Britney should do the right thing and recognize this
                            # notation for faux-packages. But until that is fixed
                            # properly and since a dependency on python3:any is so common, we
                            # will work around this issue
                            if pkgname == 'python3':
                                pkid = '{}-{}-{}'.format(
                                    'python3:any', pkgversion, pkgarch)
                                if pkid in fauxpkg_data:
                                    continue
                                fauxpkg_data[pkid] = data.replace(
                                    'Package: python3\n',
                                    'Package: python3:any\n')

        with open(fauxpkg_fname, 'w') as f:
            for segment in fauxpkg_data.values():
                f.write(segment + '\n\n')
Ejemplo n.º 24
0
 def readReleaseFile(self, filename):
     """Read a Release file, return as a keyword/value dict."""
     sections = list(TagFile(file(filename)))
     self.assertEqual(1, len(sections))
     return dict(sections[0])
Ejemplo n.º 25
0
class DebianControlFile(object):
    """
    This class holds all the information from a debian control file.
    It also provides some methods to operate with that information.
    """
    class FileInfo:
        def __init__(self, md5sum, size, name):            
            self.size = size
            self.md5sum = md5sum
            self.name = name
            
    class FileNotFoundError(Exception):
        """ 
        A file operation was requested an a listed file was not found
        """
        def __init__(self, filename):
            self.filename = filename
        def __str__(self):
            return repr(self.filename)
    
    class MD5Error(Exception):
        """
        The MD5 checksum verification failed during a file copy/move
        operation.
        """
        def __init__(self, expected_md5, found_md5, name):
            self.expected_md5 = expected_md5
            self.found_md5 = found_md5
            self.name = name
        def __str__(self):
            return repr(self.value1, self.value2, self.value3)
    
    
    def __init__(self, input=None):
        
        if type(input) == types.StringType:
            self._filename = input
            self.load(input)
        else:
            self.tagfile = TagFile(input)        
            
    def load(self, input):
        """
        Load control file
        """
        plain_file = open(input)
        tagfile = TagFile(plain_file)
        
        # Loop to skip PGP signature
        while tagfile.step():
            if tagfile.section.keys()[0][0] != '-':
                break

        self.tagfile = tagfile
    
    def step(self):
        """ Advance to next section """
        return self.tagfile.step()
        
    def files_list(self):
        if not self['Files']:
            return None
        files = self['Files'].split('\n')
        file_info_list = []
        for file in files:
            file_parts = file.strip(" ").split(" ")
            file_info = self.FileInfo(file_parts[0], file_parts[1], \
                file_parts[len(file_parts)-1])
            file_info_list.append(file_info)
        return file_info_list
    
    def version(self):
        """ 
        Returns the package version after removing the epoch part
        """
        version = self.tagfile.section['Version']
        epoch, sep, version = version.partition(":")
        return version or epoch
    
    def upstream_version(self):
        """ 
        Returns the upstream version contained on the Version field
        """          
        return self.version().rsplit("-", 1)[0]  

    def verify_gpg(self, keyring, verbose=False):
        """Verifies the file GPG signature using the specified keyring
        file.
        
        @param keyring: they keyring to be used for verification
        @return: the signature author or None
        """
        gpg_cmd = "LANGUAGE=en_US.UTF-8 LANG=en_US.UTF-8 " \
              "gpg --no-options --no-default-keyring " \
            "--keyring %s --verify --logger-fd=1 %s" \
            % (keyring, self._filename)
    
        sign_author = None
        (rc, output) = commands.getstatusoutput(gpg_cmd)
        output = unicode(output, 'utf-8')    
        if verbose:
            print output
        output_lines = output.split("\n")        
        if rc==0:
            for line in output_lines:        
                if line.startswith("gpg: Good signature from"):
                    dummy, sign_author, dummy = line.split('"')    
        return sign_author
        
    def verify_md5sum(self, source_dir=None):
        """
        Verify the MD5 checksum for all the files
        Returns:
            None: on success
            (expected_md5, found_md5, filename): on failure
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        for file in self.files_list():
            full_filename = "%s/%s" % (source_dir, file.name)
            if not os.path.exists(full_filename):
                return (file.md5sum, "FILE_NOT_FOUND", file.name)
            else:
                md5sum = commands.getoutput("md5sum %s" % full_filename)                                
                (found_md5, dummy) = md5sum.split()
                if found_md5 != file.md5sum:
                    return (file.md5sum, found_md5, file.name)
        return None    
        
    def copy(self, destination_dir=None, source_dir=None, md5check=True):
        """
        Copies the files listed on the control file
        The control file is also copied at the end
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        if not os.path.isdir(destination_dir):
            raise Exception
            return
            
        file_list = self.files_list()
        file_list.append(self.FileInfo(None, None, \
            os.path.basename(self._filename)))
        for file in file_list:
            source_filename = "%s/%s" % (source_dir, file.name)
            target_filename = "%s/%s" % (destination_dir, file.name)
            if not os.path.exists(source_filename):
                raise self.FileNotFoundError(source_filename)
                return
            shutil.copy2(source_filename, target_filename)
            if md5check and file.md5sum:
                md5sum = commands.getoutput("md5sum %s" % target_filename)                                
                (found_md5, dummy) = md5sum.split()
                if found_md5 != file.md5sum:
                    raise self.MD5Error(file.md5sum, found_md5, file.name)                    
                    return        
        return None
    
    def move(self, destination_dir=None, source_dir=None, md5check=True):
        """
        Moves the files listed on the control file
        The control file is also moved at the end
        Returns:
            None: on success
            (expected_md5, found_md5, filename): on failure
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        if not os.path.isdir(destination_dir):
            raise Exception
            return
    
        file_list = self.files_list()
        file_list.append(self.FileInfo(None, None, \
            os.path.basename(self._filename)))
        for file in file_list:
            source_filename = "%s/%s" % (source_dir, file.name)
            target_filename = "%s/%s" % (destination_dir, file.name)
            if not os.path.exists(source_filename):
                raise self.FileNotFoundError(source_filename)
                return
            if os.path.exists(target_filename):
                os.unlink(target_filename)
            shutil.move(source_filename, target_filename)
            if md5check and file.md5sum:
                md5sum = commands.getoutput("md5sum %s" % target_filename)                                
                (found_md5, dummy) = md5sum.split()
                if found_md5 != file.md5sum:
                    raise self.MD5Error(file.md5sum, found_md5, file.name)                
                    return
                    
        return None
    
    def remove(self, source_dir=None):
        """
        Removes all files listed and the control file itself
        Returns:
            None: on success
            (expected_md5, found_md5, filename): on failure
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        
        file_list = self.files_list()
        file_list.append(self.FileInfo(None, None, \
            os.path.basename(self._filename)))
        for file in file_list:
            full_filename = "%s/%s" % (source_dir, file.name)
            if os.path.exists(full_filename):
                os.unlink(full_filename)
    
        
    def __getitem__(self, item):
        try:
            item = self.tagfile.section[item]
        except KeyError:
            item = None
        return item
        
    def __str__(self):
        return str(self.tagfile.section)
Ejemplo n.º 26
0
class DebianControlFile(object):
    """
    This class holds all the information from a debian control file.
    It also provides some methods to operate with that information.
    """
    class FileInfo:
        def __init__(self, md5sum, size, name):
            self.size = size
            self.md5sum = md5sum
            self.name = name

    class FileNotFoundError(Exception):
        """ 
        A file operation was requested an a listed file was not found
        """
        def __init__(self, filename):
            self.filename = filename

        def __str__(self):
            return repr(self.filename)

    class MD5Error(Exception):
        """
        The MD5 checksum verification failed during a file copy/move
        operation.
        """
        def __init__(self, expected_md5, found_md5, name):
            self.expected_md5 = expected_md5
            self.found_md5 = found_md5
            self.name = name

        def __str__(self):
            return repr(self.value1, self.value2, self.value3)

    def __init__(self, input=None):

        if type(input) == types.StringType:
            self._filename = input
            self.load(input)
        else:
            self.tagfile = TagFile(input)

    def load(self, input):
        """
        Load control file
        """
        plain_file = open(input)
        tagfile = TagFile(plain_file)

        # Loop to skip PGP signature
        while tagfile.step():
            if tagfile.section.keys()[0][0] != '-':
                break

        self.tagfile = tagfile

    def step(self):
        """ Advance to next section """
        return self.tagfile.step()

    def files_list(self):
        if not self['Files']:
            return None
        files = self['Files'].split('\n')
        file_info_list = []
        for file in files:
            file_parts = file.strip(" ").split(" ")
            file_info = self.FileInfo(file_parts[0], file_parts[1], \
                file_parts[len(file_parts)-1])
            file_info_list.append(file_info)
        return file_info_list

    def version(self):
        """ 
        Returns the package version after removing the epoch part
        """
        version = self.tagfile.section['Version']
        epoch, sep, version = version.partition(":")
        return version or epoch

    def upstream_version(self):
        """ 
        Returns the upstream version contained on the Version field
        """
        return self.version().rsplit("-", 1)[0]

    def verify_gpg(self, keyring, verbose=False):
        """Verifies the file GPG signature using the specified keyring
        file.
        
        @param keyring: they keyring to be used for verification
        @return: the signature author or None
        """
        gpg_cmd = "LANGUAGE=en_US.UTF-8 LANG=en_US.UTF-8 " \
              "gpg --no-options --no-default-keyring " \
            "--keyring %s --verify --logger-fd=1 %s" \
            % (keyring, self._filename)

        sign_author = None
        (rc, output) = commands.getstatusoutput(gpg_cmd)
        output = unicode(output, 'utf-8')
        if verbose:
            print output
        output_lines = output.split("\n")
        if rc == 0:
            for line in output_lines:
                if line.startswith("gpg: Good signature from"):
                    dummy, sign_author, dummy = line.split('"')
        return sign_author

    def verify_md5sum(self, source_dir=None):
        """
        Verify the MD5 checksum for all the files
        Returns:
            None: on success
            (expected_md5, found_md5, filename): on failure
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        for file in self.files_list():
            full_filename = "%s/%s" % (source_dir, file.name)
            if not os.path.exists(full_filename):
                return (file.md5sum, "FILE_NOT_FOUND", file.name)
            else:
                md5sum = commands.getoutput("md5sum %s" % full_filename)
                (found_md5, dummy) = md5sum.split()
                if found_md5 != file.md5sum:
                    return (file.md5sum, found_md5, file.name)
        return None

    def copy(self, destination_dir=None, source_dir=None, md5check=True):
        """
        Copies the files listed on the control file
        The control file is also copied at the end
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        if not os.path.isdir(destination_dir):
            raise Exception
            return

        file_list = self.files_list()
        file_list.append(self.FileInfo(None, None, \
            os.path.basename(self._filename)))
        for file in file_list:
            source_filename = "%s/%s" % (source_dir, file.name)
            target_filename = "%s/%s" % (destination_dir, file.name)
            if not os.path.exists(source_filename):
                raise self.FileNotFoundError(source_filename)
                return
            shutil.copy2(source_filename, target_filename)
            if md5check and file.md5sum:
                md5sum = commands.getoutput("md5sum %s" % target_filename)
                (found_md5, dummy) = md5sum.split()
                if found_md5 != file.md5sum:
                    raise self.MD5Error(file.md5sum, found_md5, file.name)
                    return
        return None

    def move(self, destination_dir=None, source_dir=None, md5check=True):
        """
        Moves the files listed on the control file
        The control file is also moved at the end
        Returns:
            None: on success
            (expected_md5, found_md5, filename): on failure
        """
        source_dir = source_dir or os.path.dirname(self._filename)
        if not os.path.isdir(destination_dir):
            raise Exception
            return

        file_list = self.files_list()
        file_list.append(self.FileInfo(None, None, \
            os.path.basename(self._filename)))
        for file in file_list:
            source_filename = "%s/%s" % (source_dir, file.name)
            target_filename = "%s/%s" % (destination_dir, file.name)
            if not os.path.exists(source_filename):
                raise self.FileNotFoundError(source_filename)
                return
            if os.path.exists(target_filename):
                os.unlink(target_filename)
            shutil.move(source_filename, target_filename)
            if md5check and file.md5sum:
                md5sum = commands.getoutput("md5sum %s" % target_filename)
                (found_md5, dummy) = md5sum.split()
                if found_md5 != file.md5sum:
                    raise self.MD5Error(file.md5sum, found_md5, file.name)
                    return

        return None

    def remove(self, source_dir=None):
        """
        Removes all files listed and the control file itself
        Returns:
            None: on success
            (expected_md5, found_md5, filename): on failure
        """
        source_dir = source_dir or os.path.dirname(self._filename)

        file_list = self.files_list()
        file_list.append(self.FileInfo(None, None, \
            os.path.basename(self._filename)))
        for file in file_list:
            full_filename = "%s/%s" % (source_dir, file.name)
            if os.path.exists(full_filename):
                os.unlink(full_filename)

    def __getitem__(self, item):
        try:
            item = self.tagfile.section[item]
        except KeyError:
            item = None
        return item

    def __str__(self):
        return str(self.tagfile.section)
Ejemplo n.º 27
0
    def source_packages(self, suite, component):
        ''' Return a list of all source packages in the given suite and component. '''
        assert type(suite) is ArchiveSuite
        assert type(component) is ArchiveComponent

        index_fname = self.index_file(suite.name, os.path.join(component.name, 'source', 'Sources.xz'))
        if not index_fname:
            return []

        pkgs = []
        with TagFile(index_fname) as tf:
            for e in tf:
                pkgname = e['Package']
                pkgversion = e['Version']
                if not pkgname or not pkgversion:
                    raise Exception('Found invalid block (no Package and Version fields) in Sources file "{}".'.format(index_fname))
                    break

                pkg = SourcePackage()
                pkg.repo = self._repo_entity
                pkg.name = pkgname
                pkg.component = component
                if suite not in pkg.suites:
                    pkg.suites.append(suite)

                pkg.version = pkgversion
                pkg.architectures = split_strip(e['Architecture'], ' ')
                pkg.standards_version = e.get('Standards-Version', '0~notset')
                pkg.format_version = e['Format']

                pkg.vcs_browser = e.get('Vcs-Browser')
                pkg.homepage = e.get('Homepage')
                pkg.maintainer = e['Maintainer']
                pkg.uploaders = split_strip(e.get('Uploaders', ''), ',')  # FIXME: Careful! Splitting just by comma isn't enough! We need to parse this properly.

                pkg.build_depends = split_strip(e.get('Build-Depends', ''), ',')
                pkg.directory = e['Directory']

                pkg.files = parse_checksums_list(e.get('Checksums-Sha256'), pkg.directory)

                binaries = []
                raw_pkg_list = e.get('Package-List', None)
                if not raw_pkg_list:
                    for bpname in e.get('Binary', '').split(','):
                        if not bpname:
                            continue
                        bpname = bpname.strip()
                        pi = PackageInfo()
                        pi.deb_type = DebType.DEB
                        pi.name = bpname
                        pi.ver = pkg.version
                        binaries.append(pi)
                else:
                    binaries = parse_package_list_str(raw_pkg_list, pkg.version)
                pkg.binaries = binaries

                # do some issue-reporting
                if not pkg.files and pkg.format_version != '1.0':
                    log.warning('Source package {}/{} seems to have no files (in {}).'.format(pkg.name, pkg.version, self.location))

                # add package to results set
                pkg.update_uuid()
                pkgs.append(pkg)

        return pkgs
Ejemplo n.º 28
0
    def batch_rebuild_packages(self,
                               component,
                               bad_depends,
                               build_note,
                               dry_run=True):
        source_path = self._archivePath + "/%s/dists/%s/%s/binary-i386/Packages.gz" % (
            "tanglu", self._suite, component)
        f = gzip.open(source_path, 'rb')
        tagf = TagFile(f)
        rebuildSources = []
        bad_depends = bad_depends.strip()
        for section in tagf:
            pkgname = section['Package']
            source_pkg = section.get('Source', '')
            if source_pkg == '':
                source_pkg = pkgname
            if "(" in source_pkg:
                source_pkg = source_pkg.split("(")[0].strip()
            if source_pkg in rebuildSources:
                continue  # we already handled a rebuild for that
            # old binary packages are not interesting for us
            if source_pkg in self._pkgs_tanglu:
                compare = version_compare(
                    self._pkgs_tanglu[source_pkg].version, section['Version'])
                if compare > 0:
                    continue

            depends = section.get('Depends', '')
            if depends == '':
                continue
            # we ignore pre-depends: Pre-depending stuff is much safer with a manual rebuild
            dep_chunks = depends.split(',')
            for dep in dep_chunks:
                dep = dep.strip()
                if dep.startswith(bad_depends):
                    if dep == bad_depends:
                        rebuildSources.append(source_pkg)
                        continue
                    if '(' not in dep:
                        continue
                    depid_parts = dep.split('(')
                    if bad_depends == depid_parts[0].strip():
                        rebuildSources.append(source_pkg)
                        continue

        print("Packages planned for rebuild:")
        if len(rebuildSources) == 0:
            print("No matching packages found.")
            return
        print("\n".join(rebuildSources))

        if dry_run:
            return True  # dry-run - nothing to do

        res = True
        for pkg in rebuildSources:
            if not self.trigger_package_rebuild(component, pkg, build_note,
                                                False):
                res = False

        print("\n".join(self._todo_cmds))
        return res