Exemple #1
0
    def move_package_files(self, filelist, resultdir, verbose=True):
        """move package files from the temporary build area to the result directory

        we define here the self.packages variable used by post-treatment
        some tests are performed before copying to result directory

        :see: dcmd command
        :todo: add more checks: sizes, checksums, etc... (ex: _check_file)
        :todo: support other source package formats
        :todo: define API and/or integrate software (dput, curl, scp) ?
        """
        assert isinstance(filelist,
                          list), "must be a list to be able to extend"

        def _sign_file(filename):
            if self.config.sign:
                check_debsign(self)
                try:
                    check_call(["debsign", filename], stdout=sys.stdout)
                except CalledProcessError as err:
                    self.logger.error("lgp cannot debsign '%s' automatically" %
                                      filename)
                    self.logger.error("You have to run manually: debsign %s" %
                                      copied_filename)

        def _check_file(filename):
            if osp.isfile(filename):
                hash1 = hashlib.md5(open(fullpath).read()).hexdigest()
                hash2 = hashlib.md5(open(filename).read()).hexdigest()
                if hash1 == hash2:
                    self.logger.debug("overwrite same file file '%s'" %
                                      filename)
                else:
                    msg = "theses files shouldn't be different:\n"\
                          "- %s (%s)\n"\
                          "- %s (%s)"
                    self.logger.warn(msg % (fullpath, hash1, filename, hash2))
                    os.system('diff -u %s %s' % (fullpath, filename))
                    raise LGPException(
                        "bad md5 sums of source archives (tarball)")

        def _check_pristine():
            """basic check about presence of pristine tarball in source package

            Format: 1.0
            A source package in this format consists either of a .orig.tar.gz
            associated to a .diff.gz or a single .tar.gz (in that case the pack-
            age is said to be native).

            A source package contains at least an original tarball
            (.orig.tar.ext where ext can be gz, bz2 and xz)
            """
            ext = tuple([".tar" + e for e in ('.gz', '.bz2', '.xz')])
            pristine = diff = None
            for entry in filelist:
                if not diff and entry.endswith('.diff.gz'):
                    diff = entry
                if not pristine and entry.endswith(ext):
                    pristine = entry
            if pristine is None and self.is_initial_debian_revision():
                msg = "no pristine tarball found for initial Debian revision"\
                      " (searched: %s)"
                self.logger.error(msg % (entry, ext))
            orig = pristine.rsplit('.', 2)[0].endswith(".orig")
            if not diff and not orig:
                msg = ("native package detected. Read `man dpkg-source` "
                       "carefully if not sure")
                self.logger.warn(msg)

        while filelist:
            fullpath = filelist.pop()
            if not osp.isfile(fullpath):
                raise IOError("%s not found!" % fullpath)
            (path, filename) = osp.split(fullpath)
            copied_filename = osp.join(resultdir, osp.basename(filename))

            if filename.endswith(('.changes', '.dsc')):
                contents = deb822.Deb822(file(fullpath))
                filelist.extend([
                    osp.join(path,
                             f.split()[-1])
                    for f in contents['Files'].split('\n') if f
                ])
            #logging.debug('copying: %s -> %s ... \npending: %s' % (filename, copied_filename, filelist))

            if filename.endswith('.dsc'):
                #_check_file(copied_filename)
                _check_pristine()
                if self.config.deb_src_only:
                    self.logger.info("Debian source control file: %s" %
                                     copied_filename)
                    _sign_file(fullpath)
            if filename.endswith('.log'):
                self.logger.info("a build logfile is available: %s" %
                                 copied_filename)
            if filename.endswith('.changes'):
                self.logger.info("Debian changes file: %s" % copied_filename)
                #_check_file(copied_filename)
                _sign_file(fullpath)
            #if filename.endswith('.diff.gz'):
            #    _check_file(copied_filename)

            cp(fullpath, copied_filename)
            assert osp.exists(copied_filename)
            self.packages.append(copied_filename)
Exemple #2
0
def read_config(config_filepath):
    config = None
    with open(config_filepath, "r") as config_file:
        content = config_file.read()
        config = deb822.Deb822(content.split("\n"))
    return config
Exemple #3
0
    def test_publish_repo(self, _repo_controller, _task_current, _restorecon, _sign):
        _task_current.request.id = 'aabb'
        worker_name = "worker01"
        _task_current.request.configure_mock(hostname=worker_name)
        os.makedirs(os.path.join(self.pulp_working_dir, worker_name))
        # Set up some files
        storage_dir = os.path.join(self.work_dir, 'storage_dir')
        publish_dir = os.path.join(self.work_dir, 'publish_dir')
        os.makedirs(storage_dir)
        units = self._units(storage_dir)

        unit_dict = dict()
        unit_counts = dict()
        for type_id in sorted(ids.SUPPORTED_TYPES):
            _l = unit_dict[type_id] = [u for u in units
                                       if u.type_id == type_id]
            unit_counts[type_id] = len(_l)

        distributor = self.Module.DebDistributor()
        repo = mock.Mock()
        repo_time = int(time.time())
        repo_id = "repo-%d-deb-level0" % repo_time
        repo.configure_mock(
            working_dir=os.path.join(self.work_dir, 'work_dir'),
            content_unit_counts=unit_counts,
            description="Repo %d description" % repo_time,
            id=repo_id)

        def mock_get_units(repo_id, model_class, *args, **kwargs):
            units = unit_dict[model_class.TYPE_ID]
            query = mock.MagicMock()
            query.count.return_value = len(units)
            query.__iter__.return_value = iter(units)
            return [query]
        _repo_controller.get_unit_model_querysets.side_effect = mock_get_units
        conduit = self._config_conduit()
        repo_config = dict(
            http=True, https=False,
            relative_url='level1/' + repo.id,
            http_publish_dir=publish_dir + '/http/repos',
            https_publish_dir=publish_dir + '/https/repos')
        if self.default_release:
            repo_config[constants.PUBLISH_DEFAULT_RELEASE_KEYWORD] = True

        signer = self.new_file(name="signer", contents="#!/bin/bash").path
        os.chmod(signer, 0o755)

        repo_config.update(gpg_cmd=signer)

        # This call is to be tested
        distributor.publish_repo(repo, conduit, config=repo_config)

        # Assert, certain things have been called
        self.assertEquals(
            [x[0][0] for x in conduit.build_success_report.call_args_list],
            [{'publish_directory': 'FINISHED', 'publish_modules': 'FINISHED',
              'generate_listing_files': 'FINISHED'}])
        self.assertEquals(
            [x[0][1][0]['num_processed']
             for x in conduit.build_success_report.call_args_list],
            [1])
        self.assertEquals(
            [len(x[0][1][0]['sub_steps'])
             for x in conduit.build_success_report.call_args_list],
            [4])

        # Make sure all three models (packages, components, releases) are retrieved
        self.assertEqual(_repo_controller.get_unit_model_querysets.call_count, 3)

        # Make sure symlinks got created
        for unit in unit_dict[ids.TYPE_ID_DEB]:
            units_components = [comp.name for comp in unit_dict[ids.TYPE_ID_DEB_COMP]
                                if unit.id in comp.packages]
            for component in units_components:
                published_path = os.path.join(
                    repo_config['http_publish_dir'],
                    repo_config['relative_url'],
                    'pool',
                    component,
                    unit.filename)
                self.assertEquals(os.readlink(published_path), unit.storage_path)
            if self.default_release:
                published_path = os.path.join(
                    repo_config['http_publish_dir'],
                    repo_config['relative_url'],
                    'pool',
                    'all',
                    unit.filename)
                self.assertEquals(os.readlink(published_path), unit.storage_path)

        # Make sure the release files exist
        release_units = unit_dict[ids.TYPE_ID_DEB_RELEASE]
        component_units = unit_dict[ids.TYPE_ID_DEB_COMP]
        # Old-style repositories do not have release units and should be published as "stable/main"
        if not release_units:
            release_units.append(models.DebRelease(codename='stable', id='stableid'))
            component_units.append(models.DebComponent(name='main', id='mainid', release='stable'))
        # Test for default/all release
        if self.default_release:
            release_units.append(models.DebRelease(codename='default', id='defaultid'))
            component_units.append(models.DebComponent(name='all', id='allid', release='default'))
        for release in release_units:
            comp_dir = os.path.join(
                repo_config['http_publish_dir'],
                repo_config['relative_url'],
                'dists',
                release.codename)
            release_file = os.path.join(comp_dir, 'Release')
            self.assertTrue(os.path.exists(release_file))
            # Make sure the components Packages files exist
            for comp in [comp.name for comp in component_units
                         if comp.release == release.codename]:
                for arch in self.Architectures:
                    self.assertTrue(os.path.exists(
                        os.path.join(comp_dir, comp, 'binary-' + arch, 'Packages')))
            # #3917: make sure Description and Label are properly set
            rel_file_contents = deb822.Deb822(sequence=open(release_file))
            self.assertEqual(repo.id, rel_file_contents['Label'])
            self.assertEqual(repo.description, rel_file_contents['Description'])

        exp = [
            mock.call(repo.id, models.DebRelease, None),
            mock.call(repo.id, models.DebComponent, None),
            mock.call(repo.id, models.DebPackage, None),
        ]
        self.assertEquals(
            exp,
            _repo_controller.get_unit_model_querysets.call_args_list)

        publish_dir = os.path.join(repo_config['http_publish_dir'],
                                   repo_config['relative_url'])
        # Make sure there is a listing file
        lfpath = os.path.join(os.path.dirname(publish_dir), 'listing')
        self.assertEquals(repo_id, open(lfpath).read())
        # Parent directory too
        lfpath = os.path.join(os.path.dirname(os.path.dirname(lfpath)),
                              'listing')
        self.assertEquals('level1', open(lfpath).read())

        for release in unit_dict[ids.TYPE_ID_DEB_RELEASE]:
            work_release_file = os.path.join(self.pulp_working_dir, worker_name,
                                             "aabb", "dists", release.codename, "Release")
            _sign.assert_any_call(work_release_file)
Exemple #4
0
def load_debian_issue(f, branches):
    deb_issue = deb822.Deb822(f)
    issue = {}

    issue['description'] = deb_issue['Description']

    references = \
        [ref
         for ref in LINE_BREAK_RE.split(deb_issue['References'].strip())
         if ref] + \
        [bug_url(ref)
         for ref in LINE_BREAK_RE.split(deb_issue['Bugs'].strip())
         if ref]
    if references:
        issue['references'] = references

    # Group and join comment lines by name
    comments = {}
    for line in LINE_BREAK_RE.split(deb_issue['Notes'].strip()):
        if not line:
            continue
        match = COMMENT_RE.match(line)
        if match:
            name = 'debian/' + match.group(1)
            rest = match.group(2)
        else:
            name = 'debian'
            rest = line
        comments.setdefault(name, []).append(rest)
    if comments:
        issue['comments'] = dict(
            (name, '\n'.join(lines)) for (name, lines) in comments.items())

    def get_fixes(branch_name, branch_format, match):
        if branch_format == BranchFormat.STANDARD:
            if match.group('changerefs'):
                hashes = [
                    ref_name for ref_name in COMMA_SEP_RE.split(
                        match.group('changerefs'))
                    if kernel_sec.issue.change_is_git_hash(ref_name)
                ]
                if hashes:
                    return hashes
        else:
            assert branch_format == BranchFormat.PATCH_QUEUE
            is_debian = branch_name.startswith('debian/')
            state = match.group('state')

            if is_debian:
                if state == 'released':
                    version = match.group('version')
                    if version is None or ',' in version or '-' not in version:
                        return None
                    ref_name = 'debian/' + version.replace('~', '_')
                else:
                    assert state == 'pending'
                    ref_name = branch_name[7:]
            else:
                ref_name = 'master'

            if match.group('changerefs'):
                assert branch_format == BranchFormat.PATCH_QUEUE
                patches = COMMA_SEP_RE.split(match.group('changerefs'))
                if patches:
                    return [
                        'patch:%s:%s' % (ref_name, file_name)
                        for file_name in patches
                    ]
            elif is_debian and state == 'released':
                # Fixed in this version but without any changes listed.
                # Probably fixed by importing a newer upstream.
                return ['version:' + ref_name]

        return None

    # Branch status
    for key in deb_issue:
        # Parse the branch name and determine format of the branch
        # dependent on state
        match = BRANCH_RE.match(key)
        if not match:
            continue
        base_ver = match.group('base_ver')
        if match.group('mainline'):
            branch_format = {
                'pending': BranchFormat.STANDARD,
                'released': BranchFormat.STANDARD,
            }
            branch_name = 'mainline'
        elif not match.group('debian'):
            branch_format = {
                'pending': BranchFormat.PATCH_QUEUE,
                'released': BranchFormat.STANDARD,
            }
            branch_name = 'stable/' + base_ver
        else:
            branch_format = {
                'pending': BranchFormat.PATCH_QUEUE,
                'released': BranchFormat.PATCH_QUEUE,
            }
            branch_name = 'debian/' + match.group('debian')
        if branch_name not in branches:
            continue

        # For mainline, fixes may span multiple releases
        for match in STATUS_RE.finditer(deb_issue[key]):
            state = match.group('state')
            if state in ['pending', 'released']:
                fixes = get_fixes(branch_name, branch_format[state], match)
                if fixes:
                    issue.setdefault('fixed-by',
                                     {}).setdefault(branch_name,
                                                    []).extend(fixes)
            # However, there will be only one "ignored" entry
            if state == 'ignored' and match.group('reason'):
                issue.setdefault('ignore',
                                 {})[branch_name] = match.group('reason')

    # Fill in status for Debian stable branches fixed before the
    # Debian branch point.  These will only be explicitly marked as
    # fixed in sid, though they may have a free-form comment
    # explaining why the stable branch wasn't affected.
    if 'sid' in deb_issue:
        match = STATUS_RE.match(deb_issue['sid'])
        version = match and match.group('version')
        if match \
           and match.group('state') == 'released' \
           and version and ',' not in version:
            fixes = get_fixes('debian/sid', BranchFormat.PATCH_QUEUE, match)
            if fixes:
                for branch_name, branch in branches.items():
                    if branch_name.startswith('debian/') \
                       and branch_name not in issue.get('fixed-by', {}) \
                       and dpkg_version_cmp(
                           version, branch['debian_branch_point']) <= 0:
                        issue.setdefault('fixed-by', {})[branch_name] = fixes

    return issue
Exemple #5
0
def parse_submission(request):
    raw_text = request.read()

    data = deb822.Deb822(raw_text)
    raw_text_gpg_stripped = data.dump()

    ## Parse GPG info #########################################################

    uid = None
    data.raw_text = raw_text
    gpg_info = data.get_gpg_info()

    for x in ('VALIDSIG', 'NO_PUBKEY'):
        try:
            uid = gpg_info[x][0]
            break
        except (KeyError, IndexError):
            pass

    if uid is None:
        raise InvalidSubmission("Could not determine GPG uid")

    ## Check whether .buildinfo already exists ################################

    def create_submission(buildinfo):
        submission = buildinfo.submissions.create(
            key=Key.objects.get_or_create(uid=uid)[0], )

        default_storage.save(
            submission.get_storage_name(),
            ContentFile(raw_text),
        )

        return submission

    ## Parse new .buildinfo ###################################################

    def get_or_create(model, field):
        try:
            return model.objects.get_or_create(name=data[field])[0]
        except KeyError:
            raise InvalidSubmission("Missing required field: {}".format(field))

    if data.get('Format') not in SUPPORTED_FORMATS:
        raise InvalidSubmission(
            "Only {} 'Format:'  versions are supported".format(
                ', '.join(sorted(SUPPORTED_FORMATS)), ))

    sha1 = hashlib.sha1(raw_text_gpg_stripped.encode('utf-8')).hexdigest()

    try:
        with transaction.atomic():
            buildinfo = Buildinfo.objects.create(
                sha1=sha1,
                source=get_or_create(Source, 'Source'),
                architecture=get_or_create(Architecture, 'Architecture'),
                version=data['version'],
                build_path=data.get('Build-Path', ''),
                build_date=parse(data.get('Build-Date', '')),
                build_origin=get_or_create(Origin, 'Build-Origin'),
                build_architecture=get_or_create(Architecture,
                                                 'Build-Architecture'),
                environment=data.get('Environment', ''),
            )
    except IntegrityError:
        # Already exists; just attach a new Submission instance
        return create_submission(Buildinfo.objects.get(sha1=sha1)), False

    default_storage.save(
        buildinfo.get_storage_name(),
        ContentFile(raw_text_gpg_stripped),
    )

    ## Parse binaries #########################################################

    try:
        binary_names = set(data['Binary'].split(' '))
    except KeyError:
        raise InvalidSubmission("Missing 'Binary' field")

    if not binary_names:
        raise InvalidSubmission("Invalid 'Binary' field")

    binaries = {}
    for x in binary_names:
        # Save instances for lookup later
        binaries[x] = buildinfo.binaries.create(
            binary=Binary.objects.get_or_create(name=x)[0], )

    ## Parse checksums ########################################################

    hashes = ('Md5', 'Sha1', 'Sha256')

    checksums = {}
    for x in hashes:
        for y in data['Checksums-%s' % x].strip().splitlines():
            checksum, size, filename = y.strip().split()

            # Check size
            try:
                size = int(size)
                if size < 0:
                    raise ValueError()
            except ValueError:
                raise InvalidSubmission(
                    "Invalid size for {}: {}".format(filename, size), )

            checksums.setdefault(filename, {
                'size': size,
                'binary': None,
            })['checksum_{}'.format(x.lower())] = checksum

            existing = checksums[filename]['size']
            if size != existing:
                raise InvalidSubmission("Mismatched file size in "
                                        "Checksums-{}: {} != {}".format(
                                            x, existing, size))

    ## Create Checksum instances ##############################################

    for k, v in sorted(checksums.items()):
        # Match with Binary instances if possible
        m = re_binary.match(k)
        if m is not None:
            v['binary'] = binaries.get(m.group('name'))

        buildinfo.checksums.create(filename=k, **v)

    ## Validate Installed-Build-Depends #######################################

    for x in data['Installed-Build-Depends'].strip().splitlines():
        m = re_installed_build_depends.match(x.strip())

        if m is None:
            raise InvalidSubmission(
                "Invalid entry in Installed-Build-Depends: {}".format(x), )

    return create_submission(buildinfo), True
Exemple #6
0
				trusted=trusted,
			)
		)
	else:
		apt_sources.append(
			AptSource(
				tokens[0], tokens[1], tokens[2], tokens[3:],
				trusted=trusted,
			)
		)

timestamps = {}

for source in apt_sources:
	with closing(urlopen(source.release_url)) as release_file:
		release_info = deb822.Deb822(release_file)
		try:
			timestamps[source] = calendar.timegm(time.strptime(
				release_info['date'],
				'%a, %d %b %Y %H:%M:%S %Z',
			))
		except (KeyError, ValueError):
			timestamps[source] = 0

if 'SOURCE_DATE_EPOCH' in os.environ:
	reference_timestamp = int(os.environ['SOURCE_DATE_EPOCH'])
else:
	reference_timestamp = max(timestamps.values())

if args.set_name is not None:
	name = args.set_name
Exemple #7
0
def jobize(path, job):
    f = open(path, 'r')
    obj = deb822.Deb822(f)
    obj['X-Debile-Job'] = str(job['id'])
    obj.dump(fd=open(path, 'wb'))
    return obj
    def setUp(self):
        self.control_data = {
            'Package':
            u'foo',
            'Version':
            u'0.0.1-1',
            'Architecture':
            u'amd64',
            'Maintainer':
            u'Brett Smith <*****@*****.**>',
            'Installed-Size':
            u'25',
            'Section':
            u'database',
            'Priority':
            u'extra',
            'Multi-Arch':
            u'foreign',
            'Homepage':
            u'https://github.com/xbcsmith/foo',
            'Description':
            u'So this is the Foo of Brixton program\n'
            ' When they kick at your front door\n How you'
            ' gonna come?\n With your hands on your head\n'
            ' Or on the trigger of your gun',
        }

        self.md5sum_data = {
            'MD5sum': u'5fc5c0cb24690e78d6c6a2e13753f1aa',
            'SHA256': u'd80568c932f54997713bb7832c6da6aa04992919'
            'f3d0f47afb6ba600a7586780',
            'SHA1': u'5e26ae3ebf9f7176bb7fd01c9e802ac8e223cdcc'
        }

        self.hashes_data = {
            u'usr/share/doc/foo/changelog.Debian.gz':
            u'9e2d1b5db1f1fb50621a48538d570ee8',
            u'usr/share/doc/foo/copyright':
            u'a664cb0d199e56bb5691d8ae29ca759a',
            u'usr/share/doc/foo/README.Debian':
            u'22c9f74e69fd45c5a60331586763c253'
        }

        self.files_data = sorted([x for x in self.hashes_data.keys()])

        self.md5sum_string = '''\
MD5sum: 5fc5c0cb24690e78d6c6a2e13753f1aa
SHA1: 5e26ae3ebf9f7176bb7fd01c9e802ac8e223cdcc
SHA256: d80568c932f54997713bb7832c6da6aa04992919f3d0f47afb6ba600a7586780
'''

        self.files_string = 'usr/share/doc/foo/README.Debian\n'\
                            'usr/share/doc/foo/changelog.Debian.gz\n'\
                            'usr/share/doc/foo/copyright'

        self.files_string_bad = 'usr/share/doc/foo/changelog.Debian.gz\n'\
            'usr/share/doc/foo/README.Debian\n'\
            'usr/share/doc/foo/copyright'

        # self.files_string = '\n'.join(self.files_data)
        self.attrs_data = {
            'md5sum': u'5fc5c0cb24690e78d6c6a2e13753f1aa',
            'sha1': u'5e26ae3ebf9f7176bb7fd01c9e802ac8e223cdcc',
            'sha256': u'd80568c932f54997713bb7832c6da6aa049929'
            '19f3d0f47afb6ba600a7586780',
            'name': u'foo',
            'nevra': u'foo_0.0.1-1_amd64',
        }

        self.package_data = self.control_data.copy()
        self.package_data.update(self.md5sum_data)

        self.package_obj = deb822.Deb822(self.package_data)
Exemple #9
0
def jobize(path, job):
    f = open(path, 'r')
    obj = deb822.Deb822(f)
    obj['X-Lucy-Job'] = job
    obj.dump(fd=open(path, 'wb'))
    return obj
Exemple #10
0
    def test_pkg(self):

        package_data = {
            'Package':
            u'foo',
            'Version':
            u'0.0.1-1',
            'Architecture':
            u'amd64',
            'Maintainer':
            u'Brett Smith <*****@*****.**>',
            'Installed-Size':
            u'25',
            'Section':
            u'database',
            'Priority':
            u'extra',
            'Multi-Arch':
            u'foreign',
            'Homepage':
            u'https://github.com/xbcsmith/foo',
            'Description':
            u'So this is the Foo of Brixton program\n'
            ' When they kick at your front door\n How you'
            ' gonna come?\n With your hands on your head\n'
            ' Or on the trigger of your gun',
            'MD5sum':
            u'5fc5c0cb24690e78d6c6a2e13753f1aa',
            'SHA256':
            u'd80568c932f54997713bb7832c6da6aa04992919'
            'f3d0f47afb6ba600a7586780',
            'SHA1':
            u'5e26ae3ebf9f7176bb7fd01c9e802ac8e223cdcc'
        }

        attrs_data = {
            'md5sum': u'5fc5c0cb24690e78d6c6a2e13753f1aa',
            'sha1': u'5e26ae3ebf9f7176bb7fd01c9e802ac8e223cdcc',
            'sha256': u'd80568c932f54997713bb7832c6da6aa049929'
            '19f3d0f47afb6ba600a7586780',
            'name': u'foo',
            'nevra': u'foo_0.0.1-1_amd64',
        }

        md5sums_data = {
            u'usr/share/doc/foo/changelog.Debian.gz':
            u'9e2d1b5db1f1fb50621a48538d570ee8',
            u'usr/share/doc/foo/copyright':
            u'a664cb0d199e56bb5691d8ae29ca759a',
            u'usr/share/doc/foo/README.Debian':
            u'22c9f74e69fd45c5a60331586763c253'
        }

        files_data = [x for x in md5sums_data.keys()]

        package_attrs = {'foo': attrs_data}
        package_objects = {'foo': deb822.Deb822(package_data)}
        package_files = {'foo': DebPkgFiles(files_data)}
        package_md5sums = {'foo': DebPkgMD5sums(md5sums_data)}

        files = []
        for root, _, fl in os.walk(self.pool_dir):
            for f in fl:
                if f.endswith('.deb'):
                    files.append(os.path.join(root, f))

        packages = {}

        for fpath in files:
            pkg = DebPkg.from_file(fpath)
            packages.setdefault(pkg.name, pkg)

        for name, pkg in packages.items():
            if name in package_attrs:
                for attr in package_attrs[name]:
                    self.assertEquals(package_attrs[name][attr],
                                      getattr(pkg, attr))
            if name in package_md5sums:
                self.assertEquals(package_md5sums[name], pkg.md5sums)
            if name in package_files:
                self.assertEquals(sorted([x for x in package_files[name]]),
                                  sorted([x for x in pkg.files]))
                self.assertEquals(package_files[name], pkg.files)
            if name in package_data:
                self.assertEquals(package_objects[name], pkg.package)