コード例 #1
0
 def __init__(self, path):
     DebTagLoader.__init__(self, "pdk:///")
     self._path = path
     ws = current_workspace()
     descriptor = ws.get_component_descriptor(self._path)
     cache = ws.world.get_backed_cache(ws.cache)
     self._component = descriptor.load(cache)
     self._contents = {}
     iterator_fn = self._component.iter_raw_ordered_contents
     refs = iterator_fn((Package,), True, None, None)
     i = 0
     types = ['deb', 'udeb']
     archs = [DEBARCH, 'all']
     for index, ref in enumerate(refs):
         if (ref.type in types) and (ref.arch in archs):
             self._contents[i] = ref
             i += 1
コード例 #2
0
ファイル: pdk_deb.py プロジェクト: djibi2/pdk
 def __init__(self, path):
     DebTagLoader.__init__(self, "pdk:///")
     self._path = path
     ws = current_workspace()
     descriptor = ws.get_component_descriptor(self._path)
     cache = ws.world.get_backed_cache(ws.cache)
     self._component = descriptor.load(cache)
     self._contents = {}
     iterator_fn = self._component.iter_raw_ordered_contents
     refs = iterator_fn((Package,), True, None, None)
     i = 0
     types = ["deb", "udeb"]
     archs = [DEBARCH, "all"]
     for index, ref in enumerate(refs):
         if (ref.type in types) and (ref.arch in archs):
             self._contents[i] = ref
             i += 1
コード例 #3
0
ファイル: audit.py プロジェクト: 64studio/pdk
def audit(args):
    """\\fB%prog\\fP
.PP
Load the component,
and verify that it and it's parts are well-formed.
    """
    ##specialization code starts here

    def note_problem(fact, prediction, prediction_basis,
                     evidence, evidence_basis):
        """A mismatch handler for Arbiter

        Handle the case of a prediction that doesn't pan out
        by reporting the mismatch to stdout, in a format
        suitable for: cut -d'|'
        """
        note_problem.called = True
        fields = fact.get_problem_description(prediction, prediction_basis,
                                         evidence, evidence_basis)
        result = string_together(fields, '|')
        print result

    note_problem.called = False

    my_cache = workspace.current_workspace().cache
    arbiter = Arbiter(note_problem)

    for component_name in args.args:
        def _note_blob_id(blob_id):
            """Make common predictions and warrants for blob_id.

            Predict that a blob_id will be in cache
            Warrant the blob_id will be needed by the component.
            """
            arbiter.predict(InCache(blob_id), True, component_name)

        # Get the set of packages in the component
        descriptor = ComponentDescriptor(component_name)
        component = descriptor.load(my_cache)

        # predict expected blob_ids and headers
        for package in component.iter_packages():
            _note_blob_id(package.blob_id)
            arbiter.predict(InCache(package.blob_id + '.header'),
                            True, component_name)
            for package_tuple in package.extra_files:
                blob_id = package_tuple[0]
                _note_blob_id(blob_id)

        # predict upcoming source packages
        for package in component.iter_packages():
            if package.role == 'binary':
                fact = HasSource(package.format, package.pdk.sp_name,
                                 package.pdk.sp_version.full_version)
                arbiter.predict(fact, True, component_name)

        # warrant source packages found
        for package in component.iter_packages():
            if package.role == 'source':
                fact = HasSource(package.format, package.name,
                                 package.version.full_version)
                arbiter.warrant(fact, True, component_name)

    # predict upcoming cache checksums
    # pre-warrant found inodes
    # note inode -> filename relationships for later
    found_by_inode = {}
    for blob_id in my_cache:
        inode = my_cache.get_inode(blob_id)
        if blob_id.endswith('.header'):
            arbiter.warrant(InCache(blob_id), True, 'cache')
            continue

        entry = found_by_inode.setdefault(inode, Set([]))
        entry.add(blob_id)

        # ? Won't this create repeated predictions?
        for blob_id in found_by_inode[inode]:
            arbiter.predict(ChecksumMatches(blob_id), blob_id, 'cache')

    for inode, blob_ids in found_by_inode.iteritems():
        for blob_id in blob_ids:
            arbiter.warrant(InCache(blob_id), True, 'cache')

        # warrant cache checksums
        one_id = tuple(blob_ids)[0]
        handle = open(my_cache.file_path(one_id))
        sha1_digest = sha.new()
        md5_digest = md5.new()
        read_size = 8196
        while 1:
            block = handle.read(read_size)
            if not block:
                break
            sha1_digest.update(block)
            md5_digest.update(block)
        handle.close()

        prefixes = []
        for blob_id in blob_ids:
            if blob_id.startswith('sha-1'):
                prefixes.append('sha-1')
                arbiter.warrant(ChecksumMatches(blob_id),
                                'sha-1:' + sha1_digest.hexdigest(),
                                'cache')
            elif blob_id.startswith('md5'):
                prefixes.append('md5')
                arbiter.warrant(ChecksumMatches(blob_id),
                                'md5:' + md5_digest.hexdigest(), 'cache')
            else:
                # note unknown prefixes
                arbiter.note_problem(
                    blob_id
                    , ('md5:', 'sha-1:')
                    , 'unknown prefix'
                    )
        prefixes.sort()
        if prefixes != ['md5', 'sha-1']:
            digests = (md5_digest.hexdigest(), sha1_digest.hexdigest())
            arbiter.note_problem(tuple(blob_ids), digests,
                                 'not hard linked properly')

    arbiter.note_leftovers()

    if note_problem.called:
        raise IntegrityFault, "Audit detected fault(s)"
コード例 #4
0
ファイル: audit.py プロジェクト: pombredanne/pdk
def audit(args):
    """\\fB%prog\\fP
.PP
Load the component,
and verify that it and it's parts are well-formed.
    """

    ##specialization code starts here

    def note_problem(fact, prediction, prediction_basis, evidence,
                     evidence_basis):
        """A mismatch handler for Arbiter

        Handle the case of a prediction that doesn't pan out
        by reporting the mismatch to stdout, in a format
        suitable for: cut -d'|'
        """
        note_problem.called = True
        fields = fact.get_problem_description(prediction, prediction_basis,
                                              evidence, evidence_basis)
        result = string_together(fields, '|')
        print result

    note_problem.called = False

    my_cache = workspace.current_workspace().cache
    arbiter = Arbiter(note_problem)

    for component_name in args.args:

        def _note_blob_id(blob_id):
            """Make common predictions and warrants for blob_id.

            Predict that a blob_id will be in cache
            Warrant the blob_id will be needed by the component.
            """
            arbiter.predict(InCache(blob_id), True, component_name)

        # Get the set of packages in the component
        descriptor = ComponentDescriptor(component_name)
        component = descriptor.load(my_cache)

        # predict expected blob_ids and headers
        for package in component.iter_packages():
            _note_blob_id(package.blob_id)
            arbiter.predict(InCache(package.blob_id + '.header'), True,
                            component_name)
            for package_tuple in package.extra_files:
                blob_id = package_tuple[0]
                _note_blob_id(blob_id)

        # predict upcoming source packages
        for package in component.iter_packages():
            if package.role == 'binary':
                fact = HasSource(package.format, package.pdk.sp_name,
                                 package.pdk.sp_version.full_version)
                arbiter.predict(fact, True, component_name)

        # warrant source packages found
        for package in component.iter_packages():
            if package.role == 'source':
                fact = HasSource(package.format, package.name,
                                 package.version.full_version)
                arbiter.warrant(fact, True, component_name)

    # predict upcoming cache checksums
    # pre-warrant found inodes
    # note inode -> filename relationships for later
    found_by_inode = {}
    for blob_id in my_cache:
        inode = my_cache.get_inode(blob_id)
        if blob_id.endswith('.header'):
            arbiter.warrant(InCache(blob_id), True, 'cache')
            continue

        entry = found_by_inode.setdefault(inode, Set([]))
        entry.add(blob_id)

        # ? Won't this create repeated predictions?
        for blob_id in found_by_inode[inode]:
            arbiter.predict(ChecksumMatches(blob_id), blob_id, 'cache')

    for inode, blob_ids in found_by_inode.iteritems():
        for blob_id in blob_ids:
            arbiter.warrant(InCache(blob_id), True, 'cache')

        # warrant cache checksums
        one_id = tuple(blob_ids)[0]
        handle = open(my_cache.file_path(one_id))
        sha1_digest = sha.new()
        md5_digest = md5.new()
        read_size = 8196
        while 1:
            block = handle.read(read_size)
            if not block:
                break
            sha1_digest.update(block)
            md5_digest.update(block)
        handle.close()

        prefixes = []
        for blob_id in blob_ids:
            if blob_id.startswith('sha-1'):
                prefixes.append('sha-1')
                arbiter.warrant(ChecksumMatches(blob_id),
                                'sha-1:' + sha1_digest.hexdigest(), 'cache')
            elif blob_id.startswith('md5'):
                prefixes.append('md5')
                arbiter.warrant(ChecksumMatches(blob_id),
                                'md5:' + md5_digest.hexdigest(), 'cache')
            else:
                # note unknown prefixes
                arbiter.note_problem(blob_id, ('md5:', 'sha-1:'),
                                     'unknown prefix')
        prefixes.sort()
        if prefixes != ['md5', 'sha-1']:
            digests = (md5_digest.hexdigest(), sha1_digest.hexdigest())
            arbiter.note_problem(tuple(blob_ids), digests,
                                 'not hard linked properly')

    arbiter.note_leftovers()

    if note_problem.called:
        raise IntegrityFault, "Audit detected fault(s)"