コード例 #1
0
ファイル: repo_manage.py プロジェクト: beebeeep/cacus
def upload_package(distro, comp, files, changes, skipUpdateMeta=False, forceUpdateMeta=False):
    # files is array of files of .deb, .dsc, .tar.gz and .changes
    # these files are belongs to single package
    meta = {}
    affected_arches = set()
    for file in files:
        filename = os.path.basename(file)
        base_key = "{0}/pool/{1}".format(distro, filename)

        with open(file) as f:
            hashes = common.get_hashes(f)

        log.info("Uploading %s to distro '%s' component '%s'", base_key, distro, comp)
        storage_key = plugin_loader.get_plugin('storage').put(base_key, filename=file)
        #storage_key = os.path.join(common.config['repo_daemon']['storage_subdir'], storage_key)

        if file.endswith('.deb') or file.endswith('.udeb'):
            if 'debs' not in meta:
                meta['debs'] = []

            deb = _process_deb_file(file, storage_key)
            meta['debs'].append(deb)
        else:
            if 'sources' not in meta:
                meta['sources'] = []
            source,dsc = _process_source_file(file, storage_key)
            meta['sources'].append(source)
            if dsc:
                meta['dsc'] = dsc
         
        if changes:
            meta['Source'] = changes['source']
            meta['Version'] = changes['version']
        else:
            # if changes file is not present (i.e. we are uploading single deb file in non-strict repo),
            # take package name and version from 1st (which also should be last) deb file
            meta['Source'] = meta['debs'][0]['Package']
            meta['Version'] = meta['debs'][0]['Version']

    affected_arches.update(x['Architecture'] for x in meta['debs'])
    if affected_arches:
        # critical section. updating meta DB
        try:
            with common.RepoLock(distro, comp):
                common.db_packages.packages.find_one_and_update(
                        {'Source': meta['Source'], 'Version': meta['Version']},
                        {'$set': meta, '$addToSet': {'repos': {'distro': distro, 'component': comp}}},
                        upsert=True)
                if not skipUpdateMeta:
                    log.info("Updating '%s/%s' distro metadata for arches: %s", distro, comp, ', '.join(affected_arches))
                    update_distro_metadata(distro, [comp], affected_arches, force=forceUpdateMeta)
        except common.RepoLockTimeout as e:
            log.error("Error updating distro: %s", e)
            raise common.TemporaryError("Cannot lock distro: {0}".format(e))
    else:
        log.info("No changes made on distro %s/%s, skipping metadata update", distro, comp)
コード例 #2
0
ファイル: repo_daemon.py プロジェクト: beebeeep/cacus
 def stream_from_storage(self, key=None, headers=[]):
     self.dead = False
     stream = common.ProxyStream(self, headers=headers)
     self.set_header('Content-Type', 'application/octet-stream')
     # TODO last-modified, content-length and other metadata _should_ be provided! 
     try: 
         yield self.settings['workers'].submit(plugin_loader.get_plugin('storage').get, key, stream)
     except common.NotFound:
         self.set_status(404)
         app_log.error("Key %s was not found at storage", key)
     except common.FatalError as e:
         #self.set_status(500)
         app_log.error("Got error from storage plugin: %s", e)
     self.finish()
コード例 #3
0
ファイル: repo_manage.py プロジェクト: beebeeep/cacus
def update_distro_metadata(distro, comps=None, arches=None, force=False):
    now = datetime.utcnow()
    if not comps:
        comps = common.db_cacus.repos.find({'distro': distro}).distinct('component')
    if not arches:
        arches = common.db_cacus.repos.find({'distro': distro}).distinct('architecture')

    if not comps or not arches:
        raise common.NotFound("Distro {} is not found or empty".format(distro))

    # update all Packages files of specified architectures in specified components
    for comp in comps:
        for arch in arches:
            md5 = hashlib.md5()
            sha1 = hashlib.sha1()
            sha256 = hashlib.sha256()

            packages = generate_packages_file(distro, comp, arch)
            size = packages.tell()
            md5.update(packages.getvalue())
            sha1.update(packages.getvalue())
            sha256.update(packages.getvalue())

            old_repo = common.db_cacus.repos.find_one({'distro': distro, 'component': comp, 'architecture': arch}, {'packages_file': 1})
            if not force and old_repo and 'packages_file' in old_repo and md5.hexdigest() in old_repo['packages_file']:
                log.warn("Packages file for %s/%s/%s not changed, skipping update", distro, comp, arch)
                continue

            # we hold Packages under unique filename as far as we don't want to make assumptions whether 
            # our storage engine supports updating of keys
            base_key = "{}/{}/{}/Packages_{}".format(distro, comp, arch, md5.hexdigest())
            storage_key = plugin_loader.get_plugin('storage').put(base_key, file=packages)
            #storage_key = os.path.join(common.config['repo_daemon']['storage_subdir'], storage_key)

            old_repo = common.db_cacus.repos.find_one_and_update(
                    {'distro': distro, 'component': comp, 'architecture': arch},
                    {'$set': {
                        'distro': distro,
                        'component': comp,
                        'architecture': arch,
                        'md5': binary.Binary(md5.digest()),
                        'sha1': binary.Binary(sha1.digest()),
                        'sha256': binary.Binary(sha256.digest()),
                        'size': size,
                        'packages_file': storage_key,
                        'lastupdated': now
                        }},
                    return_document=ReturnDocument.BEFORE,
                    upsert=True)
            if not force and old_repo and 'packages_file' in old_repo:
                old_key = old_repo['packages_file']
                log.debug("Removing old Packages file %s", old_key)

                try:
                    plugin_loader.get_plugin('storage').delete(old_key)
                except common.NotFound:
                    log.warning("Cannot find old Packages file")
        # now update all Sources indices for each component
        md5 = hashlib.md5()
        sha1 = hashlib.sha1()
        sha256 = hashlib.sha256()
        sources = generate_sources_file(distro, comp)
        size = sources.tell()
        md5.update(sources.getvalue())
        sha1.update(sources.getvalue())
        sha256.update(sources.getvalue())
        
        old_sources = common.db_cacus.components.find_one({'disro': distro, 'component': comp}, {'sources_file': 1})
        if not force and old_sources and md5.hexdigest() in old_sources.get('packages_file', ''):
            log.warn("Sources file for %s/%s not changed, skipping update", distro, comp)
            continue
        base_key = "{}/{}/source/Sources_{}".format(distro, comp, md5.hexdigest())
        storage_key = plugin_loader.get_plugin('storage').put(base_key, file=sources)

        old_component = common.db_cacus.components.find_one_and_update(
                {'distro': distro, 'component': comp},
                {'$set': {
                    'distro': distro,
                    'component': comp,
                    'md5': binary.Binary(md5.digest()),
                    'sha1': binary.Binary(sha1.digest()),
                    'sha256': binary.Binary(sha256.digest()),
                    'size': size,
                    'sources_file': storage_key,
                    'lastupdated': now
                    }},
                return_document=ReturnDocument.BEFORE,
                upsert=True)
        if not force and old_component and 'sources_file' in old_component:
            old_key = old_component['sources_file']
            log.debug("Removing old Sources file %s", old_key)
            try:
                plugin_loader.get_plugin('storage').delete(old_key)
            except common.NotFound:
                log.warning("Cannot find old Packages file")


    # now create Release file for whole distro (aka "distribution" for Debian folks) including all comps and arches
    packages = list(common.db_cacus.repos.find({'distro': distro}))
    sources = list(common.db_cacus.components.find({'distro': distro}))
    distro_settings = common.db_cacus.distros.find_one({'distro': distro})

    # see https://wiki.debian.org/RepositoryFormat#Architectures - all arch goes with other arhes' indice and shall not be listed in Release
    arches = set(x['architecture'] for x in packages if x['architecture'] != 'all')

    release = u""
    release += u"Origin: {}\n".format(distro)
    release += u"Label: {}\n".format(distro)
    release += u"Suite: {}\n".format(distro_settings.get('suite', 'unknown'))
    release += u"Codename: {}\n".format(distro)
    release += u"Date: {}\n".format(now.strftime("%a, %d %b %Y %H:%M:%S +0000"))
    release += u"Architectures: {}\n".format(' '.join(arches))
    release += u"Components: {}\n".format(' '.join(x['component'] for x in sources))
    release += u"Description: {}\n".format(distro_settings.get('description', 'Do not forget the description'))

    release += u"MD5Sum:\n"
    release += "\n".join(
            u" {} {} {}/binary-{}/Packages".format(hexlify(file['md5']), file['size'], file['component'], file['architecture'])
            for file in packages) + u"\n"
    release += "\n".join(
            u" {} {} {}/source/Sources".format(hexlify(file['md5']), file['size'], file['component'])
            for file in sources)
    release += u"\nSHA1:\n"
    release += "\n".join(
            u" {} {} {}/binary-{}/Packages".format(hexlify(file['sha1']), file['size'], file['component'], file['architecture'])
            for file in packages) + u"\n"
    release += "\n".join(
            u" {} {} {}/source/Sources".format(hexlify(file['sha1']), file['size'], file['component'])
            for file in sources)
    release += u"\nSHA256:\n"
    release += "\n".join(
            u" {} {} {}/binary-{}/Packages".format(hexlify(file['sha256']), file['size'], file['component'], file['architecture'])
            for file in packages) + u"\n"
    release += "\n".join(
            u" {} {} {}/source/Sources".format(hexlify(file['sha256']), file['size'], file['component'])
            for file in sources)
    release += u"\n"

    ### TODO Sources file ####

    release_gpg = common.gpg_sign(release.encode('utf-8'), common.config['gpg']['signer'])

    # Release file and its digest is small enough to put directly into metabase
    common.db_cacus.distros.find_one_and_update(
            {'distro': distro},
            {'$set': {
                'distro': distro,
                'lastupdated': now,
                'release_file': release,
                'release_gpg': release_gpg
                }},
            upsert=True)