def init_reliquary(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine)
def pypi_simple(req): channel = req.matchdict.get('channel', 'default') index = req.matchdict.get('index', 'default') indexobj = fetch_index_from_names(channel, index) if not indexobj: return dict(lines=[]) lines = [] relics = DBSession.query(Relic).filter_by(index_id=indexobj.uid) uniqrelics = {} for relic in relics: matches = split_pypi_name(relic.name) # if a name couldn't be satisfactorly extracted, use the the whole # relic name as the package name if not matches: uniqrelics[relic.name] = True else: uniqrelics[matches[0]] = True for relic, _ in uniqrelics.items(): lines.append("<a href='{0}'>{0}</a><br/>".format( pypi_normalize_package_name(relic))) lines.sort() return dict(lines=lines)
def pypi_simple_package(req): channel = req.matchdict.get('channel', 'default') index = req.matchdict.get('index', 'default') package = req.matchdict.get('package', None) if not package: return Response('{"status":"error","package not found"}', content_type='application/json', status_code=404) package = pypi_normalize_package_name(package) indexobj = fetch_index_from_names(channel, index) if not indexobj: return dict(lines=[]) lines = [] relics = DBSession.query(Relic).filter_by(index_id=indexobj.uid) matched = [] for relic in relics: rparts = split_pypi_name(relic.name) normname = pypi_normalize_package_name(rparts[0]) if package == normname: matched.append((relic.name, normname)) matched.sort(key=lambda x: x[1]) for relic in matched: packageurl = route_url('get_relic', req, channel=channel, index=index, relic_name=relic[0]) lines.append("<a href='{0}' rel='internal'>{1}</a><br/>".format( packageurl, relic[0])) return dict(lines=lines)
def commonjs_registry_root(req): channel = req.matchdict.get('channel', 'default') index = req.matchdict.get('index', 'default') indexobj = fetch_index_from_names(channel, index) if not indexobj: return HTTPNotFound() relics = DBSession.query(Relic).filter_by(index_id=indexobj.uid) uniqrelics = {} for relic in relics: matches = split_commonjs_name(relic.name) relicname = None if not matches: relicname = relic.name else: relicname = matches[0] relic_url = req.route_url('commonjs_registry_package_root', channel=channel, index=index, package=relicname) if relicname not in uniqrelics: uniqrelics[relicname] = relic_url return Response(json.dumps(uniqrelics, sort_keys=True, indent=2), content_type='application/json', status_code=200)
def autoindex(req): channel = req.matchdict.get('channel', 'default') index = req.matchdict.get('index', 'default') indexobj = fetch_index_from_names(channel, index) if not indexobj: return Response('{"status":"error","channel/index not found"}', content_type='application/json', status_code=404) # RELIC relics = DBSession.query(Relic) \ .filter_by(index_id=indexobj.uid) if relics.count() <= 0: return Response('{"status":"error","/channel/index not found"}', content_type='application/json', status_code=404) relicout = [] for relic in relics: relic_url = req.route_url('get_relic', channel=channel, index=index, relic_name=relic.name) relic_mtime = time.gmtime(float(relic.mtime)) relicout.append('<a href="{}">{}</a>{}{}'.format( relic_url, relic.name, time.strftime('%d-%b-%Y %H:%M', relic_mtime).rjust(79 - len(relic.name), ' '), str(relic.size).rjust(20, ' '))) return dict(display_path='/autoindex/{}/{}'.format(channel, index), relics=relicout)
def commonjs_registry_package_root(req): channel = req.matchdict.get('channel', None) index = req.matchdict.get('index', None) package = req.matchdict.get('package', None) if not channel or not index or not package: return HTTPNotFound() indexobj = fetch_index_from_names(channel, index) if not indexobj: return HTTPNotFound() # this gets all packages that start with the requested package name, # but it may include more than the intended package -- from here, # each relic name needs to be broken down into it's name and version # then compared with the the given name results = DBSession.query(Relic) \ .filter_by(index_id=indexobj.uid) \ .filter(Relic.name.startswith(package)) packageobjroot = dict(name=package, versions=dict()) for relic in results: name, version, _ = split_commonjs_name(relic.name) if name.strip().lower() == package.strip().lower(): relic_url = req.route_url('get_relic', channel=channel, index=index, relic_name=relic.name) packageobjroot["versions"][version] = dict( name=name, version=version, dist=dict(tarball=relic_url)) return Response(json.dumps(packageobjroot, sort_keys=True, indent=2), content_type='application/json', status_code=200)
def home(req): result = DBSession.query(Channel) \ .join(Index.channel) \ .values(Channel.name, Index.name) indices = [] for (channel, index) in result: indices.append(dict(channel=channel, name=index)) indices.sort(key=lambda x: x['channel'] + x['name']) return dict(indices=indices)
def get_unique_architectures_set(index_id): arches = set() relics = DBSession.query(Relic).filter_by(index_id=index_id) for relic in relics: parts = split_debian_name(relic.name) if not parts or not parts[2]: continue arches.add(parts[2]) return arches
def fetch_channel_index_items(req, channelobj, route_name): items = [] indices = DBSession.query(Index).filter_by(channel_id=channelobj.uid) for idx in indices: items.append(dict( url=req.route_url(route_name, channel=channelobj.name, index=idx.name), text=idx.name, cls="folder" )) items.sort(key=lambda x: x["text"]) return items
def fetch_channel_from_name(name): if not name: return None try: channelobj = DBSession.query(Channel) \ .filter_by(name=name) \ .one() except: logger.error( 'no channel object, or more than one found for "{}"'.format(name)) return None return channelobj
def fetch_index_from_name(channelobj, name): if not name: return None try: indexobj = DBSession.query(Index) \ .filter_by(channel_id=channelobj.uid, name=name) \ .one() except: logger.error( 'no index object, or more than one found for {}'.format(name)) return None return indexobj
def fetch_relic_if_not_exists(req, channel, index, relic_name, upstream): indexobj = fetch_index_from_names(channel, index) if not indexobj: return relics = DBSession.query(Relic) \ .filter_by(index_id=indexobj.uid, name=relic_name) # doesn't exist locally yet if relics.count() <= 0: # get valid paths, if there are valid paths to be had pathcheck = validate_reliquary_location(req, channel, index, relic_name=relic_name) # noqa if type(pathcheck) == Response: return reliquary, relic_folder, relic_path = pathcheck # create the channel/index if it doesn't exist if not os.path.exists(relic_folder): os.makedirs(relic_folder) # fetch resp = requests.get(upstream) # save with open(relic_path, 'wb') as fout: fout.write(resp.content) # index with transaction.manager: DBSession.add( Relic(dirty=False, index_id=indexobj.uid, name=relic_name, mtime=str(os.path.getmtime(relic_path)), size=os.path.getsize(relic_path)))
def pregenerate_deb_indices(): # channel + index containing relics with cached deb info indices = DBSession.query(Channel.name, Channel.uid, Index.name, Index.uid) \ .filter(Index.channel_id == Channel.uid) \ .filter(Relic.index_id == Index.uid) \ .filter(DebInfo.relic_id == Relic.uid) \ .distinct() for (channel, chanid, index, indexid) in indices: arches = get_unique_architectures_set(indexid) for arch in arches: if not generate_debian_package_index(channel, index, arch, force=True): logger.error("Failed to generate package for debian/{}/dist/{}/main/binary-{}".format(channel, index, arch)) if not generate_debian_package_index(channel, index, arch, compression='gz', force=True): logger.error("Failed to generate package.gz for debian/{}/dist/{}/main/binary-{}".format(channel, index, arch)) if not generate_debian_package_index(channel, index, arch, compression='bz2', force=True): logger.error("Failed to generate package.bz2 for debian/{}/dist/{}/main/binary-{}".format(channel, index, arch)) get_debian_release_data(indexid, force=True)
def debian_pooldistindex(req): channel = req.matchdict.get('channel', None) index = req.matchdict.get('index', None) indexobj = fetch_index_from_names(channel, index) if not indexobj: return HTTPNotFound() items = [] relics = DBSession.query(Relic).filter_by(index_id=indexobj.uid) for relic in relics: items.append(dict( url=req.route_url('debian_poolpackage', channel=channel, index=index, relic_name=relic.name), text=relic.name, cls="file" )) items.sort(key=lambda x: x["text"]) return dict( page_title="Index of /{}/pool/{}/".format(channel, index), items=items, datetime_generated=time.strftime("%Y-%m-%d %H:%M:%S"), show_updir=True, )
def index_deb_info(name, path, obj, indexname): # relative to the repository root, which would be something # like /api/v1/{channel}/ filename = "pool/{}/{}".format(indexname, name) # md5, sha1, and sha256 of file blocksize = 65536 md5sum_hasher = hashlib.md5() sha1_hasher = hashlib.sha1() sha256_hasher = hashlib.sha256() sha512_hasher = hashlib.sha512() with open(path, 'rb') as fin: buf = fin.read(blocksize) while len(buf) > 0: md5sum_hasher.update(buf) sha1_hasher.update(buf) sha256_hasher.update(buf) sha512_hasher.update(buf) buf = fin.read(blocksize) md5sum = md5sum_hasher.hexdigest() sha1 = sha1_hasher.hexdigest() sha256 = sha256_hasher.hexdigest() sha512 = sha512_hasher.hexdigest() # the rest is ripped out of the .deb file or generated based # on the information there. deb = debfile.DebFile(path) control = deb.control.debcontrol() # deb control is dict-like object where key lookups are case-insensitive multi_arch = control.get("multi-arch", None) package = control.get("package", None) source = control.get("source", None) version = control.get("version", None) section = control.get("section", None) priority = control.get("priority", None) architecture = control.get("architecture", None) essential = control.get("essential", None) depends = control.get("depends", None) recommends = control.get("recommends", None) suggests = control.get("suggests", None) enhances = control.get("enhances", None) pre_depends = control.get("pre-depends", None) installed_size = control.get("installed-size", None) maintainer = control.get("maintainer", None) description = control.get("description", None) description_md5 = control.get("description-md5", None) homepage = control.get("homepage", None) built_using = control.get("built_using", None) # if the description-md5 wasn't specified, comput it! # the computed value starts at the second character after the colon in the # control file (basically, allowing the 'Header: ' format of the text file) # and includes a trailing newline character. The value must be lowercase # hex md5. if not description_md5: if description[-1] == "\n": description_md5 = hashlib.md5(description.encode()).hexdigest() else: description_md5 = hashlib.md5((description+"\n").encode()).hexdigest() # missing required fields are a deal breaker for including this package # in the index msg = name+" skipped for deb info: '{}' not found in control" if not package: logger.error(msg.format('Package')) return if not version: logger.error(msg.format('Version')) return if not architecture: logger.error(msg.format('Architecture')) return if not maintainer: logger.error(msg.format('Maintainer')) return if not description: logger.error(msg.format('Description')) return kwargs = dict( filename=filename, md5sum=md5sum, sha1=sha1, sha256=sha256, sha512=sha512, multi_arch=multi_arch, package=package, source=source, version=version, section=section, priority=priority, architecture=architecture, essential=essential, depends=depends, recommends=recommends, suggests=suggests, enhances=enhances, pre_depends=pre_depends, installed_size=installed_size, maintainer=maintainer, description=description, description_md5=description_md5, homepage=homepage, built_using=built_using) try: debinfo_dbobj = DBSession.query(DebInfo) \ .filter_by(relic_id=obj.uid) \ .one_or_none() with transaction.manager: if debinfo_dbobj: logger.info("Adding deb info for " + name) DBSession.query(DebInfo) \ .filter_by(uid=debinfo_dbobj.uid) \ .update(kwargs) else: logger.info("Updating deb info for " + name) kwargs['relic_id'] = obj.uid DBSession.add(DebInfo(**kwargs)) except MultipleResultsFound: logger.error("Apparently there's more than one debinfo object" "associated with '"+obj.name+"'")
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ authn_policy = BasicAuthAuthenticationPolicy( groupfinder, realm=settings['reliquary.realm'], debug=settings['pyramid.debug']) authz_policy = ACLAuthorizationPolicy() engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine config = Configurator( settings=settings, authentication_policy=authn_policy, authorization_policy=authz_policy, root_factory=Root, ) config.include('pyramid_chameleon') #config.add_static_view('static', 'static', cache_max_age=3600) # ui config.add_route('home', '/api/v1/', request_method='GET') # basic api config.add_route('put_relic', '/api/v1/raw/{channel}/{index}/{relic_name}', request_method='PUT') config.add_route('get_relic', '/api/v1/raw/{channel}/{index}/{relic_name}', request_method='GET') # autoindex (nginx autogenerate index page compatible) config.add_route('autoindex', '/api/v1/autoindex/{channel}/{index}/', request_method='GET') # python package index (PEP-503 compliant) # PROXY config.add_route('pypi_proxy_simple_package', '/api/v1/python/proxy/{channel}/{index}/simple/{package}/', request_method='GET') config.add_route('pypi_proxy_simple', '/api/v1/python/proxy/{channel}/{index}/simple/', request_method='GET') # just to keep compat with pypi.python.org package locations -- calls out # to upstream or to get_relic route config.add_route('pypi_proxy_package', '/api/v1/python/proxy/{channel}/{index}/packages/{parta}/{partb}/{hash}/{package}', request_method='GET') # SELF-HOSTED config.add_route('pypi_simple_package', '/api/v1/python/{channel}/{index}/simple/{package}/', request_method='GET') config.add_route('pypi_simple', '/api/v1/python/{channel}/{index}/simple/', request_method='GET') # commonjs registry (http://wiki.commonjs.org/wiki/Packages/Registry) # npmjs.org is historically based on this, and npm should be compatible # PROXY # these mostly try to replicate the npmjs registry public api in function # npmjs registry api: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md # set the registry used with npm-config (.npmrc files): https://docs.npmjs.com/files/npmrc # http://registry.npmjs.org/<name>/<version> config.add_route('commonjs_proxy_registry_package_version', '/api/v1/commonjs/proxy/{channel}/{index}/{package}/{version}/', request_method='GET') # http://registry.npmjs.org/<name>/ config.add_route('commonjs_proxy_registry_package_root', '/api/v1/commonjs/proxy/{channel}/{index}/{package}/', request_method='GET') # http://registry.npmjs.org/-/all config.add_route('commonjs_proxy_registry_root', '/api/v1/commonjs/proxy/{channel}/{index}/', request_method='GET') # http://registry.npmjs.org/-/<package>-<version>.tgz config.add_route('commonjs_proxy_package', '/api/v1/commonjs/proxy/package/{channel}/{index}/{package}/{version}', request_method='GET') # SELF HOSTED config.add_route('commonjs_registry_root', '/api/v1/commonjs/{channel}/{index}/', request_method='GET') config.add_route('commonjs_registry_package_root', '/api/v1/commonjs/{channel}/{index}/{package}/', request_method='GET') config.add_route('commonjs_registry_package_version', '/api/v1/commonjs/{channel}/{index}/{package}/{version}/', request_method='GET') # debian repository (https://wiki.debian.org/RepositoryFormat) # additional info: http://www.ibiblio.org/gferg/ldp/giles/repository/repository-2.html # these are the minimum required paths # example sources.list entry: deb http://127.0.0.1/api/v1/debian/wildcard trusty main config.add_route('debian_distrelease', '/api/v1/debian/{channel}/dist/{index}/Release', request_method='GET') config.add_route('debian_archrelease', '/api/v1/debian/{channel}/dist/{index}/main/binary-{arch}/Release', request_method='GET') config.add_route('debian_archpackages', '/api/v1/debian/{channel}/dist/{index}/main/binary-{arch}/Packages', request_method='GET') config.add_route('debian_archpackagesgz', '/api/v1/debian/{channel}/dist/{index}/main/binary-{arch}/Packages.gz', request_method='GET') config.add_route('debian_archpackagesbz2', '/api/v1/debian/{channel}/dist/{index}/main/binary-{arch}/Packages.bz2', request_method='GET') config.add_route('debian_poolpackage', '/api/v1/debian/{channel}/pool/{index}/{relic_name}', request_method='GET') # additional paths that could be just a directory listing of some sort (like autoindex) config.add_route('debian_archindex', '/api/v1/debian/{channel}/dist/{index}/main/binary-{arch}/', request_method='GET') config.add_route('debian_compindex', '/api/v1/debian/{channel}/dist/{index}/main/', request_method='GET') config.add_route('debian_distindex', '/api/v1/debian/{channel}/dist/{index}/', request_method='GET') config.add_route('debian_distrootindex', '/api/v1/debian/{channel}/dist/', request_method='GET') config.add_route('debian_channelindex', '/api/v1/debian/{channel}/', request_method='GET') config.add_route('debian_pooldistindex', '/api/v1/debian/{channel}/pool/{index}/', request_method='GET') config.add_route('debian_poolrootindex', '/api/v1/debian/{channel}/pool/', request_method='GET') config.add_notfound_view(notfound, append_slash=True) config.scan('.views') return config.make_wsgi_app()
def get_debian_release_data(index_id, force=False): indexobj = DBSession.query(Index).filter_by(uid=index_id).one() index = indexobj.name channel = DBSession.query(Channel).filter_by( uid=indexobj.channel_id).one().name key = "{}-{}-release" if not force: try: cacheddata = DBSession.query(FileCache).filter_by( key=key).one_or_none() if cacheddata: return cacheddata.value except: logger.error( "apparently there's more than one release cached file for /{}/dist/{}/Release" .format(channel, index)) return None else: with transaction.manager: DBSession.query(FileCache).filter_by(key=key).delete() lines = [] # Suite -- indicates one of 'oldstable', 'stable', 'testing', 'unstable', # 'experimental' with optional suffixes such as '-updates' # for now, fixed to 'stable' # (required) lines.append("Suite: stable") # Codename -- describe codename of the release # for now, fixed to 'reliquary' # (required) lines.append("Codename: reliquary") # Origin -- describe origin of the release # for now, fixed to 'reliquary' # (optional) lines.append("Origin: reliquary") # Architectures -- what are all the different architectures for packages # being managed? # (required) unique_arches = get_unique_architectures_set(index_id) arches = [] for a in unique_arches: arches.append(a) lines.append("Architectures: {}".format(" ".join(arches))) # Components -- this is a fixed and static value for now # (required) lines.append("Components: main") # Date -- the time the release file was created in UTC # (required) utcnow = "{:%a, %b %d %Y %H:%M:%S +0000}".format( datetime.datetime.utcnow()) lines.append("Date: {}".format(utcnow)) # MD5Sum/SHA1/SHA256/SHA512 -- lists of indices and the hash and size for them # each line contains white space separated values: # 1. checksum in the corresponding format # 2. size of the file # 3. filename relative to the directory of the Release file arches = get_unique_architectures_set(index_id) md5sums = [] sha1s = [] sha256s = [] for arch in arches: package = generate_debian_package_index(channel, index, arch) packagegz = generate_debian_package_index(channel, index, arch, compression='gz') packagebz2 = generate_debian_package_index(channel, index, arch, compression='bz2') release = generate_debian_arch_release_index(arch) # uncompressed if not package: logger.error( "Failed to get package details for debian/{}/dist/{}/[In]Release ({})" .format(channel, index, arch)) else: md5sums.append(" {} {} {}".format( package[3], str(package[2]).rjust(15), "main/binary-{}/Packages".format(arch))) sha1s.append(" {} {} {}".format( package[4], str(package[2]).rjust(15), "main/binary-{}/Packages".format(arch))) sha256s.append(" {} {} {}".format( package[5], str(package[2]).rjust(15), "main/binary-{}/Packages".format(arch))) # gz if not packagegz: logger.error( "Failed to get package.gz details for debian/{}/dist/{}/[In]Release ({})" .format(channel, index, arch)) else: md5sums.append(" {} {} {}".format( packagegz[3], str(packagegz[2]).rjust(15), "main/binary-{}/Packages.gz".format(arch))) sha1s.append(" {} {} {}".format( packagegz[4], str(packagegz[2]).rjust(15), "main/binary-{}/Packages.gz".format(arch))) sha256s.append(" {} {} {}".format( packagegz[5], str(packagegz[2]).rjust(15), "main/binary-{}/Packages.gz".format(arch))) # bz2 if not packagebz2: logger.error( "Failed to get package.bz2 details for debian/{}/dist/{}/[In]Release ({})" .format(channel, index, arch)) else: md5sums.append(" {} {} {}".format( packagebz2[3], str(packagebz2[2]).rjust(15), "main/binary-{}/Packages.bz2".format(arch))) sha1s.append(" {} {} {}".format( packagebz2[4], str(packagebz2[2]).rjust(15), "main/binary-{}/Packages.bz2".format(arch))) sha256s.append(" {} {} {}".format( packagebz2[5], str(packagebz2[2]).rjust(15), "main/binary-{}/Packages.bz2".format(arch))) # release if not release: logger.error( "Failed to get release details for debian/{}/dist/{}/[In]Release ({})" .format(channel, index, arch)) else: md5sums.append(" {} {} {}".format( release[3], str(release[2]).rjust(15), "main/binary-{}/Release".format(arch))) sha1s.append(" {} {} {}".format( release[4], str(release[2]).rjust(15), "main/binary-{}/Release".format(arch))) sha256s.append(" {} {} {}".format( release[5], str(release[2]).rjust(15), "main/binary-{}/Release".format(arch))) lines.append("MD5Sum:") lines.append("\n".join(md5sums)) lines.append("SHA1:") lines.append("\n".join(sha1s)) lines.append("SHA256:") lines.append("\n".join(sha256s)) # Acquire-By-Hash -- an alternative method for clients, this is just an # indicator for whether or not the server supports this # for now, reliquary does not support this lines.append("Acquire-By-Hash: no") data = "\n".join(lines) bytedata = data.encode() with transaction.manager: DBSession.add( FileCache( key=key, value=bytedata, mtime=datetime.datetime.utcnow(), size=len(data), md5sum=hashlib.md5(bytedata).hexdigest(), sha1=hashlib.sha1(bytedata).hexdigest(), sha256=hashlib.sha256(bytedata).hexdigest(), )) return data
def reindex(): description = """\ Reindex reliquary storage. """ usage = "usage: %prog config_uri" parser = optparse.OptionParser( usage=usage, description=textwrap.dedent(description) ) options, args = parser.parse_args(sys.argv[1:]) if not len(args) > 0: logger.error('at least the config uri is needed') return 2 config_uri = args[0] env = bootstrap(config_uri) settings, closer = env['registry'].settings, env['closer'] try: # mark everything as dirty so we can delete anything that # is not clean by the end of the reindex with transaction.manager: DBSession.query(Channel).update({'dirty': True}) DBSession.query(Index).update({'dirty': True}) DBSession.query(Relic).update({'dirty': True}) # now, walk through the reliquary and index relics reliquary = settings.get('reliquary.location', None) # ### CHANNEL for channel in os.listdir(reliquary): # make sure the directory exists channel_path = os.path.join(reliquary, channel) if not os.path.isdir(channel_path): continue # make sure the db object exists channel_dbobj = DBSession.query(Channel) \ .filter_by(name=channel) \ .first() if not channel_dbobj: channel_dbobj = Channel(dirty=False, name=channel) with transaction.manager: DBSession.add(channel_dbobj) else: # channel is has real path and db entry, so should not be dirty with transaction.manager: DBSession.query(Channel) \ .filter_by(uid=channel_dbobj.uid) \ .update({'dirty': False}) # ### INDEX for index in os.listdir(channel_path): # make sure the directory exists index_path = os.path.join(channel_path, index) if not os.path.isdir(index_path): continue # make sure the db object exists index_dbobj = DBSession.query(Index) \ .filter_by(channel_id=channel_dbobj.uid, name=index) \ .first() if not index_dbobj: index_dbobj = Index(dirty=False, name=index, channel_id=channel_dbobj.uid) with transaction.manager: DBSession.add(index_dbobj) else: # index has real path and db entry, so should not be dirty with transaction.manager: DBSession.query(Index).filter_by(uid=index_dbobj.uid) \ .update({'dirty': False}) # ### RELIC for relic in os.listdir(index_path): relic_path = os.path.join(index_path, relic) try: relic_dbobj = DBSession.query(Relic) \ .filter_by(index_id=index_dbobj.uid, name=relic) \ .one_or_none() relic_mtime = str(os.path.getmtime(relic_path)) relic_size = os.path.getsize(relic_path) if relic_dbobj: with transaction.manager: DBSession.query(Relic) \ .filter_by(uid=relic_dbobj.uid) \ .update({'dirty': False, 'mtime': relic_mtime, 'size': relic_size}) else: relic_dbobj = Relic(dirty=False, index_id=index_dbobj.uid, name=relic, mtime=relic_mtime, size=relic_size) with transaction.manager: DBSession.add(relic_dbobj) # if the relic is a debian archive, there's additional # info that should be pulled out to make generating # a deb repo more efficient if relic[-4:] == ".deb": index_deb_info(relic, relic_path, relic_dbobj, index) except MultipleResultsFound: logger.error('index [{}/{}/{}] contains non-unique ' '/channel/index/relic_name' .format(channel, index, relic)) # delete all relics, still dirty, from the index with transaction.manager: DBSession.query(Channel) \ .filter_by(dirty=True) \ .delete() DBSession.query(Index) \ .filter_by(dirty=True) \ .delete() DBSession.query(Relic) \ .filter_by(dirty=True) \ .delete(synchronize_session=False) pregenerate_deb_indices() except Exception as ex: logger.critical("something went wrong") logger.critical(ex) finally: closer()
def generate_debian_package_index(channel, index, arch, compression=None, force=False): basekey = '{}-{}-{}-'.format(channel, index, arch) key = basekey key += compression if compression else 'none' if not force: try: cacheddata = DBSession.query(FileCache).filter_by( key=key).one_or_none() if cacheddata: return (cacheddata.value, cacheddata.mtime, cacheddata.size, cacheddata.md5sum, cacheddata.sha1, cacheddata.sha256) except: logger.error('multiple values with cache key "{}"'.format(key)) return None else: with transaction.manager: DBSession.query(FileCache).filter_by(key=key).delete() # since the request is for a compressed file, check to see if we have a cached # uncompressed file first, and base our compressed one off of that... # mainly so the cached files remain in sync in terms of content if compression: uncompressedkey = basekey + 'none' try: cacheddata = DBSession.query(FileCache).filter_by( key=uncompressedkey).one_or_none() if cacheddata: bytedata = cacheddata.value if compression == "gz": finaldata = gzip.compress(bytedata) elif compression == "bz2": finaldata = bz2.compress(bytedata) else: finaldata = bytedata mtime = datetime.datetime.utcnow() size = len(finaldata) md5sum = hashlib.md5(finaldata).hexdigest() sha1 = hashlib.sha1(finaldata).hexdigest() sha256 = hashlib.sha256(finaldata).hexdigest() newcacheddata = FileCache(key=key, value=finaldata, mtime=mtime, size=size, md5sum=md5sum, sha1=sha1, sha256=sha256) with transaction.manager: DBSession.add(newcacheddata) return (finaldata, mtime, size, md5sum, sha1, sha256) except: logger.error( 'multiple values with cache key "{}"'.format(uncompressedkey)) return None # generate and cache Package file lines = [] archobjs = DBSession.query(Relic, DebInfo) \ .filter(Relic.uid == DebInfo.relic_id) \ .filter(DebInfo.architecture.ilike('%{0}%'.format(arch))) for relic, debinfo in archobjs: # we're possibly pulling only partial matches, so this just confirms the # selection choice arches = [a.lower().strip() for a in debinfo.architecture.split()] if arch not in arches: continue lines.append("Package: {}".format(debinfo.package)) if debinfo.source: lines.append("Source: {}".format(debinfo.source)) lines.append("Version: {}".format(debinfo.version)) if debinfo.section: lines.append("Section: {}".format(debinfo.section)) if debinfo.section: lines.append("Priority: {}".format(debinfo.priority)) lines.append("Architecture: {}".format(debinfo.architecture)) if debinfo.essential: lines.append("Essential: {}".format(debinfo.essential)) if debinfo.depends: lines.append("Depends: {}".format(debinfo.depends)) if debinfo.recommends: lines.append("Recommends: {}".format(debinfo.recommends)) if debinfo.suggests: lines.append("Suggests: {}".format(debinfo.suggests)) if debinfo.enhances: lines.append("Enhances: {}".format(debinfo.enhances)) if debinfo.pre_depends: lines.append("Pre-Depends: {}".format(debinfo.pre_depends)) if debinfo.installed_size: lines.append("Installed-Size: {}".format(debinfo.installed_size)) lines.append("Maintainer: {}".format(debinfo.maintainer)) lines.append("Description: {}".format(debinfo.description)) if debinfo.homepage: lines.append("Homepage: {}".format(debinfo.homepage)) if debinfo.built_using: lines.append("Built-Using: {}".format(debinfo.built_using)) lines.append("Filename: {}".format(debinfo.filename)) lines.append("Size: {}".format(relic.size)) lines.append("MD5Sum: {}".format(debinfo.md5sum)) lines.append("SHA1: {}".format(debinfo.sha1)) lines.append("SHA256: {}".format(debinfo.sha256)) lines.append("SHA512: {}".format(debinfo.sha512)) lines.append("Description-md5: {}".format(debinfo.description_md5)) if debinfo.multi_arch: lines.append("Multi-Arch: {}".format(debinfo.multi_arch)) lines.append("") data = "\n".join(lines) bytedata = data.encode() if compression == "gz": finaldata = gzip.compress(bytedata) elif compression == "bz2": finaldata = bz2.compress(bytedata) else: finaldata = bytedata mtime = datetime.datetime.utcnow() size = len(finaldata) md5sum = hashlib.md5(finaldata).hexdigest() sha1 = hashlib.sha1(finaldata).hexdigest() sha256 = hashlib.sha256(finaldata).hexdigest() newcacheddata = FileCache(key=key, value=finaldata, mtime=mtime, size=size, md5sum=md5sum, sha1=sha1, sha256=sha256) with transaction.manager: DBSession.add(newcacheddata) return (finaldata, mtime, size, md5sum, sha1, sha256)