def build(bundle_dir): from databundles.library import new_library from databundles.source.repository.git import GitShellService # Stash must happen before pull, and pull must happen # before the class is loaded in load_bundle, otherwize the class # can't be updated by the pull. And, we have to use the GitShell # sevice directly, because thenew_repository route will ooad the bundle gss = GitShellService(bundle_dir) if args.stash: prt("{} Stashing ", bundle_dir) gss.stash() if args.pull: prt("{} Pulling ", bundle_dir) gss.pull() # Import the bundle file from the directory bundle_class = load_bundle(bundle_dir) bundle = bundle_class(bundle_dir) l = new_library(rc.library(args.library)) if l.get(bundle.identity.vid) and not args.force: prt("{} Bundle is already in library", bundle.identity.name) return elif bundle.is_built and not args.force and not args.clean: prt("{} Bundle is already built",bundle.identity.name) return else: if args.dryrun: prt("{} Would build but in dry run ", bundle.identity.name) return repo.bundle = bundle if args.clean: bundle.clean() # Re-create after cleaning is important for something ... bundle = bundle_class(bundle_dir) prt("{} Building ", bundle.identity.name) if not bundle.run_prepare(): err("{} Prepare failed", bundle.identity.name) if not bundle.run_build(): err("{} Build failed", bundle.identity.name) if args.install and not args.dryrun: if not bundle.run_install(force=True): err('{} Install failed', bundle.identity.name)
def get_library(self, name = 'default'): """Clear out the database before the test run""" config = self.rc.library(name) l = new_library(config, reset = True) return l
def remote_command(args, rc, src): from databundles.library import new_library if args.is_server: config = src else: config = rc l = new_library(config.library(args.name)) globals()['remote_'+args.subcommand](args, l,config)
def source_list(args,rc, src, names=None): '''List all of the source packages''' from collections import defaultdict import databundles.library as library dir_ = rc.sourcerepo.dir l = library.new_library(rc.library(args.library)) l_lst = defaultdict(dict, _library_list(l)) s_lst = defaultdict(dict, _source_list(dir_)) _print_bundle_list(s_lst, l_lst, subset_names=names)
def x_test_remote(self): from databundles.run import RunConfig from databundles.library import new_library rc = get_runconfig((os.path.join(self.bundle_dir,'server-test-config.yaml'),RunConfig.USER_CONFIG)) config = rc.library('default') library = new_library(config) print library.remote print library.remote.last_upstream() print library.cache print library.cache.last_upstream()
def production_run(config, reloader=False): lf = lambda: new_library(config, True) l = lf() l.database.create() logger.info("starting production server for library '{}' on http://{}:{}".format(l.name, l.host, l.port)) install(LibraryPlugin(lf)) return run(host=l.host, port=l.port, reloader=reloader, server='paste')
def local_run(config, reloader=False): global stoppable_wsgi_server_run stoppable_wsgi_server_run = None debug() lf = lambda: new_library(config, True) l = lf() l.database.create() logger.info("starting local server for library '{}' on http://{}:{}".format(l.name, l.host, l.port)) install(LibraryPlugin(lf)) return run(host=l.host, port=l.port, reloader=reloader)
def local_debug_run(config): debug() port = config['port'] if config['port'] else 7979 host = config['host'] if config['host'] else 'localhost' logger.info("starting debug server on http://{}:{}".format(host, port)) lf = lambda: new_library(config, True) l = lf() l.database.create() install(LibraryPlugin(lf)) return run(host=host, port=port, reloader=True, server='stoppable')
def source_sync(args,rc, src): '''Synchronize all of the repositories with the local library''' import databundles.library as library from databundles.identity import new_identity l = library.new_library(rc.library(args.library)) for repo in rc.sourcerepo.list: prt('--- Sync with upstream source repository {}', repo.service.ident) for e in repo.service.list(): ident = new_identity(e) l.database.add_file(e['clone_url'], repo.service.ident, ident.id_, state='synced', type_='source', source_url = e['clone_url'], data=e) prt("Added {:15s} {}",ident.id_,e['clone_url'] )
def test_run(config): '''Run method to be called from unit tests''' from bottle import run, debug #@UnresolvedImport debug() port = config['port'] if config['port'] else 7979 host = config['host'] if config['host'] else 'localhost' logger.info("starting test server on http://{}:{}".format(host, port)) lf = lambda: new_library(config, True) l = lf() l.database.create() install(LibraryPlugin(lf)) return run(host=host, port=port, reloader=False, server='stoppable')
def source_info(args,rc, src): if not args.term: prt("Source dir: {}", rc.sourcerepo.dir) for repo in rc.sourcerepo.list: prt("Repo : {}", repo.ident) else: import databundles.library as library from ..identity import new_identity l = library.new_library(rc.library(args.library)) found = False for r in l.database.get_file_by_type('source'): ident = new_identity(r.data) if args.term == ident.name or args.term == ident.vname: found = r break if not found: err("Didn't find source for term '{}'. (Maybe need to run 'source sync')", args.term) else: from ..source.repository import new_repository repo = new_repository(rc.sourcerepo(args.name)) ident = new_identity(r.data) repo.bundle_ident = ident prt('Name : {}', ident.vname) prt('Id : {}', ident.vid) prt('Dir : {}', repo.bundle_dir) if not repo.bundle.database.exists(): prt('Exists : Database does not exist or is empty') else: d = dict(repo.bundle.db_config.dict) process = d['process'] prt('Created : {}', process.get('dbcreated','')) prt('Prepared : {}', process.get('prepared','')) prt('Built : {}', process.get('built','')) prt('Build time: {}', str(round(float(process['buildtime']),2))+'s' if process.get('buildtime',False) else '')
def source_clone(args,rc, src): '''Clone one or more registered source packages ( via sync ) into the source directory ''' import databundles.library as library from ..dbexceptions import ConflictError from ..identity import new_identity l = library.new_library(rc.library(args.library)) def get_by_group(group): return [f for f in l.database.get_file_by_type('source') if f.group == group] for repo in rc.sourcerepo.list: prt ("--- Cloning sources from: {}", repo.ident) for f in get_by_group(repo.ident): try: ident = new_identity(f.data) d = repo.clone(f.path, ident.source_path,repo.dir) prt("Cloned {} to {}",f.path, d) except ConflictError as e : warn("Clone failed for {}: {}".format(f.path, e.message))
def test_simple_install(self): from databundles.cache.remote import RestRemote config = self.start_server() # Create the library so we can get the same remote config l = new_library(config) s3 = l.remote.last_upstream() print "Starting server with config: {}".format(config.to_dict()) api = RestRemote(upstream=s3, **config) r = api.put_bundle(self.bundle) print r self.web_exists(s3,self.bundle.identity.cache_key) for partition in self.bundle.partitions: r = api.put_partition(partition) r = api.get(partition.identity.cache_key) self.web_exists(s3,partition.identity.cache_key ) #os.remove(r) return # Try variants of find. r = api.find(self.bundle.identity.name) self.assertEquals(self.bundle.identity.name, r[0].name) r = api.find(QueryCommand().identity(name = self.bundle.identity.name)) self.assertEquals(self.bundle.identity.name, r[0].name) for partition in self.bundle.partitions: r = api.find((QueryCommand().partition(name = partition.identity.name)).to_dict()) self.assertEquals(partition.identity.name, r[0].name)
def x_test_dump(self): import time import logging l = new_library(self.server_rc.library('default-remote'), reset = True) l.clean() self.start_server() l.run_dumper_thread() l.run_dumper_thread() self.assertFalse(l.database.needs_dump()) l.put(self.bundle) self.assertTrue(l.database.needs_dump()) l.run_dumper_thread() time.sleep(6) self.assertFalse(l.database.needs_dump()) l.run_dumper_thread() l.put(self.bundle) l.run_dumper_thread() time.sleep(7) print l.database.needs_dump() self.assertFalse(l.database.needs_dump()) self.assertEquals(self.bundle.identity.name, l.get(self.bundle.identity.name).identity.name) l.clean() self.assertEqual(None, l.get(self.bundle.identity.name)) l.restore() self.assertEquals(self.bundle.identity.name, l.get(self.bundle.identity.name).identity.name)
def x_test_remote_library_partitions(self): self.start_server() l = self.get_library() r = l.put(self.bundle) r = l.get(self.bundle.identity.name) self.assertEquals(self.bundle.identity.name, r.identity.name) for partition in self.bundle.partitions: r = l.put(partition) # Get the partition with a name r = l.get(partition.identity.name) self.assertTrue(r is not False) self.assertEquals(partition.identity.name, r.partition.identity.name) self.assertEquals(self.bundle.identity.name, r.identity.name) # Copy all of the newly added files to the server. l.push() l2 = new_library('clean') l2.purge() r = l2.get('b1DxuZ001') self.assertTrue(r is not None and r is not False) print r self.assertTrue(r.partition is not None and r.partition is not False) self.assertEquals(r.partition.identity.id_,'b1DxuZ001' ) self.assertTrue(os.path.exists(r.partition.database.path))