def library_server(args, l, config): from ..util import daemonize from databundles.server.main import production_run, local_run def run_server(args, config): production_run(config.library(args.name)) if args.daemonize: daemonize(run_server, args, config) elif args.test: local_run(config.library(args.name)) else: production_run(config.library(args.name))
def run_server(args, config): production_run(config.library(args.name))
def run_server(args, rc): production_run(rc, name = args.name)
def library_command(args, rc): import library l = library.get_library(name=args.name) if args.subcommand == 'init': print "Initialize Library" l.database.create() elif args.subcommand == 'server': from databundles.server.main import production_run def run_server(args, rc): production_run(rc, name = args.name) if args.daemonize: daemonize(run_server, args, rc) else: production_run(rc, name = args.name) elif args.subcommand == 'drop': print "Drop tables" l.database.drop() elif args.subcommand == 'clean': print "Clean tables" l.database.clean() elif args.subcommand == 'purge': print "Purge library" l.purge() elif args.subcommand == 'rebuild': print "Rebuild library" l.rebuild() elif args.subcommand == 'info': print "Library Info" print "Database: {}".format(l.database.dsn) print "Remote: {}".format(l.remote) print "Cache: {}".format(l.cache.cache_dir) elif args.subcommand == 'push': if args.force: state = 'all' else: state = 'new' files_ = l.database.get_file_by_state(state) if len(files_): print "-- Pushing to {}".format(l.remote) for f in files_: print "Pushing: {}".format(f.path) l.push(f) elif args.subcommand == 'files': files_ = l.database.get_file_by_state(args.file_state) if len(files_): print "-- Display {} files".format(args.file_state) for f in files_: print "{0:11s} {1:4s} {2}".format(f.ref,f.state,f.path) elif args.subcommand == 'find': dataset, partition = l.get_ref(args.term) if not dataset: print "{}: Not found".format(args.term) else: print "Rel Path : ",dataset.identity.cache_key print "Abs Path : ",l.cache.exists(dataset.identity.cache_key) print "Dataset : ",dataset.id, dataset.name print "Partition : ",(partition.id, partition.name )if partition else '' print "D Is Local: ",l.cache.exists(dataset.identity.cache_key) is not False print "P Is Local: ",(l.cache.exists(partition.identity.cache_key) is not False) if partition else '' elif args.subcommand == 'get': # This will fetch the data, but the return values aren't quite right r = l.get(args.term) if not r: print "{}: Not found".format(args.term) else: print "Rel Path : ",r.bundle.identity.cache_key print "Abs Path : ",l.cache.exists(r.bundle.identity.cache_key) print "Dataset : ",r.bundle.identity.id_, r.bundle.identity.name if r.partition: print "Partition : ",r.partition.identity.id_, r.partition.name else: print "Partition : " print "D Is Local: ",l.cache.exists(r.bundle.identity.cache_key) is not False print "P Is Local: ",(l.cache.exists(r.partition.identity.cache_key) is not False) if r.partition else '' if r and args.open: if r.partition: abs_path = os.path.join(l.cache.cache_dir, r.partition.identity.cache_key) else: abs_path = os.path.join(l.cache.cache_dir, r.bundle.identity.cache_key) print "\nOpening: {}\n".format(abs_path) os.execlp('sqlite3','sqlite3',abs_path ) elif args.subcommand == 'listremote': print 'List Remote' datasets = l.api.list() for id_, data in datasets.items(): print "{0:11s} {1:4s} {2}".format(id_,'remote',data['name']) else: print "Unknown subcommand" print args