def setup (): def getvar (name, required=True): val = os.getenv (name) if required and val is None: raise Exception ("found no environment variable %s" % name) return val dbname = getvar ("PHAROS_DBNAME") dbuser = getvar ("PHAROS_DBUSER") dbpass = getvar ("PHAROS_DBPASS") web.config.db_parameters = dict(dbn='postgres', db=dbname, user=dbuser, pw=dbpass) web.db._hasPooling = False web.config.db_printing = False web.load() tdb.setup() logfile = getvar ("PHAROS_LOGFILE", False) if logfile: tdb.logger.set_logfile (open (logfile, "a")) sys.stderr.write ("logging to %s\n" % logfile) global source_name, source_path source_dir = getvar ("PHAROS_SOURCE_DIR") source_name = sys.argv[1] source_path = "%s/%s" % (source_dir, source_name) global edition_prefix, author_prefix edition_prefix = getvar ("PHAROS_EDITION_PREFIX", False) or "" author_prefix = getvar ("PHAROS_AUTHOR_PREFIX", False) or "" setup_names ()
def setup(): web.config.db_parameters = dict(dbn="postgres", # db="pharos", db="pharos", user="******", pw="pharos") web.load()
def dump(filename): """Dump specified pages and its dependencies.""" web.load() site = db.get_site(config.site) visited = {} def visit_all(pages): for p in pages: if isinstance(p, tdb.Thing): visit(p) elif isinstance(p, list): visit_all(p) def visit(p): if p.id in visited: return visited[p.id] = p visit(p.type) visit_all(p.d.values()) pages = [db.get_version(site, p.strip()) for p in open(filename).readlines()] visit_all(pages) for p in visited.values(): data = dict(p.d) data['__type__'] = p.type data['__name__'] = p.name data['__parent__'] = p.parent print tdb.logger.format('thing', p.id, data),
def get_infobase(): """Creates infobase object.""" from infogami.infobase import infobase, dbstore, cache web.config.db_printing = True web.load() # hack to make cache work for local infobase connections cache.loadhook() web.ctx.ip = '127.0.0.1' store = dbstore.DBStore(schema.get_schema()) ib = infobase.Infobase(store, infobase.config.secret_key) if config.writelog: ib.add_event_listener(Logger(config.writelog)) ol = ib.get('openlibrary.org') if ol and config.booklog: global booklogger booklogger = Logger(config.booklog) ol.add_trigger('/type/edition', write_booklog) ol.add_trigger('/type/author', write_booklog2) if ol and config.http_listeners: ol.add_event_listener(None, http_notify) return ib
def setup_database(dbname): if dbname == "sqlite": web.config.db_parameters = dict(dbn=dbname, db="webpy.db") else: web.config.db_parameters = dict(dbn=dbname, db="webpy", user="******", pw="tiger") # web.config.db_printing=True web.load()
def setup(): web.config.db_parameters = dict(dbn="postgres", # host='apollonius.us.archive.org', host='localhost', db="pharos", user="******", pw="pharos") web.load()
def main(): web.config.db_parameters = dict(dbn='postgres', db='infobase_data4', host='pharosdb', user='******', pw='') web.config.db_printing = True web.load() fbooks = open("books.txt", "w") fauthors = open("authors.txt", "w") books(fbooks, fauthors) fbooks.close() fauthors.close()
def __init__(self, *args): FtpServer.FlatFileSystem.__init__(self, *args) self.file_store = Store.Transaction(Config.file_store) self.cache = Store.Transaction(Config.cache_store) web.load() web.ctx.store = self.file_store web.ctx.cache = self.cache web.ctx.attachments = Store.Transaction(Config.attachment_store) web.ctx.printmode = False web.ctx.home = Config.canonical_base_url
def setup_database(dbname): if dbname == 'sqlite': web.config.db_parameters = dict(dbn=dbname, db='webpy.db') else: web.config.db_parameters = dict(dbn=dbname, db='webpy', user='******', pw='tiger') #web.config.db_printing=True web.load()
def setup_ol(): infogami.config.db_printing = False infogami.config.db_parameters = web.config.db_parameters = ol.config.db_parameters ol.config.infobase_server = None web.load() server._infobase = ol.get_infobase() ol.db = server._infobase.store.db ol.db.printing = False infogami._setup()
def run_server(): """Run Infobase server.""" web.config.db_parameters = config.db_parameters web.load() from infogami.infobase import server server._infobase = get_infobase() if '--create' in sys.argv: server._infobase.create('openlibrary.org') else: server.run()
def main(suite=None): user = os.getenv("USER") web.config.db_parameters = dict(dbn="postgres", db="infogami_test", user=user, pw="") web.load() delegate.app.request("/") delegate._load() if not suite: main_module = __import__("__main__") suite = module_suite(main_module, sys.argv[1:] or None) result = runTests(suite) sys.exit(not result.wasSuccessful())
def run_standalone(): """Run OL in standalone mode. No separate infobase server is required. """ infogami.config.db_parameters = web.config.db_parameters = config.db_parameters config.infobase_server = None web.load() from infogami.infobase import server server._infobase = get_infobase() if '--create' in sys.argv: server._infobase.create('openlibrary.org') else: infogami.run()
def playback(): web.load() reader = LogReader(RsyncLogFile("wiki-beta::pharos/log", "log")) # skip the log till the latest entry in the database timestamp = web.query('SELECT last_modified FROM thing ORDER BY last_modified DESC LIMIT 1')[0].last_modified reader.skip_till(timestamp) playback = LogPlayback(Infobase()) while True: for entry in reader: print reader.logfile.tell(), entry.timestamp playback.playback(entry) time.sleep(60)
def main(suite=None): user = os.getenv('USER') web.config.db_parameters = dict(dbn='postgres', db='infogami_test', user=user, pw='') web.load() delegate.app.request('/') delegate._load() if not suite: main_module = __import__('__main__') suite = module_suite(main_module, sys.argv[1:] or None) result = runTests(suite) sys.exit(not result.wasSuccessful())
def playback(): web.load() reader = LogReader(RsyncLogFile("wiki-beta::pharos/log", "log")) # skip the log till the latest entry in the database timestamp = web.query( 'SELECT last_modified FROM thing ORDER BY last_modified DESC LIMIT 1' )[0].last_modified reader.skip_till(timestamp) playback = LogPlayback(Infobase()) while True: for entry in reader: print reader.logfile.tell(), entry.timestamp playback.playback(entry) time.sleep(60)
) append(thing_id, key, _value, datatype, i) else: _value, datatype = self.prepare_datum( value, result, "%s/%s" % (path, key) ) if key == 'key': datatype = 1 append(thing_id, key, _value, datatype, None) return (thing_id, DATATYPE_REFERENCE) elif isinstance(query, string_types): return (query, TYPES['/type/string']) elif isinstance(query, int): return (query, TYPES['/type/int']) elif isinstance(query, float): return (query, TYPES['/type/float']) elif isinstance(query, bool): return (int(query), TYPES['/type/boolean']) else: raise Exception('%s: invalid value: %s' % (path, repr(query))) if __name__ == "__main__": web.config.db_parameters = dict( dbn='postgres', host='pharosdb', db='infobase_data2', user='******', pw='' ) web.config.db_printing = True web.load() site = Infobase().get_site('infogami.org') BulkUpload(site)
from __future__ import print_function from catalog.read_rc import read_rc import web, sys rc = read_rc() web.config.db_parameters = dict(dbn='postgres', db=rc['db'], user=rc['user'], pw=rc['pw'], host=rc['host']) web.load() iter = web.select('version', what='machine_comment', where="machine_comment like 'ia:%%'") for row in iter: print(row.machine_comment[3:])
def createsite(sitename, admin_password): """Creates a new site. Takes 2 arguments sitename and admin_password.""" web.load() infobase.Infobase().create_site(sitename, admin_password)
def load(filename): """Load a dump from a the given file to database.""" from infogami.plugins.wikitemplates import code code.validation_enabled = False pages = {} for _, id, data in tdb.logger.parse(filename): type = data.pop('__type__') name = data.pop('__name__') pages[int(id)] = web.storage(id=int(id), name=name, type=type, d=data) web.load() site = db.get_site(config.site) mapping = {} def flat(items): """Makes a nested list flat. >>> x = flat([1, [2, 3, [4, 5], 6], 7]) >>> list(x) [1, 2, 3, 4, 5, 6, 7] """ for item in items: if isinstance(item, list): for x in flat(item): yield x else: yield item def get_dependencies(page): d = [pages[v.id] for v in flat(page.d.values()) if isinstance(v, tdb.Thing)] if page.type.id != page.id: t = pages[page.type.id] d = [t] + d return d def remap(v): if isinstance(v, tdb.Thing): return tdb.withID(mapping[v.id], lazy=True) elif isinstance(v, list): return [remap(x) for x in v] else: return v def new_version(page): print "new_version", page.name d = dict([(k, remap(v)) for k, v in page.d.items()]) try: p = tdb.withName(page.name, site) p.setdata(d) # careful about type/type if page.type.id != page.id: p.type = remap(page.type) except tdb.NotFound: p = tdb.new(page.name, site, remap(page.type), d) p.save() return p.id def load_page(page): if page.id in mapping: return for p in get_dependencies(page): load_page(p) mapping[page.id] = new_version(page) web.transact() for p in pages.values(): load_page(p) web.commit()
def createsite(sitename, admin_password): """Creates a new site. Takes 2 arguments sitename and admin_password.""" web.load() import infobase infobase.Infobase().create_site(sitename, admin_password)
def setUp(self): web.config.db_parameters = self.parameters #web.config.db_printing = True web.load() web.delete("test", where="1=1")
def __init__(self,app): web.load() self.load() self.app = app
def sources(): web.config.db_parameters = dict(dbn='postgres', db='ol_merge', user=rc['user'], pw=rc['pw'], host=rc['host']) web.config.db_printing = False web.load() return ((i.id, i.archive_id, i.name) for i in web.select('marc_source'))