def get_site(sitename): global _infobase if not _infobase: schema = dbstore.default_schema or dbstore.Schema() store = dbstore.DBStore(schema) _infobase = infobase.Infobase(store, config.secret_key) return _infobase.get(sitename)
def get_infobase(rc): from infogami.infobase import infobase, dbstore import web web.config.db_parameters = infogami.config.db_parameters web.config.db_printing = False schema = dbstore.Schema() schema.add_table_group('type', '/type/type') schema.add_table_group('type', '/type/property') schema.add_table_group('type', '/type/backreference') schema.add_table_group('user', '/type/user') schema.add_table_group('user', '/type/usergroup') schema.add_table_group('user', '/type/permission') schema.add_table_group('edition', '/type/edition') schema.add_table_group('author', '/type/author') schema.add_table_group('scan', '/type/scan_location') schema.add_table_group('scan', '/type/scan_record') schema.add_seq('/type/edition', '/b/OL%dM') schema.add_seq('/type/author', '/a/OL%dA') store = dbstore.DBStore(schema) secret_key = rc['secret_key'] return infobase.Infobase(store, secret_key)
def site(): #TODO: this does not clear data between tests. Make this work in scope=class user = os.getenv('USER') web.config.db_parameters = dict(host='postgres', dbn='postgres', db='infobase_test', user=user, pw='') store = dbstore.DBStore(dbstore.Schema()) store.db.printing = False ib = infobase.Infobase(store, 'secret') return ib.create('test')
def setUp(self): user = os.getenv('USER') web.config.db_parameters = dict(dbn='postgres', db='infobase_test', user=user, pw='') store = dbstore.DBStore(dbstore.Schema()) self.t = store.db.transaction() store.db.printing = False self.ib = infobase.Infobase(store, 'secret') self.site = self.ib.create('test')
def createsite(): import web from infogami.infobase import dbstore, infobase, config, server web.config.db_parameters = infogami.config.db_parameters web.config.db_printing = True web.ctx.ip = '127.0.0.1' server.app.request('/') schema = dbstore.Schema() store = dbstore.DBStore(schema) ib = infobase.Infobase(store, config.secret_key) ib.create(infogami.config.site)
def recreate_database(): """drop and create infobase_test database. This function is memoized to recreate the db only once per test session. """ assert os.system('dropdb infobase_test; createdb infobase_test') == 0 db = web.database(**db_parameters) schema = dbstore.default_schema or dbstore.Schema() sql = str(schema.sql()) db.query(sql)
def get_schema(): schema = dbstore.Schema() schema.add_table_group('type', '/type/type') schema.add_table_group('type', '/type/property') schema.add_table_group('type', '/type/backreference') schema.add_table_group('user', '/type/user') schema.add_table_group('user', '/type/usergroup') schema.add_table_group('user', '/type/permission') datatypes = ["str", "int", "ref", "boolean"] schema.add_table_group('edition', '/type/edition', datatypes) schema.add_table_group('author', '/type/author', datatypes) schema.add_table_group('scan', '/type/scan_location', datatypes) schema.add_table_group('scan', '/type/scan_record', datatypes) schema.add_table_group('work', '/type/work', datatypes) schema.add_table_group('publisher', '/type/publisher', datatypes) schema.add_table_group('subject', '/type/subject', datatypes) schema.add_seq('/type/edition', '/books/OL%dM') schema.add_seq('/type/author', '/authors/OL%dA') schema.add_seq('/type/work', '/works/OL%dW') schema.add_seq('/type/publisher', '/publishers/OL%dP') _sql = schema.sql # custom postgres functions required by OL. more_sql = """ CREATE OR REPLACE FUNCTION get_olid(text) RETURNS text AS $$ select regexp_replace($1, '.*(OL[0-9]+[A-Z])', E'\\1') where $1 ~ '^/.*/OL[0-9]+[A-Z]$'; $$ LANGUAGE SQL IMMUTABLE; CREATE INDEX thing_olid_idx ON thing(get_olid(key)); """ # monkey patch schema.sql to include the custom functions schema.sql = lambda: web.safestr(_sql()) + more_sql return schema
def get_schema(): schema = dbstore.Schema() schema.add_table_group('type', '/type/type') schema.add_table_group('type', '/type/property') schema.add_table_group('type', '/type/backreference') schema.add_table_group('user', '/type/user') schema.add_table_group('user', '/type/usergroup') schema.add_table_group('user', '/type/permission') datatypes = ["str", "int", "ref", "boolean"] schema.add_table_group('edition', '/type/edition', datatypes) schema.add_table_group('author', '/type/author', datatypes) schema.add_table_group('scan', '/type/scan_location', datatypes) schema.add_table_group('scan', '/type/scan_record', datatypes) schema.add_table_group('work', '/type/work', datatypes) schema.add_table_group('publisher', '/type/publisher', datatypes) schema.add_table_group('subject', '/type/subject', datatypes) schema.add_seq('/type/edition', '/books/OL%dM') schema.add_seq('/type/author', '/authors/OL%dA') schema.add_seq('/type/work', '/works/OL%dW') schema.add_seq('/type/publisher', '/publishers/OL%dP') _sql = schema.sql # custom postgres functions required by OL. more_sql = """ CREATE OR REPLACE FUNCTION get_olid(text) RETURNS text AS $$ select regexp_replace($1, '.*(OL[0-9]+[A-Z])', E'\\1') where $1 ~ '^/.*/OL[0-9]+[A-Z]$'; $$ LANGUAGE SQL IMMUTABLE; CREATE INDEX thing_olid_idx ON thing(get_olid(key)); CREATE TABLE stats ( id serial primary key, key text unique, type text, created timestamp without time zone, updated timestamp without time zone, json text ); CREATE INDEX stats_type_idx ON stats(type); CREATE INDEX stats_created_idx ON stats(created); CREATE INDEX stats_updated_idx ON stats(updated); CREATE TABLE waitingloan ( id serial primary key, book_key text, user_key text, status text default 'waiting', position integer, wl_size integer, since timestamp without time zone default (current_timestamp at time zone 'utc'), last_update timestamp without time zone default (current_timestamp at time zone 'utc'), expiry timestamp without time zone, available_email_sent boolean default 'f', UNIQUE (book_key, user_key) ); CREATE INDEX waitingloan_user_key_idx ON waitingloan(user_key); CREATE INDEX waitingloan_status_idx ON waitingloan(status); CREATE TABLE import_batch ( id serial primary key, name text, submitter text, submit_time timestamp without time zone default (current_timestamp at time zone 'utc') ); CREATE INDEX import_batch_name ON import_batch(name); CREATE INDEX import_batch_submitter_idx ON import_batch(submitter); CREATE INDEX import_batch_submit_time_idx ON import_batch(submit_time); CREATE TABLE import_item ( id serial primary key, batch_id integer references import_batch, added_time timestamp without time zone default (current_timestamp at time zone 'utc'), import_time timestamp without time zone, status text default 'pending', error text, ia_id text, data text UNIQUE, ol_key text, comments text, UNIQUE (batch_id, ia_id) ); CREATE INDEX import_item_batch_id ON import_item(batch_id); CREATE INDEX import_item_import_time ON import_item(import_time); CREATE INDEX import_item_status ON import_item(status); CREATE INDEX import_item_ia_id ON import_item(ia_id); CREATE INDEX import_item_data ON import_item(data); """ # monkey patch schema.sql to include the custom functions schema.sql = lambda: web.safestr(_sql()) + more_sql return schema