def init_plugin(): """Initialize infobase plugin.""" from infogami.infobase import common, dbstore, server, logger dbstore.default_schema = schema.get_schema() if config.get('errorlog'): common.record_exception = lambda: save_error(config.errorlog, 'infobase') ol = server.get_site('openlibrary.org') ib = server._infobase if config.get('writelog'): ib.add_event_listener(logger.Logger(config.writelog)) ib.add_event_listener(invalidate_most_recent_change) if ol: if config.get('http_listeners'): ol.add_trigger(None, http_notify) if config.get('booklog'): global booklogger booklogger = logger.Logger(config.booklog) ol.add_trigger('/type/edition', write_booklog) ol.add_trigger('/type/author', write_booklog2) # hook to add count functionality server.app.add_mapping("/([^/]*)/count_editions_by_author", __name__ + ".count_editions_by_author") server.app.add_mapping("/([^/]*)/count_editions_by_work", __name__ + ".count_editions_by_work") server.app.add_mapping("/([^/]*)/count_edits_by_user", __name__ + ".count_edits_by_user") server.app.add_mapping("/([^/]*)/most_recent", __name__ + ".most_recent") server.app.add_mapping("/([^/]*)/clear_cache", __name__ + ".clear_cache") server.app.add_mapping("/([^/]*)/stats/(\d\d\d\d-\d\d-\d\d)", __name__ + ".stats")
def init_plugin(): """Initialize infobase plugin.""" from infogami.infobase import common, dbstore, server, logger dbstore.default_schema = schema.get_schema() if config.get('errorlog'): common.record_exception = lambda: save_error(config.errorlog, 'infobase') ol = server.get_site('openlibrary.org') ib = server._infobase if config.get('writelog'): ib.add_event_listener(logger.Logger(config.writelog)) ib.add_event_listener(invalidate_most_recent_change) if ol: # install custom indexer #XXX-Anand: this might create some trouble. Commenting out. # ol.store.indexer = Indexer() if config.get('http_listeners'): ol.add_trigger(None, http_notify) # hook to add count functionality server.app.add_mapping("/([^/]*)/count_editions_by_author", __name__ + ".count_editions_by_author") server.app.add_mapping("/([^/]*)/count_editions_by_work", __name__ + ".count_editions_by_work") server.app.add_mapping("/([^/]*)/count_edits_by_user", __name__ + ".count_edits_by_user") server.app.add_mapping("/([^/]*)/most_recent", __name__ + ".most_recent") server.app.add_mapping("/([^/]*)/clear_cache", __name__ + ".clear_cache") server.app.add_mapping("/([^/]*)/stats/(\d\d\d\d-\d\d-\d\d)", __name__ + ".stats") server.app.add_mapping("/([^/]*)/has_user", __name__ + ".has_user") server.app.add_mapping("/([^/]*)/olid_to_key", __name__ + ".olid_to_key")
def g(self, *a, **kw): t_start = time.time() web.ctx.setdefault("headers", []) if not web.ctx.get('infobase_localmode'): cookies = web.cookies(infobase_auth_token=None) web.ctx.infobase_auth_token = cookies.infobase_auth_token try: d = f(self, *a, **kw) except common.InfobaseException as e: if web.ctx.get('infobase_localmode'): raise process_exception(e) except Exception as e: logger.error( "Error in processing request %s %s", web.ctx.get("method", "-"), web.ctx.get("path", "-"), exc_info=True, ) common.record_exception() # call web.internalerror to send email when web.internalerror is set to web.emailerrors process_exception( common.InfobaseException(error="internal_error", message=str(e))) if web.ctx.get('infobase_localmode'): raise common.InfobaseException(message=str(e)) else: process_exception(e) # use default=str to deal with TypeError: datetime is not JSON serializable result = d.json_data if isinstance(d, JSON) else json.dumps( d, default=str) t_end = time.time() totaltime = t_end - t_start querytime = web.ctx.pop('querytime', 0.0) queries = web.ctx.pop('queries', 0) if config.get("enabled_stats"): web.header( "X-STATS", "tt: %0.3f, tq: %0.3f, nq: %d" % (totaltime, querytime, queries), ) if web.ctx.get('infobase_localmode'): return result else: # set auth-token as cookie for remote connection. if web.ctx.get('infobase_auth_token'): web.setcookie('infobase_auth_token', web.ctx.infobase_auth_token) return result
def bootstrap(site, admin_password): """Creates system types and objects for a newly created site. """ import cache cache.loadhook() import web web.ctx.infobase_bootstrap = True query = make_query() site.save_many(query) from infogami.infobase import config import random import string def random_password(length=20): chars = string.letters + string.digits return "".join(random.choice(chars) for i in range(length)) # Account Bot is not created till now. Set account_bot to None in config until he is created. account_bot = config.get("account_bot") config.account_bot = None a = site.get_account_manager() a.register(username="******", email="*****@*****.**", password=admin_password, data=dict(displayname="Administrator"), _activate=True) a.update_user_details("admin", verified=True) if account_bot: username = account_bot.split("/")[-1] a.register(username=username, email="*****@*****.**", password=random_password(), data=dict(displayname=username), _activate=True) a.update_user_details(username, verified=True) # add admin user to admin usergroup import account q = [ usergroup('/usergroup/admin', 'Group of admin users.', [{ "key": account.get_user_root() + "admin" }]) ] site.save_many(q) config.account_bot = account_bot web.ctx.infobase_bootstrap = False
def update_config(runtime_config): # update config for k, v in runtime_config.items(): setattr(config, k, v) # import plugins plugins = [] for p in config.get('plugins') or []: plugins.append(__import__(p, None, None, ["x"])) logger.info("loading plugin %s", p) web.config.db_parameters = parse_db_parameters(config.db_parameters) # initialize cache cache_params = config.get('cache', {'type': 'none'}) cache.global_cache = cache.create_cache(**cache_params) # init plugins for p in plugins: m = getattr(p, 'init_plugin', None) m and m()
def f(): web.ctx.disable_permission_check = True d = web.storage({"key": key, "type": {"key": "/type/user"}}) d.update(data) self.site.save(key, d, timestamp=timestamp, author=d, comment="Created new account") q = make_query(d) account_bot = config.get('account_bot') account_bot = account_bot and web.storage({ "key": account_bot, "type": { "key": "/type/user" } }) self.site.save_many( q, ip=ip, timestamp=timestamp, author=account_bot, action='register', comment="Setup new account", ) self.site.store.register(key, email, enc_password) self.update_user_details(username, verified=True, active=True) # Add account doc to store olddoc = self.site.store.store.get("account/" + username) or {} doc = { "_key": "account/" + username, "_rev": olddoc.get("_rev"), "type": "account", "registered_on": olddoc['registered_on'], "activated_on": timestamp.isoformat(), "last_login": timestamp.isoformat(), } self.site.store.store.put("account/" + username, doc)
def bootstrap(site, admin_password): """Creates system types and objects for a newly created site. """ import cache cache.loadhook() import web web.ctx.infobase_bootstrap = True query = make_query() site.save_many(query) from infogami.infobase import config import random import string def random_password(length=20): chars = string.letters + string.digits return "".join(random.choice(chars) for i in range(length)) # Account Bot is not created till now. Set account_bot to None in config until he is created. account_bot = config.get("account_bot") config.account_bot = None a = site.get_account_manager() a.register(username="******", email="*****@*****.**", password=admin_password, data=dict(displayname="Administrator"), _activate=True) a.update_user_details("admin", verified=True) if account_bot: username = account_bot.split("/")[-1] a.register(username=username, email="*****@*****.**", password=random_password(), data=dict(displayname=username), _activate=True) a.update_user_details(username, verified=True) # add admin user to admin usergroup import account q = [usergroup('/usergroup/admin', 'Group of admin users.', [{"key": account.get_user_root() + "admin"}])] site.save_many(q) config.account_bot = account_bot web.ctx.infobase_bootstrap = False
def get_memcache(): """Returns memcache client created from infobase configuration. """ cache = config.get("cache", {}) if cache.get("type") == "memcache": return olmemcache.Client(cache['servers'])
def versions(self, query): what = 'thing.key, version.revision, transaction.*' where = 'version.thing_id = thing.id AND version.transaction_id = transaction.id' if config.get('use_machine_comment'): what += ", version.machine_comment" def get_id(key): meta = self.get_metadata(key) if meta: return meta.id else: raise StopIteration for c in query.conditions: key, value = c.key, c.value assert key in [ 'key', 'type', 'author', 'ip', 'comment', 'created', 'bot', 'revision' ] try: if key == 'key': key = 'thing_id' value = get_id(value) elif key == 'revision': key = 'version.revision' elif key == 'type': key = 'thing.type' value = get_id(value) elif key == 'author': key = 'transaction.author_id' value = get_id(value) else: # 'bot' column is not enabled if key == 'bot' and not config.use_bot_column: bots = get_bot_users(self.db) if value == True or str(value).lower() == "true": where += web.reparam( " AND transaction.author_id IN $bots", {"bots": bots}) else: where += web.reparam( " AND (transaction.author_id NOT IN $bots OR transaction.author_id IS NULL)", {"bots": bots}) continue else: key = 'transaction.' + key except StopIteration: # StopIteration is raised when a non-existing object is referred in the query return [] where += web.reparam(' AND %s=$value' % key, locals()) sort = query.sort if sort and sort.startswith('-'): sort = sort[1:] + ' desc' sort = 'transaction.' + sort t = self.db.transaction() if config.query_timeout: self.db.query( "SELECT set_config('statement_timeout', $query_timeout, false)", dict(query_timeout=config.query_timeout)) result = self.db.select(['thing', 'version', 'transaction'], what=what, where=where, offset=query.offset, limit=query.limit, order=sort) result = result.list() author_ids = list(set(r.author_id for r in result if r.author_id)) authors = self.get_metadata_list_from_ids(author_ids) t.commit() for r in result: r.author = r.author_id and authors[r.author_id].key return result
def get_memcache_client(self): _cache = config.get("cache", {}) if _cache.get("type") == "memcache" and "servers" in _cache: return olmemcache.Client(_cache['servers'])
def get_user_root(): user_root = config.get("user_root", "/user") return user_root.rstrip("/") + "/"