def initialize_tagging(): """this adds tagging capabilities to the database. It is meant as documentation and should only need to be run once for a database. opendb qon >>> db.root.tags_db = qon.TagsDB() """ if not get_database().root.has_key("tags_db"): get_database().init_root("tags_db", "qon.tags_db", "TagsDB") if not get_database().root.has_key("tagged_item_db"): get_database().init_root("tagged_item_db", "qon.tags_db", "TaggedItemDB")
def iterate_id(max=10**9): db = get_database() count = 0 types = {} for obj in db.iterate(): t = type(obj) types[t] = types.get(t, 0) + 1 count += 1 if count == max: break bytype = [] for t, c in types.iteritems(): bytype.append((c, t)) bytype.sort() bytype.reverse() for c, t in bytype: print '%8d %s' % (c, t)
def iterate_touch(max=10**9): db = get_database() count = 0 types = {} for obj in db.iterate(): t = type(obj) types[t] = types.get(t, 0) + 1 # force unghost - if you don't do this, # setting _p_changed has no effect if hasattr(obj, 'foo'): pass obj._p_changed = 1 assert obj._p_changed if count % 1000 == 0: transaction_commit(None, 'Touch') print 'Touched %d objects' % count count += 1 if count == max: break transaction_commit(None, 'Touch') bytype = [] for t, c in types.iteritems(): bytype.append((c, t)) bytype.sort() bytype.reverse() for c, t in bytype: print '%8d %s' % (c, t)
def process_request (self, request, env): from quixote.publish import Publisher url = request.get_url() method = request.get_method() try: has_profile_request = (request.environ['QUERY_STRING'].find('__profile') != -1) except: has_profile_request = False # if has_profile_request or (('/edit' in url) and (method=='POST')) or ('.xml' in url): # if has_profile_request or (('/edit' in url) and (method=='POST')): if has_profile_request: import sys, os, hotshot, hotshot.stats import cStringIO file_name = os.tempnam('/var/tmp', 'scgi.prof.') prof = hotshot.Profile(file_name) result = prof.runcall(Publisher.process_request, self, request, env) prof.close() stats = hotshot.stats.load(file_name).strip_dirs().sort_stats("cumulative") os.unlink(file_name) stats_io = cStringIO.StringIO() save_stdout = sys.stdout sys.stdout = stats_io stats.print_stats(100) sys.stdout = save_stdout from qon.util import sendmail sendmail("Profile Output: %s %s" % (method, url), stats_io.getvalue(), ['*****@*****.**']) stats_io.close() return result else: # for recording cache activity pre_accesses = get_database().storage._cache.fc._n_accesses pre_adds = get_database().storage._cache.fc._n_adds pre_added_bytes = get_database().storage._cache.fc._n_added_bytes pre_evicts = get_database().storage._cache.fc._n_evicts pre_evicted_bytes = get_database().storage._cache.fc._n_evicted_bytes # for timing each request start = datetime.utcnow() # DO IT result = Publisher.process_request(self, request, env) # get elapsed time td = datetime.utcnow() - start time_in_ms = td.seconds*1000 + td.microseconds/1000 # for recording basic cache activity total_added_bytes = get_database().storage._cache.fc._n_added_bytes total_evicted_bytes = get_database().storage._cache.fc._n_evicted_bytes accesses = get_database().storage._cache.fc._n_accesses - pre_accesses adds = get_database().storage._cache.fc._n_adds - pre_adds added_bytes = total_added_bytes - pre_added_bytes evicts = get_database().storage._cache.fc._n_evicts - pre_evicts evicted_bytes = total_evicted_bytes - pre_evicted_bytes # log slow requests to a file (and for now, any edits) # if (time_in_ms > local.LOG_TIMING_MIN_MS) or (('/edit' in url) and (method=='POST')) or (random.randint(0,99)==0): # if (time_in_ms > local.LOG_TIMING_MIN_MS) or (('/edit' in url) and (method=='POST')): if (time_in_ms > local.LOG_TIMING_MIN_MS): if local.CACHE_INSTRUMENTATION: # report detailed cache stats detailed_cache_stats = get_database().storage._cache.fc.get_formatted_cache_stats() qon.log.timing_info('%s\t%s\t%d ms\t(%d ac; %d a, %d ab, %d tab; %d e, %d eb, %d teb\n%s' \ % (method, url, time_in_ms, accesses, adds, added_bytes, total_added_bytes, evicts, evicted_bytes, total_evicted_bytes, detailed_cache_stats)) else: # just report basic cache stats qon.log.timing_info('%s\t%s\t%d ms\t(%d ac; %d a, %d ab, %d tab; %d e, %d eb, %d teb)' \ % (method, url, time_in_ms, accesses, adds, added_bytes, total_added_bytes, evicts, evicted_bytes, total_evicted_bytes)) # record histogram of times for reporting on admin page record_time(url, get_user(), time_in_ms) if local.CACHE_INSTRUMENTATION: # clear out lists to ready for next call detailed_cache_stats = get_database().storage._cache.fc.clear_oid_lists() return result
def upgrade_misc_db(): from qon.base import get_root, get_database root = get_root() if not root.has_key('misc_db'): get_database().init_root('misc_db', 'qon.user_db', 'MiscDB')
def pack_database(): """Pack the database.""" db = get_database() db.pack()