def write_items_SQL_A(basename, nr): # use same store object for all writes store = wikStore_sqlite(NAME + '.db') for i in range(nr): w = WikklyItem('%s-%d' % (basename, i), '%s item #%d (A)' % (basename, i)) store.saveitem(w)
def write_items_SQL_B(basename, nr): # use new store object for each write for i in range(nr): store = wikStore_sqlite(NAME + '.db') w = WikklyItem('%s-%d' % (basename, i), '%s item #%d (B)' % (basename, i)) store.saveitem(w)
def testSQLite(self): from wikklytext.store import wikStore_sqlite import os DBNAME = 'testsqlite.db' if os.path.isfile(DBNAME): os.unlink(DBNAME) store = wikStore_sqlite(DBNAME) self.doSearchWords(store)
def testRWSQLite(self): from wikklytext.store import wikStore_sqlite import os DBNAME = 'testsqlite.db' if os.path.isfile(DBNAME): os.unlink(DBNAME) store = wikStore_sqlite(DBNAME) self.doStoreTests(store, u'WikklyText')
def testSQLite_B(self): if os.path.isfile(NAME + '.db'): os.unlink(NAME + '.db') run_processes_SQL_B() store = wikStore_sqlite(NAME + '.db') names = store.names() #print "%d names (expect %d)" % (len(names),NR_PROCESSES * ITEMS_PER_WRITER) #print names self.failIf(len(names) != NR_PROCESSES * ITEMS_PER_WRITER)
def testSQLite_A(self): if os.path.isfile(NAME+'.db'): os.unlink(NAME+'.db') run_processes_SQL_A() store = wikStore_sqlite(NAME+'.db') names = store.names() #print "%d names (expect %d)" % (len(names),NR_PROCESSES * ITEMS_PER_WRITER) #print names self.failIf(len(names) != NR_PROCESSES * ITEMS_PER_WRITER)
def run(self): #print "** StoreWorker thread starting" while 1: try: # wait for next command cmd = StoreCmdQueue.get() if cmd[0] == 'quit': #print "** StoreWorker thread exiting" names = list(self.storemap.keys()) # close all stores explicitly for name in names: del self.storemap[name] return elif cmd[0] == 'open_store': #print "open_store",cmd[1:] Q, kind, pathname = cmd[1:] if kind == 'text': store = wikStore_files(pathname) elif kind == 'tiddlywiki': store = wikStore_tw(pathname) elif kind == 'sqlite': store = wikStore_sqlite(pathname) sid = makeGUID('%s:%s' % (kind, pathname)) self.storemap[sid] = store Q.put(sid) elif cmd[0] == 'info': #print "info",cmd[1:] Q, storeID = cmd[1:] Q.put(self.storemap[storeID].info()) elif cmd[0] == 'getpath': #print "getpath",cmd[1:] Q, storeID = cmd[1:] Q.put(self.storemap[storeID].getpath()) elif cmd[0] == 'names': Q, storeID = cmd[1:] Q.put(self.storemap[storeID].names()) elif cmd[0] == 'getitem': Q, storeID, name = cmd[1:] Q.put(self.storemap[storeID].getitem(name)) elif cmd[0] == 'getall': Q, storeID = cmd[1:] Q.put(self.storemap[storeID].getall()) elif cmd[0] == 'saveitem': Q, storeID, item, oldname = cmd[1:] self.storemap[storeID].saveitem(item, oldname) Q.put(0) elif cmd[0] == 'delete': Q, storeID, item = cmd[1:] self.storemap[storeID].delete(item) Q.put(0) elif cmd[0] == 'search': Q, storeID, query = cmd[1:] Q.put(self.storemap[storeID].search(query)) # catch ALL unhandled exceptions and pass to caller # keep thread alive though in case caller wants to continue # (this is nicer since it gives a single exit point) except Exception, exc: # I'm not sure that exceptions can be passed between threads # so for now I'm just passing the text dump of it Q.put(StoreWorkerError(cgitb.text(sys.exc_info())))
def write_items_SQL_B(basename, nr): # use new store object for each write for i in range(nr): store = wikStore_sqlite(NAME+'.db') w = WikklyItem('%s-%d' % (basename,i), '%s item #%d (B)' % (basename,i)) store.saveitem(w)
def write_items_SQL_A(basename, nr): # use same store object for all writes store = wikStore_sqlite(NAME+'.db') for i in range(nr): w = WikklyItem('%s-%d' % (basename,i), '%s item #%d (A)' % (basename,i)) store.saveitem(w)