class durusCommit(object): "A class to simply commit a given data with date tuple as key" def __init__(self, folder, dbFileName): self.dbFileName = folder + '/' + dbFileName + '.durus' ##print 'Durus file name: ', self.dbFileName self.con = Connection(FileStorage(self.dbFileName)) def getRoot(self): return self.con.get_root() def connectionCommit(self): self.con.commit() def makeChapterCommit(self, currentdate, book, chapterDict): dateKey = () #print 'Book name: ',book if isinstance(currentdate, tuple): dateKey = currentdate if isinstance(currentdate, datetime.date): dateKey = tuple(currentdate.strftime('%Y-%m-%d').split('-')) #print 'Datekey : ',dateKey root = self.con.get_root() if not book in root: root[book] = PersistentDict() self.con.commit() root[book][dateKey] = chapterDict self.con.commit()
def check_oid_reuse(self): # Requires ShelfStorage oid reuse pack semantics s1 = ClientStorage(address=self.address) s1.oid_pool_size = 1 c1 = Connection(s1) r1 = c1.get_root() s2 = ClientStorage(address=self.address) s2.oid_pool_size = 1 c2 = Connection(s2) r2 = c2.get_root() r1['a'] = PersistentDict() r1['b'] = PersistentDict() c1.commit() c2.abort() a_oid = r1['a']._p_oid assert 'a' in r1 and 'b' in r1 and len(r1['b']) == 0 assert 'a' in r2 and 'b' in r2 and len(r2['b']) == 0 del r2['a'] # remove only reference to a c2.commit() c2.pack() # force relinquished oid back into availability sleep(0.5) # Give time for pack to complete c2.abort() assert c2.get(a_oid) is None c1.abort() assert c1.get(a_oid)._p_is_ghost() r2['b']['new'] = Persistent() r2['b']['new'].bogus = 1 c2.commit() assert c2.get(a_oid) is r2['b']['new'] c1.abort() assert c1.get(a_oid).__class__ == PersistentDict r1['b']['new'].bogus assert c1.get(a_oid).__class__ == Persistent s1.close()
def _scenario(self): c1 = Connection(self._get_storage()) c2 = Connection(self._get_storage()) c1.get_root()['A'] = Persistent() c1.get_root()['B'] = Persistent() c1.get_root()['A'].a = 1 c1.commit() c2.abort() c1.cache.recent_objects.discard(c1.get_root()['A']) # Imagine c1 has been running for a while, and # cache management, for example, has caused the # cache reference to be weak. return c1, c2
def _scenario(self): c1 = Connection(self._get_storage()) c2 = Connection(self._get_storage()) c1.get_root()['A'] = Persistent() c1.get_root()['B'] = Persistent() c1.get_root()['A'].a = 1 c1.commit() c2.abort() c1.cache.recent_objects.discard(c1.get_root()['A']) # Imagine c1 has been running for a while, and # cache management, for example, has caused the # cache reference to be weak. return c1, c2
def create_durus_publisher(): global connection filename = os.path.join(tempfile.gettempdir(), 'quixote-demo.durus') print('Opening %r as a Durus database.' % filename) connection = Connection(FileStorage(filename)) root = connection.get_root() session_manager = root.get('session_manager', None) if session_manager is None: session_manager = PersistentSessionManager() connection.get_root()['session_manager'] = session_manager connection.commit() return Publisher(RootDirectory(), session_manager=session_manager, display_exceptions='plain')
def create_durus_publisher(): global connection filename = os.path.join(tempfile.gettempdir(), 'quixote-demo.durus') print 'Opening %r as a Durus database.' % filename connection = Connection(FileStorage(filename)) root = connection.get_root() session_manager = root.get('session_manager', None) if session_manager is None: session_manager = PersistentSessionManager() connection.get_root()['session_manager'] = session_manager connection.commit() return Publisher(RootDirectory(), session_manager=session_manager, display_exceptions='plain')
class TestBackend: def __init__(self, filename, mode): self.mode = mode if mode == "w": self.storage = FileStorage(filename) self.connection = Connection(self.storage) self.test_db_items = self.connection.get_root() elif mode == "r": self.storage = FileStorage(filename) self.connection = Connection(self.storage) self.test_db_items = self.connection.get_root() self.next_rec_num = 0 # Initialise next record counter self.num_records = len(self.test_db_items) def __setitem__(self, key, value): self.test_db_items[key] = value def __getitem__(self, key): return self.test_db_items[str(key)] def __len__(self): return len(self.test_db_items) def first(self): return self.test_db_items[0] def iteritems(self): while(self.next_rec_num < self.num_records): value = self.test_db_items[self.next_rec_num] self.next_rec_num += 1 yield value def close(self): self.connection.commit() self.storage.close() def getTestDBItems(self): return self.test_db_items.values()
def check_oid_reuse_with_invalidation(self): connection = Connection(ClientStorage(address=self.address)) root = connection.get_root() root['x'] = Persistent() connection.commit() connection = Connection(ClientStorage(address=self.address)) root = connection.get_root() root['x'] = Persistent() connection.commit() connection.pack() sleep(1) # Make sure pack finishes. connection = Connection(ClientStorage(address=self.address)) root = connection.get_root() root['x'] = Persistent() connection.commit()
def end_protocol_error(self): s1 = ClientStorage(address=self.address) c1 = Connection(s1) r1 = c1.get_root() s1.s = FakeSocket('\0\0\0\0?') r1._p_note_change() raises(ProtocolError, c1.commit)
def run_trials(): numTrials = 3000 gens = 1000 from multiprocessing.pool import ThreadPool as Pool pool = Pool(50) jids = pool.map(f,[gens]*numTrials) print "Done spawning trials. Retrieving results..." results = pool.map(cloud_result, jids) firstLocusFreqsHists = zeros((numTrials,gens+1), dtype='float') lastLocusFreqsHists = zeros((numTrials,gens+1), dtype='float') print "Done retrieving results. Press Enter to serialize..." raw_input() for i, result in enumerate(results): firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists, lastLocusFreqsHists) conn.commit() pool.close() pool.join()
def render_results(timestamp=None): with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) db = conn.get_root() if not timestamp: timestamp = sorted(db.keys())[-1] firstLocusFreqsHists, lastLocusFreqsHists = db[timestamp] print "Done deserializing results. Plotting..." x = [(2, 'First', firstLocusFreqsHists, "effective"), (3, 'Last', lastLocusFreqsHists, "non-effective")] for i, pos, freqsHists, filename in x : freqsHists = freqsHists[:,:801] f = figure(i) hold(False) plot(transpose(freqsHists), color='grey') hold(True) maxGens = freqsHists.shape[1]-1 plot([0, maxGens], [.05,.05], 'k--') plot([0, maxGens], [.95,.95], 'k--') axis([0, maxGens, 0, 1]) xlabel('Generation') ylabel('1-Frequency of the '+pos+' Locus') f.canvas.draw() f.show() savefig(filename+'.png', format='png', dpi=200)
def run_trials(): numTrials = 3000 gens = 1000 from multiprocessing.pool import ThreadPool as Pool pool = Pool(50) jids = pool.map(f, [gens] * numTrials) print "Done spawning trials. Retrieving results..." results = pool.map(cloud_result, jids) firstLocusFreqsHists = zeros((numTrials, gens + 1), dtype='float') lastLocusFreqsHists = zeros((numTrials, gens + 1), dtype='float') print "Done retrieving results. Press Enter to serialize..." raw_input() for i, result in enumerate(results): firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists, lastLocusFreqsHists) conn.commit() pool.close() pool.join()
def render_results(timestamp=None): with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) db = conn.get_root() if not timestamp: timestamp = sorted(db.keys())[-1] firstLocusFreqsHists, lastLocusFreqsHists = db[timestamp] print "Done deserializing results. Plotting..." x = [(2, 'First', firstLocusFreqsHists, "effective"), (3, 'Last', lastLocusFreqsHists, "non-effective")] for i, pos, freqsHists, filename in x: freqsHists = freqsHists[:, :801] f = figure(i) hold(False) plot(transpose(freqsHists), color='grey') hold(True) maxGens = freqsHists.shape[1] - 1 plot([0, maxGens], [.05, .05], 'k--') plot([0, maxGens], [.95, .95], 'k--') axis([0, maxGens, 0, 1]) xlabel('Generation') ylabel('1-Frequency of the ' + pos + ' Locus') f.canvas.draw() f.show() savefig(filename + '.png', format='png', dpi=200)
def check_storage_tools(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() connection.commit() index = get_reference_index(connection.get_storage()) assert index == { int8_to_str(1): [int8_to_str(0)], int8_to_str(2): [int8_to_str(0)] } census = get_census(connection.get_storage()) assert census == { as_bytes('PersistentDict'): 1, as_bytes('Persistent'): 2 } references = list( gen_referring_oid_record(connection.get_storage(), int8_to_str(1))) assert references == [(int8_to_str(0), connection.get_storage().load(int8_to_str(0)))] class Fake(object): pass s = Fake() s.__class__ = Storage raises(RuntimeError, s.__init__) raises(NotImplementedError, s.load, None) raises(NotImplementedError, s.begin) raises(NotImplementedError, s.store, None, None) raises(NotImplementedError, s.end) raises(NotImplementedError, s.sync) g = s.gen_oid_record() raises(NotImplementedError, next, g)
def interactive_client(file, address, cache_size, readonly, repair, startup): if file: storage = FileStorage(file, readonly=readonly, repair=repair) description = file else: socket_address = SocketAddress.new(address) wait_for_server(address=socket_address) storage = ClientStorage(address=socket_address) description = socket_address connection = Connection(storage, cache_size=cache_size) console_module = ModuleType('__console__') sys.modules['__console__'] = console_module namespace = {'connection': connection, 'root': connection.get_root(), 'get': connection.get, 'sys': sys, 'os': os, 'int8_to_str': int8_to_str, 'str_to_int8': str_to_int8, 'pp': pprint} vars(console_module).update(namespace) configure_readline( vars(console_module), os.path.expanduser("~/.durushistory")) console = InteractiveConsole(vars(console_module)) if startup: src = '''with open('{fn}', 'rb') as _: _ = compile(_.read(), '{fn}', 'exec') exec(globals().pop('_')) '''.format(fn = os.path.expanduser(startup)).rstrip() console.runsource(src, '-stub-', 'exec') help = (' connection -> the Connection\n' ' root -> the root instance') console.interact('Durus %s\n%s' % (description, help))
def interactive_client(file, address, cache_size, readonly, repair, startup, storage_class=None): if file: storage = get_storage(file, storage_class=storage_class, readonly=readonly, repair=repair) description = file else: socket_address = SocketAddress.new(address) wait_for_server(address=socket_address) storage = ClientStorage(address=socket_address) description = socket_address connection = Connection(storage, cache_size=cache_size) console_module = ModuleType('__console__') sys.modules['__console__'] = console_module namespace = {'connection': connection, 'root': connection.get_root(), 'get': connection.get, 'sys': sys, 'os': os, 'int8_to_str': int8_to_str, 'str_to_int8': str_to_int8, 'pp': pprint} vars(console_module).update(namespace) configure_readline( vars(console_module), os.path.expanduser("~/.durushistory")) console = InteractiveConsole(vars(console_module)) if startup: console.runsource('execfile("%s")' % os.path.expanduser(startup)) help = (' connection -> the Connection\n' ' root -> the root instance') console.interact('Durus %s\n%s' % (description, help))
class Session(object): """ Representation of the game state. """ _persistent_attributes = ('scheduler', 'started', 'lastroom', 'universe', 'characters', 'player', 'debugging') # default values scheduler = None # Scheduler instance started = False # Is game started yet? (I.e. have player turns/actions begun) lastroom = None # Used to determine auto-placement of items universe = None # Top level container object (provides storage for entire game state) characters = () # List of character agents (references into universe) player = () # List of player character agents (normally only 1 in PUB) debugging = False # Debugging mode is for use during game development def __init__(self, storagefile="default.sav"): self.storage = Connection(FileStorage(storagefile)) self.root = self.storage.get_root() self.running = False def __setattr__(self, name, value): if name in self._persistent_attributes: self.root[name] = value else: object.__setattr__(self, name, value) def __getattribute__(self, name): persistent_attributes = object.__getattribute__( self, '_persistent_attributes') if name in persistent_attributes: try: return self.root[name] except KeyError: return getattr(self.__class__, name) else: return object.__getattribute__(self, name) def new_game(self): """ Start up a new game (clear the storage instance). """ self.scheduler = None self.started = True self.lastroom = None self.universe = None self.characters = None self.player = None self.debugging = False self.commit() self.pack() def commit(self): self.storage.commit() def abort(self): self.storage.abort() def pack(self): self.storage.pack()
def check_storage_tools(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() connection.commit() index = get_reference_index(connection.get_storage()) assert index == { int8_to_str(1): [int8_to_str(0)], int8_to_str(2): [int8_to_str(0)]} census = get_census(connection.get_storage()) assert census == {as_bytes('PersistentDict'):1, as_bytes('Persistent'):2} references = list(gen_referring_oid_record(connection.get_storage(), int8_to_str(1))) assert references == [ (int8_to_str(0), connection.get_storage().load(int8_to_str(0)))] class Fake(object): pass s = Fake() s.__class__ = Storage raises(RuntimeError, s.__init__) raises(NotImplementedError, s.load, None) raises(NotImplementedError, s.begin) raises(NotImplementedError, s.store, None, None) raises(NotImplementedError, s.end) raises(NotImplementedError, s.sync) g = s.gen_oid_record() raises(NotImplementedError, next, g)
def check_write_conflict(self): s1 = ClientStorage(address=self.address) c1 = Connection(s1) r1 = c1.get_root() s1.s = FakeSocket('\0\0\0\0', STATUS_INVALID) r1._p_note_change() raises(WriteConflictError, c1.commit)
def get_config(): connection = Connection(FileStorage("/var/tmp/test.durus")) root = connection.get_root() # connection set as shown above. if not root.has_key("_pconfig"): cf = GenericConfiguration() root["_pconfig"] = cf root["_pconfig"]["default"] = SECTION() connection.commit() return root["_pconfig"]
def get_config(): connection = Connection(FileStorage("/var/tmp/test.durus")) root = connection.get_root() # connection set as shown above. if not root.has_key("_pconfig"): cf = GenericConfiguration() root["_pconfig"] = cf root["_pconfig"]["default"] = SECTION() connection.commit() return root["_pconfig"]
def open_document(db_path): conn = Connection(FileStorage(db_path)) db_root = conn.get_root() if 'doc' not in db_root: db_root['doc'] = Document() db_root['version'] = updates.current_version conn.commit() updates.do_updates(conn) h = DocumentHandler(conn) return h.doc
def main(old_file, new_file): if old_file.startswith('-'): usage() if new_file.startswith('-'): usage() assert not exists(new_file) connection = Connection(sys.argv[1]) tmpfile = TemporaryFile() print("pickling from " + old_file) dump(connection.get_root().__getstate__(), tmpfile, 2) connection = None tmpfile.seek(0) connection2 = Connection(sys.argv[2]) print("unpickling") connection2.get_root().__setstate__(load(tmpfile)) connection2.get_root()._p_note_change() print("commit to " + new_file) connection2.commit() print("pack") connection2.pack()
def main(old_file, new_file): if old_file.startswith('-'): usage() if new_file.startswith('-'): usage() assert not exists(new_file) connection = Connection(sys.argv[1]) tmpfile = TemporaryFile() print("pickling from " + old_file) dump(connection.get_root().__getstate__(), tmpfile, 2) connection = None tmpfile.seek(0) connection2 = Connection(sys.argv[2]) print("unpickling") connection2.get_root().__setstate__(load(tmpfile)) connection2.get_root()._p_note_change() print("commit to " + new_file) connection2.commit() print("pack") connection2.pack()
class _Projects(object): """ Class for managing project and autocompletes for each project """ def __init__(self): self._conn = Connection(FileStorage(PROJECTS_DATA_PATH)) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict( autocomplete=PersistentDict()) self.sync() def get(self): """Return projects list""" return self._data.keys() def add(self, name): """Add new project""" self._data[unicode(name)] = PersistentDict( autocomplete=PersistentDict()) self.sync() def remove(self, name): """Remove project""" del self._data[unicode(name)] def getAutocomleteList(self, name, appendix={}): """ Return autocomplete list for project, if appendix autocompletes was specified merge it with project autocompletes. """ autocompDict = self._data[unicode(name)]["autocomplete"] autocompDict.update(appendix) sortedDict = sorted(autocompDict.items(), key=lambda (k, v): (v, k), reverse=True) return [a[0] for a in sortedDict] def addAutocomplete(self, projectName, taskName): """Add autocompleted task for peoject""" prj = self._data[projectName] if taskName not in prj["autocomplete"]: prj["autocomplete"][taskName] = 1 else: prj["autocomplete"][taskName] += 1 self.sync() def sync(self): self._conn.commit() def __del__(self): self.sync()
class _Settings(object): """Settings singleton object""" _appName = "MacTimeLog" _defaultSettings = { "dateFormat": "%m-%d-%Y %H:%M", "timeFormat": "%H:%M", "logDateTimeFormat": "at %H:%M", "workEndTime": "06:00", "workDayLength": 3600*8, "timerInterval": 1, "showWorkTill": False, "showDateTime": False, "logPath": "%s/%s" % (settingsFolder(_appName), "log.txt"), "projectsDataPath": "%s/%s" % (settingsFolder(_appName), "projects"), "slackingDataPath": "%s/%s" % (settingsFolder(_appName), "slacking"), "logEditCommand": "open -a TextEdit \"%s\"", "projectSeparator": "::", "selectedProject": "Default", "startPlaceholder": "__start__", "showNotification": False, "notificationTime": 40, "notificationRepeatTime": 10, "soundOnNotification": False, "showHelpMessageOnStart": True } _globalSettings = {} def __init__(self): self._settingsFile = "%s/%s" % (settingsFolder(self._appName), "settings") self._conn = Connection(FileStorage(self._settingsFile)) self._globalSettings = self._conn.get_root() def get(self, key): """Return setting value by key""" if key in self._globalSettings: return self._globalSettings[key] elif key in self._defaultSettings: return self._defaultSettings[key] else: pass def set(self, key, value): """Set setting value by key""" self._globalSettings[key] = value def sync(self): self._conn.commit() def __del__(self): self.sync()
class Store(): def __init__(self,host="127.0.0.1",port=2972): self.address = host,port self.conn = Connection(ClientStorage(self.address)) self.root = self.conn.get_root() def get_objects(self,key): return self.root.get(key) def new_objects(self,key): assert key and type(key) == str self.root[key] = BTree()
def main(): parser = OptionParser() parser.set_description('Stress test a Durus Server') parser.add_option('--port', dest='port', default=DEFAULT_PORT, type='int', help='Port to listen on. (default=%s)' % DEFAULT_PORT) parser.add_option('--host', dest='host', default=DEFAULT_HOST, help='Host to listen on. (default=%s)' % DEFAULT_HOST) parser.add_option('--cache_size', dest="cache_size", default=4000, type="int", help="Size of client cache (default=4000)") parser.add_option('--max-loops', dest='loops', default=None, type='int', help='Maximum number of loops before exiting.') (options, args) = parser.parse_args() from durus.logger import logger logger.setLevel(5) storage = ClientStorage(host=options.host, port=options.port) connection = Connection(storage, cache_size=options.cache_size) try: if 'obj' not in connection.get_root(): init_db(connection) verify_db(connection, all=True) connection.commit() except ConflictError: connection.abort() n = options.loops while n is None or n > 0: if n is not None: n -= 1 try: if hasattr(sys, 'gettotalrefcount'): sys.stdout.write('refs = %s\n' % sys.gettotalrefcount()) if randbool(): connection.abort() verify_db(connection) mutate_db(connection) connection.commit() maybe_sleep() except ConflictError: sys.stdout.write('conflict\n') connection.abort() maybe_sleep()
def check_touch_every_reference(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() from durus.persistent_list import PersistentList root['b'].c = PersistentList() connection.commit() touch_every_reference(connection, 'PersistentList') assert root['b']._p_is_unsaved() assert root['b'].c._p_is_unsaved() assert not root._p_is_unsaved() assert len(list(connection.get_cache())) == 4
def check_touch_every_reference(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() from durus.persistent_list import PersistentList root['b'].c = PersistentList() connection.commit() touch_every_reference(connection, 'PersistentList') assert root['b']._p_is_unsaved() assert root['b'].c._p_is_unsaved() assert not root._p_is_unsaved() assert len(list(connection.get_cache())) == 4
class _Projects(object): """ Class for managing project and autocompletes for each project """ def __init__(self): self._conn = Connection(FileStorage(Settings.get("projectsDataPath"))) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict(autocomplete=PersistentDict()) self.sync() def get(self): """Return projects list""" return self._data.keys() def add(self, name): """Add new project""" self._data[unicode(name)] = PersistentDict(autocomplete=PersistentDict()) self.sync() def remove(self, name): """Remove project""" del self._data[unicode(name)] def getAutocomleteList(self, name, appendix={}): """ Return autocomplete list for project, if appendix autocompletes was specified merge it with project autocompletes. """ autocompDict = self._data[unicode(name)]["autocomplete"] autocompDict.update(appendix) sortedDict = sorted(autocompDict.items(), key=lambda (k, v):(v, k), reverse=True) return [a[0] for a in sortedDict] def addAutocomplete(self, projectName, taskName): """Add autocompleted task for peoject""" prj = self._data[projectName] if taskName not in prj["autocomplete"]: prj["autocomplete"][taskName] = 1 else: prj["autocomplete"][taskName] += 1 self.sync() def sync(self): self._conn.commit() def __del__(self): self.sync()
def check_fine_conflict(self): c1 = Connection(self._get_storage()) c2 = Connection(self._get_storage()) c1.get_root()['A'] = Persistent() c1.get_root()['A'].a = 1 c1.get_root()['B'] = Persistent() c1.commit() c2.abort() # c1 has A loaded. assert not c1.get_root()['A']._p_is_ghost() c1.get_root()['B'].b = 1 c2.get_root()['A'].a = 2 c2.commit() # Even though A has been changed by c2, # c1 has not accessed an attribute of A since # the last c1.commit(), so we don't want a ConflictError. c1.commit() assert c1.get_root()['A']._p_is_ghost() c1.get_root()['A'].a # accessed! c1.get_root()['B'].b = 1 c2.get_root()['A'].a = 2 c2.commit() raises(WriteConflictError, c1.commit)
def check_fine_conflict(self): c1 = Connection(self._get_storage()) c2 = Connection(self._get_storage()) c1.get_root()['A'] = Persistent() c1.get_root()['A'].a = 1 c1.get_root()['B'] = Persistent() c1.commit() c2.abort() # c1 has A loaded. assert not c1.get_root()['A']._p_is_ghost() c1.get_root()['B'].b = 1 c2.get_root()['A'].a = 2 c2.commit() # Even though A has been changed by c2, # c1 has not accessed an attribute of A since # the last c1.commit(), so we don't want a ConflictError. c1.commit() assert c1.get_root()['A']._p_is_ghost() c1.get_root()['A'].a # accessed! c1.get_root()['B'].b = 1 c2.get_root()['A'].a = 2 c2.commit() raises(WriteConflictError, c1.commit)
def lowlevelops(self): from durus.persistent import _getattribute, _setattribute from durus.persistent import _delattribute, _hasattribute storage = TempFileStorage() connection = Connection(storage) root = connection.get_root() root._p_set_status_ghost() assert not _hasattribute(root, 'data') root._p_set_status_ghost() raises(AttributeError, _getattribute, root, 'data') assert root._p_is_ghost() _setattribute(root, 'data', 'bogus') assert root._p_is_ghost() _delattribute(root, 'data') assert root._p_is_ghost()
def lowlevelops(self): from durus.persistent import _getattribute, _setattribute from durus.persistent import _delattribute, _hasattribute storage = TempFileStorage() connection = Connection(storage) root = connection.get_root() root._p_set_status_ghost() assert not _hasattribute(root, 'data') root._p_set_status_ghost() raises(AttributeError, _getattribute, root, 'data') assert root._p_is_ghost() _setattribute(root, 'data', 'bogus') assert root._p_is_ghost() _delattribute(root, 'data') assert root._p_is_ghost()
class _SlackingAutocompletes(object): def __init__(self): self._conn = Connection(FileStorage(SLACKING_DATA_PATH)) self._data = self._conn.get_root() def get(self): """Return slacking autocomplete list""" return self._data def add(self, name): """Add slacking autocomplete""" if name in self._data: self._data[name] += 1 else: self._data[name] = 1 self._conn.commit()
class DurusStore(SyncStore): '''Class for Durus object database frontend.''' init = 'durus://' def __init__(self, engine, **kw): super(DurusStore, self).__init__(engine, **kw) self._db = FileStorage(self._engine) self._connection = Connection(self._db) self.sync = self._connection.commit self._store = self._connection.get_root() def close(self): '''Closes all open storage and connections.''' self.sync() self._db.close() super(DurusStore, self).close()
class _SlackingAutocompletes(object): def __init__(self): self._conn = Connection(FileStorage(SLACKING_DATA_PATH)) self._data = self._conn.get_root() def get(self): """Return slacking autocomplete list""" return self._data def add(self, name): """Add slacking autocomplete""" if name in self._data: self._data[name] += 1 else: self._data[name] = 1 self._conn.commit()
def main(): parser = OptionParser() parser.set_description("Stress test a Durus Server") parser.add_option( "--port", dest="port", default=DEFAULT_PORT, type="int", help="Port to listen on. (default=%s)" % DEFAULT_PORT ) parser.add_option( "--host", dest="host", default=DEFAULT_HOST, help="Host to listen on. (default=%s)" % DEFAULT_HOST ) parser.add_option( "--cache_size", dest="cache_size", default=4000, type="int", help="Size of client cache (default=4000)" ) parser.add_option( "--max-loops", dest="loops", default=None, type="int", help="Maximum number of loops before exiting." ) (options, args) = parser.parse_args() from durus.logger import logger logger.setLevel(5) storage = ClientStorage(host=options.host, port=options.port) connection = Connection(storage, cache_size=options.cache_size) try: if "obj" not in connection.get_root(): init_db(connection) verify_db(connection, all=True) connection.commit() except ConflictError: connection.abort() n = options.loops while n is None or n > 0: if n is not None: n -= 1 try: if hasattr(sys, "gettotalrefcount"): sys.stdout.write("refs = %s\n" % sys.gettotalrefcount()) if randbool(): connection.abort() verify_db(connection) mutate_db(connection) connection.commit() maybe_sleep() except ConflictError: sys.stdout.write("conflict\n") connection.abort() maybe_sleep()
def check_more(self): storage = TempFileStorage() connection = Connection(storage) root=connection.get_root() assert not root._p_is_ghost() root['a'] = 1 assert root._p_is_unsaved() del root['a'] connection.abort() assert root._p_is_ghost() raises(AttributeError, getattr, root, 'a') root._p_set_status_saved() assert root._p_is_saved() root._p_set_status_unsaved() assert root._p_is_unsaved() root._p_set_status_ghost() assert root._p_is_ghost() root._p_set_status_unsaved()
class DurusStore(SyncStore): '''Class for Durus object database frontend.''' init = 'durus://' def __init__(self, engine, **kw): super(DurusStore, self).__init__(engine, **kw) self._db = FileStorage(self._engine) self._connection = Connection(self._db) self.sync = self._connection.commit self._store = self._connection.get_root() def close(self): '''Closes all open storage and connections.''' self.sync() self._db.close() super(DurusStore, self).close()
def check_more(self): storage = TempFileStorage() connection = Connection(storage) root=connection.get_root() assert not root._p_is_ghost() root['a'] = 1 assert root._p_is_unsaved() del root['a'] connection.abort() assert root._p_is_ghost() raises(AttributeError, getattr, root, 'a') root._p_set_status_saved() assert root._p_is_saved() root._p_set_status_unsaved() assert root._p_is_unsaved() root._p_set_status_ghost() assert root._p_is_ghost() root._p_set_status_unsaved()
class DurusTest(UTest): def _pre(self): self.connection = Connection(MemoryStorage()) def _post(self): del self.connection def a(self): bt = self.connection.get_root()['bt'] = BTree() t = bt.root.minimum_degree assert self.connection.get_cache_count() == 1 for x in range(2 * t - 1): bt.add(x) self.connection.commit() assert self.connection.get_cache_count() == 3 bt.add(2 * t - 1) self.connection.commit() assert self.connection.get_cache_count() == 5 bt.note_change_of_bnode_containing_key(1)
def b(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oid = r['a9']._p_oid del r['a9'] c.commit() c.pack() c.abort() assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 10 new_oid = s.new_oid() assert new_oid == deleted_oid new_oid = s.new_oid() assert new_oid == int8_to_str(11)
def b(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oid = r['a9']._p_oid del r['a9'] c.commit() c.pack() c.abort() assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 10 new_oid = s.new_oid() assert new_oid == deleted_oid new_oid = s.new_oid() assert new_oid == int8_to_str(11)
class DurusTest(UTest): def _pre(self): self.connection = Connection(MemoryStorage()) def _post(self): del self.connection def a(self): bt = self.connection.get_root()['bt'] = BTree() t = bt.root.minimum_degree assert self.connection.get_cache_count() == 1 for x in range(2 * t - 1): bt.add(x) self.connection.commit() assert self.connection.get_cache_count() == 3 bt.add(2 * t - 1) self.connection.commit() assert self.connection.get_cache_count() == 5 bt.note_change_of_bnode_containing_key(1)
def a(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oids = [ r['a0']._p_oid, r['a2']._p_oid, r['a7']._p_oid, r['a8']._p_oid ] del r['a0'] del r['a2'] del r['a7'] del r['a8'] c.commit() c.pack() c.abort() assert c.get(deleted_oids[0])._p_is_ghost() assert c.get(deleted_oids[1])._p_is_ghost() raises(KeyError, getattr, c.get(deleted_oids[0]), 'a') assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 7 c.commit() c.pack() new_oid = s.new_oid() assert new_oid == deleted_oids[-1], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == deleted_oids[-2], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == deleted_oids[-3], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == deleted_oids[-4], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == int8_to_str(11), repr(new_oid) new_oid = s.new_oid() assert new_oid == int8_to_str(12), repr(new_oid)
def c(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oid = r['a9']._p_oid del r['a9'] c.commit() c.pack() c.abort() r.clear() c.commit() c.pack() c.abort() new_oid = s.new_oid() assert new_oid == int8_to_str(1), repr(new_oid) new_oid = s.new_oid() assert new_oid == int8_to_str(2), repr(new_oid)
def c(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oid = r['a9']._p_oid del r['a9'] c.commit() c.pack() c.abort() r.clear() c.commit() c.pack() c.abort() new_oid = s.new_oid() assert new_oid == int8_to_str(1), repr(new_oid) new_oid = s.new_oid() assert new_oid == int8_to_str(2), repr(new_oid)
def a(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oids = [ r['a0']._p_oid, r['a2']._p_oid, r['a7']._p_oid, r['a8']._p_oid] del r['a0'] del r['a2'] del r['a7'] del r['a8'] c.commit() c.pack() c.abort() assert c.get(deleted_oids[0])._p_is_ghost() assert c.get(deleted_oids[1])._p_is_ghost() raises(ReadConflictError, getattr, c.get(deleted_oids[0]), 'a') assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 7 c.commit() c.pack() new_oid = s.new_oid() assert new_oid == deleted_oids[-1], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == deleted_oids[-2], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == deleted_oids[-3], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == deleted_oids[-4], (new_oid, deleted_oids) new_oid = s.new_oid() assert new_oid == int8_to_str(11), repr(new_oid) new_oid = s.new_oid() assert new_oid == int8_to_str(12), repr(new_oid)
class DurusFile(object): def __init__(self, file, new): self.__file = file if new: if os.path.exists(self.__file): os.remove(self.__file) self.__connection = Connection(FileStorage(self.__file)) self.__root = self.__connection.get_root() def close(self): self.__connection.get_storage().close() def getBudget(self): if self.__root.has_key("baseversion") and \ globalVars.baseversion == self.__root["baseversion"]: return self.__root["budget"] else: print _("Incorrent Base version") return None def setBudget(self, budget): self.__root["budget"] = budget self.__root["baseversion"] = globalVars.baseversion self.__connection.commit()
def showExperimentTimeStamps(): with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) return conn.get_root().keys()
class PersistentListTest(UTest): def _pre(self): self.connection = Connection(MemoryStorage()) self.root = self.connection.get_root() def no_arbitrary_attributes(self): p = PersistentList() raises(AttributeError, setattr, p, 'bogus', 1) def nonzero(self): p = PersistentList() assert not p self.root['a'] = p self.connection.commit() p.append(1) assert p assert p._p_is_unsaved() def iter(self): p = PersistentList() assert list(p) == [] p.extend([2, 3, 4]) assert list(p) == [2, 3, 4] def insert_again(self): p = PersistentList([5, 6, 7]) p[1] = 2 p[1] = 3 assert p[1] == 3 def contains(self): p = PersistentList(x for x in interval(5)) assert 2 in p assert -1 not in p def cmp(self): p = PersistentList(interval(10)) p2 = PersistentList(interval(10)) assert p == p2 assert p == list(p2) assert p <= p2 assert p >= p2 assert not p < p2 assert not p > p2 p.append(3) assert p != p2 def delete(self): p = PersistentList(x for x in interval(10)) self.root['x'] = p self.connection.commit() del p[1] assert p._p_is_unsaved() def pop(self): p = PersistentList(x for x in interval(10)) p.pop() assert 9 not in p def slice(self): p = PersistentList(x for x in interval(10)) p[:] = [2, 3] assert len(p) == 2 assert p[-1:] == [3] p[1:] = PersistentList(interval(2)) assert p == [2, 0, 1], p.data p[:] = (3, 4) assert p == [3, 4] del p[:1] assert p == [4] def sort(self): p = PersistentList(x for x in interval(10)) p.reverse() assert p == list(reversed(interval(10))) p = sorted(p) assert p == interval(10) def arith(self): p = PersistentList(interval(3)) p2 = PersistentList(interval(3)) assert p + p2 == interval(3) + interval(3) assert interval(3) + p2 == interval(3) + interval(3) assert tuple(interval(3)) + p2 == interval(3) + interval(3) assert p + interval(3) == interval(3) + interval(3) assert p + tuple(interval(3)) == interval(3) + interval(3) assert p * 2 == interval(3) + interval(3) p += p2 assert p == interval(3) + interval(3) p2 += interval(3) assert p == interval(3) + interval(3) p = PersistentList(interval(3)) p *= 2 assert p == interval(3) + interval(3) def other(self): p = PersistentList() p.insert(0, 2) assert p == [2] assert p.count(0) == 0 assert p.count(2) == 1 assert p.index(2) == 0 p.remove(2) p.extend(PersistentList(interval(3))) assert p == interval(3)
class CUsers(Persistent): def __init__(self): # durus file storage self.conndurus = Connection(FileStorage(CONFIG['durus_file'])) root = self.conndurus.get_root() if not root.get('users'): root['users'] = PersistentDict() # {user jid: CUser} if not root.get('feeds'): root['feeds'] = CFeeds() self.data = root['users'] self.feeds = root['feeds'] self.save() def save(self): self.conndurus.commit() def __getitem__(self, key): return self.data.get(key) def __len__(self): return len(self.data) def add_feed(self, jid, feed=None): """Add an user if not exists and subscribe the feed url, if not exists. """ fn = True # first notification? if not self.data.get(jid): self.data[jid] = CUser(jid) if not self.feeds.get(feed) and feed: self.feeds[feed] = CFeed(feed) fn = False if feed: oku = self.data[jid].subs_feed(self.feeds[feed], fn) okf = self.feeds[feed].add_user(self.data[jid]) self.save() if feed: return oku and okf else: return oku def del_feed(self, jid, feed): """Delete an user subscription.""" tempfeed = self.feeds.get(feed) tempuser = self.data.get(jid) if tempuser: oku = self.data[jid].unsubs_feed(tempfeed) else: oku = False if tempfeed: okf = self.feeds[feed].del_user(tempuser) else: okf = False self.save() return oku and okf def notification_method(self, jid): """Return 'how the user will receive the notifications'""" tempuser = self.data.get(jid) if tempuser: hl = tempuser.getConfig('useheadline') if not hl or hl == "on": return "by headlines" return "by chat message" else: return "-" def notification_when(self, jid): """Return 'when the user wants to receive notifications'""" tempuser = self.data.get(jid) if tempuser: oa = tempuser.getConfig('onlyavailable') if not oa or oa == "off": return "always" return "available only, or ready for chat" else: return "-" def len_feeds(self, jid): tempuser = self.data.get(jid) if tempuser: return str(len(tempuser)) else: return "0" def setup(self, jid, action, mode): tempuser = self.data.get(jid) if not tempuser: tempuser = CUser(jid) self.data[jid] = tempuser tempuser.setup(action, mode) return True def get(self, key): return self.data.get(key) def keys(self): return self.data.keys() def values(self): return self.data.values()
def check_alternative_root(self): connection = Connection(self._get_storage(), root_class=Persistent) root = connection.get_root() assert isinstance(root, Persistent) connection2 = Connection(connection.storage, root_class=None)