def pack_storage_main(): parser = OptionParser() parser.set_description("Packs a Durus storage.") parser.add_option( '--file', dest="file", default=None, help="If this is not given, the storage is through a Durus server.") parser.add_option('--port', dest="port", default=DEFAULT_PORT, type="int", help="Port the server is on. (default=%s)" % DEFAULT_PORT) parser.add_option('--host', dest="host", default=DEFAULT_HOST, help="Host of the server. (default=%s)" % DEFAULT_HOST) (options, args) = parser.parse_args() if options.file is None: wait_for_server(options.host, options.port) storage = ClientStorage(host=options.host, port=options.port) else: storage = FileStorage(options.file) connection = Connection(storage) connection.pack()
def interactive_client(file, address, cache_size, readonly, repair, startup): if file: storage = FileStorage(file, readonly=readonly, repair=repair) description = file else: socket_address = SocketAddress.new(address) wait_for_server(address=socket_address) storage = ClientStorage(address=socket_address) description = socket_address connection = Connection(storage, cache_size=cache_size) console_module = ModuleType('__console__') sys.modules['__console__'] = console_module namespace = {'connection': connection, 'root': connection.get_root(), 'get': connection.get, 'sys': sys, 'os': os, 'int8_to_str': int8_to_str, 'str_to_int8': str_to_int8, 'pp': pprint} vars(console_module).update(namespace) configure_readline( vars(console_module), os.path.expanduser("~/.durushistory")) console = InteractiveConsole(vars(console_module)) if startup: src = '''with open('{fn}', 'rb') as _: _ = compile(_.read(), '{fn}', 'exec') exec(globals().pop('_')) '''.format(fn = os.path.expanduser(startup)).rstrip() console.runsource(src, '-stub-', 'exec') help = (' connection -> the Connection\n' ' root -> the root instance') console.interact('Durus %s\n%s' % (description, help))
def end_protocol_error(self): s1 = ClientStorage(address=self.address) c1 = Connection(s1) r1 = c1.get_root() s1.s = FakeSocket('\0\0\0\0?') r1._p_note_change() raises(ProtocolError, c1.commit)
def run_trials(): numTrials = 3000 gens = 1000 from multiprocessing.pool import ThreadPool as Pool pool = Pool(50) jids = pool.map(f,[gens]*numTrials) print "Done spawning trials. Retrieving results..." results = pool.map(cloud_result, jids) firstLocusFreqsHists = zeros((numTrials,gens+1), dtype='float') lastLocusFreqsHists = zeros((numTrials,gens+1), dtype='float') print "Done retrieving results. Press Enter to serialize..." raw_input() for i, result in enumerate(results): firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists, lastLocusFreqsHists) conn.commit() pool.close() pool.join()
def test_delete(self): connection = Connection(MemoryStorage()) pd = PersistentDict((x, True) for x in range(10)) connection.root['x'] = pd connection.commit() del pd[1] assert pd._p_is_unsaved()
def run_trials(): numTrials = 3000 gens = 1000 from multiprocessing.pool import ThreadPool as Pool pool = Pool(50) jids = pool.map(f, [gens] * numTrials) print "Done spawning trials. Retrieving results..." results = pool.map(cloud_result, jids) firstLocusFreqsHists = zeros((numTrials, gens + 1), dtype='float') lastLocusFreqsHists = zeros((numTrials, gens + 1), dtype='float') print "Done retrieving results. Press Enter to serialize..." raw_input() for i, result in enumerate(results): firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists, lastLocusFreqsHists) conn.commit() pool.close() pool.join()
def render_results(timestamp=None): with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) db = conn.get_root() if not timestamp: timestamp = sorted(db.keys())[-1] firstLocusFreqsHists, lastLocusFreqsHists = db[timestamp] print "Done deserializing results. Plotting..." x = [(2, 'First', firstLocusFreqsHists, "effective"), (3, 'Last', lastLocusFreqsHists, "non-effective")] for i, pos, freqsHists, filename in x : freqsHists = freqsHists[:,:801] f = figure(i) hold(False) plot(transpose(freqsHists), color='grey') hold(True) maxGens = freqsHists.shape[1]-1 plot([0, maxGens], [.05,.05], 'k--') plot([0, maxGens], [.95,.95], 'k--') axis([0, maxGens, 0, 1]) xlabel('Generation') ylabel('1-Frequency of the '+pos+' Locus') f.canvas.draw() f.show() savefig(filename+'.png', format='png', dpi=200)
def interactive_client(file, host, port, cache_size, readonly, repair, startup): if file: storage = FileStorage(file, readonly=readonly, repair=repair) description = file else: wait_for_server(host, port) storage = ClientStorage(host=host, port=port) description = "%s:%s" % (host, port) connection = Connection(storage, cache_size=cache_size) namespace = {'connection': connection, 'root': connection.get(0), 'get': connection.get, 'sys': sys, 'os': os, 'p64': p64, 'u64': u64, 'pp': pprint} configure_readline(namespace, os.path.expanduser("~/.durushistory")) console = InteractiveConsole(namespace) if startup: console.runsource('execfile("%s")' % os.path.expanduser(startup)) help = (' connection -> the connection\n' ' root -> get(0)\n' ' get(oid) -> get an object\n' ' pp(object) -> pretty-print') console.interact('Durus (%s)\n%s' % (description, help))
def render_results(timestamp=None): with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) db = conn.get_root() if not timestamp: timestamp = sorted(db.keys())[-1] firstLocusFreqsHists, lastLocusFreqsHists = db[timestamp] print "Done deserializing results. Plotting..." x = [(2, 'First', firstLocusFreqsHists, "effective"), (3, 'Last', lastLocusFreqsHists, "non-effective")] for i, pos, freqsHists, filename in x: freqsHists = freqsHists[:, :801] f = figure(i) hold(False) plot(transpose(freqsHists), color='grey') hold(True) maxGens = freqsHists.shape[1] - 1 plot([0, maxGens], [.05, .05], 'k--') plot([0, maxGens], [.95, .95], 'k--') axis([0, maxGens, 0, 1]) xlabel('Generation') ylabel('1-Frequency of the ' + pos + ' Locus') f.canvas.draw() f.show() savefig(filename + '.png', format='png', dpi=200)
def check_write_conflict(self): s1 = ClientStorage(address=self.address) c1 = Connection(s1) r1 = c1.get_root() s1.s = FakeSocket('\0\0\0\0', STATUS_INVALID) r1._p_note_change() raises(WriteConflictError, c1.commit)
def __init__(self, file, new): self.__file = file if new: if os.path.exists(self.__file): os.remove(self.__file) self.__connection = Connection(FileStorage(self.__file)) self.__root = self.__connection.get_root()
def interactive_client(file, address, cache_size, readonly, repair, startup, storage_class=None): if file: storage = get_storage(file, storage_class=storage_class, readonly=readonly, repair=repair) description = file else: socket_address = SocketAddress.new(address) wait_for_server(address=socket_address) storage = ClientStorage(address=socket_address) description = socket_address connection = Connection(storage, cache_size=cache_size) console_module = ModuleType('__console__') sys.modules['__console__'] = console_module namespace = {'connection': connection, 'root': connection.get_root(), 'get': connection.get, 'sys': sys, 'os': os, 'int8_to_str': int8_to_str, 'str_to_int8': str_to_int8, 'pp': pprint} vars(console_module).update(namespace) configure_readline( vars(console_module), os.path.expanduser("~/.durushistory")) console = InteractiveConsole(vars(console_module)) if startup: console.runsource('execfile("%s")' % os.path.expanduser(startup)) help = (' connection -> the Connection\n' ' root -> the root instance') console.interact('Durus %s\n%s' % (description, help))
def delete(self): connection = Connection(MemoryStorage()) pd = PersistentDict((x, True) for x in range(10)) connection.root['x'] = pd connection.commit() del pd[1] assert pd._p_is_unsaved()
def __init__(self): self._conn = Connection(FileStorage(PROJECTS_DATA_PATH)) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict( autocomplete=PersistentDict()) self.sync()
def get_config(): connection = Connection(FileStorage("/var/tmp/test.durus")) root = connection.get_root() # connection set as shown above. if not root.has_key("_pconfig"): cf = GenericConfiguration() root["_pconfig"] = cf root["_pconfig"]["default"] = SECTION() connection.commit() return root["_pconfig"]
def open_document(db_path): conn = Connection(FileStorage(db_path)) db_root = conn.get_root() if 'doc' not in db_root: db_root['doc'] = Document() db_root['version'] = updates.current_version conn.commit() updates.do_updates(conn) h = DocumentHandler(conn) return h.doc
def check_storage_tools(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() connection.commit() index = get_reference_index(connection.get_storage()) assert index == { int8_to_str(1): [int8_to_str(0)], int8_to_str(2): [int8_to_str(0)] } census = get_census(connection.get_storage()) assert census == { as_bytes('PersistentDict'): 1, as_bytes('Persistent'): 2 } references = list( gen_referring_oid_record(connection.get_storage(), int8_to_str(1))) assert references == [(int8_to_str(0), connection.get_storage().load(int8_to_str(0)))] class Fake(object): pass s = Fake() s.__class__ = Storage raises(RuntimeError, s.__init__) raises(NotImplementedError, s.load, None) raises(NotImplementedError, s.begin) raises(NotImplementedError, s.store, None, None) raises(NotImplementedError, s.end) raises(NotImplementedError, s.sync) g = s.gen_oid_record() raises(NotImplementedError, next, g)
class _Projects(object): """ Class for managing project and autocompletes for each project """ def __init__(self): self._conn = Connection(FileStorage(PROJECTS_DATA_PATH)) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict( autocomplete=PersistentDict()) self.sync() def get(self): """Return projects list""" return self._data.keys() def add(self, name): """Add new project""" self._data[unicode(name)] = PersistentDict( autocomplete=PersistentDict()) self.sync() def remove(self, name): """Remove project""" del self._data[unicode(name)] def getAutocomleteList(self, name, appendix={}): """ Return autocomplete list for project, if appendix autocompletes was specified merge it with project autocompletes. """ autocompDict = self._data[unicode(name)]["autocomplete"] autocompDict.update(appendix) sortedDict = sorted(autocompDict.items(), key=lambda (k, v): (v, k), reverse=True) return [a[0] for a in sortedDict] def addAutocomplete(self, projectName, taskName): """Add autocompleted task for peoject""" prj = self._data[projectName] if taskName not in prj["autocomplete"]: prj["autocomplete"][taskName] = 1 else: prj["autocomplete"][taskName] += 1 self.sync() def sync(self): self._conn.commit() def __del__(self): self.sync()
def __init__(self): # durus file storage self.conndurus = Connection(FileStorage(CONFIG['durus_file'])) root = self.conndurus.get_root() if not root.get('users'): root['users'] = PersistentDict() # {user jid: CUser} if not root.get('feeds'): root['feeds'] = CFeeds() self.data = root['users'] self.feeds = root['feeds'] self.save()
class _Settings(object): """Settings singleton object""" _appName = "MacTimeLog" _defaultSettings = { "dateFormat": "%m-%d-%Y %H:%M", "timeFormat": "%H:%M", "logDateTimeFormat": "at %H:%M", "workEndTime": "06:00", "workDayLength": 3600*8, "timerInterval": 1, "showWorkTill": False, "showDateTime": False, "logPath": "%s/%s" % (settingsFolder(_appName), "log.txt"), "projectsDataPath": "%s/%s" % (settingsFolder(_appName), "projects"), "slackingDataPath": "%s/%s" % (settingsFolder(_appName), "slacking"), "logEditCommand": "open -a TextEdit \"%s\"", "projectSeparator": "::", "selectedProject": "Default", "startPlaceholder": "__start__", "showNotification": False, "notificationTime": 40, "notificationRepeatTime": 10, "soundOnNotification": False, "showHelpMessageOnStart": True } _globalSettings = {} def __init__(self): self._settingsFile = "%s/%s" % (settingsFolder(self._appName), "settings") self._conn = Connection(FileStorage(self._settingsFile)) self._globalSettings = self._conn.get_root() def get(self, key): """Return setting value by key""" if key in self._globalSettings: return self._globalSettings[key] elif key in self._defaultSettings: return self._defaultSettings[key] else: pass def set(self, key, value): """Set setting value by key""" self._globalSettings[key] = value def sync(self): self._conn.commit() def __del__(self): self.sync()
def check_touch_every_reference(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() from durus.persistent_list import PersistentList root['b'].c = PersistentList() connection.commit() touch_every_reference(connection, 'PersistentList') assert root['b']._p_is_unsaved() assert root['b'].c._p_is_unsaved() assert not root._p_is_unsaved() assert len(list(connection.get_cache())) == 4
class _Projects(object): """ Class for managing project and autocompletes for each project """ def __init__(self): self._conn = Connection(FileStorage(Settings.get("projectsDataPath"))) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict(autocomplete=PersistentDict()) self.sync() def get(self): """Return projects list""" return self._data.keys() def add(self, name): """Add new project""" self._data[unicode(name)] = PersistentDict(autocomplete=PersistentDict()) self.sync() def remove(self, name): """Remove project""" del self._data[unicode(name)] def getAutocomleteList(self, name, appendix={}): """ Return autocomplete list for project, if appendix autocompletes was specified merge it with project autocompletes. """ autocompDict = self._data[unicode(name)]["autocomplete"] autocompDict.update(appendix) sortedDict = sorted(autocompDict.items(), key=lambda (k, v):(v, k), reverse=True) return [a[0] for a in sortedDict] def addAutocomplete(self, projectName, taskName): """Add autocompleted task for peoject""" prj = self._data[projectName] if taskName not in prj["autocomplete"]: prj["autocomplete"][taskName] = 1 else: prj["autocomplete"][taskName] += 1 self.sync() def sync(self): self._conn.commit() def __del__(self): self.sync()
class TestBackend: def __init__(self, filename, mode): self.mode = mode if mode == "w": self.storage = FileStorage(filename) self.connection = Connection(self.storage) self.test_db_items = self.connection.get_root() elif mode == "r": self.storage = FileStorage(filename) self.connection = Connection(self.storage) self.test_db_items = self.connection.get_root() self.next_rec_num = 0 # Initialise next record counter self.num_records = len(self.test_db_items) def __setitem__(self, key, value): self.test_db_items[key] = value def __getitem__(self, key): return self.test_db_items[str(key)] def __len__(self): return len(self.test_db_items) def first(self): return self.test_db_items[0] def iteritems(self): while(self.next_rec_num < self.num_records): value = self.test_db_items[self.next_rec_num] self.next_rec_num += 1 yield value def close(self): self.connection.commit() self.storage.close() def getTestDBItems(self): return self.test_db_items.values()
class durusCommit(object): "A class to simply commit a given data with date tuple as key" def __init__(self, folder, dbFileName): self.dbFileName = folder + '/' + dbFileName + '.durus' ##print 'Durus file name: ', self.dbFileName self.con = Connection(FileStorage(self.dbFileName)) def getRoot(self): return self.con.get_root() def connectionCommit(self): self.con.commit() def makeChapterCommit(self, currentdate, book, chapterDict): dateKey = () #print 'Book name: ',book if isinstance(currentdate, tuple): dateKey = currentdate if isinstance(currentdate, datetime.date): dateKey = tuple(currentdate.strftime('%Y-%m-%d').split('-')) #print 'Datekey : ',dateKey root = self.con.get_root() if not book in root: root[book] = PersistentDict() self.con.commit() root[book][dateKey] = chapterDict self.con.commit()
def check_storage_tools(self): connection = Connection(self._get_storage()) root = connection.get_root() root['a'] = Persistent() root['b'] = Persistent() connection.commit() index = get_reference_index(connection.get_storage()) assert index == { int8_to_str(1): [int8_to_str(0)], int8_to_str(2): [int8_to_str(0)]} census = get_census(connection.get_storage()) assert census == {as_bytes('PersistentDict'):1, as_bytes('Persistent'):2} references = list(gen_referring_oid_record(connection.get_storage(), int8_to_str(1))) assert references == [ (int8_to_str(0), connection.get_storage().load(int8_to_str(0)))] class Fake(object): pass s = Fake() s.__class__ = Storage raises(RuntimeError, s.__init__) raises(NotImplementedError, s.load, None) raises(NotImplementedError, s.begin) raises(NotImplementedError, s.store, None, None) raises(NotImplementedError, s.end) raises(NotImplementedError, s.sync) g = s.gen_oid_record() raises(NotImplementedError, next, g)
def lowlevelops(self): from durus.persistent import _getattribute, _setattribute from durus.persistent import _delattribute, _hasattribute storage = TempFileStorage() connection = Connection(storage) root = connection.get_root() root._p_set_status_ghost() assert not _hasattribute(root, 'data') root._p_set_status_ghost() raises(AttributeError, _getattribute, root, 'data') assert root._p_is_ghost() _setattribute(root, 'data', 'bogus') assert root._p_is_ghost() _delattribute(root, 'data') assert root._p_is_ghost()
def __init__(self): self._conn = Connection(FileStorage(Settings.get("projectsDataPath"))) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict(autocomplete=PersistentDict()) self.sync()
def __init__(self, filename, mode): self.mode = mode if mode == "w": self.storage = FileStorage(filename) self.connection = Connection(self.storage) self.test_db_items = self.connection.get_root() elif mode == "r": self.storage = FileStorage(filename) self.connection = Connection(self.storage) self.test_db_items = self.connection.get_root() self.next_rec_num = 0 # Initialise next record counter self.num_records = len(self.test_db_items)
def __init__(self): self._conn = Connection(FileStorage(PROJECTS_DATA_PATH)) self._data = self._conn.get_root() if not len(self._data.keys()): self._data["Default"] = PersistentDict(autocomplete=PersistentDict()) self.sync()
class _SlackingAutocompletes(object): def __init__(self): self._conn = Connection(FileStorage(SLACKING_DATA_PATH)) self._data = self._conn.get_root() def get(self): """Return slacking autocomplete list""" return self._data def add(self, name): """Add slacking autocomplete""" if name in self._data: self._data[name] += 1 else: self._data[name] = 1 self._conn.commit()
def check_connection(self): self.conn = conn = Connection(self._get_storage()) self.root = root = conn.get_root() assert root._p_is_ghost() == False assert root is conn.get(int8_to_str(0)) assert root is conn.get(0) assert conn is root._p_connection assert conn.get(int8_to_str(1)) == None conn.abort() conn.commit() assert root._p_is_ghost() == False root['a'] = Persistent() assert root._p_is_unsaved() == True assert root['a']._p_is_unsaved() == True root['a'].f = 2 assert list(conn.changed.values()) == [root] conn.commit() assert root._p_is_saved() assert list(conn.changed.values()) == [] root['a'] = Persistent() assert list(conn.changed.values()) == [root] root['b'] = Persistent() root['a'].a = 'a' root['b'].b = 'b' conn.commit() root['a'].a = 'a' root['b'].b = 'b' conn.abort() conn.shrink_cache() root['b'].b = 'b' del conn
def copy(self): connection = Connection(MemoryStorage()) pd = PersistentDict((x, True) for x in range(10)) pd2 = pd.copy() assert pd == pd2 pd[1] = 34 assert pd != pd2
class Session(object): """ Representation of the game state. """ _persistent_attributes = ('scheduler', 'started', 'lastroom', 'universe', 'characters', 'player', 'debugging') # default values scheduler = None # Scheduler instance started = False # Is game started yet? (I.e. have player turns/actions begun) lastroom = None # Used to determine auto-placement of items universe = None # Top level container object (provides storage for entire game state) characters = () # List of character agents (references into universe) player = () # List of player character agents (normally only 1 in PUB) debugging = False # Debugging mode is for use during game development def __init__(self, storagefile="default.sav"): self.storage = Connection(FileStorage(storagefile)) self.root = self.storage.get_root() self.running = False def __setattr__(self, name, value): if name in self._persistent_attributes: self.root[name] = value else: object.__setattr__(self, name, value) def __getattribute__(self, name): persistent_attributes = object.__getattribute__( self, '_persistent_attributes') if name in persistent_attributes: try: return self.root[name] except KeyError: return getattr(self.__class__, name) else: return object.__getattribute__(self, name) def new_game(self): """ Start up a new game (clear the storage instance). """ self.scheduler = None self.started = True self.lastroom = None self.universe = None self.characters = None self.player = None self.debugging = False self.commit() self.pack() def commit(self): self.storage.commit() def abort(self): self.storage.abort() def pack(self): self.storage.pack()
def check_more(self): storage = TempFileStorage() connection = Connection(storage) root=connection.get_root() assert not root._p_is_ghost() root['a'] = 1 assert root._p_is_unsaved() del root['a'] connection.abort() assert root._p_is_ghost() raises(AttributeError, getattr, root, 'a') root._p_set_status_saved() assert root._p_is_saved() root._p_set_status_unsaved() assert root._p_is_unsaved() root._p_set_status_ghost() assert root._p_is_ghost() root._p_set_status_unsaved()
def pack_storage_main(): parser = ArgumentParser("Packs a Durus storage.") parser.add_argument( '--file', dest="file", default=None, help="If this is not given, the storage is through a Durus server.") parser.add_argument( '--port', dest="port", default=DEFAULT_PORT, type=int, help="Port the server is on. (default=%s)" % DEFAULT_PORT) parser.add_argument( '--host', dest="host", default=DEFAULT_HOST, help="Host of the server. (default=%s)" % DEFAULT_HOST) args = parser.parse_args() if args.file is None: wait_for_server(args.host, args.port) storage = ClientStorage(host=args.host, port=args.port) else: storage = FileStorage(args.file) connection = Connection(storage) connection.pack()
def b(self): f = File(prefix='shelftest') name = f.get_name() f.close() s = FileStorage(name) c = Connection(s) r = c.get_root() for x in range(10): r["a%s" % x] = Persistent() c.commit() deleted_oid = r['a9']._p_oid del r['a9'] c.commit() c.pack() c.abort() assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 10 new_oid = s.new_oid() assert new_oid == deleted_oid new_oid = s.new_oid() assert new_oid == int8_to_str(11)
def pack_storage_main(): parser = OptionParser() parser.set_description("Packs a Durus storage.") parser.add_option( '--file', dest="file", default=None, help="If this is not given, the storage is through a Durus server.") parser.add_option( '--port', dest="port", default=DEFAULT_PORT, type="int", help="Port the server is on. (default=%s)" % DEFAULT_PORT) parser.add_option( '--host', dest="host", default=DEFAULT_HOST, help="Host of the server. (default=%s)" % DEFAULT_HOST) (options, args) = parser.parse_args() if options.file is None: wait_for_server(options.host, options.port) storage = ClientStorage(host=options.host, port=options.port) else: storage = get_storage(options.file) connection = Connection(storage) connection.pack()
class Store(): def __init__(self,host="127.0.0.1",port=2972): self.address = host,port self.conn = Connection(ClientStorage(self.address)) self.root = self.conn.get_root() def get_objects(self,key): return self.root.get(key) def new_objects(self,key): assert key and type(key) == str self.root[key] = BTree()
def open(self): """Open the underlying storage based on initial arguments.""" if not self.is_open: # Find or create storage. if self.client is not None: self.storage = self.client.storage(self.database) elif None not in (self.host, self.port): self.client = xdserver.client.Client(self.host, self.port) self.storage = self.client.storage(self.database) # Connect to storage. self.conn = Connection( self.storage, cache_size=self.cache_size) self.is_open = True
class DurusFile(object): def __init__(self, file, new): self.__file = file if new: if os.path.exists(self.__file): os.remove(self.__file) self.__connection = Connection(FileStorage(self.__file)) self.__root = self.__connection.get_root() def close(self): self.__connection.get_storage().close() def getBudget(self): if self.__root.has_key("baseversion") and \ globalVars.baseversion == self.__root["baseversion"]: return self.__root["budget"] else: print _("Incorrent Base version") return None def setBudget(self, budget): self.__root["budget"] = budget self.__root["baseversion"] = globalVars.baseversion self.__connection.commit()
def create_durus_publisher(): global connection filename = os.path.join(tempfile.gettempdir(), 'quixote-demo.durus') print('Opening %r as a Durus database.' % filename) connection = Connection(FileStorage(filename)) root = connection.get_root() session_manager = root.get('session_manager', None) if session_manager is None: session_manager = PersistentSessionManager() connection.get_root()['session_manager'] = session_manager connection.commit() return Publisher(RootDirectory(), session_manager=session_manager, display_exceptions='plain')
def main(): from shutil import copyfile from os.path import exists def usage(): sys.stdout.write("Usage: python %s <existing_file> <new_file>\n" % sys.argv[0]) sys.stdout.write(" Creates a new py3k-compatible file ") sys.stdout.write("from an existing FileStorage file.\n") raise SystemExit if len(sys.argv) != 3: usage() infile = sys.argv[1] outfile = sys.argv[2] if not exists(infile): usage() if exists(outfile): if input('overwrite %r? [y/N] ' % outfile).strip().lower() != 'y': raise SystemExit copyfile(infile, outfile) # monkey patch pickler class, must be done before importing durus stuff patch_pickler() from durus.__main__ import get_storage_class from durus.connection import Connection storage_class = get_storage_class(outfile) storage = storage_class(outfile) connection = Connection(storage) print("Converting %s for use with py3k." % outfile) for j, x in enumerate(connection.get_crawler()): x._p_note_change() if j > 0 and j % 10000 == 0: print(j) connection.commit() print(j) connection.commit() connection.pack()
class DurusStore(SyncStore): '''Class for Durus object database frontend.''' init = 'durus://' def __init__(self, engine, **kw): super(DurusStore, self).__init__(engine, **kw) self._db = FileStorage(self._engine) self._connection = Connection(self._db) self.sync = self._connection.commit self._store = self._connection.get_root() def close(self): '''Closes all open storage and connections.''' self.sync() self._db.close() super(DurusStore, self).close()
def __init__(self, axisLabels, statisticLabels, forcedCategoricals, startingXattribute, startingYattribute): for fileToClear in ['Data.db','Data.db.lock','Data.db.tmp','Data.db.prepack']: if os.path.exists(fileToClear): os.remove(fileToClear) self.dataConnection = Connection(FileStorage("Data.db")) self.data = self.dataConnection.get_root() # possible key:value pairs (this is a little bizarre, but I can only have one database (as objects reference each other), # and for performance I want to keep all variant objects and axes on the root level: self.data['variant keys'] = set() self.axisLabels = axisLabels self.forcedCategoricals = forcedCategoricals self.statisticLabels = statisticLabels self.allAxes = self.defaultAxisOrder()
class DurusTest(UTest): def _pre(self): self.connection = Connection(MemoryStorage()) def _post(self): del self.connection def a(self): bt = self.connection.get_root()['bt'] = BTree() t = bt.root.minimum_degree assert self.connection.get_cache_count() == 1 for x in range(2 * t - 1): bt.add(x) self.connection.commit() assert self.connection.get_cache_count() == 3 bt.add(2 * t - 1) self.connection.commit() assert self.connection.get_cache_count() == 5 bt.note_change_of_bnode_containing_key(1)
def check_shrink(self): storage = self._get_storage() self.conn = conn = Connection(storage, cache_size=3) self.root = root = conn.get_root() root['a'] = Persistent() root['b'] = Persistent() root['c'] = Persistent() assert self.root._p_is_unsaved() conn.commit() root['a'].a = 1 conn.commit() root['b'].b = 1 root['c'].c = 1 root['d'] = Persistent() root['e'] = Persistent() root['f'] = Persistent() conn.commit() root['f'].f = 1 root['g'] = Persistent() conn.commit() conn.pack()
def __init__(self, filename): self._connection = Connection(FileStorage(filename))
def showExperimentTimeStamps(): with closing(FileStorage("soda_results.durus")) as durus: conn = Connection(durus) return conn.get_root().keys()
class CUsers(Persistent): def __init__(self): # durus file storage self.conndurus = Connection(FileStorage(CONFIG['durus_file'])) root = self.conndurus.get_root() if not root.get('users'): root['users'] = PersistentDict() # {user jid: CUser} if not root.get('feeds'): root['feeds'] = CFeeds() self.data = root['users'] self.feeds = root['feeds'] self.save() def save(self): self.conndurus.commit() def __getitem__(self, key): return self.data.get(key) def __len__(self): return len(self.data) def add_feed(self, jid, feed=None): """Add an user if not exists and subscribe the feed url, if not exists. """ fn = True # first notification? if not self.data.get(jid): self.data[jid] = CUser(jid) if not self.feeds.get(feed) and feed: self.feeds[feed] = CFeed(feed) fn = False if feed: oku = self.data[jid].subs_feed(self.feeds[feed], fn) okf = self.feeds[feed].add_user(self.data[jid]) self.save() if feed: return oku and okf else: return oku def del_feed(self, jid, feed): """Delete an user subscription.""" tempfeed = self.feeds.get(feed) tempuser = self.data.get(jid) if tempuser: oku = self.data[jid].unsubs_feed(tempfeed) else: oku = False if tempfeed: okf = self.feeds[feed].del_user(tempuser) else: okf = False self.save() return oku and okf def notification_method(self, jid): """Return 'how the user will receive the notifications'""" tempuser = self.data.get(jid) if tempuser: hl = tempuser.getConfig('useheadline') if not hl or hl == "on": return "by headlines" return "by chat message" else: return "-" def notification_when(self, jid): """Return 'when the user wants to receive notifications'""" tempuser = self.data.get(jid) if tempuser: oa = tempuser.getConfig('onlyavailable') if not oa or oa == "off": return "always" return "available only, or ready for chat" else: return "-" def len_feeds(self, jid): tempuser = self.data.get(jid) if tempuser: return str(len(tempuser)) else: return "0" def setup(self, jid, action, mode): tempuser = self.data.get(jid) if not tempuser: tempuser = CUser(jid) self.data[jid] = tempuser tempuser.setup(action, mode) return True def get(self, key): return self.data.get(key) def keys(self): return self.data.keys() def values(self): return self.data.values()
def main(): parser = OptionParser() parser.set_description('Stress test a Durus Server') parser.add_option('--port', dest='port', default=DEFAULT_PORT, type='int', help='Port to listen on. (default=%s)' % DEFAULT_PORT) parser.add_option('--host', dest='host', default=DEFAULT_HOST, help='Host to listen on. (default=%s)' % DEFAULT_HOST) parser.add_option('--cache_size', dest="cache_size", default=4000, type="int", help="Size of client cache (default=4000)") parser.add_option('--max-loops', dest='loops', default=None, type='int', help='Maximum number of loops before exiting.') (options, args) = parser.parse_args() from durus.logger import logger logger.setLevel(5) storage = ClientStorage(host=options.host, port=options.port) connection = Connection(storage, cache_size=options.cache_size) try: if 'obj' not in connection.get_root(): init_db(connection) verify_db(connection, all=True) connection.commit() except ConflictError: connection.abort() n = options.loops while n is None or n > 0: if n is not None: n -= 1 try: if hasattr(sys, 'gettotalrefcount'): sys.stdout.write('refs = %s\n' % sys.gettotalrefcount()) if randbool(): connection.abort() verify_db(connection) mutate_db(connection) connection.commit() maybe_sleep() except ConflictError: sys.stdout.write('conflict\n') connection.abort() maybe_sleep()
def __init__(self, host, port): self._connection = Connection(ClientStorage(host=host, port=port))