Example #1
0
def run_trials():
    numTrials = 3000
    gens = 1000
    from multiprocessing.pool import ThreadPool as Pool
    pool = Pool(50)

    jids = pool.map(f, [gens] * numTrials)
    print "Done spawning trials. Retrieving results..."

    results = pool.map(cloud_result, jids)
    firstLocusFreqsHists = zeros((numTrials, gens + 1), dtype='float')
    lastLocusFreqsHists = zeros((numTrials, gens + 1), dtype='float')
    print "Done retrieving results. Press Enter to serialize..."

    raw_input()

    for i, result in enumerate(results):
        firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result

    with closing(FileStorage("soda_results.durus")) as durus:
        conn = Connection(durus)
        conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists,
                                                         lastLocusFreqsHists)
        conn.commit()

    pool.close()
    pool.join()
Example #2
0
def run_trials():
    numTrials = 3000
    gens = 1000
    from multiprocessing.pool import ThreadPool as Pool
    pool = Pool(50)

    jids = pool.map(f,[gens]*numTrials)
    print "Done spawning trials. Retrieving results..."

    results = pool.map(cloud_result, jids)
    firstLocusFreqsHists = zeros((numTrials,gens+1), dtype='float')
    lastLocusFreqsHists = zeros((numTrials,gens+1), dtype='float')
    print "Done retrieving results. Press Enter to serialize..."

    raw_input()

    for i, result in enumerate(results):
        firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result

    with closing(FileStorage("soda_results.durus")) as durus:
        conn = Connection(durus)
        conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists, lastLocusFreqsHists)
        conn.commit()

    pool.close()
    pool.join()
Example #3
0
class Session(object):
    """
    Representation of the game state.
    """
    _persistent_attributes = ('scheduler', 'started', 'lastroom', 'universe',
                              'characters', 'player', 'debugging')
    # default values
    scheduler = None  # Scheduler instance
    started = False  # Is game started yet? (I.e. have player turns/actions begun)
    lastroom = None  # Used to determine auto-placement of items
    universe = None  # Top level container object (provides storage for entire game state)
    characters = ()  # List of character agents (references into universe)
    player = ()  # List of player character agents (normally only 1 in PUB)
    debugging = False  # Debugging mode is for use during game development

    def __init__(self, storagefile="default.sav"):
        self.storage = Connection(FileStorage(storagefile))
        self.root = self.storage.get_root()

        self.running = False

    def __setattr__(self, name, value):
        if name in self._persistent_attributes:
            self.root[name] = value
        else:
            object.__setattr__(self, name, value)

    def __getattribute__(self, name):
        persistent_attributes = object.__getattribute__(
            self, '_persistent_attributes')
        if name in persistent_attributes:
            try:
                return self.root[name]
            except KeyError:
                return getattr(self.__class__, name)
        else:
            return object.__getattribute__(self, name)

    def new_game(self):
        """
        Start up a new game (clear the storage instance).
        """
        self.scheduler = None
        self.started = True
        self.lastroom = None
        self.universe = None
        self.characters = None
        self.player = None
        self.debugging = False
        self.commit()
        self.pack()

    def commit(self):
        self.storage.commit()

    def abort(self):
        self.storage.abort()

    def pack(self):
        self.storage.pack()
Example #4
0
 def delete(self):
     connection = Connection(MemoryStorage())
     pd = PersistentDict((x, True) for x in range(10))
     connection.root['x'] = pd
     connection.commit()
     del pd[1]
     assert pd._p_is_unsaved()
Example #5
0
 def check_storage_tools(self):
     connection = Connection(self._get_storage())
     root = connection.get_root()
     root['a'] = Persistent()
     root['b'] = Persistent()
     connection.commit()
     index = get_reference_index(connection.get_storage())
     assert index == {
         int8_to_str(1): [int8_to_str(0)], int8_to_str(2): [int8_to_str(0)]}
     census = get_census(connection.get_storage())
     assert census == {as_bytes('PersistentDict'):1, as_bytes('Persistent'):2}
     references = list(gen_referring_oid_record(connection.get_storage(),
                                                int8_to_str(1)))
     assert references == [
         (int8_to_str(0), connection.get_storage().load(int8_to_str(0)))]
     class Fake(object):
         pass
     s = Fake()
     s.__class__ = Storage
     raises(RuntimeError, s.__init__)
     raises(NotImplementedError, s.load, None)
     raises(NotImplementedError, s.begin)
     raises(NotImplementedError, s.store, None, None)
     raises(NotImplementedError, s.end)
     raises(NotImplementedError, s.sync)
     g = s.gen_oid_record()
     raises(NotImplementedError, next, g)
Example #6
0
    def check_storage_tools(self):
        connection = Connection(self._get_storage())
        root = connection.get_root()
        root['a'] = Persistent()
        root['b'] = Persistent()
        connection.commit()
        index = get_reference_index(connection.get_storage())
        assert index == {
            int8_to_str(1): [int8_to_str(0)],
            int8_to_str(2): [int8_to_str(0)]
        }
        census = get_census(connection.get_storage())
        assert census == {
            as_bytes('PersistentDict'): 1,
            as_bytes('Persistent'): 2
        }
        references = list(
            gen_referring_oid_record(connection.get_storage(), int8_to_str(1)))
        assert references == [(int8_to_str(0),
                               connection.get_storage().load(int8_to_str(0)))]

        class Fake(object):
            pass

        s = Fake()
        s.__class__ = Storage
        raises(RuntimeError, s.__init__)
        raises(NotImplementedError, s.load, None)
        raises(NotImplementedError, s.begin)
        raises(NotImplementedError, s.store, None, None)
        raises(NotImplementedError, s.end)
        raises(NotImplementedError, s.sync)
        g = s.gen_oid_record()
        raises(NotImplementedError, next, g)
Example #7
0
 def test_delete(self):
     connection = Connection(MemoryStorage())
     pd = PersistentDict((x, True) for x in range(10))
     connection.root['x'] = pd
     connection.commit()
     del pd[1]
     assert pd._p_is_unsaved()
Example #8
0
def get_config():
    connection = Connection(FileStorage("/var/tmp/test.durus"))
    root = connection.get_root()  # connection set as shown above.
    if not root.has_key("_pconfig"):
        cf = GenericConfiguration()
        root["_pconfig"] = cf
        root["_pconfig"]["default"] = SECTION()
        connection.commit()
    return root["_pconfig"]
Example #9
0
def get_config():
    connection = Connection(FileStorage("/var/tmp/test.durus"))
    root = connection.get_root()  # connection set as shown above.
    if not root.has_key("_pconfig"):
        cf = GenericConfiguration()
        root["_pconfig"] = cf
        root["_pconfig"]["default"] = SECTION()
        connection.commit()
    return root["_pconfig"]
Example #10
0
def open_document(db_path):
    conn = Connection(FileStorage(db_path))
    db_root = conn.get_root()
    if 'doc' not in db_root:
        db_root['doc'] = Document()
        db_root['version'] = updates.current_version
        conn.commit()
    updates.do_updates(conn)
    h = DocumentHandler(conn)
    return h.doc
Example #11
0
class _Projects(object):
    """
    Class for managing project and autocompletes
    for each project
    """
    def __init__(self):
        self._conn = Connection(FileStorage(PROJECTS_DATA_PATH))
        self._data = self._conn.get_root()

        if not len(self._data.keys()):
            self._data["Default"] = PersistentDict(
                autocomplete=PersistentDict())
            self.sync()

    def get(self):
        """Return projects list"""
        return self._data.keys()

    def add(self, name):
        """Add new project"""
        self._data[unicode(name)] = PersistentDict(
            autocomplete=PersistentDict())
        self.sync()

    def remove(self, name):
        """Remove project"""
        del self._data[unicode(name)]

    def getAutocomleteList(self, name, appendix={}):
        """
        Return autocomplete list for project, if appendix
        autocompletes was specified merge it with project autocompletes.
        """
        autocompDict = self._data[unicode(name)]["autocomplete"]
        autocompDict.update(appendix)
        sortedDict = sorted(autocompDict.items(),
                            key=lambda (k, v): (v, k),
                            reverse=True)
        return [a[0] for a in sortedDict]

    def addAutocomplete(self, projectName, taskName):
        """Add autocompleted task for peoject"""
        prj = self._data[projectName]
        if taskName not in prj["autocomplete"]:
            prj["autocomplete"][taskName] = 1
        else:
            prj["autocomplete"][taskName] += 1
        self.sync()

    def sync(self):
        self._conn.commit()

    def __del__(self):
        self.sync()
Example #12
0
class _Settings(object):
    """Settings singleton object"""
    
    _appName = "MacTimeLog"
    
    _defaultSettings = {
        "dateFormat": "%m-%d-%Y %H:%M",
        "timeFormat": "%H:%M",
        "logDateTimeFormat": "at %H:%M",
        "workEndTime": "06:00",
        "workDayLength": 3600*8,
        "timerInterval": 1,
        "showWorkTill": False,
        "showDateTime": False,
        "logPath": "%s/%s" % (settingsFolder(_appName), "log.txt"),
        "projectsDataPath": "%s/%s" % (settingsFolder(_appName), "projects"),
        "slackingDataPath": "%s/%s" % (settingsFolder(_appName), "slacking"),
        "logEditCommand": "open -a TextEdit \"%s\"",
        "projectSeparator": "::",
        "selectedProject": "Default",
        "startPlaceholder": "__start__",
        "showNotification": False,
        "notificationTime": 40,
        "notificationRepeatTime": 10,
        "soundOnNotification": False,
        "showHelpMessageOnStart": True
    }

    _globalSettings = {}
    
    def __init__(self):
        self._settingsFile = "%s/%s" % (settingsFolder(self._appName), "settings")
        self._conn = Connection(FileStorage(self._settingsFile))
        self._globalSettings = self._conn.get_root()
        
    def get(self, key):
        """Return setting value by key"""
        if key in self._globalSettings:
            return self._globalSettings[key]
        elif key in self._defaultSettings:
            return self._defaultSettings[key]
        else:
            pass
        
    def set(self, key, value):
        """Set setting value by key"""
        self._globalSettings[key] = value
        
    def sync(self):
        self._conn.commit()

    def __del__(self):
        self.sync()
Example #13
0
def main():
    parser = OptionParser()
    parser.set_description('Stress test a Durus Server')
    parser.add_option('--port',
                      dest='port',
                      default=DEFAULT_PORT,
                      type='int',
                      help='Port to listen on. (default=%s)' % DEFAULT_PORT)
    parser.add_option('--host',
                      dest='host',
                      default=DEFAULT_HOST,
                      help='Host to listen on. (default=%s)' % DEFAULT_HOST)
    parser.add_option('--cache_size',
                      dest="cache_size",
                      default=4000,
                      type="int",
                      help="Size of client cache (default=4000)")
    parser.add_option('--max-loops',
                      dest='loops',
                      default=None,
                      type='int',
                      help='Maximum number of loops before exiting.')

    (options, args) = parser.parse_args()
    from durus.logger import logger
    logger.setLevel(5)
    storage = ClientStorage(host=options.host, port=options.port)
    connection = Connection(storage, cache_size=options.cache_size)
    try:
        if 'obj' not in connection.get_root():
            init_db(connection)
            verify_db(connection, all=True)
            connection.commit()
    except ConflictError:
        connection.abort()
    n = options.loops
    while n is None or n > 0:
        if n is not None:
            n -= 1
        try:
            if hasattr(sys, 'gettotalrefcount'):
                sys.stdout.write('refs = %s\n' % sys.gettotalrefcount())
            if randbool():
                connection.abort()
            verify_db(connection)
            mutate_db(connection)
            connection.commit()
            maybe_sleep()
        except ConflictError:
            sys.stdout.write('conflict\n')
            connection.abort()
            maybe_sleep()
Example #14
0
 def _scenario(self):
     c1 = Connection(self._get_storage())
     c2 = Connection(self._get_storage())
     c1.get_root()['A'] = Persistent()
     c1.get_root()['B'] = Persistent()
     c1.get_root()['A'].a = 1
     c1.commit()
     c2.abort()
     c1.cache.recent_objects.discard(c1.get_root()['A'])
     # Imagine c1 has been running for a while, and
     # cache management, for example, has caused the
     # cache reference to be weak.
     return c1, c2
Example #15
0
def repickle_storage(storage):
    """(storage: FileStorage)
    Force very object to be loaded and re-pickled.
    This also packs, so that all of the old pickles are removed.
    """
    connection = Connection(storage)
    for j, oid in enumerate(storage.index):
        obj = connection.get(oid)
        obj._p_note_change()
        if j and j % 10000 == 0:
            connection.commit()
    connection.commit()
    storage.pack()
Example #16
0
 def check_touch_every_reference(self):
     connection = Connection(self._get_storage())
     root = connection.get_root()
     root['a'] = Persistent()
     root['b'] = Persistent()
     from durus.persistent_list import PersistentList
     root['b'].c = PersistentList()
     connection.commit()
     touch_every_reference(connection, 'PersistentList')
     assert root['b']._p_is_unsaved()
     assert root['b'].c._p_is_unsaved()
     assert not root._p_is_unsaved()
     assert len(list(connection.get_cache())) == 4
Example #17
0
 def check_touch_every_reference(self):
     connection = Connection(self._get_storage())
     root = connection.get_root()
     root['a'] = Persistent()
     root['b'] = Persistent()
     from durus.persistent_list import PersistentList
     root['b'].c = PersistentList()
     connection.commit()
     touch_every_reference(connection, 'PersistentList')
     assert root['b']._p_is_unsaved()
     assert root['b'].c._p_is_unsaved()
     assert not root._p_is_unsaved()
     assert len(list(connection.get_cache())) == 4
Example #18
0
 def _scenario(self):
     c1 = Connection(self._get_storage())
     c2 = Connection(self._get_storage())
     c1.get_root()['A'] = Persistent()
     c1.get_root()['B'] = Persistent()
     c1.get_root()['A'].a = 1
     c1.commit()
     c2.abort()
     c1.cache.recent_objects.discard(c1.get_root()['A'])
     # Imagine c1 has been running for a while, and
     # cache management, for example, has caused the
     # cache reference to be weak.
     return c1, c2
Example #19
0
class _Projects(object):
    """
    Class for managing project and autocompletes
    for each project
    """
    
    def __init__(self):
        self._conn = Connection(FileStorage(Settings.get("projectsDataPath")))
        self._data = self._conn.get_root()

        if not len(self._data.keys()):
            self._data["Default"] = PersistentDict(autocomplete=PersistentDict())
            self.sync()
    
    def get(self):
        """Return projects list"""
        return self._data.keys()
        
    def add(self, name):
        """Add new project"""
        self._data[unicode(name)] = PersistentDict(autocomplete=PersistentDict())
        self.sync()
        
    def remove(self, name):
        """Remove project"""
        del self._data[unicode(name)]
        
    def getAutocomleteList(self, name, appendix={}):
        """
        Return autocomplete list for project, if appendix 
        autocompletes was specified merge it with project autocompletes.
        """
        autocompDict = self._data[unicode(name)]["autocomplete"]
        autocompDict.update(appendix)
        sortedDict = sorted(autocompDict.items(), key=lambda (k, v):(v, k), reverse=True)
        return [a[0] for a in sortedDict]
        
    def addAutocomplete(self, projectName, taskName):
        """Add autocompleted task for peoject"""
        prj = self._data[projectName]
        if taskName not in prj["autocomplete"]:
            prj["autocomplete"][taskName] = 1
        else:
            prj["autocomplete"][taskName] += 1
        self.sync()
 
    def sync(self):
        self._conn.commit()
        
    def __del__(self):
        self.sync()
Example #20
0
class TestBackend:

  def __init__(self, filename, mode):

    self.mode = mode

    if mode == "w":
      self.storage = FileStorage(filename)
      self.connection = Connection(self.storage)
      self.test_db_items = self.connection.get_root()

    elif mode == "r":
      self.storage = FileStorage(filename)
      self.connection = Connection(self.storage)
      self.test_db_items = self.connection.get_root()

      self.next_rec_num = 0   # Initialise next record counter
      self.num_records = len(self.test_db_items)

  def __setitem__(self, key, value):

    self.test_db_items[key] = value

  def __getitem__(self, key):

    return self.test_db_items[str(key)]

  def __len__(self):

    return len(self.test_db_items)

  def first(self):

    return self.test_db_items[0]

  def iteritems(self):

    while(self.next_rec_num < self.num_records):
      value = self.test_db_items[self.next_rec_num]
  
      self.next_rec_num += 1

      yield value

  def close(self):
    self.connection.commit()
    self.storage.close()

  def getTestDBItems(self):
    return self.test_db_items.values()
Example #21
0
 def create_durus_publisher():
     global connection
     filename = os.path.join(tempfile.gettempdir(), 'quixote-demo.durus')
     print 'Opening %r as a Durus database.' % filename
     connection = Connection(FileStorage(filename))
     root = connection.get_root()
     session_manager = root.get('session_manager', None)
     if session_manager is None:
         session_manager = PersistentSessionManager()
         connection.get_root()['session_manager'] = session_manager
         connection.commit()
     return Publisher(RootDirectory(),
                      session_manager=session_manager,
                      display_exceptions='plain')
Example #22
0
 def create_durus_publisher():
     global connection
     filename = os.path.join(tempfile.gettempdir(), 'quixote-demo.durus')
     print('Opening %r as a Durus database.' % filename)
     connection = Connection(FileStorage(filename))
     root = connection.get_root()
     session_manager = root.get('session_manager', None)
     if session_manager is None:
         session_manager = PersistentSessionManager()
         connection.get_root()['session_manager'] = session_manager
         connection.commit()
     return Publisher(RootDirectory(),
                      session_manager=session_manager,
                      display_exceptions='plain')
class _SlackingAutocompletes(object):
    def __init__(self):
        self._conn = Connection(FileStorage(SLACKING_DATA_PATH))
        self._data = self._conn.get_root()

    def get(self):
        """Return slacking autocomplete list"""
        return self._data

    def add(self, name):
        """Add slacking autocomplete"""
        if name in self._data:
            self._data[name] += 1
        else:
            self._data[name] = 1
        self._conn.commit()
Example #24
0
def main():
    parser = OptionParser()
    parser.set_description("Stress test a Durus Server")
    parser.add_option(
        "--port", dest="port", default=DEFAULT_PORT, type="int", help="Port to listen on. (default=%s)" % DEFAULT_PORT
    )
    parser.add_option(
        "--host", dest="host", default=DEFAULT_HOST, help="Host to listen on. (default=%s)" % DEFAULT_HOST
    )
    parser.add_option(
        "--cache_size", dest="cache_size", default=4000, type="int", help="Size of client cache (default=4000)"
    )
    parser.add_option(
        "--max-loops", dest="loops", default=None, type="int", help="Maximum number of loops before exiting."
    )

    (options, args) = parser.parse_args()
    from durus.logger import logger

    logger.setLevel(5)
    storage = ClientStorage(host=options.host, port=options.port)
    connection = Connection(storage, cache_size=options.cache_size)
    try:
        if "obj" not in connection.get_root():
            init_db(connection)
            verify_db(connection, all=True)
            connection.commit()
    except ConflictError:
        connection.abort()
    n = options.loops
    while n is None or n > 0:
        if n is not None:
            n -= 1
        try:
            if hasattr(sys, "gettotalrefcount"):
                sys.stdout.write("refs = %s\n" % sys.gettotalrefcount())
            if randbool():
                connection.abort()
            verify_db(connection)
            mutate_db(connection)
            connection.commit()
            maybe_sleep()
        except ConflictError:
            sys.stdout.write("conflict\n")
            connection.abort()
            maybe_sleep()
Example #25
0
 def check_conflict(self):
     b = Connection(self._get_storage())
     c = Connection(self._get_storage())
     rootb = b.get(int8_to_str(0))
     rootb['b'] = Persistent()
     rootc = c.get(int8_to_str(0))
     rootc['c'] = Persistent()
     c.commit()
     raises(ConflictError, b.commit)
     raises(KeyError, rootb.__getitem__, 'c')
     transaction_serial = b.transaction_serial
     b.abort()
     assert b.get_transaction_serial() > transaction_serial
     assert rootb._p_is_ghost()
     rootc['d'] = Persistent()
     c.commit()
     rootb['d']
class _SlackingAutocompletes(object):

    def __init__(self):
        self._conn = Connection(FileStorage(SLACKING_DATA_PATH))
        self._data = self._conn.get_root()

    def get(self):
        """Return slacking autocomplete list"""
        return self._data

    def add(self, name):
        """Add slacking autocomplete"""
        if name in self._data:
            self._data[name] += 1
        else:
            self._data[name] = 1
        self._conn.commit()
Example #27
0
 def check_conflict(self):
     b = Connection(self._get_storage())
     c = Connection(self._get_storage())
     rootb = b.get(int8_to_str(0))
     rootb['b'] = Persistent()
     rootc = c.get(int8_to_str(0))
     rootc['c'] = Persistent()
     c.commit()
     raises(ConflictError, b.commit)
     raises(KeyError, rootb.__getitem__, 'c')
     transaction_serial = b.transaction_serial
     b.abort()
     assert b.get_transaction_serial() > transaction_serial
     assert rootb._p_is_ghost()
     rootc['d'] = Persistent()
     c.commit()
     rootb['d']
Example #28
0
class DurusTest(UTest):
    def _pre(self):
        self.connection = Connection(MemoryStorage())

    def _post(self):
        del self.connection

    def a(self):
        bt = self.connection.get_root()['bt'] = BTree()
        t = bt.root.minimum_degree
        assert self.connection.get_cache_count() == 1
        for x in range(2 * t - 1):
            bt.add(x)
        self.connection.commit()
        assert self.connection.get_cache_count() == 3
        bt.add(2 * t - 1)
        self.connection.commit()
        assert self.connection.get_cache_count() == 5
        bt.note_change_of_bnode_containing_key(1)
Example #29
0
class DurusTest(UTest):

    def _pre(self):
        self.connection = Connection(MemoryStorage())

    def _post(self):
        del self.connection

    def a(self):
        bt = self.connection.get_root()['bt'] = BTree()
        t = bt.root.minimum_degree
        assert self.connection.get_cache_count() == 1
        for x in range(2 * t - 1):
            bt.add(x)
        self.connection.commit()
        assert self.connection.get_cache_count() == 3
        bt.add(2 * t - 1)
        self.connection.commit()
        assert self.connection.get_cache_count() == 5
        bt.note_change_of_bnode_containing_key(1)
Example #30
0
def main(old_file, new_file):
    if old_file.startswith('-'):
        usage()
    if new_file.startswith('-'):
        usage()
    assert not exists(new_file)
    connection = Connection(sys.argv[1])
    tmpfile = TemporaryFile()
    print("pickling from " + old_file)
    dump(connection.get_root().__getstate__(), tmpfile, 2)
    connection = None
    tmpfile.seek(0)
    connection2 = Connection(sys.argv[2])
    print("unpickling")
    connection2.get_root().__setstate__(load(tmpfile))
    connection2.get_root()._p_note_change()
    print("commit to " + new_file)
    connection2.commit()
    print("pack")
    connection2.pack()
Example #31
0
 def b(self):
     f = File(prefix='shelftest')
     name = f.get_name()
     f.close()
     s = FileStorage(name)
     c = Connection(s)
     r = c.get_root()
     for x in range(10):
         r["a%s" % x] = Persistent()
         c.commit()
     deleted_oid = r['a9']._p_oid
     del r['a9']
     c.commit()
     c.pack()
     c.abort()
     assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 10
     new_oid = s.new_oid()
     assert new_oid == deleted_oid
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(11)
Example #32
0
 def b(self):
     f = File(prefix='shelftest')
     name = f.get_name()
     f.close()
     s = FileStorage(name)
     c = Connection(s)
     r = c.get_root()
     for x in range(10):
         r["a%s" % x] = Persistent()
         c.commit()
     deleted_oid = r['a9']._p_oid
     del r['a9']
     c.commit()
     c.pack()
     c.abort()
     assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 10
     new_oid = s.new_oid()
     assert new_oid == deleted_oid
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(11)
Example #33
0
def main(old_file, new_file):
    if old_file.startswith('-'):
        usage()
    if new_file.startswith('-'):
        usage()
    assert not exists(new_file)
    connection = Connection(sys.argv[1])
    tmpfile = TemporaryFile()
    print("pickling from " + old_file)
    dump(connection.get_root().__getstate__(), tmpfile, 2)
    connection = None
    tmpfile.seek(0)
    connection2 = Connection(sys.argv[2])
    print("unpickling")
    connection2.get_root().__setstate__(load(tmpfile))
    connection2.get_root()._p_note_change()
    print("commit to " + new_file)
    connection2.commit()
    print("pack")
    connection2.pack()
Example #34
0
def main():
    from shutil import copyfile
    from os.path import exists

    def usage():
        sys.stdout.write("Usage: python %s <existing_file> <new_file>\n" %
                         sys.argv[0])
        sys.stdout.write("  Creates a new py3k-compatible file ")
        sys.stdout.write("from an existing FileStorage file.\n")
        raise SystemExit

    if len(sys.argv) != 3:
        usage()
    infile = sys.argv[1]
    outfile = sys.argv[2]
    if not exists(infile):
        usage()
    if exists(outfile):
        if input('overwrite %r? [y/N] ' % outfile).strip().lower() != 'y':
            raise SystemExit
    copyfile(infile, outfile)

    # monkey patch pickler class, must be done before importing durus stuff
    patch_pickler()

    from durus.__main__ import get_storage_class
    from durus.connection import Connection

    storage_class = get_storage_class(outfile)
    storage = storage_class(outfile)
    connection = Connection(storage)
    print("Converting %s for use with py3k." % outfile)
    for j, x in enumerate(connection.get_crawler()):
        x._p_note_change()
        if j > 0 and j % 10000 == 0:
            print(j)
            connection.commit()
    print(j)
    connection.commit()
    connection.pack()
Example #35
0
 def check_oid_reuse(self):
     # Requires ShelfStorage oid reuse pack semantics
     s1 = ClientStorage(address=self.address)
     s1.oid_pool_size = 1
     c1 = Connection(s1)
     r1 = c1.get_root()
     s2 = ClientStorage(address=self.address)
     s2.oid_pool_size = 1
     c2 = Connection(s2)
     r2 = c2.get_root()
     r1['a'] = PersistentDict()
     r1['b'] = PersistentDict()
     c1.commit()
     c2.abort()
     a_oid = r1['a']._p_oid
     assert 'a' in r1 and 'b' in r1 and len(r1['b']) == 0
     assert 'a' in r2 and 'b' in r2 and len(r2['b']) == 0
     del r2['a'] # remove only reference to a
     c2.commit()
     c2.pack() # force relinquished oid back into availability
     sleep(0.5) # Give time for pack to complete
     c2.abort()
     assert c2.get(a_oid) is None
     c1.abort()
     assert c1.get(a_oid)._p_is_ghost()
     r2['b']['new'] = Persistent()
     r2['b']['new'].bogus = 1
     c2.commit()
     assert c2.get(a_oid) is r2['b']['new']
     c1.abort()
     assert c1.get(a_oid).__class__ == PersistentDict
     r1['b']['new'].bogus
     assert c1.get(a_oid).__class__ == Persistent
     s1.close()
class durusCommit(object):
    "A class to simply commit a given data with date tuple as key"

    def __init__(self, folder, dbFileName):
        self.dbFileName = folder + '/' + dbFileName + '.durus'
        ##print 'Durus file name: ', self.dbFileName
        self.con = Connection(FileStorage(self.dbFileName))

    def getRoot(self):
        return self.con.get_root()

    def connectionCommit(self):
        self.con.commit()

    def makeChapterCommit(self, currentdate, book, chapterDict):
        dateKey = ()
        #print 'Book name: ',book
        if isinstance(currentdate, tuple):
            dateKey = currentdate
        if isinstance(currentdate, datetime.date):
            dateKey = tuple(currentdate.strftime('%Y-%m-%d').split('-'))
        #print 'Datekey : ',dateKey

        root = self.con.get_root()
        if not book in root:
            root[book] = PersistentDict()
            self.con.commit()
        root[book][dateKey] = chapterDict
        self.con.commit()
Example #37
0
def main():
    from shutil import copyfile
    from os.path import exists

    def usage():
        sys.stdout.write(
            "Usage: python %s <existing_file> <new_file>\n" % sys.argv[0])
        sys.stdout.write("  Creates a new py3k-compatible file ")
        sys.stdout.write("from an existing FileStorage file.\n")
        raise SystemExit

    if len(sys.argv) != 3:
        usage()
    infile = sys.argv[1]
    outfile = sys.argv[2]
    if not exists(infile):
        usage()
    if exists(outfile):
        if input('overwrite %r? [y/N] ' % outfile).strip().lower() != 'y':
            raise SystemExit
    copyfile(infile, outfile)

    # monkey patch pickler class, must be done before importing durus stuff
    patch_pickler()

    from durus.__main__ import get_storage_class
    from durus.connection import Connection

    storage_class = get_storage_class(outfile)
    storage = storage_class(outfile)
    connection = Connection(storage)
    print ("Converting %s for use with py3k." % outfile)
    for j, x in enumerate(connection.get_crawler()):
        x._p_note_change()
        if j > 0 and j % 10000 == 0:
            print(j)
            connection.commit()
    print(j)
    connection.commit()
    connection.pack()
Example #38
0
class DurusFile(object):
    def __init__(self, file, new):
        self.__file = file
        if new:
            if os.path.exists(self.__file):
                os.remove(self.__file)
        self.__connection = Connection(FileStorage(self.__file))
        self.__root = self.__connection.get_root()

    def close(self):
        self.__connection.get_storage().close()

    def getBudget(self):
        if self.__root.has_key("baseversion") and \
           globalVars.baseversion == self.__root["baseversion"]:
            return self.__root["budget"]
        else:
            print _("Incorrent Base version")
            return None

    def setBudget(self, budget):
        self.__root["budget"] = budget
        self.__root["baseversion"] = globalVars.baseversion
        self.__connection.commit()
Example #39
0
 def check_oid_reuse_with_invalidation(self):
     connection = Connection(ClientStorage(address=self.address))
     root = connection.get_root()
     root['x'] = Persistent()
     connection.commit()
     connection = Connection(ClientStorage(address=self.address))
     root = connection.get_root()
     root['x'] = Persistent()
     connection.commit()
     connection.pack()
     sleep(1) # Make sure pack finishes.
     connection = Connection(ClientStorage(address=self.address))
     root = connection.get_root()
     root['x'] = Persistent()
     connection.commit()
Example #40
0
 def check_fine_conflict(self):
     c1 = Connection(self._get_storage())
     c2 = Connection(self._get_storage())
     c1.get_root()['A'] = Persistent()
     c1.get_root()['A'].a = 1
     c1.get_root()['B'] = Persistent()
     c1.commit()
     c2.abort()
     # c1 has A loaded.
     assert not c1.get_root()['A']._p_is_ghost()
     c1.get_root()['B'].b = 1
     c2.get_root()['A'].a = 2
     c2.commit()
     # Even though A has been changed by c2,
     # c1 has not accessed an attribute of A since
     # the last c1.commit(), so we don't want a ConflictError.
     c1.commit()
     assert c1.get_root()['A']._p_is_ghost()
     c1.get_root()['A'].a # accessed!
     c1.get_root()['B'].b = 1
     c2.get_root()['A'].a = 2
     c2.commit()
     raises(WriteConflictError, c1.commit)
Example #41
0
 def check_fine_conflict(self):
     c1 = Connection(self._get_storage())
     c2 = Connection(self._get_storage())
     c1.get_root()['A'] = Persistent()
     c1.get_root()['A'].a = 1
     c1.get_root()['B'] = Persistent()
     c1.commit()
     c2.abort()
     # c1 has A loaded.
     assert not c1.get_root()['A']._p_is_ghost()
     c1.get_root()['B'].b = 1
     c2.get_root()['A'].a = 2
     c2.commit()
     # Even though A has been changed by c2,
     # c1 has not accessed an attribute of A since
     # the last c1.commit(), so we don't want a ConflictError.
     c1.commit()
     assert c1.get_root()['A']._p_is_ghost()
     c1.get_root()['A'].a  # accessed!
     c1.get_root()['B'].b = 1
     c2.get_root()['A'].a = 2
     c2.commit()
     raises(WriteConflictError, c1.commit)
Example #42
0
 def a(self):
     f = File(prefix='shelftest')
     name = f.get_name()
     f.close()
     s = FileStorage(name)
     c = Connection(s)
     r = c.get_root()
     for x in range(10):
         r["a%s" % x] = Persistent()
         c.commit()
     deleted_oids = [
         r['a0']._p_oid, r['a2']._p_oid, r['a7']._p_oid, r['a8']._p_oid
     ]
     del r['a0']
     del r['a2']
     del r['a7']
     del r['a8']
     c.commit()
     c.pack()
     c.abort()
     assert c.get(deleted_oids[0])._p_is_ghost()
     assert c.get(deleted_oids[1])._p_is_ghost()
     raises(KeyError, getattr, c.get(deleted_oids[0]), 'a')
     assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 7
     c.commit()
     c.pack()
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-1], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-2], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-3], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-4], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(11), repr(new_oid)
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(12), repr(new_oid)
Example #43
0
 def c(self):
     f = File(prefix='shelftest')
     name = f.get_name()
     f.close()
     s = FileStorage(name)
     c = Connection(s)
     r = c.get_root()
     for x in range(10):
         r["a%s" % x] = Persistent()
         c.commit()
     deleted_oid = r['a9']._p_oid
     del r['a9']
     c.commit()
     c.pack()
     c.abort()
     r.clear()
     c.commit()
     c.pack()
     c.abort()
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(1), repr(new_oid)
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(2), repr(new_oid)
Example #44
0
 def c(self):
     f = File(prefix='shelftest')
     name = f.get_name()
     f.close()
     s = FileStorage(name)
     c = Connection(s)
     r = c.get_root()
     for x in range(10):
         r["a%s" % x] = Persistent()
         c.commit()
     deleted_oid = r['a9']._p_oid
     del r['a9']
     c.commit()
     c.pack()
     c.abort()
     r.clear()
     c.commit()
     c.pack()
     c.abort()
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(1), repr(new_oid)
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(2), repr(new_oid)
Example #45
0
 def a(self):
     f = File(prefix='shelftest')
     name = f.get_name()
     f.close()
     s = FileStorage(name)
     c = Connection(s)
     r = c.get_root()
     for x in range(10):
         r["a%s" % x] = Persistent()
         c.commit()
     deleted_oids = [
         r['a0']._p_oid, r['a2']._p_oid, r['a7']._p_oid, r['a8']._p_oid]
     del r['a0']
     del r['a2']
     del r['a7']
     del r['a8']
     c.commit()
     c.pack()
     c.abort()
     assert c.get(deleted_oids[0])._p_is_ghost()
     assert c.get(deleted_oids[1])._p_is_ghost()
     raises(ReadConflictError, getattr, c.get(deleted_oids[0]), 'a')
     assert len([repr(oid) for oid, record in s.gen_oid_record()]) == 7
     c.commit()
     c.pack()
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-1], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-2], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-3], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == deleted_oids[-4], (new_oid, deleted_oids)
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(11), repr(new_oid)
     new_oid = s.new_oid()
     assert new_oid == int8_to_str(12), repr(new_oid)
class tempVariantData:
    TICK_INTERVAL=10000
    COMMIT_FREQ=1000
    COMMIT=0
    def __init__(self, axisLabels, statisticLabels, forcedCategoricals, startingXattribute, startingYattribute):
        for fileToClear in ['Data.db','Data.db.lock','Data.db.tmp','Data.db.prepack']:
            if os.path.exists(fileToClear):
                os.remove(fileToClear)
        
        self.dataConnection = Connection(FileStorage("Data.db"))
        self.data = self.dataConnection.get_root()
        # possible key:value pairs (this is a little bizarre, but I can only have one database (as objects reference each other),
        # and for performance I want to keep all variant objects and axes on the root level:
        
        self.data['variant keys'] = set()
        
        self.axisLabels = axisLabels
        self.forcedCategoricals = forcedCategoricals
        self.statisticLabels = statisticLabels
        
        self.allAxes = self.defaultAxisOrder()
    
    def addVariant(self, variantObject):
        if variantObject.attributes.has_key('RSID'):
            variantObject.name = variantObject.attributes['RSID']
        if variantObject.name in self.data['variant keys']:
            self.data[variantObject.name].repair(variantObject)
        else:
            assert variantObject.name != 'variant keys'
            self.data['variant keys'].add(variantObject.name)
            self.data[variantObject.name] = variantObject
        
        tempVariantData.COMMIT += 1
        if tempVariantData.COMMIT >= tempVariantData.COMMIT_FREQ:
            tempVariantData.COMMIT = 0
            self.dataConnection.commit()
    
    def performGroupCalculations(self, groupDict, statisticDict, callback, tickInterval):
        from dataModels.setupData import statistic
        
        currentLine = 0
        nextTick = tickInterval
        
        targetAlleleGroups = {}
        for s in statisticDict.itervalues():
            if s.statisticType == statistic.ALLELE_FREQUENCY:
                if s.parameters.has_key('alleleGroup'):
                    index = s.parameters['alleleMode']
                    if index >= 1:
                        index -= 1  # they'll specify 1 as the most frequent, but we're in 0-based computer land; -1 is still the same though
                    targetAlleleGroups[s.parameters['alleleGroup']] = s.parameters['alleleMode']
                else:
                    targetAlleleGroups['vcf override'] = s.parameters['alleleMode']
        if len(targetAlleleGroups) == 0:    # nothing to calculate
            return
        
        for key in self.data.iterkeys():
            if key == 'variant keys':
                continue
            variantObject = self.data[key]
            currentLine += 1
            if currentLine >= nextTick:
                nextTick += tickInterval
                self.dataConnection.commit()
                if callback():  # abort?
                    return "ABORTED"
            
            if variantObject == None or variantObject.poisoned:
                continue
            
            # First find all the target alleles
            targetAlleles = {}
            for group,mode in targetAlleleGroups.iteritems():
                if group == 'vcf override':
                    alleles = variantObject.alleles
                else:
                    # First see if we can find a major allele with the people in basisGroup
                    alleleCounts = countingDict()
                    for i in groupDict[group].samples:
                        if variantObject.genotypes.has_key(i):
                            allele1 = variantObject.genotypes[i].allele1
                            allele2 = variantObject.genotypes[i].allele2
                            if allele1.text != None:
                                alleleCounts[allele1] += 1
                            if allele2.text != None:
                                alleleCounts[allele2] += 1
                    alleles = [x[0] for x in sorted(alleleCounts.iteritems(), key=lambda x: x[1])]
                
                if mode >= len(alleles) or mode < -len(alleles):
                    targetAlleles[group] = None
                else:
                    targetAlleles[group] = variantObject.alleles[mode]
            
            for statisticID,s in statisticDict.iteritems():
                targetAllele = targetAlleles[s.parameters.get('alleleGroup','vcf override')]
                if s.statisticType == statistic.ALLELE_FREQUENCY:
                    if targetAllele == None:
                        variantObject.setAttribute(statisticID,"Masked")    # the original group didn't have the allele, so we're masked
                        continue
                    
                    allCount = 0
                    targetCount = 0
                    
                    for i in groupDict[s.parameters['group']].samples:
                        if variantObject.genotypes.has_key(i):
                            allele1 = variantObject.genotypes[i].allele1
                            allele2 = variantObject.genotypes[i].allele2
                            if allele1 != None:
                                allCount += 1
                                if allele1 == targetAllele:
                                    targetCount += 1
                            if allele2 != None:
                                allCount += 1
                                if allele2 == targetAllele:
                                    targetCount += 1
                    if allCount == 0:
                        variantObject.setAttribute(statisticID,None)    # We had no data for this variant, so this thing is undefined
                    else:
                        variantObject.setAttribute(statisticID,float(targetCount)/allCount)
            self.data[variantObject.name].attributes = variantObject.attributes   # a way to force durus to acknowledge the change
            self.data._p_note_change()
            self.dataConnection.commit()
        self.dataConnection.commit()
    
    def estimateTicks(self):
        return len(self.allAxes)*len(self.data['variant keys'])/tempVariantData.TICK_INTERVAL
    
    def getFirstAxes(self):
        temp = self.defaultAxisOrder()
        if len(temp) < 2:
            raise ValueError("You should have at least two data attributes (columns in your .csv file or INFO fields in your .vcf file)\n" +
                             "to use this program (otherwise you probably should be using LibreOffice Calc or IGV to explore your data instead).")
        return (temp[0],temp[1])
    
    def defaultAxisOrder(self):
        # gives priority to axes (each subgroup is sorted alphabetically):
        # program-generated allele frequencies are first
        # numeric other values are next
        # categorical other values are last
        result = []
        for a in sorted(self.statisticLabels):
            result.append(a)
        for a in sorted(self.axisLabels):
            if not self.data.has_key(a):
                result.append(a)    # if we haven't built the axes yet, just add them in order; we'll worry about numeric, etc next time
            elif self.data[a].hasNumeric() and a not in self.statisticLabels:
                result.append(a)
        
        for a in sorted(self.axisLabels):
            if self.data.has_key(a) and not self.data[a].hasNumeric() and a not in self.statisticLabels:
                result.append(a)
        result.append('Genome Position')
        return result
    
    def dumpVcfFile(self, path, callback):
        self.dataConnection.commit()
        
        outfile = open(path,'w')
        outfile.write('##fileformat=VCFv4.0\n')
        outfile.write('##FILTER=<ID=s50,Description="Less than 50% of samples are fully called">\n')
        outfile.write('##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n')
        for a in self.allAxes:
            if a.startswith('FILTER') or a.startswith('QUAL') or a == 'Genome Position':
                continue
            outfile.write('##INFO=<ID=%s,Number=1,Type=Float,Description="%s">\n' % (a,a))
        outfile.write('#CHROM  POS     ID      REF     ALT     QUAL    FILTER  INFO    FORMAT\n')
        i = 0
        nextTick = tempVariantData.TICK_INTERVAL
        for k,v in self.data.iteritems():
            if k == 'variant keys':
                continue
            outfile.write(variantFile.composeVcfLine(v,{}) + "\n")
            i += 1
            if i > nextTick:
                nextTick += tempVariantData.TICK_INTERVAL
                if callback():
                    outfile.close()
                    return False
        outfile.close()
        return True
        
Example #47
0
class Session(object):
    """
    Representation of the game state.
    """    
    _persistent_attributes = (
        'scheduler',
        'started',
        'lastroom',
        'universe',
        'characters',
        'player',
        'debugging')
    # default values
    scheduler   = None      # Scheduler instance
    started     = False     # Is game started yet? (I.e. have player turns/actions begun)
    lastroom    = None      # Used to determine auto-placement of items
    universe    = None      # Top level container object (provides storage for entire game state)
    characters  = ()        # List of character agents (references into universe)
    player      = ()        # List of player character agents (normally only 1 in PUB)
    debugging   = False     # Debugging mode is for use during game development
    
    def __init__(self, storagefile="default.sav"):
        self.storage = Connection(FileStorage(storagefile))
        self.root    = self.storage.get_root()

        self.running = False

    def __setattr__(self, name, value):
        if name in self._persistent_attributes:
            self.root[name] = value
        else:
            object.__setattr__(self, name, value)

    def __getattribute__(self, name):
        persistent_attributes = object.__getattribute__(self, '_persistent_attributes')
        if name in persistent_attributes:
            try:
                return self.root[name]
            except KeyError:
                return getattr(self.__class__, name)
        else:
            return object.__getattribute__(self, name)

    def new_game(self):
        """
        Start up a new game (clear the storage instance).
        """
        self.scheduler  = None
        self.started    = True
        self.lastroom   = None
        self.universe   = None
        self.characters = None
        self.player     = None
        self.debugging  = False
        self.commit()
        self.pack()
        
    def commit(self):
        self.storage.commit()

    def abort(self):
        self.storage.abort()

    def pack(self):
        self.storage.pack()
Example #48
0
eliza = NamedObject('a bespectacled old lady', 'Eliza',
                    set(['old', 'lady', 'woman']), startroom)
eliza.addDelegate(ChattyDelegate(eliza))
startroom.add(eliza)

if os.access("mudlib.durus", os.F_OK):
    os.remove("mudlib.durus")

try:
    connection = Connection(FileStorage("mudlib.durus"))

    root = connection.get_root()

    # pylint: disable-msg= E1101,W0212
    #pylint doesn't know about our metaclass hackery, and complains about the
    #use of the leading underscore variables.
    root['startroom'] = startroom
    root['all_rooms'] = Room._instances
    root['all_objects'] = MUDObject._instances
    root['targettable_objects_by_name'] = NamedObject._name_registry
    root['ticker'] = Ticker(0.1)
    # pylint: enable-msg= E1101,W0212

    connection.commit()
except:
    connection.abort()
    #if os.access("mudlib.durus", os.F_OK):
    #    os.remove("mudlib.durus")
    raise
Example #49
0
class PersistentListTest(UTest):
    def _pre(self):
        self.connection = Connection(MemoryStorage())
        self.root = self.connection.get_root()

    def no_arbitrary_attributes(self):
        p = PersistentList()
        raises(AttributeError, setattr, p, 'bogus', 1)

    def nonzero(self):
        p = PersistentList()
        assert not p
        self.root['a'] = p
        self.connection.commit()
        p.append(1)
        assert p
        assert p._p_is_unsaved()

    def iter(self):
        p = PersistentList()
        assert list(p) == []
        p.extend([2, 3, 4])
        assert list(p) == [2, 3, 4]

    def insert_again(self):
        p = PersistentList([5, 6, 7])
        p[1] = 2
        p[1] = 3
        assert p[1] == 3

    def contains(self):
        p = PersistentList(x for x in interval(5))
        assert 2 in p
        assert -1 not in p

    def cmp(self):
        p = PersistentList(interval(10))
        p2 = PersistentList(interval(10))
        assert p == p2
        assert p == list(p2)
        assert p <= p2
        assert p >= p2
        assert not p < p2
        assert not p > p2
        p.append(3)
        assert p != p2

    def delete(self):
        p = PersistentList(x for x in interval(10))
        self.root['x'] = p
        self.connection.commit()
        del p[1]
        assert p._p_is_unsaved()

    def pop(self):
        p = PersistentList(x for x in interval(10))
        p.pop()
        assert 9 not in p

    def slice(self):
        p = PersistentList(x for x in interval(10))
        p[:] = [2, 3]
        assert len(p) == 2
        assert p[-1:] == [3]
        p[1:] = PersistentList(interval(2))
        assert p == [2, 0, 1], p.data
        p[:] = (3, 4)
        assert p == [3, 4]
        del p[:1]
        assert p == [4]

    def sort(self):
        p = PersistentList(x for x in interval(10))
        p.reverse()
        assert p == list(reversed(interval(10)))
        p = sorted(p)
        assert p == interval(10)

    def arith(self):
        p = PersistentList(interval(3))
        p2 = PersistentList(interval(3))
        assert p + p2 == interval(3) + interval(3)
        assert interval(3) + p2 == interval(3) + interval(3)
        assert tuple(interval(3)) + p2 == interval(3) + interval(3)
        assert p + interval(3) == interval(3) + interval(3)
        assert p + tuple(interval(3)) == interval(3) + interval(3)
        assert p * 2 == interval(3) + interval(3)
        p += p2
        assert p == interval(3) + interval(3)
        p2 += interval(3)
        assert p == interval(3) + interval(3)
        p = PersistentList(interval(3))
        p *= 2
        assert p == interval(3) + interval(3)

    def other(self):
        p = PersistentList()
        p.insert(0, 2)
        assert p == [2]
        assert p.count(0) == 0
        assert p.count(2) == 1
        assert p.index(2) == 0
        p.remove(2)
        p.extend(PersistentList(interval(3)))
        assert p == interval(3)
Example #50
0
class CUsers(Persistent):
    def __init__(self):
        # durus file storage
        self.conndurus = Connection(FileStorage(CONFIG['durus_file']))
        root = self.conndurus.get_root()
        
        if not root.get('users'):
            root['users'] = PersistentDict() # {user jid: CUser}
        if not root.get('feeds'):
            root['feeds'] = CFeeds()
        self.data = root['users']
        self.feeds = root['feeds']
        self.save()
        
        
    def save(self):
        self.conndurus.commit()
        
        
    def __getitem__(self, key):
        return self.data.get(key)
        
        
    def __len__(self):
        return len(self.data)
    
    
    def add_feed(self, jid, feed=None):
        """Add an user if not exists and subscribe the feed url, if not exists.
        """
        
        fn = True # first notification?

        if not self.data.get(jid):
            self.data[jid] = CUser(jid)
        if not self.feeds.get(feed) and feed:
            self.feeds[feed] = CFeed(feed)
            fn = False

        if feed:
            oku = self.data[jid].subs_feed(self.feeds[feed], fn)
            okf = self.feeds[feed].add_user(self.data[jid])
            
        self.save()
        
        if feed:
            return oku and okf
        else:
            return oku
        
    
    def del_feed(self, jid, feed):
        """Delete an user subscription."""

        tempfeed = self.feeds.get(feed)
        tempuser = self.data.get(jid)
        
        if tempuser: oku = self.data[jid].unsubs_feed(tempfeed)
        else: oku = False
        if tempfeed: okf = self.feeds[feed].del_user(tempuser)
        else: okf = False
            
        self.save()
        
        return oku and okf
        
        
    def notification_method(self, jid):
        """Return 'how the user will receive the notifications'"""
        tempuser = self.data.get(jid)
        if tempuser:
            hl = tempuser.getConfig('useheadline')
            if not hl or hl == "on":
                return "by headlines"
            return "by chat message"
        else:
            return "-"
        
    def notification_when(self, jid):
        """Return 'when the user wants to receive notifications'"""
        tempuser = self.data.get(jid)
        if tempuser:
            oa = tempuser.getConfig('onlyavailable')
            if not oa or oa == "off":
                return "always"
            return "available only, or ready for chat"
        else:
            return "-"
        
    def len_feeds(self, jid):
        tempuser = self.data.get(jid)
        if tempuser:
            return str(len(tempuser))
        else:
            return "0"
            
            
    def setup(self, jid, action, mode):
        tempuser = self.data.get(jid)
        if not tempuser:
            tempuser = CUser(jid)
            self.data[jid] = tempuser
        
        tempuser.setup(action, mode)
        return True
        
    
    def get(self, key):
        return self.data.get(key)

    def keys(self):
        return self.data.keys()

    def values(self):
        return self.data.values()
Example #51
0
class TestPersistentList(object):

    def setup(self):
        self.connection = Connection(MemoryStorage())
        self.root = self.connection.get_root()

    def test_no_arbitrary_attributes(self):
        p = PersistentList()
        raises(AttributeError, setattr, p, 'bogus', 1)

    def test_nonzero(self):
        p = PersistentList()
        assert not p
        self.root['a'] = p
        self.connection.commit()
        p.append(1)
        assert p
        assert p._p_is_unsaved()

    def test_iter(self):
        p = PersistentList()
        assert list(p) == []
        p.extend([2,3,4])
        assert list(p) == [2,3,4]

    def test_insert_again(self):
        p = PersistentList([5,6,7])
        p[1] = 2
        p[1] = 3
        assert p[1] == 3

    def test_contains(self):
        p = PersistentList(x for x in interval(5))
        assert 2 in p
        assert -1 not in p

    def test_cmp(self):
        p = PersistentList(interval(10))
        p2 = PersistentList(interval(10))
        assert p == p2
        assert p == list(p2)
        assert p <= p2
        assert p >= p2
        assert not p < p2
        assert not p > p2
        p.append(3)
        assert p != p2

    def test_delete(self):
        p = PersistentList(x for x in interval(10))
        self.root['x'] = p
        self.connection.commit()
        del p[1]
        assert p._p_is_unsaved()

    def test_pop(self):
        p = PersistentList(x for x in interval(10))
        p.pop()
        assert 9 not in p

    def test_slice(self):
        p = PersistentList(x for x in interval(10))
        p[:] = [2,3]
        assert len(p) == 2
        assert p[-1:] == [3]
        p[1:] = PersistentList(interval(2))
        assert p == [2,0,1], p.data
        p[:] = (3,4)
        assert p == [3,4]
        del p[:1]
        assert p == [4]

    def test_sort(self):
        p = PersistentList(x for x in interval(10))
        p.reverse()
        assert p == list(reversed(interval(10)))
        p = sorted(p)
        assert p == interval(10)

    def test_arith(self):
        p = PersistentList(interval(3))
        p2 = PersistentList(interval(3))
        assert p + p2 == interval(3) + interval(3)
        assert interval(3) + p2 == interval(3) + interval(3)
        assert tuple(interval(3)) + p2 == interval(3) + interval(3)
        assert p + interval(3) == interval(3) + interval(3)
        assert p + tuple(interval(3)) == interval(3) + interval(3)
        assert p * 2 == interval(3) + interval(3)
        p += p2
        assert p == interval(3) + interval(3)
        p2 += interval(3)
        assert p == interval(3) + interval(3)
        p = PersistentList(interval(3))
        p *= 2
        assert p == interval(3) + interval(3)

    def test_other(self):
        p = PersistentList()
        p.insert(0, 2)
        assert p == [2]
        assert p.count(0) == 0
        assert p.count(2) == 1
        assert p.index(2) == 0
        p.remove(2)
        p.extend(PersistentList(interval(3)))
        assert p == interval(3)
Example #52
0
    from durus.connection import Connection
    from durus.file_storage import FileStorage
    from shutil import copyfile

    def usage():
        sys.stdout.write("Usage: python %s <existing_file> <new_file>\n" %
                         sys.argv[0])
        sys.stdout.write("  Creates a new py3k-compatible file ")
        sys.stdout.write("from an existing FileStorage file.\n")
        raise SystemExit

    if len(sys.argv) != 3:
        usage()
    from os.path import exists
    if not exists(sys.argv[1]):
        usage()
    if exists(sys.argv[2]):
        usage()
    copyfile(sys.argv[1], sys.argv[2])
    storage = FileStorage(sys.argv[2])
    connection = Connection(storage)
    print("Converting %s for use with py3k." % sys.argv[2])
    for j, x in enumerate(connection.get_crawler()):
        x._p_note_change()
        if j > 0 and j % 10000 == 0:
            print(j)
            connection.commit()
    print(j)
    connection.commit()
    connection.pack()