def __init__(self, key, proxy): PersistentMapping.__init__(self) self._p_oid = hash(key) self._p_jar = self # we are our own data manager self._p_key = key self._p_proxy = proxy self._p_joined = False
def change_pools_autonaming_scheme(root, registry): # pragma: no cover """Change pool autonaming scheme.""" prefixes = _get_autonaming_prefixes(registry) catalogs = find_service(root, 'catalogs') pools = _search_for_interfaces(catalogs, (IPool, IFolder)) count = len(pools) for index, pool in enumerate(pools): logger.info('Migrating {0} of {1}: {2}'.format(index + 1, count, pool)) if not pool: continue if hasattr(pool, '_autoname_last'): pool._autoname_lasts = PersistentMapping() for prefix in prefixes: pool._autoname_lasts[prefix] = Length(pool._autoname_last + 1) del pool._autoname_last elif not hasattr(pool, '_autoname_lasts'): pool._autoname_lasts = PersistentMapping() for prefix in prefixes: pool._autoname_lasts[prefix] = Length() if hasattr(pool, '_autoname_lasts'): # convert int to Length for prefix in pool._autoname_lasts.keys(): if isinstance(pool._autoname_lasts[prefix], int): pool._autoname_lasts[prefix] \ = Length(pool._autoname_lasts[prefix].value) elif isinstance(pool._autoname_lasts[prefix].value, Length): pool._autoname_lasts[prefix] = Length(1) # convert dict to PersistentMapping if not isinstance(pool._autoname_lasts, PersistentMapping): pool._autoname_lasts = PersistentMapping(pool._autoname_lasts)
def __init__(self, name='', dsn='', echo=False, use_pool=True, pool_size=25, pool_recycle=-1, echo_pool=False, encoding='utf-8', convert_unicode=False, twophase=True, **kwargs): # pylint: disable=too-many-arguments self.name = name self.dsn = dsn self.echo = echo self.use_pool = use_pool self.pool_size = pool_size self.pool_recycle = pool_recycle self.echo_pool = echo_pool self.encoding = encoding self.convert_unicode = convert_unicode self.twophase = twophase self.kw = PersistentMapping() # pylint: disable=invalid-name self.kw.update(kwargs)
def checkPackKeepNewObjects(self): # Packing should not remove objects created or modified after # the pack time, even if they are unreferenced. db = DB(self._storage) try: # add some data to be packed c = db.open() extra1 = PersistentMapping() c.add(extra1) extra2 = PersistentMapping() c.add(extra2) transaction.commit() # Choose the pack time now = packtime = time.time() while packtime <= now: time.sleep(0.1) packtime = time.time() while packtime == time.time(): time.sleep(0.1) extra2.foo = "bar" extra3 = PersistentMapping() c.add(extra3) transaction.commit() self._storage.pack(packtime, referencesf) # extra1 should have been garbage collected self.assertRaises(KeyError, self._storage.load, extra1._p_oid, "") # extra2 and extra3 should both still exist self._storage.load(extra2._p_oid, "") self._storage.load(extra3._p_oid, "") finally: db.close()
def _create_initial_state(self): # Given a set of referencing objects present at the beginning # of the pre pack: # 0 1 2 3 # T1: root -> A -> B -> C # # If a new transaction is committed such that the graph becomes: # # 0 1 # T2: root -> A # \-> B -> D -> C # 2 4 3 # # That is, C is no longer referenced from B but a new object # D, B is referenced not from A but from the root. txm = transaction.TransactionManager(explicit=True) conn = self.main_db.open(txm) txm.begin() A = conn.root.A = PersistentMapping() # OID 0x1 B = A['B'] = PersistentMapping() # OID 0x2 C = B['C'] = PersistentMapping() # OID 0x3 txm.commit() oids = { 'A': A._p_oid, 'B': B._p_oid, 'C': C._p_oid, } conn.close() return oids
def __init__(self): self.filename = None self.package = None self.start_time = None self.end_time = None self._entities = PersistentMapping() """contains all entities in the form of (simplified): { class_name: ( [entitiy, ...], Binary tree of all entities (key: primary_identifier) ) } """ self._events = OOBTree.TreeSet() """contains all entities """ self._event_index = PersistentMapping() """contains indices for events in the form of { class_name: { key: Binary Tree { attribute_value: set(event, ...) } } } """ self.plugins = {} self.hashes = {}
def _adaptWFDataForItemsAndMeetings(self): """We use PM default WFs, no more meetingcommunes(item)_workflow... Adapt: - workflow_history for items and meetings; - takenOverByInfos for items.""" logger.info('Updating WF history items and meetings to use new WF id...') wfTool = api.portal.get_tool('portal_workflow') catalog = api.portal.get_tool('portal_catalog') for cfg in self.tool.objectValues('MeetingConfig'): # this will call especially part where we duplicate WF and apply WFAdaptations cfg.registerPortalTypes() itemWFId = cfg.getItemWorkflow() for brain in catalog(portal_type=(cfg.getItemTypeName(), cfg.getMeetingTypeName())): itemOrMeeting = brain.getObject() itemOrMeetingWFId = wfTool.getWorkflowsFor(itemOrMeeting)[0].getId() if itemOrMeetingWFId not in itemOrMeeting.workflow_history: wf_history_key = self._get_wh_key(itemOrMeeting) itemOrMeeting.workflow_history[itemOrMeetingWFId] = \ tuple(itemOrMeeting.workflow_history[wf_history_key]) del itemOrMeeting.workflow_history[wf_history_key] # do this so change is persisted itemOrMeeting.workflow_history = itemOrMeeting.workflow_history else: # already migrated break if itemOrMeeting.__class__.__name__ == 'MeetingItem': takenOverByInfos = itemOrMeeting.takenOverByInfos.copy() newTakenOverByInfos = PersistentMapping() for k, v in takenOverByInfos.items(): wf_name, state = k.split('__wfstate__') newTakenOverByInfos['{0}__wfstate__{1}'.format(itemWFId, state)] = v if sorted(newTakenOverByInfos.keys()) != sorted(takenOverByInfos.keys()): itemOrMeeting.takenOverByInfos = newTakenOverByInfos logger.info('Done.')
def __init__(self): """ """ self.rankedNouns = PersistentMapping() self.rankedNPs = PersistentMapping() self.extractor = getUtility(ITermExtractor) self.friendlyTypes = PersistentList()
def checkPackKeepNewObjects(self): # Packing should not remove objects created or modified after # the pack time, even if they are unreferenced. db = DB(self._storage) try: # add some data to be packed c = db.open() extra1 = PersistentMapping() c.add(extra1) extra2 = PersistentMapping() c.add(extra2) transaction.commit() # Choose the pack time now = packtime = time.time() while packtime <= now: time.sleep(0.1) packtime = time.time() while packtime == time.time(): time.sleep(0.1) extra2.foo = 'bar' extra3 = PersistentMapping() c.add(extra3) transaction.commit() self._storage.pack(packtime, referencesf) # extra1 should have been garbage collected self.assertRaises(KeyError, self._storage.load, extra1._p_oid, '') # extra2 and extra3 should both still exist self._storage.load(extra2._p_oid, '') self._storage.load(extra3._p_oid, '') finally: db.close()
def __init__(self): PersistentMapping.__init__(self) self.blogs = IOBTree() self.categories = Categories() self.tags = Tags()
def checkPackOldUnreferenced(self): db = DB(self._storage) try: c1 = db.open() r1 = c1.root() r1['A'] = PersistentMapping() A_B = PersistentMapping() r1['A']['B'] = A_B transaction.get().note(u'add A then add B to A') transaction.commit() del r1['A']['B'] transaction.get().note(u'remove B from A') transaction.commit() r1['A']['C'] = '' transaction.get().note(u'add C (non-persistent) to A') transaction.commit() packtime = c1._storage.lastTransactionInt() self._storage.pack(packtime, referencesf) # B should be gone, since nothing refers to it. with self.assertRaises(KeyError): __traceback_info__ = bytes8_to_int64(A_B._p_oid) self._storage.load(A_B._p_oid) finally: db.close()
def setVar(self, key, value): try: self.datadict[key] = value except AttributeError: self.datadict = PersistentMapping() self.datadict[key] = value self._v_modified = True
def tileCreated(tile, event): # avoid attributes acquisition context = aq_base(event.newParent) tile_id = event.newName if not context: return managerId = getManagerId(tile) new_tile = PersistentMapping() new_tile['tile_id'] = tile_id new_tile['tile_hidden'] = False new_tile['tile_style'] = '' try: tile_type = re.search('@@(.*?)/', tile.url).group(1) except AttributeError: tile_type = '' if tile_type: new_tile['tile_type'] = tile_type # store tiles_order in persistent object attribute. if not getattr(context, 'tiles_list', {}): context.tiles_list = PersistentMapping() if managerId not in context.tiles_list: context.tiles_list[managerId] = PersistentList() context.tiles_list[managerId].append(new_tile)
def checkPackOldUnreferenced(self): db = DB(self._storage) try: c1 = db.open() r1 = c1.root() r1['A'] = PersistentMapping() B = PersistentMapping() r1['A']['B'] = B transaction.get().note('add A then add B to A') transaction.commit() del r1['A']['B'] transaction.get().note('remove B from A') transaction.commit() r1['A']['C'] = '' transaction.get().note('add C to A') transaction.commit() now = packtime = time.time() while packtime <= now: packtime = time.time() self._storage.pack(packtime, referencesf) # B should be gone, since nothing refers to it. self.assertRaises(KeyError, self._storage.load, B._p_oid, '') finally: db.close()
def create_root(storage, oid=z64, check_new=True): """ Creates public or private root in storage. Root has the type PersistentMapping. :param storage: ZODB storage to create the root in :param str oid: Object id to give to the root (z64 is global root) :param bool check_new: If True, do nothing if the root exists """ if check_new: try: storage.load(oid, '') return except KeyError: pass # Create the database's root in the storage if it doesn't exist from persistent.mapping import PersistentMapping root = PersistentMapping() # Manually create a pickle for the root to put in the storage. # The pickle must be in the special ZODB format. file = BytesIO() p = Pickler(file, _protocol) p.dump((root.__class__, None)) p.dump(root.__getstate__()) t = transaction.Transaction() t.description = 'initial database creation' storage.tpc_begin(t) storage.store(oid, None, file.getvalue(), '', t) storage.tpc_vote(t) storage.tpc_finish(t)
def _populate_root_and_mapping(self): """ Creates the following structure in ``self._storage``:: root.myobj1 = PersistentMapping() root.myobj1.key = PersistentMapping() root.myobj = 3 Does this over several transactions. Returns the tid of the last time the root changed, and the tid of ``root.myobj1``, which is later than the root TID and which is current, and the database opened on the storage. """ tx1 = transaction.TransactionManager() storage1 = self._storage db1 = self._closing(DB(storage1)) c1 = db1.open(tx1) root = c1.root root().myobj1 = root.myobj1 = mapping = PersistentMapping() root().myobj = root.myobj = 1 tx1.commit() c1._storage._cache.clear(load_persistent=False) c1._storage.poll_invalidations() root().myobj = root.myobj = 2 tx1.commit() c1._storage._cache.clear(load_persistent=False) c1._storage.poll_invalidations() root().myobj = root.myobj = 3 tx1.commit() root_tid = self.assert_oid_known(ROOT_OID, c1) c1._storage._cache.clear(load_persistent=False) # Now, mutate an object that's not the root # so that we get a new transaction after the root was # modified. This transaction will be included in # a persistent cache. c1._storage.poll_invalidations() root().myobj1.key = root.myobj1.key = PersistentMapping() mapping_oid = mapping._p_oid mapping_oid_int = bytes8_to_int64(mapping_oid) tx1.commit() mapping_tid = self.assert_oid_known(mapping_oid_int, c1) # self.assert_checkpoints(c1, (root_tid, root_tid)) self.assert_oid_current(mapping_oid_int, c1) # the root is not in a delta self.assert_oid_not_known(ROOT_OID, c1) # Nor is it in the cache, because the Connection's # object cache still had the root and we were never # asked. self.assert_oid_not_cached(ROOT_OID, c1) # So lets get it in the cache with its current TID. c1._storage.load(z64) self.assert_cached_exact(ROOT_OID, root_tid, c1) c1.close() return root_tid, mapping_tid, db1
def __init__(self, content_id, data=None, **kwargs): if not data: data = {} PersistentMapping.__init__(self) Base.__init__(self, content_id, data=data) self._order = []
def db_setup_tickets(self, root): # Tickets root.ticket_pools = PersistentMapping() # Payments root.payments = PersistentMapping() # Queue root.queue = PersistentList() root.active = PersistentMapping()
def test_set_acl_set_resource_dirty(): """Regression test.""" from persistent.mapping import PersistentMapping from . import set_acl resource = PersistentMapping() resource._p_jar = Mock() # make _p_changed property work set_acl(resource, [('Deny', 'role:creator', 'edit')]) assert resource._p_changed is True
def __init__(self, request, id): base.Session.__init__(self, request, id) self.user = None minfo = info.HelperMaKaCInfo.getMaKaCInfoInstance() self.datadict = PersistentMapping() base.Session.__init__(self, request, id) self._lang = minfo.getLang() self.datadict["ActiveTimezone"] = "LOCAL"
def __init__(self): self._setObject('IdCookers', IdCookerFolder('IdCookers', '')) DuplicatesCriteriaManager.__init__(self) # Add the local reference types registry self._reference_types = PersistentMapping() # Populate it initially with those types declared in config.py for ref_type in REFERENCE_TYPES: self._reference_types[ref_type] = None
def __init__(self, name): self.name = name # Player name -> Player self.players = PersistentMapping() # List of all matches for this game self.matches = PersistentList() # Whether to use average instead of sum-of-skill for this game self.use_average_team_skill = True
def test_persistent_mapping(self): value = PersistentMapping({'foo': 'bar'}) self.assertEquals({u'foo': u'bar'}, json_compatible(value)) self.assertEquals('{"foo": "bar"}', json.dumps(json_compatible(value))) self.assertIsInstance(json_compatible(value.keys()[0]), unicode, 'Dict keys should be converted recursively.') self.assertIsInstance(json_compatible(value.values()[0]), unicode, 'Dict values should be converted recursively.')
def test_set_acl_set_resource_dirty(): from . import set_acl from pyramid.security import Deny from persistent.mapping import PersistentMapping resource = PersistentMapping() resource._p_jar = Mock() # make _p_changed property work set_acl(resource, [(Deny, 'role:creator', 'edit_comment')]) assert resource._p_changed is True
def test_non_ascii_zoid(self): root = self.root for i in range(200): self.conn.add(PersistentMapping()) root.x = PersistentMapping() self.commit() _ = self.load() _ = self.load()
def bootstrap(zodb_root): if 'my_zodb' not in zodb_root: root = Root('firstpyramid') root['users'] = PersistentMapping() root['images'] = PersistentMapping() zodb_root['my_zodb'] = root transaction.commit() return zodb_root['my_zodb']
def getVar(self, key): try: if self.datadict: pass except AttributeError: self.datadict = PersistentMapping() return None return self.datadict.get(key, None)
def __init__(self, crumb_name, owner, id): PersistentMapping.__init__(self) self.item_number = 0 self.timestamp = datetime.now() self.id = id self.owner = owner # Name used in breadcrumps self.crumb_name = crumb_name
def removeVar(self, key): try: if self.datadict: pass except AttributeError: self.datadict = PersistentMapping() return None if self.datadict.has_key(key): del self.datadict[key]
def __init__(self, id, title, config): PersistentMapping.__init__(self) self.title = title self.id = id self.set_config(config) # Import here to avoid import dependencies loop from ulearnhub.models.services import ServicesContainer self.services = ServicesContainer(self)
def __init__(self, filename='data/metadatad.db'): storage = FileStorage(filename) self.db = DB(storage) self.connection = self.db.open() self.root = self.connection.root() self.files = self.root.setdefault(File.table, PersistentMapping()) self.tracks = self.root.setdefault(Track.table, PersistentMapping()) self.artists = self.root.setdefault(Artist.table, PersistentMapping()) self.roots = self.root.setdefault(Root.table, PersistentMapping())
def __init__(self, firstname='', lastname='', email='', phone='', extension='', fax='', department='', position='', organization='', location='', country='', websites=None, languages='', office='', room_no='', biography='', date_format=None, data=None, home_path=None, preferred_communities=None, ): super(Profile, self).__init__(data) self.firstname = firstname self.lastname = lastname self.email = email self.phone = phone self.fax = fax self.extension = extension self.department = department self.position = position self.organization = organization self.location = location if country not in countries.as_dict: country = 'XX' self.country = country if websites is not None: self.websites = websites self.languages = languages self.office = office self.room_no = room_no self.biography = biography if date_format not in cultures.as_dict: date_format = None self.date_format = date_format self.home_path = home_path self._alert_prefs = PersistentMapping() self._pending_alerts = Accumulator() self.categories = PersistentMapping() self.password_reset_key = None self.password_reset_time = None self.preferred_communities = preferred_communities self.last_login_time = None self.password_expiration_date = None self.last_passwords = None self.active_device = None
def __init__(self, id = None): Persistent.__init__(self) if id is not None: self.ID = id else: self.ID = uuid.uuid4() self.Name = "" self.Attributes = PersistentMapping() self.TitleAttr = None self.Graphics = None
def migration_infos(context): path = '/'.join(context.getPhysicalPath()) purl = getToolByName(context, 'portal_url') pobj = purl.getPortalObject() annotations = IAnnotations(pobj) if not PRODUCT in annotations: annotations[PRODUCT] = PersistentMapping() if not path in annotations[PRODUCT]: annotations[PRODUCT][path] = PersistentMapping() return annotations[PRODUCT][path]
def __init__(self, name, parent, uid, gid, mode, mtime_ns): FileMeta.__init__(self, name, parent, uid, gid, mode, mtime_ns) self.adds = PersistentMapping() self.removes = PersistentMapping() self.meta_changes = PersistentMapping() # name => ((to_uid, to_gid, to_mode), (from_uid, from_gid, from_mode)) self.subdirs = PersistentMapping() self.state = PatchedDirectory.EXISTING
def populate(self): transaction.begin() conn = self._db.open() root = conn.root() root['test'] = pm = PersistentMapping() for n in range(100): pm[n] = PersistentMapping({0: 100 - n}) transaction.get().note('created test data') transaction.commit() conn.close()
def checkBackwardTimeTravelWithRevertWhenStale(self): # If revert_when_stale is true, when the database # connection is stale (such as through failover to an # asynchronous slave that is not fully up to date), the poller # should notice that backward time travel has occurred and # invalidate all objects that have changed in the interval. self._storage = self.make_storage(revert_when_stale=True) import os import shutil import tempfile from ZODB.FileStorage import FileStorage db = DB(self._storage) try: transaction.begin() c = db.open() r = c.root() r['alpha'] = PersistentMapping() transaction.commit() # To simulate failover to an out of date async slave, take # a snapshot of the database at this point, change some # object, then restore the database to its earlier state. d = tempfile.mkdtemp() try: transaction.begin() fs = FileStorage(os.path.join(d, 'Data.fs')) fs.copyTransactionsFrom(c._storage) r['beta'] = PersistentMapping() transaction.commit() self.assertTrue('beta' in r) c._storage.zap_all(reset_oid=False, slow=True) c._storage.copyTransactionsFrom(fs) fs.close() finally: shutil.rmtree(d) # r should still be in the cache. self.assertTrue('beta' in r) # Now sync, which will call poll_invalidations(). c.sync() # r should have been invalidated self.assertEqual(r._p_changed, None) # r should be reverted to its earlier state. self.assertFalse('beta' in r) finally: db.close()
def __init__(self): self.name = "New Method" self.short_name = "New Method" self.description = "" self.__name__ = Coding().generateUniqueCode(short=True, withdash=False) self.settings = PersistentMapping() self.enabled = False self.public = True self.deadlined = False self.transaction_properties = PersistentMapping() self.groups = PersistentList()
def __init__(self, log): #self.start_time = log['start_time'] #self.players = log['players'] #self.units = log['units'] #self.grid = log['grid'] #self['init_locs'] = log['init_locs'] PersistentMapping.__init__(self, init_locs=log['init_locs'], start_time=log['start_time'], units=log['units'], grid=log['grid'], owners=log['owners'],)
def _data(self): """Return dictionary to store data.""" sheets_data = getattr(self.context, '_sheets', None) if sheets_data is None: sheets_data = PersistentMapping() setattr(self.context, '_sheets', sheets_data) data = sheets_data.get(self._data_key, None) if data is None: data = PersistentMapping() sheets_data[self._data_key] = data return data
def __setitem__(self, key, value): if key in self.data: raise TypeError("Can't update key in AppendOnlyDict!") if isinstance(value, (dict, list)): raise TypeError("Can't add non-persistent mutable subobjects!") if not PY3: from types import InstanceType if type(value) is InstanceType: if not isinstance(value, Persistent): raise TypeError( "Can't add non-persistent mutable subobjects!") PersistentMapping.__setitem__(self, key, value)
def record_change(self, context, _userid): if self.new: key = self.make_key(context) data = self.data context['gallery'][key] = PersistentMapping(data) context['gallery'][key].order = self.order else: key = self.key if self.delete: del context['gallery'][key] else: context['gallery'][key].order = self.order
def __init__(self, settings): self.users = PersistentMapping() self.groups = PersistentMapping() self.activation = PersistentMapping() admin, pwd = settings.get('pycms.admin_user', "admin:admin").split(":") self.users['admin'] = User(admin, "Administrator", "", pwd) self.groups['admin'] = Group('admin', users=PersistentList(['admin'])) self.groups['viewers'] = Group('viewers') self.groups['editors'] = Group('editors')
def persist(data): if isinstance(data, dict): data = PersistentMapping(data) for key, value in data.items(): data[key] = persist(value) elif isinstance(data, list): return PersistentList(map(persist, data)) else: # Usually we got basestrings, or integer here, so do nothing. pass return data
def make_persistent(data): if isinstance(data, dict): new = PersistentMapping() for key, value in data.items(): new[make_persistent(key)] = make_persistent(value) return new elif isinstance(data, list): new = PersistentList() for value in data: new.append(make_persistent(value)) return new else: return data
def populate(self, db_factory): self._guarantee_min_random_data(self.objects_per_txn) db = db_factory() conn = db.open() root = conn.root() # clear the database root['speedtest'] = None # We explicitly leave the `speedtest_min` value around # so that it can survive packs. transaction.commit() db.pack() # Make sure the minimum objects are present if self.min_object_count: # not all storages support __len__ to return the size of the database. # FileStorage, RelStorage and ClientStorage do. db_count = max(len(db.storage), len(conn._storage)) needed = max(self.min_object_count - db_count, 0) if needed: logger.debug("Adding %d objects to a DB of size %d", needed, db_count) # We append to a list the new objects. This makes sure that we # don't *erase* some objects we were counting on. l = root.get('speedtest_min') if l is None: l = root['speedtest_min'] = PersistentList() # If `needed` is large, this could result in a single # very large transaction. Do we need to think about splitting it up? m = PersistentMapping() m.update(self.data_to_store(needed)) l.append(m) transaction.commit() logger.debug("Added %d objects to a DB of size %d", len(m), db_count) else: logger.debug("Database is already of size %s", db_count) # put a tree in the database root['speedtest'] = t = self.MappingType() for i in range(self.concurrency): t[i] = self.MappingType() transaction.commit() conn.close() db.close() logger.debug('Populated storage.')
def __init__(self, context): self.ideal_wrapper = getUtility(IMollieIdeal) annotations = IAnnotations(context) self._metadata = annotations.get(IDEAL_PAYMENT_ANNOTATION_KEY, None) if self._metadata is None: self._metadata = PersistentMapping() annotations[IDEAL_PAYMENT_ANNOTATION_KEY] = self._metadata
def __setitem__(self, name, index): """ Add an object which implements ``repoze.catalog.interfaces.ICatalogIndex`` to the catalog. No other type of object may be added to a catalog.""" if not ICatalogIndex.providedBy(index): raise ValueError('%s does not provide ICatalogIndex') return PersistentMapping.__setitem__(self, name, index)
def __init__( self, id, title=u"", input_type="text", python_type="ustring", extra_js=u"", extra_css=u"", mandatory=False, options=[], options_expression="", visibility_expression="", include_in_filter_options=False, ): self.id = str(id) self.title = title self.input_type = input_type self.python_type = python_type self.attributes = PersistentMapping() self.extra_css = extra_css self.extra_js = extra_js self.mandatory = mandatory self.options = options self.options_expression = options_expression self.disabled = False self.visibility_expression = visibility_expression self.include_in_filter_options = include_in_filter_options
def getVar(self, key): try: if self.datadict: pass except AttributeError: self.datadict = PersistentMapping() return None return self.datadict.get(key,None)
def upgrade_carousel_tiles_custom_url(context): # Get covers covers = context.portal_catalog(portal_type='collective.cover.content') logger.info('About to update %s covers' % len(covers)) for cover in covers: obj = cover.getObject() tile_ids = obj.list_tiles(types=[ u'collective.cover.carousel', u'collective.cover.list' ]) for tile_id in tile_ids: tile = obj.get_tile(tile_id) old_data = ITileDataManager(tile).get() uuids = old_data['uuids'] if isinstance(uuids, PersistentMapping): # This tile is fixed, carry on logger.info( 'Tile %s at %s was already updated' % (tile_id, cover.getPath()) ) continue if not uuids: # This tile did not have data, so ignore logger.info( 'Tile %s at %s did not have any data' % (tile_id, cover.getPath()) ) continue new_data = PersistentMapping() order = 0 for uuid in uuids: if uuid not in new_data.keys(): entry = PersistentMapping() entry[u'order'] = unicode(order) new_data[uuid] = entry order += 1 old_data['uuids'] = new_data ITileDataManager(tile).set(old_data) logger.info( 'Tile %s at %s updated' % (tile_id, cover.getPath()) ) logger.info('Done')
def __init__(self): self.subject = None self.signature = None self.comment_edited_text = None self.comment_deleted_text = None self.subscription_comment_added_text = None self.subscription_comment_edited_text = None self.thread_moved_text = None self._salutations = PersistentMapping()
class MollieIdealMultiplePayments(object): implements(IMollieIdealMultiplePayments) adapts(IAttributeAnnotatable) def __init__(self, context): self.ideal_wrapper = getUtility(IMollieIdeal) annotations = IAnnotations(context) self._metadata = annotations.get( IDEAL_MULTIPLE_PAYMENTS_ANNOTATION_KEY, None) if self._metadata is None: self._metadata = PersistentMapping() annotations[IDEAL_MULTIPLE_PAYMENTS_ANNOTATION_KEY] = \ self._metadata # Methods def get_banks(self): return self.ideal_wrapper.get_banks() def get_payment_url(self, partner_id, bank_id, amount, message, report_url, return_url, profile_key=None): transaction_id, url = self.ideal_wrapper.request_payment( partner_id, bank_id, amount, message, report_url, return_url, profile_key) self._metadata[transaction_id] = { 'partner_id': partner_id, 'profile_key': profile_key, 'amount': amount, 'last_update': DateTime(), 'curreny': None, 'status': None, 'paid': None, 'consumer': {}, 'last_status': None, } return transaction_id, url def get_transaction(self, transaction_id): transaction = self._metadata.get(transaction_id) if transaction is None: raise UnknownTransactionError return transaction def get_payment_status(self, transaction_id): transaction = self.get_transaction(transaction_id) order_info = self.ideal_wrapper.check_payment( transaction['partner_id'], transaction_id) if order_info['status'] != 'CheckedBefore': # Only store the main info the first time. transaction['currency'] = order_info['currency'] transaction['paid'] = order_info['paid'] transaction['consumer'] = order_info.get('consumer') transaction['status'] = order_info['status'] transaction['last_status'] = order_info['status'] transaction['last_update'] = DateTime() return transaction['last_status']
def __init__( self, firstname="", lastname="", email="", phone="", extension="", fax="", department="", position="", organization="", location="", country="", websites=None, languages="", office="", room_no="", biography="", data=None, home_path=None, preferred_communities=None, dob=None, gender="", ): super(Profile, self).__init__(data) self.firstname = firstname self.lastname = lastname self.email = email self.phone = phone self.fax = fax self.extension = extension self.department = department self.position = position self.organization = organization self.location = location if country not in countries.as_dict: country = "XX" self.country = country self.websites = websites or () self.languages = languages self.office = office self.room_no = room_no self.biography = biography self.home_path = home_path self._alert_prefs = PersistentMapping() self._pending_alerts = PersistentList() self.categories = PersistentMapping() self.password_reset_key = None self.password_reset_time = None self.preferred_communities = preferred_communities self.last_login_time = None # states are # 1. inactive - user has become inactive rather than deleted from the system. # 2. active - registered with a invite email which creates the profile self.security_state = "active" self.dob = dob self.gender = gender