def get_non_persistent_object(self, state, obj): if '_py_constant' in state: klass = self.simple_resolve(state['_py_constant']) return klass # this method must NOT change the passed in state dict state = dict(state) if '_py_type' in state: # Handle the simplified case. klass = self.simple_resolve(state.pop('_py_type')) sub_obj = copy_reg._reconstructor(klass, object, None) elif interfaces.ATTR_NAME_PY_TYPE in state: # Another simple case for persistent objects that do not want # their own document. klass = self.simple_resolve(state.pop(interfaces.ATTR_NAME_PY_TYPE)) sub_obj = copy_reg.__newobj__(klass) else: factory = self.simple_resolve(state.pop('_py_factory')) factory_args = self.get_object(state.pop('_py_factory_args'), obj) sub_obj = factory(*factory_args) if len(state): sub_obj_state = self.get_object(state, sub_obj) if hasattr(sub_obj, '__setstate__'): sub_obj.__setstate__(dict(sub_obj_state)) else: sub_obj.__dict__.update(sub_obj_state) if isinstance(sub_obj, persistent.Persistent): # This is a persistent sub-object -- mark it as such. Otherwise # we risk to store this object in its own table next time. setattr(sub_obj, interfaces.ATTR_NAME_SUB_OBJECT, True) if getattr(sub_obj, interfaces.ATTR_NAME_SUB_OBJECT, False): setattr(sub_obj, interfaces.ATTR_NAME_DOC_OBJECT, obj) sub_obj._p_jar = self._jar sub_obj._p_oid = 1 # set fake oid (needed for update _p_state, _p_changed) return sub_obj
def get_non_persistent_object(self, state, obj): if '_py_constant' in state: return self.simple_resolve(state['_py_constant']) # this method must NOT change the passed in state dict state = dict(state) if '_py_type' in state: # Handle the simplified case. klass = self.simple_resolve(state.pop('_py_type')) sub_obj = copyreg._reconstructor(klass, object, None) self._set_object_state(state, sub_obj, obj) elif interfaces.PY_TYPE_ATTR_NAME in state: # Another simple case for persistent objects that do not want # their own document. klass = self.simple_resolve(state.pop( interfaces.PY_TYPE_ATTR_NAME)) sub_obj = copyreg.__newobj__(klass) self._set_object_state(state, sub_obj, obj) else: factory = self.simple_resolve(state.pop('_py_factory')) factory_args = self.get_object(state.pop('_py_factory_args'), obj) sub_obj = factory(*factory_args) # if there is anything left over in `state`, set it below # otherwise setting a {} state seems to clean out the object # but this is such an edge case of an edge case.... if state: self._set_object_state(state, sub_obj, obj) if getattr(sub_obj, interfaces.SUB_OBJECT_ATTR_NAME, False): setattr(sub_obj, interfaces.DOC_OBJECT_ATTR_NAME, obj) sub_obj._p_jar = self._jar return sub_obj
def set_ghost_state(self, obj, doc=None): __traceback_info__ = (obj, doc) # Look up the object state by coll_name and oid. if doc is None: coll = self._jar.get_collection(obj._p_oid.database, obj._p_oid.collection) doc = coll.find_one({'_id': obj._p_oid.id}) # Check that we really have a state doc now. if doc is None: raise ImportError(obj._p_oid) # Create a copy of the doc, so that we can modify it. state_doc = copy.deepcopy(doc) # Remove unwanted attributes. state_doc.pop('_id') state_doc.pop('_py_persistent_type', None) # Allow the conflict handler to modify the object or state document # before it is set on the object. self._jar.conflict_handler.on_before_set_state(obj, state_doc) # Now convert the document to a proper Python state dict. state = dict(self.get_object(state_doc, obj)) # Now store the original state. It is assumed that the state dict is # not modified later. # Make sure that we never set the original state multiple times, even # if reassigning the state within the same transaction. Otherwise we # can never fully undo a transaction. if obj._p_oid not in self._jar._original_states: self._jar._original_states[obj._p_oid] = doc # Sometimes this method is called to update the object state # before storage. Only update the latest states when the object is # originally loaded. self._jar._latest_states[obj._p_oid] = doc # Set the state. obj.__setstate__(state)
def set_ghost_state(self, obj, doc=None): __traceback_info__ = (obj, doc) # Look up the object state by coll_name and oid. if doc is None: coll = self._jar.get_collection( obj._p_oid.database, obj._p_oid.collection) doc = coll.find_one({'_id': obj._p_oid.id}) # Check that we really have a state doc now. if doc is None: raise ImportError(obj._p_oid) # Create a copy of the doc, so that we can modify it. state_doc = copy.deepcopy(doc) # Remove unwanted attributes. state_doc.pop('_id') state_doc.pop('_py_persistent_type', None) # Allow the conflict handler to modify the object or state document # before it is set on the object. self._jar.conflict_handler.on_before_set_state(obj, state_doc) # Now convert the document to a proper Python state dict. state = dict(self.get_object(state_doc, obj)) # Now store the original state. It is assumed that the state dict is # not modified later. # Make sure that we never set the original state multiple times, even # if reassigning the state within the same transaction. Otherwise we # can never fully undo a transaction. if obj._p_oid not in self._jar._original_states: self._jar._original_states[obj._p_oid] = doc # Sometimes this method is called to update the object state # before storage. Only update the latest states when the object is # originally loaded. self._jar._latest_states[obj._p_oid] = doc # Set the state. obj.__setstate__(state)
def set_ghost_state(self, obj, doc=None): __traceback_info__ = (obj, doc) # Check whether the object state was stored on the object itself. if doc is None: doc = getattr(obj, interfaces.STATE_ATTR_NAME, None) # Look up the object state by table_name and oid. if doc is None: doc = self._jar._get_doc_by_dbref(obj._p_oid) # Check that we really have a state doc now. if doc is None: raise ImportError(obj._p_oid) # Remove unwanted attributes. doc.pop(interfaces.PY_TYPE_ATTR_NAME, None) # Now convert the document to a proper Python state dict. state = dict(self.get_object(doc, obj)) if obj._p_oid not in self._jar._latest_states: # Sometimes this method is called to update the object state # before storage. Only update the latest states when the object is # originally loaded. self._jar._latest_states[obj._p_oid] = doc # Set the state. obj.__setstate__(state) # Run the custom load functions. if interfaces.IPersistentSerializationHooks.providedBy(obj): obj._pj_after_load_hook(self._jar._conn)
def get_state(self, obj, seen=None): seen = seen or [] if isinstance(obj, interfaces.MONGO_NATIVE_TYPES): # If we have a native type, we'll just use it as the state. return obj if isinstance(obj, str): # In Python 2, strings can be ASCII, encoded unicode or binary # data. Unfortunately, BSON cannot handle that. So, if we have a # string that cannot be UTF-8 decoded (luckily ASCII is a valid # subset of UTF-8), then we use the BSON binary type. try: obj.decode('utf-8') return obj except UnicodeError: return bson.binary.Binary(obj) # Some objects might not naturally serialize well and create a very # ugly Mongo entry. Thus, we allow custom serializers to be # registered, which can encode/decode different types of objects. for serializer in SERIALIZERS: if serializer.can_write(obj): return serializer.write(obj) if isinstance(obj, (type, types.ClassType)): # We frequently store class and function paths as meta-data, so we # need to be able to properly encode those. return {'_py_type': 'type', 'path': get_dotted_name(obj)} if isinstance(obj, (tuple, list, PersistentList)): # Make sure that all values within a list are serialized # correctly. Also convert any sequence-type to a simple list. return [self.get_state(value, seen) for value in obj] if isinstance(obj, (dict, PersistentDict)): # Same as for sequences, make sure that the contained values are # properly serialized. # Note: A big constraint in Mongo is that keys must be strings! has_non_string_key = False data = [] for key, value in obj.items(): data.append((key, self.get_state(value, seen))) has_non_string_key |= not isinstance(key, basestring) if not has_non_string_key: # The easy case: all keys are strings: return dict(data) else: # We first need to reduce the keys and then produce a data # structure. data = [(self.get_state(key), value) for key, value in data] return {'dict_data': data} if isinstance(obj, persistent.Persistent): # Only create a persistent reference, if the object does not want # to be a sub-document. if not getattr(obj, '_p_mongo_sub_object', False): return self.get_persistent_state(obj, seen) # This persistent object is a sub-document, so it is treated like # a non-persistent object. return self.get_non_persistent_state(obj, seen)
def back_link(self): url = self.request.form.get('referer') if not url: addview = aq_parent(aq_inner(self.context)) context = aq_parent(aq_inner(addview)) url = str(component.getMultiAdapter((context, self.request), name=u"absolute_url")) + '/@@manage-portlets' return dict(url=url, label=_(u"Back to portlets"))
def get_object(self, state, obj): if isinstance(state, bson.objectid.ObjectId): # The object id is special. Preserve it. return state if isinstance(state, bson.binary.Binary): # Binary data in Python 2 is presented as a string. We will # convert back to binary when serializing again. return str(state) if isinstance(state, bson.dbref.DBRef): # Load a persistent object. Using the get_ghost() method, so that # caching is properly applied. return self.get_ghost(state) if isinstance(state, dict) and state.get('_py_type') == 'type': # Convert a simple object reference, mostly classes. return self.simple_resolve(state['path']) # Give the custom serializers a chance to weigh in. for serializer in SERIALIZERS: if serializer.can_read(state): return serializer.read(state) if isinstance(state, dict) and ( '_py_factory' in state or '_py_constant' in state or '_py_type' in state or '_py_persistent_type' in state): # Load a non-persistent object. return self.get_non_persistent_object(state, obj) if isinstance(state, (tuple, list)): # All lists are converted to persistent lists, so that their state # changes are noticed. Also make sure that all value states are # converted to objects. sub_obj = [self.get_object(value, obj) for value in state] if self.preferPersistent: sub_obj = PersistentList(sub_obj) sub_obj._p_mongo_doc_object = obj sub_obj._p_jar = self._jar return sub_obj if isinstance(state, dict): # All dictionaries are converted to persistent dictionaries, so # that state changes are detected. Also convert all value states # to objects. # Handle non-string key dicts. if 'dict_data' in state: items = state['dict_data'] else: items = state.items() sub_obj = dict( [(self.get_object(name, obj), self.get_object(value, obj)) for name, value in items]) if self.preferPersistent: sub_obj = PersistentDict(sub_obj) sub_obj._p_mongo_doc_object = obj sub_obj._p_jar = self._jar return sub_obj return state
def handleAdd(self, action): data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return params = urlencode(dict([('composer.widgets.%s'%key, value) for key, value in data.items()])) subscribe_url = '%s/subscribe.html?%s' % (self.context.absolute_url(), params) self.request.response.redirect(subscribe_url) return
def __repr__(self): def dict_format(data): return pprint.pformat(dict(data)).replace('\n', '') data = dict(channel=self.channel) for attr in ('composer_data', 'collector_data', 'metadata'): data[attr] = dict_format(getattr(self, attr)) fmt_str = ("<SimpleSubscription to %(channel)r with composerdata: " "%(composer_data)s, collectordata: %(collector_data)s, " "and metadata: %(metadata)s>") return fmt_str % data
def add_subscription( self, channel, secret, composerd, collectord, metadata): subscription = self.subscription_factory( channel, secret, composerd, collectord, metadata) data = ISubscriptionCatalogData(subscription) contained_name = u'%s-%s' % (data.key, data.format) if contained_name in self: raise ValueError(_("There's already a subscription for ${name}", mapping=dict(name=contained_name))) self[contained_name] = subscription return self[contained_name]
def __init__(self, data=None, **kwargs): # We optimize the case where data is not a dict. The original # implementation always created an empty dict, which it then # updated. This turned out to be expensive. if data is None: self.data = {} elif isinstance(data, dict): self.data = data.copy() else: self.data = dict(data) if len(kwargs): self.update(kwargs)
def add_subscription(self, channel, secret, composerd, collectord, metadata): subscription = self.subscription_factory(channel, secret, composerd, collectord, metadata) data = ISubscriptionCatalogData(subscription) contained_name = u'%s-%s' % (data.key, data.format) if contained_name in self: raise ValueError( _("There's already a subscription for ${name}", mapping=dict(name=contained_name))) self[contained_name] = subscription return self[contained_name]
def tool_added(tool, event): # Add children factories = dict(channels=ChannelContainer, collectors=collective.dancing.collector.CollectorContainer) existing = tool.objectIds() for name, factory in factories.items(): if name not in existing: tool[name] = factory(name) # Create and register salt salt = getattr(aq_base(tool), 'salt', Salt()) tool.salt = salt sm = component.getSiteManager(tool) sm.registerUtility(salt, collective.singing.interfaces.ISalt)
def tool_added(tool, event): # Add children factories = dict(channels=ChannelContainer, collectors=collective.dancing.collector.CollectorContainer) existing = tool.objectIds() for name, factory in factories.items(): if name not in existing: tool[name] = factory(name) # Create and register salt salt = getattr(aq_base(tool), 'salt', Salt()) tool.salt = salt sm = zope.component.getSiteManager(tool) sm.registerUtility(salt, collective.singing.interfaces.ISalt)
def set_ghost_state(self, obj, doc=None): # # Check whether the object state was stored on the object itself. if doc is None: doc = getattr(obj, interfaces.ATTR_NAME_STATE, None) # Look up the object state by table_name and oid. if doc is None: doc = self._jar._get_doc_by_dbref(obj._p_oid) # Check that we really have a state doc now. if doc is None: raise ImportError(obj._p_oid) # Remove unwanted attributes. pytype = doc.pop(interfaces.ATTR_NAME_PY_TYPE) # Now convert the document to a proper Python state dict. state = dict(self.get_object(doc, obj)) # Sometimes this method is called to update the object state # before storage. doc[interfaces.ATTR_NAME_PY_TYPE] = pytype # Set the state. obj.__setstate__(state) # Run the custom load functions. if interfaces.IPersistentSerializationHooks.providedBy(obj): obj._pj_after_load_hook(self._jar._conn)
def __init__(self, channel, data): self._channel = channel # generate random secret number as dummy secret = "".join([random.choice(string.ascii_letters + string.digits) for i in range(50)]) collector_data = {} selected_collectors = [] if not isinstance(data["topics"], (list, tuple)): data["topics"] = [] for collector_title in data["topics"]: try: collector = self.find_topic(collector_title) except AttributeError: collector = None if collector is not None: selected_collectors.append(collector) if not selected_collectors: # if selected_collectors is empty it is important to leave it empty # even though some channels don't have optional sections. # In a channel with optional sections a missing selected_collectors # means subscriber will get everything # An empty selected_collectors means they will get nothing. # we want the latter. selected_collectors = [dummy_collector] collector_data["selected_collectors"] = set(selected_collectors) # make sure the email is unicode too subscription_email = unicode(data["email"].strip()) # "confirm_url" is no needed here composer_data = dict(email=subscription_email) if isinstance(data["unsubscribe_url"], basestring) and data["unsubscribe_url"]: composer_data["unsubscribe_url"] = data["unsubscribe_url"] if isinstance(data["my_subscriptions_url"], basestring) and data["my_subscriptions_url"]: composer_data["my_subscriptions_url"] = data["my_subscriptions_url"] if isinstance(data["subscriber_data"], dict): composer_data.update(data["subscriber_data"]) # default the pending value to False metadata = dict(format="html", pending=False) if isinstance(data["format"], basestring) and data["format"]: metadata["format"] = data["format"] if data["subscription_date"]: metadata["date"] = data["subscription_date"] super(SubscriptionFromDictionary, self).__init__(channel, secret, composer_data, collector_data, metadata) # S&D expects to have a persistent store during send to store the # cue for example. It stores it in self.metadata. We will instead # replace it with a store in the channel object itself. if subscription_email not in self._channel.subscriptions_metadata: self._channel.subscriptions_metadata[subscription_email] = persistent.dict.PersistentDict() # we only set the metadata the first time from the subscriber list. # We don't want to keep creating commits on sends self._channel.subscriptions_metadata[subscription_email].update(metadata) self.metadata = self._channel.subscriptions_metadata[subscription_email]
def dict_format(data): return pprint.pformat(dict(data)).replace('\n', '')
def get_state(self, obj, pobj=None): objectType = type(obj) # in_seen = seen # if seen is None: # seen = set() __traceback_info__ = obj, objectType, pobj if objectType in interfaces.PJ_NATIVE_TYPES: # If we have a native type, we'll just use it as the state. return obj if type(obj) == bytes: return { '_py_type': 'BINARY', 'data': base64.b64encode(obj).decode('ascii') } if type(obj) == str: return obj # Some objects might not naturally serialize well and create a very # ugly JSONB entry. Thus, we allow custom serializers to be # registered, which can encode/decode different types of objects. for serializer in SERIALIZERS: if serializer.can_write(obj): return serializer.write(obj) if objectType == datetime.date: return { '_py_type': 'datetime.date', 'value': obj.strftime(FMT_DATE) } if objectType == datetime.time: return { '_py_type': 'datetime.time', 'value': obj.strftime(FMT_TIME) } if objectType == datetime.datetime: return { '_py_type': 'datetime.datetime', 'value': obj.strftime(FMT_DATETIME) } if isinstance(obj, type): # We frequently store class and function paths as meta-data, so we # need to be able to properly encode those. return {'_py_type': 'type', 'path': get_dotted_name(obj)} # We need to make sure that the object's jar and doc-object are # set. This is important for the case when a sub-object was just # added. if getattr(obj, interfaces.SUB_OBJECT_ATTR_NAME, False): if obj._p_jar is None: if pobj is not None and \ getattr(pobj, '_p_jar', None) is not None: obj._p_jar = pobj._p_jar setattr(obj, interfaces.DOC_OBJECT_ATTR_NAME, pobj) if isinstance(obj, (tuple, list, PersistentList)): # Make sure that all values within a list are serialized # correctly. Also convert any sequence-type to a simple list. return [self.get_state(value, pobj) for value in obj] if isinstance(obj, (dict, PersistentDict)): # Same as for sequences, make sure that the contained values are # properly serialized. # Note: see comments at the definition of DICT_NON_STRING_KEY_MARKER has_non_compliant_key = False data = [] for key, value in obj.items(): data.append((key, self.get_state(value, pobj))) if (not isinstance(key, str) or # non-string # a key with our special marker key == DICT_NON_STRING_KEY_MARKER): has_non_compliant_key = True if not has_non_compliant_key: # The easy case: all keys are complaint: return dict(data) else: # We first need to reduce the keys and then produce a data # structure. data = [(self.get_state(key, pobj), value) for key, value in data] return {DICT_NON_STRING_KEY_MARKER: data} if isinstance(obj, persistent.Persistent): # Only create a persistent reference, if the object does not want # to be a sub-document. if not getattr(obj, interfaces.SUB_OBJECT_ATTR_NAME, False): return self.get_persistent_state(obj) # This persistent object is a sub-document, so it is treated like # a non-persistent object. try: res = self.get_non_persistent_state(obj) except RuntimeError as re: # let it run into a RuntimeError... # it's hard to catch a non-persistent - non-persistent circular # reference while NOT catching a # >>> anobj = object() # >>> alist = [anobj, anobj] if re.args[0].startswith('maximum recursion depth exceeded'): raise interfaces.CircularReferenceError(obj) else: raise return res
def get_state(self, obj, pobj=None, seen=None): seen = seen or [] objectType = type(obj) if objectType in interfaces.PJ_NATIVE_TYPES: # If we have a native type, we'll just use it as the state. return obj if isinstance(obj, str): # In Python 2, strings can be ASCII, encoded unicode or binary # data. Unfortunately, BSON cannot handle that. So, if we have a # string that cannot be UTF-8 decoded (luckily ASCII is a valid # subset of UTF-8), then we use the BSON binary type. try: obj.decode('utf-8') return obj except UnicodeError: return {'_py_type': 'BINARY', 'data': obj.encode('base64')} # Some objects might not naturally serialize well and create a very # ugly JSONB entry. Thus, we allow custom serializers to be # registered, which can encode/decode different types of objects. for serializer in SERIALIZERS: if serializer.can_write(obj): return serializer.write(obj) if objectType == datetime.date: return {'_py_type': 'datetime.date', 'value': obj.strftime(FMT_DATE)} if objectType == datetime.time: return {'_py_type': 'datetime.time', 'value': obj.strftime(FMT_TIME)} if objectType == datetime.datetime: return {'_py_type': 'datetime.datetime', 'value': obj.strftime(FMT_DATETIME)} if isinstance(obj, (type, types.ClassType)): # We frequently store class and function paths as meta-data, so we # need to be able to properly encode those. return {'_py_type': 'type', 'path': get_dotted_name(obj)} # We need to make sure that the object's jar and doc-object are # set. This is important for the case when a sub-object was just # added. if getattr(obj, interfaces.SUB_OBJECT_ATTR_NAME, False): if obj._p_jar is None: if pobj is not None and \ getattr(pobj, '_p_jar', None) is not None: obj._p_jar = pobj._p_jar setattr(obj, interfaces.DOC_OBJECT_ATTR_NAME, pobj) if isinstance(obj, (tuple, list, PersistentList)): # Make sure that all values within a list are serialized # correctly. Also convert any sequence-type to a simple list. return [self.get_state(value, pobj, seen) for value in obj] if isinstance(obj, (dict, PersistentDict)): # Same as for sequences, make sure that the contained values are # properly serialized. # Note: A big constraint in JSONB is that keys must be strings! has_non_string_key = False data = [] for key, value in obj.items(): data.append((key, self.get_state(value, pobj, seen))) has_non_string_key |= not isinstance(key, basestring) if (not isinstance(key, basestring) or '\0' in key): has_non_string_key = True if not has_non_string_key: # The easy case: all keys are strings: return dict(data) else: # We first need to reduce the keys and then produce a data # structure. data = [(self.get_state(key, pobj), value) for key, value in data] return {'dict_data': data} if isinstance(obj, persistent.Persistent): # Only create a persistent reference, if the object does not want # to be a sub-document. if not getattr(obj, interfaces.SUB_OBJECT_ATTR_NAME, False): return self.get_persistent_state(obj, seen) # This persistent object is a sub-document, so it is treated like # a non-persistent object. return self.get_non_persistent_state(obj, seen)
def get_object(self, state, obj): # stateIsDict and state_py_type: optimization to avoid X lookups # the code was: # if isinstance(state, dict) and state.get('_py_type') == 'DBREF': # this methods gets called a gazillion times, so being fast is crucial stateIsDict = isinstance(state, dict) if stateIsDict: state_py_type = state.get('_py_type') if state_py_type == 'BINARY': # Binary data in Python 2 is presented as a string. We will # convert back to binary when serializing again. return state['data'].decode('base64') if state_py_type == 'DBREF': # Load a persistent object. Using the _jar.load() method to make # sure we're loading from right database and caching is properly # applied. dbref = DBRef(state['table'], state['id'], state['database']) return self._jar.load(dbref) if state_py_type == 'type': # Convert a simple object reference, mostly classes. return self.simple_resolve(state['path']) if state_py_type == 'datetime.date': return datetime.datetime.strptime( state['value'], FMT_DATE).date() if state_py_type == 'datetime.time': return datetime.datetime.strptime( state['value'], FMT_TIME).time() if state_py_type == 'datetime.datetime': return datetime.datetime.strptime( state['value'], FMT_DATETIME) # Give the custom serializers a chance to weigh in. for serializer in SERIALIZERS: if serializer.can_read(state): return serializer.read(state) if stateIsDict and ( '_py_factory' in state or '_py_constant' in state or '_py_type' in state or interfaces.PY_TYPE_ATTR_NAME in state): # Load a non-persistent object. return self.get_non_persistent_object(state, obj) if isinstance(state, (tuple, list)): # All lists are converted to persistent lists, so that their state # changes are noticed. Also make sure that all value states are # converted to objects. sub_obj = [self.get_object(value, obj) for value in state] if self.preferPersistent: sub_obj = PersistentList(sub_obj) setattr(sub_obj, interfaces.DOC_OBJECT_ATTR_NAME, obj) sub_obj._p_jar = self._jar return sub_obj if stateIsDict: # All dictionaries are converted to persistent dictionaries, so # that state changes are detected. Also convert all value states # to objects. # Handle non-string key dicts. if 'dict_data' in state: items = state['dict_data'] else: items = state.items() sub_obj = dict( [(self.get_object(name, obj), self.get_object(value, obj)) for name, value in items]) if self.preferPersistent: sub_obj = PersistentDict(sub_obj) setattr(sub_obj, interfaces.DOC_OBJECT_ATTR_NAME, obj) sub_obj._p_jar = self._jar return sub_obj return state
def __init__(self, channel, data): self._channel = channel # generate random secret number as dummy secret = ''.join([random.choice( string.ascii_letters + string.digits) for i in range(50)]) collector_data = {} selected_collectors = [] if not isinstance(data["topics"], (list, tuple)): data["topics"] = [] for collector_title in data["topics"]: try: collector = self.find_topic(collector_title) except AttributeError: collector = None if collector is not None: selected_collectors.append(collector) if not selected_collectors: # if selected_collectors is empty it is important to leave it empty # even though some channels don't have optional sections. # In a channel with optional sections a missing selected_collectors # means subscriber will get everything # An empty selected_collectors means they will get nothing. # we want the latter. selected_collectors = [dummy_collector] collector_data["selected_collectors"] = set(selected_collectors) # make sure the email is unicode too subscription_email = unicode(data["email"].strip()) # "confirm_url" is no needed here composer_data = dict(email=subscription_email) if isinstance(data["unsubscribe_url"], basestring) and \ data["unsubscribe_url"]: composer_data["unsubscribe_url"] = data["unsubscribe_url"] if isinstance(data["my_subscriptions_url"], basestring) and \ data["my_subscriptions_url"]: composer_data["my_subscriptions_url"] = data["my_subscriptions_url"] if isinstance(data["subscriber_data"], dict): composer_data.update(data["subscriber_data"]) # default the pending value to False metadata = dict(format="html", pending=False) if isinstance(data["format"], basestring) and \ data["format"]: metadata["format"] = data["format"] if data["subscription_date"]: metadata["date"] = data["subscription_date"] super(SubscriptionFromDictionary, self).__init__( channel, secret, composer_data, collector_data, metadata ) # S&D expects to have a persistent store during send to store the # cue for example. It stores it in self.metadata. We will instead # replace it with a store in the channel object itself. if subscription_email not in self._channel.subscriptions_metadata: self._channel.subscriptions_metadata[subscription_email] = \ persistent.dict.PersistentDict() # we only set the metadata the first time from the subscriber list. # We don't want to keep creating commits on sends self._channel.subscriptions_metadata[subscription_email]. \ update(metadata) self.metadata = self._channel.subscriptions_metadata[subscription_email]
def get_object(self, state, obj): # stateIsDict and state_py_type: optimization to avoid X lookups # the code was: # if isinstance(state, dict) and state.get('_py_type') == 'DBREF': # this methods gets called a gazillion times, so being fast is crucial stateIsDict = isinstance(state, dict) if stateIsDict: state_py_type = state.get('_py_type') if state_py_type == 'BINARY': # Binary data in Python 2 is presented as a string. We will # convert back to binary when serializing again. return base64.b64decode(state['data']) if state_py_type == 'DBREF': # Load a persistent object. Using the _jar.load() method to make # sure we're loading from right database and caching is properly # applied. dbref = DBRef(state['table'], state['id'], state['database']) return self._jar.load(dbref) if state_py_type == 'type': # Convert a simple object reference, mostly classes. return self.simple_resolve(state['path']) if state_py_type == 'datetime.date': return datetime.datetime.strptime(state['value'], FMT_DATE).date() if state_py_type == 'datetime.time': try: return datetime.datetime.strptime(state['value'], FMT_TIME).time() except ValueError: # BBB: We originally did not track sub-seconds. warnings.warn( "Data in old time format found. Support for the " "old format will be removed in pjpersist 2.0.", DeprecationWarning) return datetime.datetime.strptime(state['value'], FMT_TIME_BBB).time() if state_py_type == 'datetime.datetime': try: return datetime.datetime.strptime(state['value'], FMT_DATETIME) except ValueError: # BBB: We originally did not track sub-seconds. warnings.warn( "Data in old date/time format found. Support for the " "old format will be removed in pjpersist 2.0.", DeprecationWarning) return datetime.datetime.strptime(state['value'], FMT_DATETIME_BBB) # Give the custom serializers a chance to weigh in. for serializer in SERIALIZERS: if serializer.can_read(state): return serializer.read(state) if stateIsDict and ('_py_factory' in state or '_py_constant' in state or '_py_type' in state or interfaces.PY_TYPE_ATTR_NAME in state): # Load a non-persistent object. return self.get_non_persistent_object(state, obj) if isinstance(state, (tuple, list)): # All lists are converted to persistent lists, so that their state # changes are noticed. Also make sure that all value states are # converted to objects. sub_obj = [self.get_object(value, obj) for value in state] if self.preferPersistent: sub_obj = PersistentList(sub_obj) setattr(sub_obj, interfaces.DOC_OBJECT_ATTR_NAME, obj) sub_obj._p_jar = self._jar return sub_obj if stateIsDict: # All dictionaries are converted to persistent dictionaries, so # that state changes are detected. Also convert all value states # to objects. # Handle non-string key dicts. # Note: see comments at the definition of DICT_NON_STRING_KEY_MARKER if DICT_NON_STRING_KEY_MARKER in state: items = state[DICT_NON_STRING_KEY_MARKER] else: items = state.items() sub_obj = dict([(self.get_object(name, obj), self.get_object(value, obj)) for name, value in items]) if self.preferPersistent: sub_obj = PersistentDict(sub_obj) setattr(sub_obj, interfaces.DOC_OBJECT_ATTR_NAME, obj) sub_obj._p_jar = self._jar return sub_obj return state