def _notify_version_created(ob, dirty, last_version): # sanity cheack !+ comparing with *is* fails assert last_version.head == ob if dirty: # ok, a version was created, notify of new version creation version_created = bungeni.core.interfaces.VersionCreatedEvent(last_version) event.notify(version_created)
def publish(self): """Publish each queued object and empty queue""" _return = [] while self.context: try: item = self.context.pull() transaction.commit() except ConflictError: # queue concurrency exception...expected if logger.getEffectiveLevel() == logging.DEBUG: logger.exception("ConflictError while publishing queue, " +\ "transaction aborted. This error is an expected " +\ "runtime condition and does not necessarily " +\ "indicate an application issue") transaction.abort() """If we plan on reusing this database session we must create a new transaction""" self.connection.newTransaction() continue # skip to next loop # TODO: Add tests for re-queing on publishing errors try: IPublisher(item).publish() _return.append(item) except RecoverablePublishingError: if item: self.enqueue(item) # add item back into queue for publishing exceptions logger.exception("A recoverable publishing error has occured "+\ "for queued item %s. The item will be " +\ "added back in the publishing queue." ) notify(PublisherQueuePublishedEvent(self)) return _return
def setUp(test): functional.FunctionalTestSetup().setUp() newInteraction() root = functional.getRootFolder() setSite(root) sm = root.getSiteManager() # IIntIds root['ids'] = IntIds() sm.registerUtility(root['ids'], IIntIds) root['ids'].register(root) # catalog root['catalog'] = Catalog() sm.registerUtility(root['catalog'], ICatalog) # space space = ContentSpace(title=u'Space') event.notify(ObjectCreatedEvent(space)) root['space'] = space # people people = PersonalSpaceManager(title=u'People') event.notify(ObjectCreatedEvent(people)) root['people'] = people sm.registerUtility(root['people'], IPersonalSpaceManager) endInteraction()
def createAndAdd(self, data): domain_model = self.domain_model # create the object, inspect data for constructor args try: ob = createInstance(domain_model, data) except TypeError: log.error("Failure: createInstance(%s, %s)", domain_model, data) probing.log_exc(sys.exc_info(), log_handler=log.error) ob = domain_model() # apply any context values self.finishConstruction(ob) # apply extra form values formlib.form.applyChanges(ob, self.form_fields, data, self.adapters) # set the object in container context, causing autosetting of # constrained values e.g. one2many attributes, by triggering call to # _ManagedContainer.constraints.setConstrainedValues() self.context[""] = ob # flush so we have database id Session().flush() # !+DataError reload form and display this error? # fire an object created event notify(ObjectCreatedEvent(ob)) # !+ would set doc_id (if session not flushed) !! # signal to add form machinery to go to next url self._finished_add = True # retrieve the object with location and security information oid = self.get_oid(ob) return self.context[oid]
def __call__(self, name, content_type, data): ctr = getToolByName(self.context, 'content_type_registry') type_ = ctr.findTypeName(name.lower(), '', '') or 'File' # XXX: quick fix for german umlauts name = name.decode("utf8") normalizer = getUtility(IFileNameNormalizer) chooser = INameChooser(self.context) # otherwise I get ZPublisher.Conflict ConflictErrors # when uploading multiple files upload_lock.acquire() # this should fix #8 newid = chooser.chooseName(normalizer.normalize(name), self.context.aq_parent) try: transaction.begin() obj = ploneutils._createObjectByType(type_, self.context, newid) mutator = obj.getPrimaryField().getMutator(obj) mutator(data, content_type=content_type) obj.setTitle(name) obj.reindexObject() notify(ObjectInitializedEvent(obj)) notify(ObjectModifiedEvent(obj)) transaction.commit() finally: upload_lock.release() return obj
def create_testobject(parent, ptype, **kwargs): createContent(ptype) obj = createContent(ptype, **kwargs) notify(ObjectCreatedEvent(obj)) obj = addContentToContainer(parent, obj, checkConstraints=False) notify(ObjectAddedEvent(obj)) return obj
def _do_save(self, data): formlib.form.applyChanges(self.context, self.form_fields, data) # !+EVENT_DRIVEN_CACHE_INVALIDATION(mr, mar-2011) no modify event # invalidate caches for this domain object type notify(ObjectModifiedEvent(self.context)) #cascade_modifications(self.context) invalidate_caches_for(self.context.__class__.__name__, "edit")
def handle_apply(self, action): data, errors = self.extractData() if errors: return portal = getSite() # create a new rand expires = datetime.now() + timedelta(days=2) data = {'email': data['email']} rand = IFridgeManager(self.context).add_entry(data, expires) # send mail to user mail_to = data['email'] url = u"%s/%s/%s" % (self.context.absolute_url(), FORM_ID, rand) link = u'<a href="%s">%s</a>' % (url, url) message = translate(msgid=u'request_registration_mail_text', domain=DOMAIN, mapping={'link': link, 'expires': expires.strftime('%d.%m.%Y %H:%M')}, context=self.request, default=u'Finish your registration here ${link} by ${expires}.', ) mail_subject = translate(msgid="request_registration_mail_subject", domain=DOMAIN, context=self.request, default=u'Registration', ) notify(MailEvent(message, mail_to, subject=mail_subject)) IStatusMessage(self.request).add(_(u'request_registration_success_notification'), type='warn') return self._redirect()
def retract(self, principal=None): if principal is None: principal = getPrincipal() if not self.isRetractable(principal): raise DraftException('Cannot retract content.') container = queryMultiAdapter((principal, self), IDraftContainer) if container is None: raise DraftException('Cannot find draft container.') content = self.context origName = content.__name__ oldContainer = content.__parent__ newName = INameChooser(container).chooseName(u'', content) container[newName] = removeAllProxies(content) del removeAllProxies(oldContainer)[origName] draft = container[newName] event.notify(ObjectRetractedEvent(content, draft)) return draft
def create_application(factory, container, name): """Creates an application and triggers the events from the application lifecycle. """ # Check the factory. assert IApplication.implementedBy(factory) # Check the availability of the name in the container. if name in container: raise KeyError(name) # Instanciate the application application = factory() # Trigger the creation event. notify(ObjectCreatedEvent(application)) # Persist the application. # This may raise a KeyError. container[name] = application # Trigger the initialization event. notify(ApplicationInitializedEvent(application)) return application
def test_document_fusion(self): # data source and model are in the same content alsoProvides(self.portal.REQUEST, ICollectiveDocumentfusionLayer) content = api.content.create(self.portal, type='letter', title=u"En réponse...", file=NamedFile(data=open(TEST_LETTER_ODT).read(), filename=u'letter.odt', contentType='application/vnd.oasis.opendocument.text'), sender_name="Thomas Desvenain", sender_address="57 Quai du Pré Long", recipient_name="Vincent Fretin", date=datetime.date(2012, 12, 23)) notify(ObjectModifiedEvent(content)) generated_stream = content.unrestrictedTraverse('@@getdocumentfusion')() self.assertTrue(generated_stream) self.assertEqual(self.portal.REQUEST.response['content-type'], 'application/pdf') generated_path = tempfile.mktemp(suffix='letter.pdf') generated_file = open(generated_path, 'w') generated_file.write(generated_stream.read()) generated_file.close() txt_path = tempfile.mktemp(suffix='letter.pdf') subprocess.call(['pdftotext', generated_path, txt_path]) txt = open(txt_path).read() self.assertIn('Vincent Fretin', txt) self.assertIn('57 Quai du Pré Long', txt) self.assertIn('2012', txt) self.assertIn(u'EN RÉPONSE...', txt) os.remove(txt_path) os.remove(generated_path)
def handleSave(self, action): data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return typeName = self.tileType.__name__ # Traverse to a new tile in the context, with no data tile = self.context.restrictedTraverse('@@%s/%s' % (typeName, self.tileId,)) dataManager = ITileDataManager(tile) # We need to check first for existing content in order to not loose # fields that weren't sent with the form. old_data = dataManager.get() for item in data: if data[item] is not None: old_data[item] = data[item] dataManager.set(old_data) # Look up the URL - we need to do this after we've set the data to # correctly account for transient tiles tileURL = absoluteURL(tile, self.request) notify(ObjectModifiedEvent(tile)) # Get the tile URL, possibly with encoded data IStatusMessage(self.request).addStatusMessage(_(u"Tile saved",), type=u'info') self.request.response.redirect(tileURL)
def testOnItemCreation(self): """Test notification on item creation.""" portal = self.portal ntool = getToolByName(portal, NTOOL_ID) changeProperty = lambda key, value: \ ntool.manage_changeProperties(**{key: value}) wtool = getToolByName(portal, 'portal_workflow') mh = portal.MailHost self.login('manager') ## Set correct rules so that 3 mails should be sent. changeProperty('item_creation_notification_enabled', True) changeProperty('on_item_creation_users', ['* :: *']) changeProperty('on_item_creation_mail_template', ['* :: string:creation_mail_notification']) portal.invokeFactory('Document', 'document') ## See 'events/events.txt' for futher details about this ## manually fired event. event.notify(ObjectInitializedEvent(portal['document'])) self.failUnlessSent(1) portal.manage_delObjects(['document']) mh.clearSentList() ## Set workflow initial state to 'publish', thus showing the ## new item to every users. wtool.simple_publication_workflow.initial_state = 'published' portal.invokeFactory('Document', 'document') event.notify(ObjectInitializedEvent(portal['document'])) self.failUnlessSent(3) portal.manage_delObjects(['document']) mh.clearSentList() ## Disable notification changeProperty('item_creation_notification_enabled', False) portal.invokeFactory('Document', 'document') event.notify(ObjectInitializedEvent(portal['document'])) self.failUnlessSent(0) portal.manage_delObjects(['document']) mh.clearSentList() ## Enable notification but set the notified users list to [] changeProperty('item_creation_notification_enabled', True) ntool.manage_changeProperties(on_item_creation_users='* :: python: []') portal.invokeFactory('Document', 'document') event.notify(ObjectInitializedEvent(portal['document'])) self.failUnlessSent(0) portal.manage_delObjects(['document']) mh.clearSentList() ## Set the notified users list to "everybody" but ask for a ## missing mail template changeProperty('on_item_creation_users', ['* :: *']) changeProperty('on_item_creation_mail_template', ['* :: string:does_not_exist']) portal.invokeFactory('Document', 'document') event.notify(ObjectInitializedEvent(portal['document'])) self.failUnlessSent(0) portal.manage_delObjects(['document']) mh.clearSentList()
def test_limits_recently_touched_items(self, browser): self.login(self.regular_user, browser=browser) user_id = self.regular_user.getId() self._clear_recently_touched_log(user_id) # Touch a couple documents (more than the current limit) docs = [self.document, self.private_document, self.expired_document, self.subdocument, self.taskdocument] with freeze(datetime(2018, 4, 30)) as freezer: for doc in docs: freezer.forward(minutes=1) notify(ObjectTouchedEvent(doc)) api.portal.set_registry_record( 'limit', 3, IRecentlyTouchedSettings) url = '%s/@recently-touched/%s' % ( self.portal.absolute_url(), self.regular_user.getId()) browser.open( url, method='GET', headers={'Accept': 'application/json'}) # Even though the storage contains more logged touched entries, the # API endpoint should truncate them to the currently defined limit. self.assertEqual(200, browser.status_code) recently_touched_list = browser.json['recently_touched'] self.assertEqual(3, len(recently_touched_list))
def test_checked_out_docs_arent_listed_twice(self, browser): self.login(self.regular_user, browser=browser) self._clear_recently_touched_log(self.regular_user.getId()) with freeze(datetime(2018, 4, 30)): manager = queryMultiAdapter( (self.document, self.request), ICheckinCheckoutManager) manager.checkout() notify(ObjectTouchedEvent(self.document)) url = '%s/@recently-touched/%s' % ( self.portal.absolute_url(), self.regular_user.getId()) browser.open(url, method='GET', headers={'Accept': 'application/json'}) # If a document is both in the log for recently touched objects as # well as checked out, it must only be listed once, in the # checked out documents section. self.assertEqual(200, browser.status_code) self.assertEquals( {'checked_out': [{ 'icon_class': 'icon-docx is-checked-out-by-current-user', 'last_touched': '2018-04-30T00:00:00+02:00', 'target_url': self.document.absolute_url(), 'title': u'Vertr\xe4gsentwurf'}], 'recently_touched': []}, browser.json)
def createIssue(self, tracker, title='An issue', details='Something is wrong', release='(UNASSIGNED)', area='ui', issueType='bug', severity='Medium', targetRelease='(UNASSIGNED)', steps='', attachment=None, contactEmail='*****@*****.**', watchers=(), tags=(), responsibleManager='(UNASSIGNED)'): """Create an issue in the given tracker, and perform workflow and rename-after-creation initialisation""" newId = tracker.generateUniqueId('PoiIssue') tracker.invokeFactory('PoiIssue', newId) issue = getattr(tracker, newId) issue.setTitle(title) issue.setRelease(release) issue.setArea(area) issue.setIssueType(issueType) issue.setSeverity(severity) issue.setTargetRelease(targetRelease) issue.setDetails(details) issue.setSteps(steps, mimetype='text/x-web-intelligent') issue.setAttachment(attachment) issue.setWatchers(watchers) # This also adds to the watchers: issue.setContactEmail(contactEmail) issue.setSubject(tags) issue.setResponsibleManager(responsibleManager) issue._renameAfterCreation() issue.reindexObject() notify(ObjectInitializedEvent(issue)) return issue
def createResponse(self, issue, text='Response text', issueTransition='', newSeverity=None, newTargetRelease=None, newResponsibleManager=None, attachment=None): """Create a response to the given tracker, and perform workflow and rename-after-creation initialisation""" from Products.Poi.browser.response import Create request = issue.REQUEST request.form['response'] = text request.form['transition'] = issueTransition if newSeverity is not None: request.form['severity'] = newSeverity if newTargetRelease is not None: request.form['targetRelease'] = newTargetRelease if newResponsibleManager is not None: request.form['responsibleManager'] = newResponsibleManager if attachment is not None: request.form['attachment'] = attachment create_view = Create(issue, request) # A response is created by calling this view: create_view() container = IResponseContainer(issue) id = str(len(container) - 1) response = container[id] # In tests we need to fire this event manually: notify(ObjectModifiedEvent(response)) return response
def test_request_not_annotatable(self): context = FauxContext() request = FauxRequest() setRequest(request) configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurgePaths(object): implements(IPurgePaths) adapts(FauxContext) def __init__(self, context): self.context = context def getRelativePaths(self): return ['/foo', '/bar'] def getAbsolutePaths(self): return [] provideAdapter(FauxPurgePaths, name="test1") try: notify(Purge(context)) except: self.fail()
def update(self): context = self.context request = self.request if 'form.save' in request: roles = {} for role, setting in request.form.items(): if setting == '1': roles[role] = Allow elif setting == '2': roles[role] = Deny else: roles[role] = Unset context.roles = roles event.notify(ObjectModifiedEvent(context)) IStatusMessage(request).add(_('Roles have been changed.')) roles = [] for rid, setting in self.context.roles.items(): role = getUtility(IPublicRole, rid) roles.append(( role.title, rid, {'id': rid, 'title': role.title, 'setting': self._settings[setting]})) roles.sort() self.roles = [info for t,i,info in roles]
def test_request_not_annotatable(self): request = FauxRequest() configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurger(object): implements(IPurger) def __init__(self): self.purged = [] def purgeAsync(self, url, httpVerb='PURGE'): self.purged.append(url) purger = FauxPurger() provideUtility(purger) notify(EndRequestEvent(None, request)) self.assertEquals([], purger.purged)
def test_purge(self): request = FauxRequest() alsoProvides(request, IAttributeAnnotatable) IAnnotations(request)['zojax.cachepurging.urls'] = set(['/foo', '/bar']) configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurger(object): implements(IPurger) def __init__(self): self.purged = [] def purgeAsync(self, url, httpVerb='PURGE'): self.purged.append(url) purger = FauxPurger() provideUtility(purger) notify(EndRequestEvent(None, request)) self.assertEquals(['http://localhost:1234/foo', 'http://localhost:1234/bar'], purger.purged)
def __call__(self): if not isCachePurgingEnabled(): return 'Caching not enabled' notify(Purge(self.context)) return 'Queued'
def _importNode(self, node): """Import the object from the DOM node. """ for child in node.childNodes: # Properties if child.nodeName == 'property': name = child.getAttribute('name') purge = child.getAttribute('purge') purge = self._convertToBoolean(purge) elements = [] field = self.context.getField(name) for element in child.childNodes: if element.nodeName != 'element': continue elements.append(element.getAttribute('value')) if elements: if not purge: value = elements oldValue = field.getAccessor(self.context)() value.extend(x for x in oldValue if x not in value) else: value = [] else: value = self._getNodeText(child) value = value.decode('utf-8') value = value if not purge else u'' field.getMutator(self.context)(value) notify(ObjectModifiedEvent(self.context)) self.context.reindexObject()
def applyChanges(self, data): changes = self.schema.setSchemaData(data) if changes: event.notify(ObjectModifiedEvent(self.context, Attributes(IContentSchema, *changes))) return {IContentSchema: changes} else: return {}
def fix_relations(): relations_catalog = getUtility(ICatalog) intids = getUtility(IIntIds) relations = list(relations_catalog.findRelations()) for relation in relations: from_object = intids.getObject(relation.from_id) from_attribute = relation.from_attribute to_id = relation.to_id attr = getattr(from_object, from_attribute, None) attr_is_list = isinstance(attr, list) # remove the broken relation if attr_is_list: setattr(from_object, from_attribute, [x for x in attr if x is not relation]) else: setattr(from_object, from_attribute, None) # let the catalog remove the old relation notify(ObjectModifiedEvent(from_object)) attr = getattr(from_object, from_attribute, None) # create a new relation new_relation = RelationValue(to_id) if attr_is_list: attr.append(new_relation) else: setattr(from_object, from_attribute, new_relation) # let the catalog know about this new relation notify(ObjectModifiedEvent(from_object))
def setUp(self): """ """ portal = makerequest(self.layer['portal']) self.request = portal.REQUEST alsoProvides(self.request, IPloneintranetDocconvClientLayer) setRoles(portal, TEST_USER_ID, ('Manager',)) gsettings = GlobalSettings(portal) self.storage_dir = mkdtemp() gsettings.storage_location = self.storage_dir # temporarily disable event handler so that we can test objects without # previews from ploneintranet.docconv.client import handlers _update_preview_images = handlers._update_preview_images handlers._update_preview_images = lambda obj, event: None self.workspace = api.content.create( type='Folder', title=u"Docconv Workspace", container=portal) ff = open(os.path.join(os.path.dirname(__file__), TEST_FILENAME), 'r') self.filedata = ff.read() ff.close() self.testfile = api.content.create( type='File', id='test-file', title=u"Test File", file=NamedBlobFile(data=self.filedata, filename=TEST_FILENAME), container=self.workspace) handlers._update_preview_images = _update_preview_images event.notify(BeforeTraverseEvent(portal, portal.REQUEST))
def _create_file(self, item, files, title, description, rights): namechooser = INameChooser(self.context) content_type = item.headers.get('Content-Type') filename = safe_unicode(item.filename) data = item.read() id_name = '' title = title and title[0] or filename id_name = namechooser.chooseName(title, self.context) if content_type in IMAGE_MIMETYPES: portal_type = 'Image' wrapped_data = NamedBlobImage(data=data, filename=filename) else: portal_type = 'File' wrapped_data = NamedBlobFile(data=data, filename=filename) self.context.invokeFactory(portal_type, id=id_name, title=title, description=description[0], rights=rights[0]) newfile = self.context[id_name] if portal_type == 'File': if IATFile.providedBy(newfile): newfile.setFile(data, filename=filename) else: newfile.file = wrapped_data elif portal_type == 'Image': if IATImage.providedBy(newfile): newfile.setImage(data, filename=filename) else: newfile.image = wrapped_data newfile.reindexObject() notify(ObjectModifiedEvent(newfile)) return newfile
def manage_addFile(self, id, file="", title="", precondition="", content_type="", REQUEST=None): """Add a new File object. Creates a new File object 'id' with the contents of 'file'""" id = str(id) title = str(title) content_type = str(content_type) precondition = str(precondition) id, title = cookId(id, title, file) self = self.this() # First, we create the file without data: self._setObject(id, File(id, title, "", content_type, precondition)) newFile = self._getOb(id) # Now we "upload" the data. By doing this in two steps, we # can use a database trick to make the upload more efficient. if file: newFile.manage_upload(file) if content_type: newFile.content_type = content_type notify(ObjectCreatedEvent(newFile)) if REQUEST is not None: REQUEST["RESPONSE"].redirect(self.absolute_url() + "/manage_main")
def moveProcess(self, uid, targetUid): obj = self._getObject(uid) target = self._getObject(targetUid) brainsCollection = [] # reindex all the devices and processes underneath this guy and the target for org in (obj.getPrimaryParent().getPrimaryParent(), target): catalog = ICatalogTool(org) brainsCollection.append(catalog.search(OSProcess)) if isinstance(obj, OSProcessClass): source = obj.osProcessOrganizer() source.moveOSProcessClasses(targetUid, obj.id) newObj = getattr(target.osProcessClasses, obj.id) elif isinstance(obj, OSProcessOrganizer): source = aq_parent(obj) source.moveOrganizer(targetUid, (obj.id,)) newObj = getattr(target, obj.id) else: raise Exception('Illegal type %s' % obj.__class__.__name__) # fire the object moved event for the process instances (will update catalog) for brains in brainsCollection: objs = imap(unbrain, brains) for item in objs: notify(ObjectMovedEvent(item, item.os(), item.id, item.os(), item.id)) return newObj.getPrimaryPath()
def test_enabled(self): context = FauxContext() request = FauxRequest() alsoProvides(request, IAttributeAnnotatable) setRequest(request) configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurgePaths(object): implements(IPurgePaths) adapts(FauxContext) def __init__(self, context): self.context = context def getRelativePaths(self): return ['/foo', '/bar'] def getAbsolutePaths(self): return [] provideAdapter(FauxPurgePaths, name="test1") notify(Purge(context)) self.assertEquals({'zojax.cachepurging.urls': set(['/foo', '/bar'])}, dict(IAnnotations(request)))
def fire_login_events(self, first_login, user): if first_login: notify(UserInitialLoginInEvent(user)) else: notify(UserLoggedInEvent(user))
def __call__(self): """ Add a learner to a classlist """ learner_code = self.request.get('learner_code', '') learner_name = self.request.get('learner_name', '') learner_gender = self.request.get('learner_gender', '') learner_lang_id = self.request.get('learner_lang_id', '') learner_lang = self.request.get('learner_lang', '') if learner_name == '' or learner_code == '': status = 'error' # class in the template status_msg = self.context.translate(_('error')) # content string msg = self.context.translate(_("Field cannot be empty")) return json.dumps({ 'status': status, 'status_msg': status_msg, 'msg': msg }) # validate that student code is unique status = '' catalog = getToolByName(self.context, 'portal_catalog') result = catalog(id=learner_code) if len(result) != 0: status = 'error' # class in the template status_msg = self.context.translate(_('error')) # content string msg = self.context.translate(_("Student code not unique")) return json.dumps({ 'status': status, 'status_msg': status_msg, 'msg': msg }) classlist = self.context classlist.invokeFactory('upfront.classlist.content.learner', learner_code, title=learner_name) new_learner = classlist._getOb(learner_code) new_learner.code = learner_code new_learner.name = learner_name new_learner.gender = learner_gender new_learner.home_language = RelationValue(int(learner_lang_id)) notify(ObjectModifiedEvent(new_learner)) learner_id = new_learner.id learner_editurl = '%s/edit' % new_learner.absolute_url() # success status = 'info' # class in the template status_msg = self.context.translate(_('info')) # content string msg = self.context.translate(_("New learner added")) return json.dumps({ 'learner_id': learner_id, 'learner_code': learner_code, 'learner_name': learner_name, 'learner_editurl': learner_editurl, 'learner_gender': learner_gender, 'learner_lang': learner_lang, 'status': status, 'status_msg': status_msg, 'msg': msg })
def fireTransition(self, transition_id, comment=None, side_effect=None, check_security=True): state = self.state(self.context) # this raises InvalidTransitionError if id is invalid for current state transition = self.wf.getTransition(state.getState(), transition_id) # check whether we may execute this workflow transition try: interaction = getInteraction() except NoInteraction: checkPermission = nullCheckPermission else: if check_security: checkPermission = interaction.checkPermission else: checkPermission = nullCheckPermission if not checkPermission( transition.permission, self.context): raise Unauthorized(self.context, 'transition: %s' % transition_id, transition.permission) # now make sure transition can still work in this context if not transition.condition(self, self.context): # XXX should we include state info here? if so, what? raise ConditionFailedError # perform action, return any result as new version result = transition.action(self, self.context) if result is not None: if transition.source is None: self.state(result).initialize() # stamp it with version state = self.state(result) state.setId(self.state(self.context).getId()) # execute any side effect: if side_effect is not None: side_effect(result) event = WorkflowVersionTransitionEvent( result, self.context, transition.source, transition.destination, transition, comment) else: if transition.source is None: self.state(self.context).initialize() # execute any side effect if side_effect is not None: side_effect(self.context) event = WorkflowTransitionEvent( self.context, transition.source, transition.destination, transition, comment) # change state of context or new object state.setState(transition.destination) notify(event) # send modified event for original or new object if result is None: notify(ObjectModifiedEvent(self.context)) else: notify(ObjectModifiedEvent(result)) return result
def test_update_operator(self): item = self.portal['memberfolder1']['568066794'] event.notify( ObjectAddedEvent(item, self.portal['memberfolder1'], '568066794')) sorg = self.portal['orgnizationfolder1']['government1'] self.assertEqual(sorg.operator, "*****@*****.**")
def startup(): from App.PersistentExtra import patchPersistent import Globals # to set / fetch data patchPersistent() global app # Import products OFS.Application.import_products() configuration = getConfiguration() # Open the database dbtab = configuration.dbtab try: # Try to use custom storage try: m = imp.find_module('custom_zodb', [configuration.testinghome]) except: m = imp.find_module('custom_zodb', [configuration.instancehome]) except Exception: # if there is no custom_zodb, use the config file specified databases DB = dbtab.getDatabase('/', is_root=1) else: m = imp.load_module('Zope2.custom_zodb', m[0], m[1], m[2]) sys.modules['Zope2.custom_zodb'] = m # Get the database and join it to the dbtab multidatabase # FIXME: this uses internal datastructures of dbtab databases = getattr(dbtab, 'databases', {}) if hasattr(m, 'DB'): DB = m.DB databases.update(getattr(DB, 'databases', {})) DB.databases = databases else: DB = ZODB.DB(m.Storage, databases=databases) notify(DatabaseOpened(DB)) Globals.BobobaseName = DB.getName() if DB.getActivityMonitor() is None: from ZODB.ActivityMonitor import ActivityMonitor DB.setActivityMonitor(ActivityMonitor()) Globals.DB = DB Zope2.DB = DB # Hook for providing multiple transaction object manager undo support: Globals.UndoManager = DB Globals.opened.append(DB) import ClassFactory DB.classFactory = ClassFactory.ClassFactory # "Log on" as system user newSecurityManager(None, AccessControl.User.system) # Set up the CA load_zcml() # Set up the "app" object that automagically opens # connections app = App.ZApplication.ZApplicationWrapper(DB, 'Application', OFS.Application.Application, ()) Zope2.bobo_application = app # Initialize the app object application = app() OFS.Application.initialize(application) if Globals.DevelopmentMode: # Set up auto-refresh. from App.RefreshFuncs import setupAutoRefresh setupAutoRefresh(application._p_jar) application._p_jar.close() # "Log off" as system user noSecurityManager() global startup_time startup_time = asctime() notify(DatabaseOpenedWithRoot(DB)) Zope2.zpublisher_transactions_manager = TransactionsManager() Zope2.zpublisher_exception_hook = zpublisher_exception_hook Zope2.zpublisher_validated_hook = validated_hook Zope2.__bobo_before__ = noSecurityManager
def CopyToClient(survey, preview=False): """Copy the survey to the online client part of the site. :param survey: the survey to copy :param bool preview: indicates if this is a preview or a normal publication :rtype: :py:class:`euphorie.content.survey.Survey` The public area is hardcoded to be a container with id ``client`` within the site root. The ''id'' and ''title'' of the survey group will be used for the published survey. If another object with the same ''id'' already exists it will be removed first. Any missing country and sector folders are created if needed. If this is a preview (as indicated by the ``preview`` parameter) the id of the survey will be set to ``preview``, guaranteeing that an existing published survey will not be replaced. This also means only a sector can only have one preview online. This method assumes the current user has permissions to create content in the online client. This is normally done by using the :py:func:`PublishToClient` function which switches the current user for the copy operation. Returns the new public survey instance. """ # This is based on OFS.CopyContainer.manage_clone, modified to # use the sector id and title, skip security checks and remove # an existing object with the same id. client = getPortal(survey).client source = aq_inner(survey) surveygroup = aq_parent(source) sector = aq_parent(surveygroup) country = aq_parent(sector) from euphorie.content.sector import ISector assert ISector.providedBy(sector) if country.id not in client: client.invokeFactory( "euphorie.clientcountry", country.id, title=country.title, country_type=country.country_type, ) cl_country = client[country.id] if sector.id not in cl_country: cl_country.invokeFactory("euphorie.clientsector", sector.id) target = cl_country[sector.id] target.title = sector.title target.logo = sector.logo # Clear any scaled logos AnnotationStorage(target).storage.clear() copy = source._getCopy(target) if preview: copy.id = "preview" else: copy.id = surveygroup.id copy.title = surveygroup.title copy.obsolete = surveygroup.obsolete copy.evaluation_algorithm = surveygroup.evaluation_algorithm copy.version = source.id copy.published = datetime.datetime.now() copy.preview = preview if copy.id in target: # We must suppress events to prevent the can-not-delete-published- # content check from blocking us. # XXX: We need however the ObjectWillBeRemovedEvent event to be called # otherwise the removed objects are not uncatalogged. to_delete = target._getOb(copy.id) notify(ObjectWillBeRemovedEvent(to_delete, target, copy.id)) target._delObject(copy.id, suppress_events=True) target._setObject(copy.id, copy, suppress_events=True) copy = target[copy.id] copy._postCopy(target, op=0) notify(ObjectPublishedEvent(source)) return copy
w.notifyBefore(ob, action) notify(ActionWillBeInvokedEvent(ob, w, action)) try: res = func(*args, **kw) except ObjectDeleted, ex: res = ex.getResult() reindex = 0 except ObjectMoved, ex: res = ex.getResult() ob = ex.getNewObject() except: exc = sys.exc_info() try: for w in wfs: w.notifyException(ob, action, exc) notify(ActionRaisedExceptionEvent(ob, w, action, exc)) raise exc[0], exc[1], exc[2] finally: exc = None for w in wfs: w.notifySuccess(ob, action, res) notify(ActionSucceededEvent(ob, w, action, res)) if reindex: self._reindexWorkflowVariables(ob) return res security.declarePrivate('_recursiveUpdateRoleMappings') def _recursiveUpdateRoleMappings(self, ob, wfs): """ Update roles-permission mappings recursively, and reindex special index.
def endRequest(self, request, ob): endInteraction() notify(EndRequestEvent(ob, request))
def get_cloned_session(self): sql_session = Session old_session = self.session new_session = sql_clone( old_session, skip={ "id", "created", "modified", "last_modifier_id", "company", "published", "group_id", "archived", }, session=sql_session, ) lang = getattr(self.request, "LANGUAGE", "en") new_session.title = "{}: {}".format( translate(_("prefix_cloned_title", default="COPY"), target_language=lang), new_session.title, ) account = self.webhelpers.get_current_account() new_session.group = account.group new_session.modified = new_session.created = datetime.now() new_session.account = account if old_session.company: new_session.company = sql_clone(old_session.company, skip={"id", "session"}, session=sql_session) risk_module_skipped_attributes = { "id", "session", "sql_module_id", "parent_id", "session_id", "sql_risk_id", "risk_id", } module_mapping = {} old_modules = sql_session.query(Module).filter( SurveyTreeItem.session == old_session) for old_module in old_modules: new_module = sql_clone(old_module, skip=risk_module_skipped_attributes, session=sql_session) module_mapping[old_module.id] = new_module new_module.session = new_session old_risks = sql_session.query(Risk).filter( SurveyTreeItem.session == old_session) for old_risk in old_risks: new_risk = sql_clone(old_risk, skip=risk_module_skipped_attributes, session=sql_session) new_risk.parent_id = module_mapping[old_risk.parent_id].id new_risk.session = new_session for old_plan in old_risk.action_plans: new_plan = sql_clone(old_plan, skip={"id", "risk_id"}, session=sql_session) new_plan.risk = new_risk notify(ObjectModifiedEvent(new_session)) return new_session
def __call__(self, value): obj = AutopublishSpecification(value) notify(ObjectCreatedEvent(obj)) return obj
def fire_login_event(self, member): user = member.getUser() if self.first_login: event.notify(UserInitialLoginInEvent(user)) else: event.notify(UserLoggedInEvent(user))
def callTraversalHooks(self, request, ob): # Call __before_publishing_traverse__ hooks notify(BeforeTraverseEvent(ob, request)) # This is also a handy place to try and authenticate. self._maybePlacefullyAuthenticate(request, ob)
def setupGroups(context): """create emc management groups and management users """ from emc.memberArea.events import BackMemberCreatedEvent group = api.group.create( groupname='System Administrators', title='System Administrators', description='EMC System Administrators', roles=['SysAdmin',], ) group = api.group.create( groupname='Secure Staffs', title='Secure Staffs', description='EMC Secure Staffs', roles=['SecStaff', ], ) group = api.group.create( groupname='Secure Auditors', title='Secure Auditors', description='EMC Secure Auditors', roles=['SecAuditor', ], ) properties= dict(fullname=u'李四'.encode('utf-8')) demo = api.user.create( username='******' , email='*****@*****.**', password='******', properties=properties ) if demo != None: event.notify(BackMemberCreatedEvent(demo)) properties= dict(fullname=u'系统管理员'.encode('utf-8')) demo = api.user.create( username='******' , email='*****@*****.**', password='******', properties=properties ) if demo != None: event.notify(BackMemberCreatedEvent(demo)) properties= dict(fullname=u'安全管理员'.encode('utf-8')) demo = api.user.create( username='******' , email='*****@*****.**', password='******', properties=properties ) if demo != None: event.notify(BackMemberCreatedEvent(demo)) properties= dict(fullname=u'安全审计员'.encode('utf-8')) demo = api.user.create( username='******' , email='*****@*****.**', password='******', properties=properties ) if demo != None: event.notify(BackMemberCreatedEvent(demo)) api.group.add_user(groupname='System Administrators', username='******') api.group.add_user(groupname='Secure Staffs', username='******') api.group.add_user(groupname='Secure Auditors', username='******')
def notify_modified(self): notify(ObjectModifiedEvent(self.context.session))
def assertFullyTranslatedPages(self): a = createContentInContainer( self.portal['en'], 'Document', title=u"Test document") a_ca = api.translate(a, 'ca') a_ca.setTitle(u"Test Document (CA)") a_es = api.translate(a, 'es') a_es.setTitle(u"Test Document (ES)") wftool = getToolByName(self.portal, 'portal_workflow') wftool.doActionFor(a, 'publish') wftool.doActionFor(a_ca, 'publish') wftool.doActionFor(a_es, 'publish') notify(ObjectModifiedEvent(a)) notify(ObjectModifiedEvent(a_ca)) notify(ObjectModifiedEvent(a_es)) selector_viewlet =\ LanguageSelectorViewlet(a, self.request, None, None) selector_viewlet.update() selector_viewlet_languages = selector_viewlet.languages() self.assertEqual(selector_viewlet_languages, [{ 'code': u'en', u'flag': u'/++resource++country-flags/gb.gif', u'name': u'English', u'native': u'English', 'url': SELECTOR_VIEW_TEMPLATE % { 'url': self.portal_url, 'tg': ITG(a), 'lang': 'en', 'query': '?set_language=en' }, 'selected': True, 'translated': True, }, { 'code': u'ca', u'flag': u'/++resource++language-flags/ca.gif', u'name': u'Catalan', u'native': u'Catal\xe0', 'url': SELECTOR_VIEW_TEMPLATE % { 'url': self.portal_url, 'tg': ITG(a), 'lang': 'ca', 'query': '?set_language=ca' }, 'selected': False, 'translated': True, }, { 'code': u'es', u'flag': u'/++resource++country-flags/es.gif', u'name': u'Spanish', u'native': u'Espa\xf1ol', 'url': SELECTOR_VIEW_TEMPLATE % { 'url': self.portal_url, 'tg': ITG(a), 'lang': 'es', 'query': '?set_language=es' }, 'selected': False, 'translated': True, }]) transaction.commit() self.browser.open(selector_viewlet_languages[0]['url']) self.assertEqual(self.browser.url, a.absolute_url() + '?set_language=en') self.assertRegexpMatches(self.browser.contents, r"You\s*are here") self.browser.open(selector_viewlet_languages[1]['url']) self.assertEqual(self.browser.url, a_ca.absolute_url() + '?set_language=ca') self.assertIn(u'lang="ca"'.encode("utf-8"), self.browser.contents) self.browser.open(selector_viewlet_languages[2]['url']) self.assertEqual(self.browser.url, a_es.absolute_url() + '?set_language=es') self.assertIn(u'lang="es"'.encode("utf-8"), self.browser.contents)
class DeleteForm(PageForm): """Delete-form for Bungeni content. Confirmation The user is presented with a confirmation form which details the items that are going to be deleted. Subobjects Recursively, a permission check is carried out for each item that is going to be deleted. If a permission check fails, an error message is displayed to the user. Will redirect back to the container on success. """ # evoque template = z3evoque.PageViewTemplateFile("delete.html") # zpt # !+form_template(mr, jul-2010) this is unused here, but needed by # some adapter of this "object delete" view #form_template = NamedTemplate("alchemist.form") #template = ViewPageTemplateFile("templates/delete.pt") _next_url = None form_fields = formlib.form.Fields() def _can_delete_item(self, action): return True def nextURL(self): return self._next_url def update(self): self.subobjects = self.get_subobjects() super(DeleteForm, self).update() def get_subobjects(self): return () def delete_subobjects(self): return 0 @formlib.form.action(_(u"Delete"), condition=_can_delete_item) def handle_delete(self, action, data): count = self.delete_subobjects() container = self.context.__parent__ trusted = removeSecurityProxy(self.context) session = Session() session.delete(trusted) count += 1 try: session.commit() except IntegrityError, e: # this should not happen in production; it's a critical # error, because the transaction might have failed in the # second phase of the commit session.rollback() logging.critical(e) self.status = _(u"Could not delete item due to " "database integrity error") return self.render() session.close() # invalidate caches for this domain object type invalidate_caches_for(self.context.__class__.__name__, "delete") #TODO: check that it is removed from the index! notify(ObjectRemovedEvent( self.context, oldParent=container, oldName=self.context.__name__)) # we have to switch our context here otherwise the deleted object will # be merged into the session again and reappear magically self.context = container next_url = self.nextURL() if next_url is None: next_url = url.absoluteURL(container, self.request) + \ "/?portal_status_message=%d items deleted" % count self.request.response.redirect(next_url)
def test_multiple_widgets(self): from zope.event import notify from Products.Archetypes.Widget import RelatedItemsWidget from zope.interface import implements from zope.lifecycleevent import ObjectCreatedEvent from plone.uuid.interfaces import IUUID from plone.uuid.interfaces import IAttributeUUID class ExampleContent(object): implements(IAttributeUUID) obj1 = ExampleContent() obj2 = ExampleContent() notify(ObjectCreatedEvent(obj1)) notify(ObjectCreatedEvent(obj2)) with mock.patch('plone.app.widgets.utils.getUtility') as mock_method: registry = Mock() registry.get.return_value = ['SomeType'] mock_method.return_value = registry self.context.fieldvalue = lambda: obj1 field1 = ReferenceField( 'fieldname1', relationship="A", multiValued=False, widget=RelatedItemsWidget(), ) field1.accessor = "fieldvalue" self.assertEqual( { 'name': 'fieldname1', 'value': '{}'.format(IUUID(obj1)), 'pattern': 'relateditems', 'pattern_options': { 'folderTypes': ['SomeType'], 'homeText': u'Home', 'separator': ';', 'orderable': True, 'searchAllText': u'Entire site', 'searchText': u'Search', 'maximumSelectionSize': 1, 'vocabularyUrl': '/@@getVocabulary?name=' 'plone.app.vocabularies.Catalog' '&field=fieldname1', }, }, field1.widget._base_args(self.context, field1, self.request), ) field2 = ReferenceField( 'fieldname2', relationship="A", multiValued=True, widget=RelatedItemsWidget(), ) field2.accessor = "fieldvalue" self.context.fieldvalue = lambda: [obj1, obj2] self.assertEqual( { 'name': 'fieldname2', 'value': '{};{}'.format(IUUID(obj1), IUUID(obj2)), 'pattern': 'relateditems', 'pattern_options': { 'folderTypes': ['SomeType'], 'homeText': u'Home', 'separator': ';', 'orderable': True, 'searchAllText': u'Entire site', 'searchText': u'Search', 'maximumSelectionSize': -1, 'vocabularyUrl': '/@@getVocabulary?name=' 'plone.app.vocabularies.Catalog' '&field=fieldname2', }, }, field2.widget._base_args(self.context, field2, self.request), )
def render(self): method = self.request.get('REQUEST_METHOD', 'GET') # import pdb # pdb.set_trace() if (method != 'POST'): return self.request.response.redirect(self.context.absolute_url()) if self.request.form.get('form.button.Cancel'): return self.request.response.redirect(self.context.absolute_url()) searchview = self.searchview() # datadic receive front ajax post data datadic = self.request.form start = int(datadic['start']) # batch search start position size = int(datadic['size']) # batch search size sortcolumn = datadic['sortcolumn'] sortdirection = datadic['sortdirection'] keyword = (datadic['searchabletext']).strip() # origquery = searchview.getPathQuery() origquery = {} # default reverse,as is desc origquery['sort_on'] = sortcolumn # sql db sortt_order:asc,desc origquery['sort_order'] = sortdirection #模糊搜索 if keyword != "": origquery['SearchableText'] = '%' + keyword + '%' else: origquery['SearchableText'] = "" #origquery provide batch search origquery['size'] = size origquery['start'] = start #totalquery search all totalquery = origquery.copy() totalquery['size'] = 0 # search all size = 0 return numbers of recorders totalnum = searchview.search_multicondition(totalquery) origquery.update({"size": totalnum}) resultDicLists = searchview.search_multicondition(origquery) del origquery del totalquery if totalnum == 0: return #fire a log event user = api.user.get_current() ip = get_ip(self.request) if user is None: return des = "从用户日志表导出了%s条日志" % totalnum loginEvent = NormalUserloginEvent( userid=getfullname_orid(user), datetime=datetime.datetime.now().strftime(fmt), ip=ip, type=0, description=des, result=1) if loginEvent.available(): if loginEvent.is_normal_user(): event.notify(loginEvent) else: des = "从管理员日志表导出了%s条日志" % totalnum loginEvent = AddloginEvent( adminid=getfullname_orid(user), userid="", datetime=datetime.datetime.now().strftime(fmt), ip=ip, type=0, description=des, result=1) event.notify(loginEvent) return self.exportData(resultDicLists)
def create(self, context, request): """/@@API/create: Create new object. Required parameters: - obj_type = portal_type of new object. - obj_path = path of new object, from plone site root. - Not required for obj_type=AnalysisRequest Optionally: - obj_id = ID of new object. All other parameters in the request are matched against the object's Schema. If a matching field is found in the schema, then the value is taken from the request and sent to the field's mutator. Reference fields may have their target value(s) specified with a delimited string query syntax, containing the portal_catalog search: <FieldName>=index1:value1|index2:value2 eg to set the Client of a batch: ...@@API/update?obj_path=<path>... ...&Client=title:<client_title>&... And, to set a multi-valued reference, these both work: ...@@API/update?obj_path=<path>... ...&InheritedObjects:list=title:AR1... ...&InheritedObjects:list=title:AR2... ...@@API/update?obj_path=<path>... ...&InheritedObjects[]=title:AR1... ...&InheritedObjects[]=title:AR2... The Analysis_Specification parameter is special, it mimics the format of the python dictionaries, and only service Keyword can be used to reference services. Even if the keyword is not actively required, it must be supplied: <service_keyword>:min:max:error tolerance The function returns a dictionary as a json string: { runtime: Function running time. error: true or string(message) if error. false if no error. success: true or string(message) if success. false if no success. } >>> portal = layer['portal'] >>> portal_url = portal.absolute_url() >>> from plone.app.testing import SITE_OWNER_NAME >>> from plone.app.testing import SITE_OWNER_PASSWORD Simple AR creation, no obj_path parameter is required: >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD) >>> browser.open(portal_url+"/@@API/create", "&".join([ ... "obj_type=AnalysisRequest", ... "Client=portal_type:Client|id:client-1", ... "SampleType=portal_type:SampleType|title:Apple Pulp", ... "Contact=portal_type:Contact|getFullname:Rita Mohale", ... "Services:list=portal_type:AnalysisService|title:Calcium", ... "Services:list=portal_type:AnalysisService|title:Copper", ... "Services:list=portal_type:AnalysisService|title:Magnesium", ... "SamplingDate=2013-09-29", ... "Specification=portal_type:AnalysisSpec|title:Apple Pulp", ... ])) >>> browser.contents '{..."success": true...}' If some parameters are specified and are not located as existing fields or properties of the created instance, the create should fail: >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD) >>> browser.open(portal_url+"/@@API/create?", "&".join([ ... "obj_type=Batch", ... "obj_path=/batches", ... "title=Test", ... "Thing=Fish" ... ])) >>> browser.contents '{...The following request fields were not used: ...Thing...}' Now we test that the AR create also fails if some fields are spelled wrong >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD) >>> browser.open(portal_url+"/@@API/create", "&".join([ ... "obj_type=AnalysisRequest", ... "thing=Fish", ... "Client=portal_type:Client|id:client-1", ... "SampleType=portal_type:SampleType|title:Apple Pulp", ... "Contact=portal_type:Contact|getFullname:Rita Mohale", ... "Services:list=portal_type:AnalysisService|title:Calcium", ... "Services:list=portal_type:AnalysisService|title:Copper", ... "Services:list=portal_type:AnalysisService|title:Magnesium", ... "SamplingDate=2013-09-29" ... ])) >>> browser.contents '{...The following request fields were not used: ...thing...}' """ savepoint = transaction.savepoint() self.context = context self.request = request self.unused = [x for x in self.request.form.keys()] self.used("form.submitted") self.used("__ac_name") self.used("__ac_password") # always require obj_type self.require("obj_type") obj_type = self.request['obj_type'] self.used("obj_type") # AnalysisRequest shortcut: creates Sample, Partition, AR, Analyses. if obj_type == "AnalysisRequest": try: return self._create_ar(context, request) except: savepoint.rollback() raise # Other object types require explicit path as their parent self.require("obj_path") obj_path = self.request['obj_path'] if not obj_path.startswith("/"): obj_path = "/" + obj_path self.used("obj_path") site_path = request['PATH_INFO'].replace("/@@API/create", "") parent = context.restrictedTraverse(str(site_path + obj_path)) # normal permissions still apply for this user if not getSecurityManager().checkPermission(AccessJSONAPI, parent): msg = "You don't have the '{0}' permission on {1}".format( AccessJSONAPI, parent.absolute_url()) raise Unauthorized(msg) obj_id = request.get("obj_id", "") _renameAfterCreation = False if not obj_id: _renameAfterCreation = True obj_id = tmpID() self.used(obj_id) ret = { "url": router.url_for("create", force_external=True), "success": True, "error": False, } try: obj = _createObjectByType(obj_type, parent, obj_id) obj.unmarkCreationFlag() if _renameAfterCreation: renameAfterCreation(obj) ret['obj_id'] = obj.getId() used_fields = set_fields_from_request(obj, request) for field in used_fields: self.used(field) obj.reindexObject() obj.aq_parent.reindexObject() event.notify(ObjectInitializedEvent(obj)) obj.at_post_create_script() except: savepoint.rollback() raise if self.unused: raise BadRequest( "The following request fields were not used: %s. Request aborted." % self.unused) return ret
def __call__(self, filename, title, description, content_type, data, portal_type): context = aq_inner(self.context) error = '' result = {} result['success'] = None newid = get_id_from_filename(filename, context) chooser = INameChooser(context) newid = chooser.chooseName(newid, context) # consolidation because it's different upon Plone versions if not title: # try to split filenames because we don't want # big titles without spaces title = filename.rsplit('.', 1)[0]\ .replace('_', ' ')\ .replace('-', ' ') if newid in context: # only here for flashupload method since a check_id is done # in standard uploader - see also XXX in quick_upload.py raise NameError('Object id %s already exists' % newid) else: upload_lock.acquire() try: transaction.begin() try: context.invokeFactory(type_name=portal_type, id=newid, title=title, description=description) except Unauthorized: error = u'serverErrorNoPermission' except ValueError: error = u'serverErrorDisallowedType' except Exception as e: error = u'serverError' logger.exception(e) if error: if error == u'serverError': logger.info( "An error happens with setId from filename, " "the file has been created with a bad id, " "can't find %s", newid) else: obj = getattr(context, newid) if obj: error = IQuickUploadFileSetter(obj).set( data, filename, content_type) obj._at_rename_after_creation = False try: # Archetypes obj.processForm() except AttributeError: # Dexterity notify(ObjectModifiedEvent(obj)) else: del obj._at_rename_after_creation # TODO : rollback if there has been an error transaction.commit() finally: upload_lock.release() result['error'] = error if not error: result['success'] = obj return result
def migrate_object(self, obj): self.migrate_class(obj, TemplateDossier) notify(ObjectModifiedEvent(obj))
def _process(self, mlist, msg, msgdata): """See `TerminalChainBase`.""" # Start by decorating the message with a header that contains a list # of all the rules that matched. These metadata could be None or an # empty list. rule_hits = msgdata.get('rule_hits') if rule_hits: msg['X-Mailman-Rule-Hits'] = SEMISPACE.join(rule_hits) rule_misses = msgdata.get('rule_misses') if rule_misses: msg['X-Mailman-Rule-Misses'] = SEMISPACE.join(rule_misses) reasons = format_reasons(msgdata.get('moderation_reasons', ['n/a'])) # Hold the message by adding it to the list's request database. request_id = hold_message(mlist, msg, msgdata, SEMISPACE.join(reasons)) # Calculate a confirmation token to send to the author of the # message. pendable = HeldMessagePendable(id=request_id) token = getUtility(IPendings).add(pendable) # Get the language to send the response in. If the sender is a # member, then send it in the member's language, otherwise send it in # the mailing list's preferred language. member = mlist.members.get_member(msg.sender) language = (member.preferred_language if member else mlist.preferred_language) # A substitution dictionary for the email templates. charset = mlist.preferred_language.charset original_subject = msg.get('subject') if original_subject is None: original_subject = _('(no subject)') else: # This must be encoded to the mailing list's perferred charset, # ignoring incompatible characters, otherwise when creating the # notification messages, we could get a Unicode error. oneline_subject = oneline(original_subject, in_unicode=True) bytes_subject = oneline_subject.encode(charset, 'replace') original_subject = bytes_subject.decode(charset) substitutions = dict( subject=original_subject, sender_email=msg.sender, reasons=_compose_reasons(msgdata), # For backward compatibility. sender=msg.sender, ) # At this point the message is held, but now we have to craft at least # two responses. The first will go to the original author of the # message and it will contain the token allowing them to approve or # discard the message. The second one will go to the moderators of # the mailing list, if the list is so configured. # # Start by possibly sending a response to the message author. There # are several reasons why we might not go through with this. If the # message was gated from NNTP, the author may not even know about this # list, so don't spam them. If the author specifically requested that # acknowledgments not be sent, or if the message was bulk email, then # we do not send the response. It's also possible that either the # mailing list, or the author (if they are a member) have been # configured to not send such responses. if (not msgdata.get('fromusenet') and can_acknowledge(msg) and mlist.respond_to_post_requests and autorespond_to_sender(mlist, msg.sender, language)): # We can respond to the sender with a message indicating their # posting was held. subject = _( 'Your message to $mlist.fqdn_listname awaits moderator approval' ) send_language_code = msgdata.get('lang', language.code) template = getUtility(ITemplateLoader).get( 'list:user:notice:hold', mlist, language=send_language_code) text = wrap( expand(template, mlist, dict(language=send_language_code, **substitutions))) adminaddr = mlist.bounces_address nmsg = UserNotification( msg.sender, adminaddr, subject, text, getUtility(ILanguageManager)[send_language_code]) nmsg.send(mlist) # Now the message for the list moderators. This one should appear to # come from <list>-owner since we really don't need to do bounce # processing on it. if mlist.admin_immed_notify: # Now let's temporarily set the language context to that which the # administrators are expecting. with _.using(mlist.preferred_language.code): language = mlist.preferred_language charset = language.charset substitutions['subject'] = original_subject # We need to regenerate or re-translate a few values in the # substitution dictionary. substitutions['reasons'] = _compose_reasons(msgdata, 55) # craft the admin notification message and deliver it subject = _( '$mlist.fqdn_listname post from $msg.sender requires ' 'approval') nmsg = UserNotification(mlist.owner_address, mlist.owner_address, subject, lang=language) nmsg.set_type('multipart/mixed') template = getUtility(ITemplateLoader).get( 'list:admin:action:post', mlist) text = MIMEText(expand(template, mlist, substitutions), _charset=charset) dmsg = MIMEText(wrap( _("""\ If you reply to this message, keeping the Subject: header intact, Mailman will discard the held message. Do this if the message is spam. If you reply to this message and include an Approved: header with the list password in it, the message will be approved for posting to the list. The Approved: header can also appear in the first line of the body of the reply.""")), _charset=language.charset) dmsg['Subject'] = 'confirm ' + token dmsg['From'] = mlist.request_address dmsg['Date'] = formatdate(localtime=True) dmsg['Message-ID'] = make_msgid() nmsg.attach(text) nmsg.attach(MIMEMessage(msg)) nmsg.attach(MIMEMessage(dmsg)) nmsg.send(mlist) # Log the held message. Log messages are not translated, so recast # the reasons in the English. with _.using('en'): reasons = format_reasons(msgdata.get('moderation_reasons', ['N/A'])) log.info('HOLD: %s post from %s held, message-id=%s: %s', mlist.fqdn_listname, msg.sender, msg.get('message-id', 'n/a'), SEMISPACE.join(reasons)) notify(HoldEvent(mlist, msg, msgdata, self))
def update_inherit(self, status=True, reindex=True): """Method Wrapper for the super method, to allow notify a corresponding event. Needed for adding a Journalentry after a change of the inheritance """ user = api.user.get_current() is_administrator = user.has_role('Administrator') or user.has_role( 'Manager') # Modifying local roles needs the "Sharing page: Delegate roles" # permission as well as "Modify portal content". However, we don't # want to give the "Role Manager" Role "Modify portal content", # so we circumvent the permission check here by temporarily assuming # the owner's roles. [lgraf] context = self.context portal_membership = getToolByName(context, 'portal_membership') block = not status oldblock = bool( getattr(aq_base(context), '__ac_local_roles_block__', False)) if block == oldblock: return False # store the real user user = portal_membership.getAuthenticatedMember() # assume the manger user security context old_sm = getSecurityManager() owner = getToolByName( context, 'portal_url').getPortalObject().getWrappedOwner() newSecurityManager(self.context, owner) if block and not is_administrator: # If user has inherited local roles and removes inheritance, # locally set roles he inherited before # to avoid definitive lose of access (refs #11945) # For administrators and managers we skip those fallback, because # the access for those users is ensured by the global roles. So we # can avoid local_roles assigned to a specific users, which we say # should not be used usually. context_roles = user.getRolesInContext(context) global_roles = user.getRoles() local_roles = [r for r in context_roles if r not in global_roles] if local_roles: assignment = SharingRoleAssignment(user.getId(), local_roles) RoleAssignmentManager( self.context).add_or_update_assignment(assignment) context.__ac_local_roles_block__ = True if block else None # Restore the old security manager setSecurityManager(old_sm) if reindex: context.reindexObjectSecurity() if not block: notify(LocalRolesAcquisitionActivated(self.context)) else: notify(LocalRolesAcquisitionBlocked(self.context)) return True
def _create_ar(self, context, request): """Creates AnalysisRequest object, with supporting Sample, Partition and Analysis objects. The client is retrieved from the obj_path key in the request. Required request parameters: - Contact: One client contact Fullname. The contact must exist in the specified client. The first Contact with the specified value in it's Fullname field will be used. - SampleType_<index> - Must be an existing sample type. Optional request parameters: - CCContacts: A list of contact Fullnames, which will be copied on all messages related to this AR and it's sample or results. - CCEmails: A list of email addresses to include as above. - Sample_id: Create a secondary AR with an existing sample. If unspecified, a new sample is created. - Specification: a lookup to set Analysis specs default values for all analyses - Analysis_Specification: specs (or overrides) per analysis, using a special lookup format. &Analysis_Specification:list=<Keyword>:min:max:error&... """ wftool = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') bsc = getToolByName(context, 'bika_setup_catalog') pc = getToolByName(context, 'portal_catalog') ret = { "url": router.url_for("create", force_external=True), "success": True, "error": False, } SamplingWorkflowEnabled = context.bika_setup.getSamplingWorkflowEnabled( ) for field in [ 'Client', 'SampleType', 'Contact', 'SamplingDate', 'Services' ]: self.require(field) self.used(field) try: client = resolve_request_lookup(context, request, 'Client')[0].getObject() except IndexError: raise Exception("Client not found") # Sample_id if 'Sample' in request: try: sample = resolve_request_lookup(context, request, 'Sample')[0].getObject() except IndexError: raise Exception("Sample not found") else: # Primary AR sample = _createObjectByType("Sample", client, tmpID()) sample.unmarkCreationFlag() fields = set_fields_from_request(sample, request) for field in fields: self.used(field) sample._renameAfterCreation() sample.setSampleID(sample.getId()) event.notify(ObjectInitializedEvent(sample)) sample.at_post_create_script() if SamplingWorkflowEnabled: wftool.doActionFor(sample, 'sampling_workflow') else: wftool.doActionFor(sample, 'no_sampling_workflow') ret['sample_id'] = sample.getId() parts = [{ 'services': [], 'container': [], 'preservation': '', 'separate': False }] specs = self.get_specs_from_request() ar = _createObjectByType("AnalysisRequest", client, tmpID()) ar.unmarkCreationFlag() fields = set_fields_from_request(ar, request) for field in fields: self.used(field) ar.setSample(sample.UID()) ar._renameAfterCreation() ret['ar_id'] = ar.getId() brains = resolve_request_lookup(context, request, 'Services') service_uids = [p.UID for p in brains] new_analyses = ar.setAnalyses(service_uids, specs=specs) ar.setRequestID(ar.getId()) ar.reindexObject() event.notify(ObjectInitializedEvent(ar)) ar.at_post_create_script() # Create sample partitions parts_and_services = {} for _i in range(len(parts)): p = parts[_i] part_prefix = sample.getId() + "-P" if '%s%s' % (part_prefix, _i + 1) in sample.objectIds(): parts[_i]['object'] = sample['%s%s' % (part_prefix, _i + 1)] parts_and_services['%s%s' % (part_prefix, _i + 1)] = p['services'] part = parts[_i]['object'] else: part = _createObjectByType("SamplePartition", sample, tmpID()) parts[_i]['object'] = part container = None preservation = p['preservation'] parts[_i]['prepreserved'] = False part.edit( Container=container, Preservation=preservation, ) part.processForm() if SamplingWorkflowEnabled: wftool.doActionFor(part, 'sampling_workflow') else: wftool.doActionFor(part, 'no_sampling_workflow') parts_and_services[part.id] = p['services'] if SamplingWorkflowEnabled: wftool.doActionFor(ar, 'sampling_workflow') else: wftool.doActionFor(ar, 'no_sampling_workflow') # Add analyses to sample partitions # XXX jsonapi create AR: right now, all new analyses are linked to the first samplepartition if new_analyses: analyses = list(part.getAnalyses()) analyses.extend(new_analyses) part.edit(Analyses=analyses, ) for analysis in new_analyses: analysis.setSamplePartition(part) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not SamplingWorkflowEnabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) for analysis in ar.objectValues('Analysis'): doActionFor(analysis, lowest_state) doActionFor(ar, lowest_state) # receive secondary AR if request.get('Sample_id', ''): doActionFor(ar, 'sampled') doActionFor(ar, 'sample_due') not_receive = [ 'to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved' ] sample_state = wftool.getInfoFor(sample, 'review_state') if sample_state not in not_receive: doActionFor(ar, 'receive') for analysis in ar.getAnalyses(full_objects=1): doActionFor(analysis, 'sampled') doActionFor(analysis, 'sample_due') if sample_state not in not_receive: doActionFor(analysis, 'receive') if self.unused: raise BadRequest( "The following request fields were not used: %s. Request aborted." % self.unused) return ret
def handler_watcher(changeset: Changeset): """ Take directory_watcher info and broadcast zope.event """ # This is the notify side of the zope.event subscription event = NewBatch(changeset=changeset) notify(event)
def __call__(self, container, request): notify(BeforeTraverseEvent(container, request))
def __setitem__(self, key, value): link = getUtility(IIntIds).getObject(self.lid) link.shared[key] = value notify(ObjectModifiedEvent(link))
def transaction_pubevents(request, response, tm=transaction.manager): try: setDefaultSkin(request) newInteraction() tm.begin() notify(pubevents.PubStart(request)) yield notify(pubevents.PubBeforeCommit(request)) if tm.isDoomed(): tm.abort() else: tm.commit() notify(pubevents.PubSuccess(request)) except Exception as exc: # Normalize HTTP exceptions # (For example turn zope.publisher NotFound into zExceptions NotFound) exc_type, _ = upgradeException(exc.__class__, None) if not isinstance(exc, exc_type): exc = exc_type(str(exc)) # Create new exc_info with the upgraded exception. exc_info = (exc_type, exc, sys.exc_info()[2]) try: # Raise exception from app if handle-errors is False # (set by zope.testbrowser in some cases) if request.environ.get('x-wsgiorg.throw_errors', False): reraise(*exc_info) retry = False unauth = False debug_exc = getattr(response, 'debug_exceptions', False) # If the exception is transient and the request can be retried, # shortcut further processing. It makes no sense to have an # exception view registered for this type of exception. if isinstance(exc, TransientError) and request.supports_retry(): retry = True else: # Handle exception view. Make sure an exception view that # blows up doesn't leave the user e.g. unable to log in. try: exc_view_created = _exc_view_created_response( exc, request, response) except Exception: exc_view_created = False # _unauthorized modifies the response in-place. If this hook # is used, an exception view for Unauthorized has to merge # the state of the response and the exception instance. if isinstance(exc, Unauthorized): unauth = True exc.setRealm(response.realm) response._unauthorized() response.setStatus(exc.getStatus()) # Notify subscribers that this request is failing. notify(pubevents.PubBeforeAbort(request, exc_info, retry)) tm.abort() notify(pubevents.PubFailure(request, exc_info, retry)) if retry or \ (not unauth and (debug_exc or not exc_view_created)): reraise(*exc_info) finally: # Avoid traceback / exception reference cycle. del exc, exc_info finally: endInteraction()
def clear(self): deleted = list(self._links.items()) self._links.clear() self._lids.clear() for name, link in deleted: notify(ObjectRemovedEvent(link, self._links, name))
def update(self): self.context = aq_inner(self.context) # XXX: used to lock the object when someone is editing it notify(EditBegunEvent(self.context))