def reject_syndication(obj, event): """ Update the source object's rejected_syndication_sites with a reference to the proxy object's site and remove proxy object. """ # Bail out if this isn't our workflow if event.workflow.id != 'syndication_proxy_workflow': return # and not our transition transition_id = event.transition and event.transition.id or None if transition_id != 'reject_syndication': return workflow = getToolByName(obj, 'portal_workflow') organization_path = getNavigationRoot(obj) organization = obj.restrictedTraverse(organization_path) source = utils.get_proxy_source(obj) referenceable.IReferenceable(source).addReference( referenceable.IReferenceable(organization), relationship='rejected_syndication_sites') referenceable.IReferenceable(source).deleteReference( referenceable.IReferenceable(obj), relationship='current_syndication_targets') # Use the workflow object's doActionFor so that IAfterTransitionEvent # gets fired correctly sudo(workflow.doActionFor, source, 'review_syndication') # Remove the proxy for this syndication request aq_parent(obj).manage_delObjects([obj.getId()])
def unpublish_proxy(obj, event): """Unpublish proxy after source object is unpublished """ proxies = referenceable.IReferenceable(obj).getRefs( relationship='current_syndication_targets') if not proxies: return old_state = event.old_state.id if old_state != 'published': return wft = getToolByName(obj, 'portal_workflow') for proxy in proxies: if wft.getInfoFor(proxy, "review_state") == "published": sudo(wft.doActionFor, proxy, 'retract')
def reject_move(obj, event): """ Remove the proxy after a rejected move. """ # Bail out if this isn't our workflow if event.workflow.id != 'syndication_proxy_move_workflow': return # and not our transition transition_id = event.transition and event.transition.id or None if transition_id != 'reject_move': return # Use the workflow object's doActionFor so that IAfterTransitionEvent # gets fired correctly workflow = getToolByName(obj, 'portal_workflow') source = utils.get_proxy_source(obj) sudo(workflow.doActionFor, source, 'review_move') # Remove the proxy for this move request aq_parent(obj).manage_delObjects([obj.getId()])
def accept_syndication(obj, event): """ Update source object to syndicated. """ # Bail out if this isn't our workflow if event.workflow.id != 'syndication_proxy_workflow': return # and not our transition transition_id = event.transition and event.transition.id or None if transition_id not in { 'accept_syndication', 'auto_approve_syndication'}: return wf_tool = getToolByName(obj, 'portal_workflow') source = utils.get_proxy_source(obj) if wf_tool.getInfoFor(source, 'syndication_state') != 'syndicated': sudo( wf_tool.doActionFor, source, 'review_syndication') # Automatically publish proxy object wf_tool.doActionFor(obj, 'publish')
def send_syndication_notification(obj, event): """When an item's syndication state changes, send a notification. """ # Bail out if this isn't our workflow if event.workflow.id not in { 'syndication_source_workflow', 'syndication_source_move_workflow'}: return # Don't sent an email for the empty transition that is just meant to # trigger automatic transitions transition_id = event.transition and event.transition.id or None if transition_id in {'review_syndication', 'review_move'}: return portal = getToolByName(obj, 'portal_url').getPortalObject() catalog = getToolByName(obj, 'portal_catalog') mfromname = api.portal.get_registry_record('plone.email_from_name') mfrom = api.portal.get_registry_record('plone.email_from_address') source = hooks.getSite() organizations = (event.kwargs or {}).get('organizations', ()) bccs = getattr(source, 'notification_emails', '') for target_brain in catalog(UID=organizations): target_addr = getattr( target_brain.getObject(), 'notification_emails', None ) if target_addr: bccs += ', ' + target_addr if bccs == '': return bccs_by_addr = {} for name, addr in email.utils.getaddresses([bccs]): if not bccs_by_addr.get(addr): bccs_by_addr[addr] = name mto = u', '.join( email.utils.formataddr((name, addr)) for addr, name in bccs_by_addr.iteritems()) mailhost = getToolByName(obj, 'MailHost') payload = { 'new_state_id': event.new_state.id, 'object_uid': api.content.get_uuid(obj), 'old_state_id': event.old_state.id, 'status': event.status, 'transition_id': event.transition and event.transition.id or None, } # Bail out if this is the initial object creation transition if payload['transition_id'] is None and \ payload['new_state_id'] == payload['old_state_id']: logger.debug( 'Syndication notification NOT queued in %s for %s: %s', getNavigationRoot(obj), obj, payload ) return nav_root_path = getNavigationRoot(obj) nav_root = obj.restrictedTraverse(nav_root_path) payload['organization_title'] = nav_root.title_or_id() payload['organization_uid'] = api.content.get_uuid(nav_root) update_payload(obj, payload) if '/' in payload['object_uid']: # Plone site proxy = portal.unrestrictedTraverse(payload['object_uid']) else: proxy = sudo(uuidToObject, payload['object_uid']) update_payload(proxy, payload) subject = ('Pending review status for {0!r}'.format(obj)) # Create the enclosing (outer) message outer = email.mime.Multipart.MIMEMultipart() # Create the HTML digest_notification = component.getMultiAdapter( (obj, obj.REQUEST), name='digest_notification') digest_notification.update(payload) html = digest_notification() # Create the MIME wrapper html_part = email.mime.text.MIMEText( html, _subtype='html', _charset='UTF-8') # Attach part outer.attach(html_part) mailhost.send( outer, subject=subject, mfrom="%s <%s>" % (mfromname, mfrom), mto=mto) logger.debug('Syndication notification queued to be sent %s for %s: %s', nav_root, obj, payload)
def accept_move(proxy, event): """ Move the content to the correct location based on the target organization chosen. """ # Bail out if this isn't our workflow if event.workflow.id != 'syndication_proxy_move_workflow': return # and not our transition transition_id = event.transition and event.transition.id or None if transition_id != 'move': return wft = getToolByName(proxy, 'portal_workflow') history = wft.getHistoryOf( 'syndication_source_move_workflow', utils.get_proxy_source(proxy)) # last history entry is for current transition_id # we need to get the previous one last_request_move = [ wfh for wfh in reversed(history) if wfh['action'] == 'request_move' ][0] organization = sudo(uuidToObject, last_request_move['organization']) catalog = getToolByName(proxy, 'portal_catalog') organization_path = getNavigationRoot(organization) targets = catalog( path=organization_path, object_provides=behaviors.ISyndicationTarget.__identifier__) for target in targets: target_obj = target.getObject() if getNavigationRoot(target_obj) == organization_path: break else: raise ValueError( 'Could not find organization for target of {0!r}'.format(proxy)) # Move original object into place source = utils.get_proxy_source(proxy) source_parent = aq_parent(source) paste_id = source.getId() paste = sudo(source_parent.manage_cutObjects, ids=[paste_id]) sudo(target_obj.manage_pasteObjects, paste) # Automatically publish moved object moved_obj = target_obj[paste_id] if wft.getInfoFor(moved_obj, "review_state") != "published": sudo(wft.doActionFor, moved_obj, 'publish') # Update moved object's syndication_state sudo(wft.doActionFor, moved_obj, 'review_move') # Remove the proxy for this move request aq_parent(proxy).manage_delObjects([proxy.getId()]) request = globalrequest.getRequest() if request is not None: request['redirect_to'] = '/'.join( moved_obj.getPhysicalPath()[ len(api.portal.get().getPhysicalPath()):])
def create_digest(app): plone_site_id, domain = arg_handler() upgrade_logger(__name__, logging.DEBUG) email_counter = {} review_perm = 'Review portal content' app = makerequest(app, environ={'SERVER_NAME': domain}) portal = app[plone_site_id] setSite(portal) mfromname = api.portal.get_registry_record('plone.email_from_name') mfrom = api.portal.get_registry_record('plone.email_from_address') notification_tool = getToolByName(portal, 'portal_syn_notification') acl_users = getToolByName(portal, 'acl_users') mailhost = getToolByName(portal, 'MailHost') all_user_ids = acl_users.getUserIds() managers = {'curr': getSecurityManager()} logger.info('Starting syndication digest creation.') for nav_root_uid, queue in notification_tool.queues.items(): # Create view instance for each organization, so that state # is not preserved digest_notification = getMultiAdapter((portal, portal.REQUEST), name='digest_notification') # Create the enclosing (outer) message outer = MIMEMultipart() setSecurityManager(managers['curr']) organization = uuidToObject(nav_root_uid) org_path = '/'.join(organization.getPhysicalPath()) subject = 'Pending review status daily digest' while queue: payload = queue.pull() if '/' in payload['object_uid']: # Plone site proxy = portal.unrestrictedTraverse(payload['object_uid']) else: proxy = sudo(uuidToObject, payload['object_uid']) update_payload(proxy, payload) logger.debug('Adding activity for %r: %r', organization, proxy or 'Proxy deleted') digest_notification.update(payload) if not digest_notification.items_by_uid: logger.info('No activity for: %r', organization) continue # Create the HTML html = digest_notification() # Create the MIME wrapper html_part = MIMEText(html, _subtype='html', _charset='UTF-8') # Attach part outer.attach(html_part) # If we're processing a re-queued item with failed user_id's, # only notify those that failed payload.setdefault('failed_user_ids', set()) if payload['failed_user_ids']: notify_users_ids = payload['failed_user_ids'] else: notify_users_ids = all_user_ids for user_id in notify_users_ids: user = acl_users.getUserById(user_id) newSecurityManager(portal.REQUEST, user) can_review = user is not None and \ getSecurityManager().checkPermission(review_perm, organization) user_email = user is not None and user.getProperty('email') # Retain this check for failed_user_ids, as the user's permissions # might have changed if can_review and user_email: msg = 'Notifying %s: can_review=%s, email=%s' logger.info(msg, user, can_review, user_email) try: mailhost.send(outer, subject=subject, mfrom="%s <%s>" % (mfromname, mfrom), mto=user_email) email_counter.setdefault(org_path, []).append(user_id) if user_id in payload['failed_user_ids']: payload['failed_user_ids'].remove(user_id) except Exception: msg = 'Problem notifying user %r; re-queueing.' logger.exception(msg, user) payload['failed_user_ids'].add(user_id) notification_tool.requeue_notification( organization, payload) continue logger.info( 'Syndication digest creation finished: %d message(s) sent (%s).', sum([len(user_ids) for user_ids in email_counter.values()]), email_counter) transaction.commit()