def notifyCommentAdded(obj ,event): """Notify owner of added comment""" log = logging.getLogger('plumi.content.subscribers') urltool = getToolByName(obj, 'portal_url') portal = urltool.getPortalObject() video = aq_parent(aq_parent(obj)) videoUrl = video.absolute_url() creator= video.Creator() membr_tool = getToolByName(obj,'portal_membership') member=membr_tool.getMemberById(creator) mTo = member.getProperty('email',None) log.info('notifyCommentAdded') if mTo: try: mFrom = portal.getProperty('email_from_address') mSubj = _('Comment added on: ') + video.Title().decode('utf-8') mMsg = 'To: %s\n' % mTo mMsg += 'From: %s\n' % mFrom mMsg += 'Content-Type: text/plain; charset=utf-8\n\n' mMsg = _('Hi ') + member.getProperty('fullname', creator) mMsg += '\n\n' + _('A comment has been added on ') + videoUrl + '\n\n' async = getUtility(IAsyncService) job = async.queueJobWithDelay(None, temp_time, sendMail, obj, mMsg, mSubj) log.info('notifyCommentAdded , im %s . sending email to %s from %s ' % (obj, mTo, mFrom) ) except: log.error('Didnt actually send email to contribution owner! Something amiss with SecureMailHost.')
def test_documents_task_transport_selected_docs(self): intids = getUtility(IIntIds) task, documents = self._create_task( self.portal, with_docs=True, return_docs=True) target = self._create_task(self.portal) sql_task = task.get_sql_object() doc_transporter = getUtility(ITaskDocumentsTransporter) ids = [intids.getId(documents[0]), intids.getId(documents[3])] intids_mapping = doc_transporter.copy_documents_from_remote_task( sql_task, target, documents=ids) self.assertEquals( [aa.Title for aa in target.getFolderContents()].sort(), ['Doc 1', 'Doc 4'].sort()) pair1 = intids_mapping.items()[0] pair2 = intids_mapping.items()[0] self.assertEquals( intids.getObject(pair1[0]).Title(), intids.getObject(pair1[1]).Title() ) self.assertEquals( intids.getObject(pair2[0]).Title(), intids.getObject(pair2[1]).Title() )
def test_invoke_add_view(self): portlet1 = getUtility( IPortletType, name='telesur.portlets.HotThreads') portlet2 = getUtility( IPortletType, name='telesur.portlets.PopularThreads') mapping = self.portal.restrictedTraverse( '++contextportlets++plone.leftcolumn') for m in mapping.keys(): del mapping[m] addview1 = mapping.restrictedTraverse('+/' + portlet1.addview) addview2 = mapping.restrictedTraverse('+/' + portlet2.addview) # TODO: Pass a dictionary containing dummy form inputs from the add # form. # Note: if the portlet has a NullAddForm, simply call # addview() instead of the next line. addview1.createAndAdd(data={'max_results': 5, 'forum': 'testforum'}) addview2.createAndAdd(data={'max_results': 5, 'interval': 'interval', 'forum': 'testforum'}) self.assertEquals(len(mapping), 2) self.failUnless(isinstance(mapping.values()[0], hot_threads.Assignment)) self.failUnless(isinstance(mapping.values()[1], popular_threads.Assignment))
def update_links(event): obj = event.object if is_outdated(obj) or not is_publically_visible(obj): return temporary = hasattr(obj, 'meta_type') and \ obj.meta_type == TempFolder.meta_type if temporary: # Objects that are temporary (read: portal_factory) and do not have a # (stable) URL (yet) do not need to be crawled: relative links will be # off quickly and we can't really use the UID anyway. return try: link_checker = getToolByName(obj, 'portal_linkchecker').aq_inner except AttributeError: return if not link_checker.active: return retriever = IRetriever(obj, None) if retriever is not None: sm = getSecurityManager() if not sm.checkPermission(ModifyPortalContent, obj): return if (not IReferenceable.providedBy(obj)): return async = getUtility(IAsyncService) tpath = '/'.join(obj.getPhysicalPath()) job = async.queueJob(retrieve_async, obj, tpath, online=True) callback = job.addCallbacks(failure=job_failure_callback) callback # for pep
def setupAdminClient(portal, event): client = queryUtility(IAdminClient) if client is None: settings = getUtility(IRegistry) try: jid = settings['jarn.xmpp.adminJID'] jdomain = settings['jarn.xmpp.xmppDomain'] password = settings['jarn.xmpp.adminPassword'] pubsub_jid = settings['jarn.xmpp.pubsubJID'] except KeyError: return client = AdminClient(jid, jdomain, password, pubsub_jid) gsm = getGlobalSiteManager() gsm.registerUtility(client, IAdminClient) def checkAdminClientConnected(): if client.state != 'authenticated': logger.error('XMPP admin client has not been able to authenticate. ' \ 'Client state is "%s". Will retry on the next request.' % client.state) gsm.unregisterUtility(client, IAdminClient) zr = getUtility(IZopeReactor) zr.reactor.callLater(10, checkAdminClientConnected)
def handleSend(self, action): request = self.request data, errors = self.extractData() if errors: IStatusMessage(request).add(self.formErrorsMessage, 'error') else: message = data['message'] members = self.context.members getPrincipal = getUtility(IAuthentication).getPrincipal invitations = getUtility(IInvitations) group = removeAllProxies(self.context) for pid in request.get('principal.users', ()): try: principal = getPrincipal(pid) except PrincipalLookupError: continue if not invitations.catalog.search( group, type = {'any_of': ('invitation.member',)}, principal = {'any_of': (pid,)}): removeSecurityProxy(members).invite(pid, message) IStatusMessage(request).add(_(u'Invitations have been sent.')) self.redirect('.')
def items(self): request = self.request catalog = getUtility(ICatalog) results = getUtility(ICatalog).searchResults( traversablePath={'any_of':(self.context,)}, typeType={'any_of': ('User Manuals',)}, sort_order='reverse', sort_on='modified', isDraft={'any_of': (False,)}) for item in results: url = absoluteURL(item, request) preview = getMultiAdapter((item, request), IContentPreview) preview.update() info = { 'title': item.title, 'description': item.description, 'guid': '%s/'%url, 'pubDate': rfc822.formatdate(time.mktime( IDCTimes(item).modified.timetuple())), 'isPermaLink': True} principal = IOwnership(item).owner if principal is not None: profile = IPersonalProfile(principal) info['author'] = u'%s (%s)'%(profile.email, profile.title) yield info
def test_access_to_timeline_of_proprietary_product(self): # ITestTimelineProductSeries instances related to proprietary # products are visible only for person with a policy grant for # the product. owner = self.factory.makePerson() user_with_policy_grant = self.factory.makePerson() product = self.factory.makeProduct( owner=owner, information_type=InformationType.PROPRIETARY) series = self.factory.makeProductSeries(product=product) with person_logged_in(owner): timeline = series.getTimeline() getUtility(IService, 'sharing').sharePillarInformation( product, user_with_policy_grant, owner, {InformationType.PROPRIETARY: SharingPermission.ALL}) # Anonymous users do not have access. with person_logged_in(ANONYMOUS): for name in ( 'name', 'status', 'is_development_focus', 'uri', 'landmarks', 'product'): self.assertRaises(Unauthorized, getattr, timeline, name) # Ordinary users do not have access. with person_logged_in(self.factory.makePerson()): for name in ( 'name', 'status', 'is_development_focus', 'uri', 'landmarks', 'product'): self.assertRaises(Unauthorized, getattr, timeline, name) # Users with a policy grant have access. with person_logged_in(user_with_policy_grant): for name in ( 'name', 'status', 'is_development_focus', 'uri', 'landmarks', 'product'): # No exception is raised when attributes of timeline # are accessed. getattr(timeline, name)
def run(self): self.text_content=None self.new_headers={} things=self.headers extractor=getUtility(IExtractor, name='extractor') ext_data=extractor.extract(self.content, things) ext_things={} ext_things.update(things) ext_things.update(ext_data) content_extractor=getUtility(IExtractor, name='content') cont_data=content_extractor.extract(self.content, ext_things) if not 'text-body' in cont_data: recoll_extractor=getUtility(IExtractor, name='recoll') ext_things.update(cont_data) cont_data=recoll_extractor.extract(self.content, ext_things) text_p=things['text-body-presence']='text-body' in cont_data ext_things.update(cont_data) if text_p: self.text_content=cont_data['text-body'].encode('utf-8') storage=getUtility(IContentStorage, name="content") ext_things['text-id']=storage.hash(self.text_content) self.new_headers=ext_things
def __bind__(self, principal): clone = super(PasswordPreference, self).__bind__(principal) clone.user = getUtility(IAuthentication).getUser(principal.id) clone.ptool = getUtility(IPasswordTool) #clone.changer = IPasswordChanger(clone.__principal__, None) return clone
def setUp(self): self.portal = self.layer['portal'] self.portal_url = self.portal.portal_url() # create EasyNewsletter instance and add some subscribers setRoles(self.portal, TEST_USER_ID, ['Manager']) login(self.portal, TEST_USER_NAME) self.portal._original_MailHost = self.portal.MailHost self.portal.MailHost = mailhost = MockMailHost('MailHost') sm = getSiteManager(context=self.portal) sm.unregisterUtility(provided=IMailHost) sm.registerUtility(mailhost, provided=IMailHost) self.mail_host = getUtility(IMailHost) if not IS_PLONE_5: # BBB self.portal.email_from_address = "*****@*****.**" self.portal.email_from_name = u"Plone Master" self.mail_host.smtp_host = u"example.com" self.mail_host.smtp_port = 25 self.mail_host.smtp_userid = u"*****@*****.**" self.mail_host.smtp_pass = u"Password" else: self.registry = getUtility(IRegistry) reg_mail = self.registry.forInterface( IMailSchema, prefix='plone') reg_mail.email_from_address = '*****@*****.**' reg_mail.email_from_name = u'Plone Master' reg_mail.smtp_host = u'example.com' reg_mail.smtp_port = 25 reg_mail.smtp_userid = u'*****@*****.**' reg_mail.smtp_pass = u'Password'
def test_editProperties(self): # https://bugs.launchpad.net/zope-cmf/+bug/174246 # PropertiesTool.editProperties fails with traceback due to # faulty invocation of the site's manage_changeProperties method props = { 'email_from_name' : 'Test Admin' , 'email_from_address' : '*****@*****.**' , 'description' : 'Test MailHost Description' , 'title' : 'Test MailHost' , 'smtp_server' : 'mail.example.com' , 'validate_email' : True , 'email_charset' : 'iso-8859-15' , 'default_charset' : 'iso-8859-1' , 'enable_permalink' : True } tool = getUtility(IPropertiesTool) tool.editProperties(props) site_prop = self.site.getProperty self.assertEquals(getUtility(IMailHost).smtp_host, 'mail.example.com') self.assertEquals(site_prop('email_from_name'), 'Test Admin') self.assertEquals(site_prop('email_from_address'), '*****@*****.**') self.assertEquals(site_prop('description'), 'Test MailHost Description') self.assertEquals(site_prop('title'), 'Test MailHost') self.assertEquals(site_prop('validate_email'), True) self.assertEquals(site_prop('email_charset'), 'iso-8859-15') self.assertEquals(site_prop('default_charset'), 'iso-8859-1') self.assertEquals(site_prop('enable_permalink'), True)
def update(self): quiz = getSite() int_ids = getUtility(IQreatureIntIds, context=self.context) ce = getUtility(ICounterExplorer, context=self.context) results= [r for r in quiz.values() if IQuizResult.providedBy(r)] self.titles = [r.title.encode('utf-8') for r in results] self.counters = [ce.getCounter(r, key=int_ids.getId(r)) for r in results]
def update(self): self.everyone = getUtility(IEveryoneGroup).id self.authgroup = getUtility(IAuthenticatedGroup).id manager = IPrincipalPermissionManager(removeSecurityProxy(self.context)) if 'site.access.save' in self.request: val = self.request.get('site.access', None) if val == 'open': manager.grantPermissionToPrincipal( 'zojax.AccessSite', self.everyone) manager.unsetPermissionForPrincipal( 'zojax.AccessSite', self.authgroup) if val == 'private': manager.grantPermissionToPrincipal( 'zojax.AccessSite', self.authgroup) manager.unsetPermissionForPrincipal( 'zojax.AccessSite', self.everyone) IStatusMessage(self.request).add( _('Site access settings has been changed.')) self.everyoneAllowed = manager.getSetting( 'zojax.AccessSite', self.everyone) is Allow self.authgroupAllowed = manager.getSetting( 'zojax.AccessSite', self.authgroup) is Allow
def onZSliceValueChanged(self, evt): try: component.getUtility(ICurrentImage) except: # no image loaded so exit early return # get current z-slice index self.SetZIndexValue(int(round(evt.GetSliceValue()))) # we'd like to give regular plot updates without bring the system # to it's knees - we'll guarantee the user a redraw at least every # half second dt = time.time() - self._last_delayed_redraw # schedule an update in 0.25 second if self._delayed_cb: self._delayed_cb.cancel() if dt > 1.0: # perform redraw immediately self.doScheduledRedraw() else: # schedule for later self._delayed_cb = reactor.callLater(0.25, self.doScheduledRedraw)
def _assert_run_cronscript(self, create_job): # The cronscript is configured: schema-lazr.conf and security.cfg. # The job runs correctly and the requested bug subscriptions are # removed. distro = self.factory.makeDistribution() grantee = self.factory.makePerson() owner = self.factory.makePerson() bug = self.factory.makeBug( owner=owner, target=distro, information_type=InformationType.USERDATA) with person_logged_in(owner): bug.subscribe(grantee, owner) job, job_type = create_job(distro, bug, grantee, owner) # Subscribing grantee has created an artifact grant so we need to # revoke that to test the job. artifact = self.factory.makeAccessArtifact(concrete=bug) getUtility(IAccessArtifactGrantSource).revokeByArtifact( [artifact], [grantee]) transaction.commit() out, err, exit_code = run_script( "LP_DEBUG_SQL=1 cronscripts/process-job-source.py -vv %s" % ( job_type)) self.addDetail("stdout", Content(UTF8_TEXT, lambda: out)) self.addDetail("stderr", Content(UTF8_TEXT, lambda: err)) self.assertEqual(0, exit_code) self.assertTrue( 'Traceback (most recent call last)' not in err) IStore(job.job).invalidate() self.assertEqual(JobStatus.COMPLETED, job.job.status) self.assertNotIn( grantee, removeSecurityProxy(bug).getDirectSubscribers())
def test_unsubscribe_pillar_artifacts_specific_info_types(self): # Only remove subscriptions for bugs of the specified info type. person_grantee = self.factory.makePerson(name='grantee') owner = self.factory.makePerson(name='pillarowner') pillar = self.factory.makeProduct(owner=owner) # Make bugs the person_grantee is subscribed to. bug1, ignored = self._make_subscribed_bug( person_grantee, pillar, information_type=InformationType.USERDATA) bug2, ignored = self._make_subscribed_bug( person_grantee, pillar, information_type=InformationType.PRIVATESECURITY) # Now run the job, removing access to userdata artifacts. getUtility(IRemoveArtifactSubscriptionsJobSource).create( pillar.owner, pillar=pillar, information_types=[InformationType.USERDATA]) with block_on_job(self): transaction.commit() self.assertNotIn( person_grantee, removeSecurityProxy(bug1).getDirectSubscribers()) self.assertIn( person_grantee, removeSecurityProxy(bug2).getDirectSubscribers())
def fix_relations(): relations_catalog = getUtility(ICatalog) intids = getUtility(IIntIds) relations = list(relations_catalog.findRelations()) for relation in relations: from_object = intids.getObject(relation.from_id) from_attribute = relation.from_attribute to_id = relation.to_id attr = getattr(from_object, from_attribute, None) attr_is_list = isinstance(attr, list) # remove the broken relation if attr_is_list: setattr(from_object, from_attribute, [x for x in attr if x is not relation]) else: setattr(from_object, from_attribute, None) # let the catalog remove the old relation notify(ObjectModifiedEvent(from_object)) attr = getattr(from_object, from_attribute, None) # create a new relation new_relation = RelationValue(to_id) if attr_is_list: attr.append(new_relation) else: setattr(from_object, from_attribute, new_relation) # let the catalog know about this new relation notify(ObjectModifiedEvent(from_object))
def get_all_user_properties(user): """ Returns a mapping with all the defined user profile properties and its values. The properties list includes all properties defined on any profile extension that is currently registered. For each of this properties, the use object is queried to retrieve the value. This may result in a empty value if that property is not set, or the value of the property provided by any properties PAS plugin. NOTE: Mapped LDAP atrributes will be retrieved and returned on this mapping if any. """ user_properties_utility = getUtility(ICatalogFactory, name='user_properties') attributes = user_properties_utility.properties + METADATA_USER_ATTRS try: extender_name = api.portal.get_registry_record('genweb.controlpanel.core.IGenwebCoreControlPanelSettings.user_properties_extender') except: extender_name = '' if extender_name: if extender_name in [a[0] for a in getUtilitiesFor(ICatalogFactory)]: extended_user_properties_utility = getUtility(ICatalogFactory, name=extender_name) attributes = attributes + extended_user_properties_utility.properties mapping = {} for attr in attributes: value = user.getProperty(attr) if isinstance(value, str) or isinstance(value, unicode): mapping.update({attr: value}) return mapping
def get_ttw_fields(obj): """Returns names of the fields that were added to obj through the web""" fti = getUtility(IDexterityFTI, name=obj.portal_type) full_schema = fti.lookupSchema() all_fields = schema.getFieldsInOrder(full_schema) schema_policy = getUtility(ISchemaPolicy, name=fti.schema_policy) original_schema = schema_policy.bases(None, None)[0] original_fields = schema.getFieldsInOrder(original_schema) new_fields = [field[0] for field in all_fields if field[0] not in dict(original_fields).keys()] for behavior_id in fti.behaviors: behavior = getUtility(IBehavior, behavior_id).interface if behavior == IContactDetails or not IFormFieldProvider.providedBy(behavior): continue try: default_fieldset_fields = non_fieldset_fields(behavior) behavior_name = behavior_id.split('.')[-1] # @TODO: get generic method to get widget id new_fields.extend(['%s.%s' % (behavior_name, field_name) for field_name in default_fieldset_fields]) except: pass return new_fields
def get_proposal(self): """Return the proposal to which this document belongs. This may return a "proposal" or a "submitted proposal". """ parent = aq_parent(aq_inner(self)) if IProposal.providedBy(parent): return parent # Find submitted proposal when self is an excerpt document in the # meeting dossier. for relation in getUtility(ICatalog).findRelations({ 'to_id': getUtility(IIntIds).getId(aq_inner(self)), 'from_attribute': 'excerpts'}): # We expect that there are 0 or 1 relation, because this document # cannot be the excerpt of multiple proposals. submitted_proposal = relation.from_object if api.user.has_permission('View', obj=submitted_proposal): return submitted_proposal # Find proposal when self is an excerpt in the case dossier. generated_excerpts = GeneratedExcerpt.query.by_document(self).all() if generated_excerpts: proposal = generated_excerpts[0].proposal.resolve_proposal() if api.user.has_permission('View', obj=proposal): return proposal return None
def copy_fields(self, translation): fti = getUtility(IDexterityFTI, name=self.context.portal_type) schemas = [] schemas.append(fti.lookupSchema()) for behavior_schema in \ utils.getAdditionalSchemata(self.context, self.context.portal_type): if behavior_schema is not None: schemas.append(behavior_schema) for schema in schemas: for field_name in schema: if field_name in EXCLUDES: continue if not ILanguageIndependentField.providedBy(schema[field_name]): value = getattr(schema(self.context), field_name, _marker) if IRelationValue.providedBy(value): obj = value.to_object adapter = queryAdapter(translation, ILanguage) trans_obj = ITranslationManager(obj).get_translation(adapter.get_language()) if trans_obj: intids = component.getUtility(IIntIds) value = RelationValue(intids.getId(trans_obj)) if not (value == _marker): # We check if not (value == _marker) because z3c.relationfield has an __eq__ setattr(schema(translation), field_name, value)
def _send_token_email(self, addr_to, token, user_info): addr_from = "*****@*****.**" email_template = load_template('zpt/pwreset_token_email.zpt') expiration_time = datetime.utcnow() + timedelta(days=1) options = { 'token_url': self.absolute_url() + "/confirm_email?token=" + token, 'user_info': user_info, 'context': self, 'network_name': NETWORK_NAME, 'expiration_time': expiration_time.strftime("%Y-%m-%d %H:%M:%S") } print options['token_url'] message = MIMEText(email_template(**options).encode('utf-8'), _charset='utf-8') message['From'] = addr_from message['To'] = addr_to message['Subject'] = "%s account password recovery" % NETWORK_NAME try: mailer = getUtility(IMailDelivery, name="Mail") mailer.send(addr_from, [addr_to], message.as_string()) except ComponentLookupError: mailer = getUtility(IMailDelivery, name="naaya-mail-delivery") try: mailer.send(addr_from, [addr_to], message.as_string()) except AssertionError: mailer.send(addr_from, [addr_to], message)
def createLineItem(self, data): parent = self.aq_parent utility = getUtility( IShoppingCartUtility ) cart = utility.get(parent, create=True) intids = getUtility(IIntIds) iid = intids.queryId(parent) if iid is None: iid = intids.register(parent) nitem = PayableLineItem() nitem.item_id = parent.UID() # archetypes uid nitem.uid = iid # copy over information regarding the item nitem.name = "Supplemental Pharmacy Application" nitem.description = "Supplemental Pharmacy Application 2011" nitem.cost = float(self.price) nitem.quantity = 1 nitem.product_code = nitem.item_id nitem.data = data # add to cart if nitem.item_id not in cart.keys(): cart[nitem.item_id] = nitem cart.last_item = nitem.item_id
def restart_sparqls(context): """ Migrate sparqls with the new arguments format (name:type query) """ async = getUtility(IAsyncService) catalog = getToolByName(context, 'portal_catalog') brains = catalog.searchResults(portal_type='Sparql') restarted = 0 log_total = len(brains) log_count = 0 restarted = 0 for brain in brains: log_count += 1 logger.info('PATH %s::%s: %s', log_count, log_total, brain.getPath()) # added exceptions for broken spqrql methods if brain.getPath() != '/www/SITE/data-and-maps/daviz/sds/'\ 'show-eunis-and-dbpedia-links-1' and \ brain.getPath() != '/www/SITE/sandbox/antonio-tests/aq' and \ brain.getPath() != '/www/SITE/sandbox/antonio-tests/aq-1' and \ brain.getPath() != '/www/SITE/data-and-maps/daviz/eionet/data/'\ 'inspire-monitoring-and-reporting-atbe-ref-years-2011-2012': obj = brain.getObject() if obj.getRefresh_rate() != 'Once': obj.scheduled_at = DateTime.DateTime() async.queueJob(async_updateLastWorkingResults, obj, scheduled_at=obj.scheduled_at, bookmarks_folder_added=False) restarted += 1 transaction.commit() message = 'Restarted %s Sparqls ...' %restarted logger.info(message) return message
def validate(self, data): """Check that user is not attempting to merge a person into itself.""" dupe_person = data.get('dupe_person') target_person = data.get('target_person') or self.user if dupe_person is None: self.setFieldError( 'dupe_person', 'The duplicate is not a valid person or team.') else: if dupe_person == target_person: self.addError(_("You can't merge ${name} into itself.", mapping=dict(name=dupe_person.name))) dupe_person_ppas = getUtility(IArchiveSet).getPPAOwnedByPerson( dupe_person, statuses=[ArchiveStatus.ACTIVE, ArchiveStatus.DELETING]) if dupe_person_ppas is not None: self.addError(_( "${name} has a PPA that must be deleted before it " "can be merged. It may take ten minutes to remove the " "deleted PPA's files.", mapping=dict(name=dupe_person.name))) all_branches = getUtility(IAllBranches) if not all_branches.ownedBy(dupe_person).isPrivate().is_empty(): self.addError( _("${name} owns private branches that must be " "deleted or transferred to another owner first.", mapping=dict(name=dupe_person.name))) if dupe_person.isMergePending(): self.addError(_("${name} is already queued for merging.", mapping=dict(name=dupe_person.name))) if target_person is not None and target_person.isMergePending(): self.addError(_("${name} is already queued for merging.", mapping=dict(name=target_person.name)))
def isAvailable(self): if IUnauthenticatedPrincipal.providedBy(self.request.principal): return False principal = self.context.__principal__ principalId = principal.id if self.request.principal.id == principalId: return False invitations = [ invitation.object.id for invitation in getUtility(IInvitations).getInvitationsByPrincipal( principalId, ('invitation.member',))] for group in getUtility(ICatalog).searchResults( type = {'any_of': ('content.group',)}, members = {'any_of': (self.request.principal.id,)}): if group.id in invitations: continue if principalId not in group.members and \ checkPermission('zojax.InviteGroupMember', group): return True return False
def countResults(event): ce = getUtility(ICounterExplorer,context=event.result) int_ids = getUtility(IQreatureIntIds, context = event.result) res_id = int_ids.getId(event.result) print '************the key which goes to explorer**************' print res_id ce.incrementCounter(event.result,res_id)
def continue_action(self, action, data): dupeaccount = data['dupe_person'] if dupeaccount == self.user: # Please, don't try to merge you into yourself. return emails = getUtility(IEmailAddressSet).getByPerson(dupeaccount) emails_count = emails.count() if emails_count > 1: # The dupe account have more than one email address. Must redirect # the user to another page to ask which of those emails (s)he # wants to claim. self.next_url = '+requestmerge-multiple?dupe=%d' % dupeaccount.id return assert emails_count == 1 email = emails[0] login = getUtility(ILaunchBag).login logintokenset = getUtility(ILoginTokenSet) # Need to remove the security proxy because the dupe account may have # hidden email addresses. token = logintokenset.new( self.user, login, removeSecurityProxy(email).email, LoginTokenType.ACCOUNTMERGE) token.sendMergeRequestEmail() self.next_url = './+mergerequest-sent?dupe=%d' % dupeaccount.id
def feedtopciquestion(obj,event): """关注话题有问题""" intids = getUtility(IIntIds) intid = intids.getId(obj) catalog = component.getUtility(ICatalog) qtlist = sorted(catalog.findRelations({'from_id': intid})) if len(qtlist) == 0: return for q in qtlist: topiclist = IFollowing(q.to_object).followed catalog = getToolByName(obj, 'portal_catalog') for topic in topiclist: brain = catalog({'object_provides': Ifeedsfolder.__identifier__, 'Creator': topic, 'sort_on': 'sortable_title'}) if not brain: break folder = brain[0].getObject() if not folder: break id = obj.getId() feed = catalog({'object_provides': Ifeed.__identifier__, 'id': id, 'path': dict(query='/'.join(folder.getPhysicalPath()), depth=1), 'sort_on': 'sortable_title'}) """如果存在当前记录,重置修改时间,否则新建""" if len(feed) > 0: feed[0].getObject().type = 1 feed[0].getObject().setModificationDate(DateTime()) else: item = createContentInContainer(folder,"emc.kb.feed",checkConstraints=False,id=id) item.type = 1
def autorespond_to_sender(mlist, sender, language=None): """Should Mailman automatically respond to this sender? :param mlist: The mailing list. :type mlist: `IMailingList`. :param sender: The sender's email address. :type sender: string :param language: Optional language. :type language: `ILanguage` or None :return: True if an automatic response should be sent, otherwise False. If an automatic response is not sent, a message is sent indicating that, er no more will be sent today. :rtype: bool """ if language is None: language = mlist.preferred_language max_autoresponses_per_day = int(config.mta.max_autoresponses_per_day) if max_autoresponses_per_day == 0: # Unlimited. return True # Get an IAddress from an email address. user_manager = getUtility(IUserManager) address = user_manager.get_address(sender) if address is None: address = user_manager.create_address(sender) response_set = IAutoResponseSet(mlist) todays_count = response_set.todays_count(address, Response.hold) if todays_count < max_autoresponses_per_day: # This person has not reached their automatic response limit, so it's # okay to send a response. response_set.response_sent(address, Response.hold) return True elif todays_count == max_autoresponses_per_day: # The last one we sent was the last one we should send today. Instead # of sending an automatic response, send them the "no more today" # message. log.info('hold autoresponse limit hit: %s', sender) response_set.response_sent(address, Response.hold) # Send this notification message instead. template = getUtility(ITemplateLoader).get( 'list:user:notice:no-more-today', mlist, language=language.code) text = wrap( expand( template, mlist, dict( language=language.code, count=todays_count, sender_email=sender, # For backward compatibility. sender=sender, owneremail=mlist.owner_address, ))) with _.using(language.code): msg = UserNotification( sender, mlist.owner_address, _('Last autoresponse notification for today'), text, lang=language) msg.send(mlist) return False else: # We've sent them everything we're going to send them today. log.info('Automatic response limit discard: %s', sender) return False
def current_component(self): """See `IPackageBuild`.""" return getUtility(IComponentSet)[default_component_dependency_name]
def globally_enabled(self): storage = getUtility(IRuleStorage) return storage.active
def _events(self): eventsFactory = getUtility( IVocabularyFactory, name='plone.contentrules.events') return dict([(e.value, e.token) for e in eventsFactory(self.context)])
def setUp(self): self.portal = self.layer['portal'] self.request = self.layer['request'] registry = getUtility(IRegistry) self.settings = registry.forInterface( ITypesSchema, prefix="plone")
def generateUniqueId(context): """ Generate pretty content IDs. - context is used to find portal_type; in case there is no prefix specified for the type, the normalized portal_type is used as a prefix instead. """ fn_normalize = getUtility(IFileNameNormalizer).normalize id_normalize = getUtility(IIDNormalizer).normalize prefixes = context.bika_setup.getPrefixes() year = context.bika_setup.getYearInPrefix() and \ DateTime().strftime("%Y")[2:] or '' separator = '-' for e in prefixes: if 'separator' not in e: e['separator'] = '' if e['portal_type'] == context.portal_type: separator = e['separator'] # Analysis Request IDs if context.portal_type == "AnalysisRequest": sample = context.getSample() s_prefix = fn_normalize(sample.getSampleType().getPrefix()) sample_padding = context.bika_setup.getSampleIDPadding() ar_padding = context.bika_setup.getARIDPadding() sample_id = sample.getId() sample_number = sample_id.split(s_prefix)[1] ar_number = sample.getLastARNumber() ar_number = ar_number and ar_number + 1 or 1 return fn_normalize( ("%s%s" + separator + "R%s") % (s_prefix, str(sample_number).zfill(sample_padding), str(ar_number).zfill(ar_padding))) # Sample Partition IDs if context.portal_type == "SamplePartition": # We do not use prefixes. There are actually codes that require the 'P'. # matches = [p for p in prefixes if p['portal_type'] == 'SamplePartition'] # prefix = matches and matches[0]['prefix'] or 'samplepartition' # padding = int(matches and matches[0]['padding'] or '0') # at this time the part exists, so +1 would be 1 too many partnr = str(len(context.aq_parent.objectValues('SamplePartition'))) # parent id is normalized already return ("%s" + separator + "P%s") % (context.aq_parent.id, partnr) if context.bika_setup.getExternalIDServer(): # if using external server for d in prefixes: # Sample ID comes from SampleType if context.portal_type == "Sample": prefix = context.getSampleType().getPrefix() padding = context.bika_setup.getSampleIDPadding() new_id = str( idserver_generate_id(context, "%s%s-" % (prefix, year))) if padding: new_id = new_id.zfill(int(padding)) return ('%s%s' + separator + '%s') % (prefix, year, new_id) elif d['portal_type'] == context.portal_type: prefix = d['prefix'] padding = d['padding'] new_id = str( idserver_generate_id(context, "%s%s-" % (prefix, year))) if padding: new_id = new_id.zfill(int(padding)) return ('%s%s' + separator + '%s') % (prefix, year, new_id) # no prefix; use portal_type # year is not inserted here # portal_type is be normalized to lowercase npt = id_normalize(context.portal_type) new_id = str(idserver_generate_id(context, npt + "-")) return ('%s' + separator + '%s') % (npt, new_id) else: # No external id-server. def next_id(prefix): # normalize before anything prefix = fn_normalize(prefix) plone = context.portal_url.getPortalObject() # grab the first catalog we are indexed in. at = getToolByName(plone, 'archetype_tool') if context.portal_type in at.catalog_map: catalog_name = at.catalog_map[context.portal_type][0] else: catalog_name = 'portal_catalog' catalog = getToolByName(plone, catalog_name) # get all IDS that start with prefix # this must specifically exclude AR IDs (two -'s) rr = re.compile("^" + prefix + separator + "[\d+]+$") ids = [int(i.split(prefix+separator)[1]) \ for i in catalog.Indexes['id'].uniqueValues() \ if rr.match(i)] #plone_tool = getToolByName(context, 'plone_utils') #if not plone_tool.isIDAutoGenerated(l.id): ids.sort() _id = ids and ids[-1] or 0 new_id = _id + 1 return str(new_id) for d in prefixes: if context.portal_type == "Sample": # Special case for Sample IDs prefix = fn_normalize(context.getSampleType().getPrefix()) padding = context.bika_setup.getSampleIDPadding() sequence_start = context.bika_setup.getSampleIDSequenceStart() new_id = next_id(prefix + year) # If sequence_start is greater than new_id. Set # sequence_start as new_id. (Jira LIMS-280) if sequence_start > int(new_id): new_id = str(sequence_start) if padding: new_id = new_id.zfill(int(padding)) return ('%s%s' + separator + '%s') % (prefix, year, new_id) elif d['portal_type'] == context.portal_type: prefix = d['prefix'] padding = d['padding'] sequence_start = d.get("sequence_start", None) new_id = next_id(prefix + year) # Jira-tracker LIMS-280 if sequence_start and int(sequence_start) > int(new_id): new_id = str(sequence_start) if padding: new_id = new_id.zfill(int(padding)) return ('%s%s' + separator + '%s') % (prefix, year, new_id) # no prefix; use portal_type # no year inserted here # use "IID" normalizer, because we want portal_type to be lowercased. prefix = id_normalize(context.portal_type) new_id = next_id(prefix) return ('%s' + separator + '%s') % (prefix, new_id)
def __call__(self): """Migrates all items """ catalog = getToolByName(self.context, 'portal_catalog') normalize = getUtility(IIDNormalizer).normalize response = "" stats = {} items = catalog(portal_type="ShopItem") for item in items: obj = item.getObject() stats[obj.UID()] = {'status': 'UNKNOWN', 'result': 'UNKNOWN'} var_conf = IVariationConfig(obj) # Skip broken OrderedDict items var_dict = var_conf.getVariationDict() if str(type(var_dict)) == "<class 'zc.dict.dict.OrderedDict'>": status = "SKIPPED: Broken OrderedDict Item '%s' at '%s'" % ( obj.Title(), obj.absolute_url()) response += status + "\n" print status stats[obj.UID()] = {'status': 'SKIPPED', 'result': 'SUCCESS'} continue varAttrs = var_conf.getVariationAttributes() num_variations = len(varAttrs) if num_variations == 0: # No migration needed stats[obj.UID()] = { 'status': 'NO_MIGRATION_NEEDED', 'result': 'SUCCESS' } continue # Migrate items with 2 variations if num_variations == 2: migrated = True # Create mapping from old to new keys mapping = {} for i, v1 in enumerate(var_conf.getVariation1Values()): for j, v2 in enumerate(var_conf.getVariation2Values()): vkey = "%s-%s" % (normalize(v1), normalize(v2)) vcode = "var-%s-%s" % (i, j) mapping[vkey] = vcode # Check if item needs to be migrated for key in var_dict.keys(): if key in mapping.keys(): migrated = False if migrated: # Already migrated stats[obj.UID()] = { 'status': 'ALREADY_MIGRATED', 'result': 'SUCCESS' } else: # Migrate the item print "Migrating %s..." % obj.Title() for vkey in mapping.keys(): vcode = mapping[vkey] try: # Store data with new vcode var_dict[vcode] = var_dict[vkey] del var_dict[vkey] var_conf.updateVariationConfig(var_dict) transaction.commit() stats[obj.UID()] = { 'status': 'MIGRATED', 'result': 'SUCCESS' } except KeyError: status = "FAILED: Migration of item '%s' at '%s' failed!" % ( obj.Title(), obj.absolute_url()) response += status + "\n" print status stats[obj.UID()] = { 'status': 'FAILED', 'result': 'FAILED' } break if stats[obj.UID()]['status'] == 'MIGRATED': status = "MIGRATED: Item '%s' at '%s'" % ( obj.Title(), obj.absolute_url()) response += status + "\n" print status # Migrate items with 1 variation if num_variations == 1: migrated = True # Create mapping from old to new keys mapping = {} for i, v1 in enumerate(var_conf.getVariation1Values()): vkey = normalize(v1) vcode = "var-%s" % i mapping[vkey] = vcode # Check if item needs to be migrated for key in var_dict.keys(): if key in mapping.keys(): migrated = False if migrated: # Already migrated stats[obj.UID()] = { 'status': 'ALREADY_MIGRATED', 'result': 'SUCCESS' } else: # Migrate this item print "Migrating %s..." % obj.Title() for vkey in mapping.keys(): vcode = mapping[vkey] try: # Store data with new vcode var_dict[vcode] = var_dict[vkey] del var_dict[vkey] var_conf.updateVariationConfig(var_dict) transaction.commit() stats[obj.UID()] = { 'status': 'MIGRATED', 'result': 'SUCCESS' } except KeyError: status = "FAILED: Migration of item '%s' at '%s' failed!" % ( obj.Title(), obj.absolute_url()) response += status + "\n" print status stats[obj.UID()] = { 'status': 'FAILED', 'result': 'FAILED' } break if stats[obj.UID()]['status'] == 'MIGRATED': status = "MIGRATED: Item '%s' at '%s'" % ( obj.Title(), obj.absolute_url()) response += status + "\n" print status total = len(items) migrated = len( [stats[k] for k in stats if stats[k]['status'] == 'MIGRATED']) skipped = len( [stats[k] for k in stats if stats[k]['status'] == 'SKIPPED']) failed = len( [stats[k] for k in stats if stats[k]['status'] == 'FAILED']) no_migration_needed = len([ stats[k] for k in stats if stats[k]['status'] == 'NO_MIGRATION_NEEDED' ]) already = len([ stats[k] for k in stats if stats[k]['status'] == 'ALREADY_MIGRATED' ]) summary = "TOTAL: %s MIGRATED: %s SKIPPED: %s "\ "FAILED: %s NO MIGRATION NEEDED: %s ALREADY_MIGRATED: %s" % (total, migrated, skipped, failed, no_migration_needed, already) response = "%s\n\n%s" % (summary, response) return response
def setUp(self): self._manager = getUtility(IListManager)
def searchResults(self): if self.params.get('action') != 'search': return ala = getUtility(IALAService) try: ret = ala.searchjson(q=self.params['searchOccurrence_query'], fq='rank:species') result = json.load(ret) except: # TODO yield result with error return if 'searchResults' not in result: return if 'results' not in result['searchResults']: return for item in result['searchResults']['results']: search_words = self.params['searchOccurrence_query'].lower().split( ) result = { 'title': item['name'], 'friendlyName': item['name'], 'description': [], 'actions': {}, } if 'commonNameSingle' in item: result['title'] = '{} <i class="taxonomy">{}</i>'.format( item['commonNameSingle'], result['title']) result['friendlyName'] = '{} {}'.format( item['commonNameSingle'], result['friendlyName']) # and filter all searchwords resulttitle = result['title'].lower() if any(sw not in resulttitle for sw in search_words): continue # filter out results without occurrences if not item.get('occCount', 0) > 0: continue if item.get('rank'): result['description'].append('({})'.format(item['rank'])) if item.get('occCount'): result['description'].append('{} occurrences from ALA'.format( item['occCount'])) result['description'] = ' '.join(result['description']) # prefer smallImage over thumbnail? if item.get('smallImageUrl'): result['thumbUrl'] = item['smallImageUrl'] elif item.get('thumbnailUrl'): result['thumbUrl'] = item['thumbnailUrl'] else: result['thumbUrl'] = '' # get actions if item.get('guid'): # TODO: uri path encode guid result['actions'][ 'viz'] = 'https://bie-ws.ala.org.au/species/' + item['guid'] params = urlencode({ 'lsid': item['guid'], 'taxon': item['name'], 'common': item.get('commonNameSingle'), 'import': 'Import' }) # TODO: need a way to generate ajax url? # TODO: can I use view name/id here? result['actions']['alaimport'] = self.context.absolute_url( ) + "/datasets_import_view?" + params yield result
def _process(self, mlist, msg, msgdata): """See `TerminalChainBase`.""" # Start by decorating the message with a header that contains a list # of all the rules that matched. These metadata could be None or an # empty list. rule_hits = msgdata.get('rule_hits') if rule_hits: msg['X-Mailman-Rule-Hits'] = SEMISPACE.join(rule_hits) rule_misses = msgdata.get('rule_misses') if rule_misses: msg['X-Mailman-Rule-Misses'] = SEMISPACE.join(rule_misses) reasons = format_reasons(msgdata.get('moderation_reasons', ['n/a'])) # Hold the message by adding it to the list's request database. request_id = hold_message(mlist, msg, msgdata, SEMISPACE.join(reasons)) # Calculate a confirmation token to send to the author of the # message. pendable = HeldMessagePendable(id=request_id) token = getUtility(IPendings).add(pendable) # Get the language to send the response in. If the sender is a # member, then send it in the member's language, otherwise send it in # the mailing list's preferred language. member = mlist.members.get_member(msg.sender) language = (member.preferred_language if member else mlist.preferred_language) # A substitution dictionary for the email templates. charset = mlist.preferred_language.charset original_subject = msg.get('subject') if original_subject is None: original_subject = _('(no subject)') else: # This must be encoded to the mailing list's perferred charset, # ignoring incompatible characters, otherwise when creating the # notification messages, we could get a Unicode error. oneline_subject = oneline(original_subject, in_unicode=True) bytes_subject = oneline_subject.encode(charset, 'replace') original_subject = bytes_subject.decode(charset) substitutions = dict( subject=original_subject, sender_email=msg.sender, reasons=_compose_reasons(msgdata), # For backward compatibility. sender=msg.sender, ) # At this point the message is held, but now we have to craft at least # two responses. The first will go to the original author of the # message and it will contain the token allowing them to approve or # discard the message. The second one will go to the moderators of # the mailing list, if the list is so configured. # # Start by possibly sending a response to the message author. There # are several reasons why we might not go through with this. If the # message was gated from NNTP, the author may not even know about this # list, so don't spam them. If the author specifically requested that # acknowledgments not be sent, or if the message was bulk email, then # we do not send the response. It's also possible that either the # mailing list, or the author (if they are a member) have been # configured to not send such responses. if (not msgdata.get('fromusenet') and can_acknowledge(msg) and mlist.respond_to_post_requests and autorespond_to_sender(mlist, msg.sender, language)): # We can respond to the sender with a message indicating their # posting was held. subject = _( 'Your message to $mlist.fqdn_listname awaits moderator approval' ) send_language_code = msgdata.get('lang', language.code) template = getUtility(ITemplateLoader).get( 'list:user:notice:hold', mlist, language=send_language_code) text = wrap( expand(template, mlist, dict(language=send_language_code, **substitutions))) adminaddr = mlist.bounces_address nmsg = UserNotification( msg.sender, adminaddr, subject, text, getUtility(ILanguageManager)[send_language_code]) nmsg.send(mlist) # Now the message for the list moderators. This one should appear to # come from <list>-owner since we really don't need to do bounce # processing on it. if mlist.admin_immed_notify: # Now let's temporarily set the language context to that which the # administrators are expecting. with _.using(mlist.preferred_language.code): language = mlist.preferred_language charset = language.charset substitutions['subject'] = original_subject # We need to regenerate or re-translate a few values in the # substitution dictionary. substitutions['reasons'] = _compose_reasons(msgdata, 55) # craft the admin notification message and deliver it subject = _( '$mlist.fqdn_listname post from $msg.sender requires ' 'approval') nmsg = UserNotification(mlist.owner_address, mlist.owner_address, subject, lang=language) nmsg.set_type('multipart/mixed') template = getUtility(ITemplateLoader).get( 'list:admin:action:post', mlist) text = MIMEText(expand(template, mlist, substitutions), _charset=charset) dmsg = MIMEText(wrap( _("""\ If you reply to this message, keeping the Subject: header intact, Mailman will discard the held message. Do this if the message is spam. If you reply to this message and include an Approved: header with the list password in it, the message will be approved for posting to the list. The Approved: header can also appear in the first line of the body of the reply.""")), _charset=language.charset) dmsg['Subject'] = 'confirm ' + token dmsg['From'] = mlist.request_address dmsg['Date'] = formatdate(localtime=True) dmsg['Message-ID'] = make_msgid() nmsg.attach(text) nmsg.attach(MIMEMessage(msg)) nmsg.attach(MIMEMessage(dmsg)) nmsg.send(mlist) # Log the held message. Log messages are not translated, so recast # the reasons in the English. with _.using('en'): reasons = format_reasons(msgdata.get('moderation_reasons', ['N/A'])) log.info('HOLD: %s post from %s held, message-id=%s: %s', mlist.fqdn_listname, msg.sender, msg.get('message-id', 'n/a'), SEMISPACE.join(reasons)) notify(HoldEvent(mlist, msg, msgdata, self))
def package_names(self): """See `IHWDBApplication`.""" return getUtility(IHWDriverSet).all_package_names()
def setUp(self): self._ant = create_list('*****@*****.**') self._bee = create_list('*****@*****.**') self._usermanager = getUtility(IUserManager)
def drivers(self, package_name=None, name=None): """See `IHWDBApplication`.""" return getUtility(IHWDriverSet).search(package_name, name)
def hwInfoByBugRelatedUsers( self, bug_ids=None, bug_tags=None, affected_by_bug=False, subscribed_to_bug=False, user=None): """See `IHWDBApplication`.""" return getUtility(IHWSubmissionSet).hwInfoByBugRelatedUsers( bug_ids, bug_tags, affected_by_bug, subscribed_to_bug, user)
def translator_count(self): """See `IRosettaApplication`.""" stats = getUtility(ILaunchpadStatisticSet) return stats.value('translator_count')
def vendorIDs(self, bus): """See `IHWDBApplication`.""" return getUtility(IHWVendorIDSet).idsForBus(bus)
def translatable_distroseriess(self): """See `IRosettaApplication`.""" distroseriess = getUtility(IDistroSeriesSet) return distroseriess.translatables()
def devices(self, bus, vendor_id, product_id=None): """See `IHWDBApplication`.""" return getUtility(IHWDeviceSet).search(bus, vendor_id, product_id)
def translation_groups(self): """See `IRosettaApplication`.""" return getUtility(ITranslationGroupSet)
def pomsgid_count(self): """See `IRosettaApplication`.""" stats = getUtility(ILaunchpadStatisticSet) return stats.value('pomsgid_count')
def languages(self): """See `IRosettaApplication`.""" return getUtility(ILanguageSet)
def translatable_products(self): """See `IRosettaApplication`.""" products = getUtility(IProductSet) return products.getTranslatables()
def shared_bug_count(self): return getUtility(ILaunchpadStatisticSet).value('shared_bug_count')
def statsdate(self): stats = getUtility(ILaunchpadStatisticSet) return stats.dateupdated('potemplate_count')
def bugtracker_count(self): return getUtility(IBugTrackerSet).count
def top_bugtrackers(self): return getUtility(IBugTrackerSet).getMostActiveBugTrackers(limit=5)
def bugwatch_count(self): return getUtility(IBugWatchSet).search().count()
def projects_with_bugs_count(self): return getUtility(ILaunchpadStatisticSet).value('projects_with_bugs')
def searchTasks(self, search_params): """See `IMaloneApplication`.""" return getUtility(IBugTaskSet).search(search_params)
def bugtask_count(self): user = getUtility(ILaunchBag).user search_params = BugTaskSearchParams(user=user) return getUtility(IBugTaskSet).search(search_params).count()