def incrementing_intids(starting_number=99000): """In testing environments we often want to have predictable intids, but using a time based version may not work well when time is frozen. This implementation replaces the intids generator with an incrementing one. """ original_intids = getUtility(IIntIds) counter = {'number': starting_number} class IncrementingIntIds(type(original_intids)): def __init__(self): self.__dict__ = original_intids.__dict__ def _generateId(self): counter['number'] += 1 intid = counter['number'] while intid in self.refs: intid += 1 return intid globals()['IncrementingIntIds'] = IncrementingIntIds patched_intids = IncrementingIntIds() getSite().getSiteManager().registerUtility(patched_intids, IIntIds) try: yield finally: getSite().getSiteManager().registerUtility(original_intids, IIntIds) globals().pop('IncrementingIntIds')
def time_based_intids(): """To ensure predictable IntIds in tests, this context manager patches the IntIds utility so that IntIds are created based on the current time """ original_intids = getUtility(IIntIds) class TimeBasedIntIds(type(original_intids)): def __init__(self): self.__dict__ = original_intids.__dict__ def _generateId(self): intid = int(datetime.now(pytz.UTC).strftime('10%H%M%S00')) while intid in self.refs: intid += 1 return intid globals()['TimeBasedIntIds'] = TimeBasedIntIds patched_intids = TimeBasedIntIds() getSite().getSiteManager().registerUtility(patched_intids, IIntIds) try: yield finally: getSite().getSiteManager().registerUtility(original_intids, IIntIds) globals().pop('TimeBasedIntIds')
def upgrade(context): # Move the Storage to catalog storage = getUtility(IMultilingualStorage) canonicals = storage.get_canonicals() already_added_canonicals = [] generator = queryUtility(IUUIDGenerator) for canonical in canonicals.keys(): canonical_object = canonicals[canonical] canonical_languages = canonical_object.get_keys() if id(canonical_object) not in already_added_canonicals: tg = generator() for canonical_language in canonical_languages: obj = uuidToObject(canonical_object.get_item(canonical_language)) if obj is not None: IMutableTG(obj).set(tg) obj.reindexObject() already_added_canonicals.append(id(canonical_object)) # Uninstall the utility getSite().getSiteManager().unregisterUtility(storage, IMultilingualStorage) del storage # Install the index and rebuild pcatalog = getToolByName(context, 'portal_catalog', None) if pcatalog is not None: indexes = pcatalog.indexes() if 'TranslationGroup' not in indexes: pcatalog.addIndex('TranslationGroup', 'FieldIndex') pcatalog.manage_reindexIndex(ids=['TranslationGroup']) else: pcatalog.clearFindAndRebuild() transaction.commit()
def write_workflow(self, specification_path, output_formatter=None): specification = self._get_specification( specification_path, output_formatter=output_formatter) if not specification: return False generator = getUtility(IWorkflowGenerator) try: generator(self._workflow_id(specification_path), specification) except ConflictError: raise except Exception, exc: if not output_formatter: raise getSite().error_log.raising(sys.exc_info()) output_formatter( 'error', _(u'error_while_generating_workflow', default=u'${id}: Error while generating' u' the workflow: ${msg}', mapping={'msg': str(exc).decode('utf-8'), 'id': self._workflow_id(specification_path)})) return False
def addToCart( self ): # create a line item and add it to the cart item_factory = component.getMultiAdapter( (self.cart, self.context), interfaces.ILineItemFactory ) # check quantity from request qty = int(self.request.get('quantity', 1)) try: item_factory.create(quantity=qty) except interfaces.AddRecurringItemException: came_from = self.request.environ.get('HTTP_REFERER', getSite().absolute_url()) msg = "Your shopping cart already has items in it. \ A recurring payment item may not be added until \ you check out or delete the existing items." IStatusMessage(self.request).addStatusMessage(msg, type='error') self.request.response.redirect(came_from) return '' except interfaces.RecurringCartItemAdditionException: came_from = self.request.environ.get('HTTP_REFERER', getSite().absolute_url()) msg = "Your shopping cart already holds a recurring payment. \ Please purchase the current item or delete it from your \ cart before adding addtional items." IStatusMessage(self.request).addStatusMessage(msg, type='error') self.request.response.redirect(came_from) return ''
def upgrade_portal_language(context): portal = getSite() registry = getUtility(IRegistry) # XXX: Somehow this code is executed for old migration steps as well # ( < Plone 4 ) and breaks because there is no registry. Looking up the # registry interfaces with 'check=False' will not work, because it will # return a settings object and then fail when we try to access the # attributes. try: lang_settings = registry.forInterface(ILanguageSchema, prefix='plone') except KeyError: return # Get old values # Merge default language options to registry portal = getUtility(ISiteRoot) default_lang = portal.getProperty('default_language', 'en') portal_properties = getToolByName(context, "portal_properties", None) if portal_properties is not None: site_properties = getattr(portal_properties, 'site_properties', None) if site_properties is not None: if site_properties.hasProperty('default_language'): default_lang = site_properties.getProperty('default_language') lang_settings.default_language = default_lang if hasattr(portal, 'portal_languages'): portal_languages = getSite().portal_languages for old, new in LANGUAGE_OPTION_MAPPING.items(): if hasattr(portal_languages, old): setattr(lang_settings, new, getattr(portal_languages, old)) # Remove the old tool portal.manage_delObjects('portal_languages')
def _dump_zodb_to(self, zodbDB, stack): """Dump the zodbDB into a data.fs by constructing a FileStorage database and copying the transactions from the DemoStorage. """ ori_site_manager_bases = None if getSite(): # The __bases__ of our local persistent component registry is # probably a volatile site manager. Pickling it will corrupt the # database. # Therefore we remember the stack bases and remove the __bases__ # for the duration of the DB dump. ori_site_manager_bases = getSite().getSiteManager().__bases__ self.data['site_site_manager_bases'][str(stack['path'].name)] = [ base.__name__ for base in ori_site_manager_bases ] getSite().getSiteManager().__bases__ = () transaction.commit() # Make sure we have the latest state. # The transaction records in testing have no _extension set, causing # a RuntimeError when copied to a filestorage. map(lambda record: setattr(record, '_extension', record.extension), zodbDB.storage.iterator()) zodb_file = str(stack['path'].joinpath('zodb.fs')) blob_dir = str(stack['path'].joinpath('blobs')) cache_storage = FileStorage(zodb_file, create=True, blob_dir=blob_dir) copyTransactionsFromTo(zodbDB.storage, cache_storage) if ori_site_manager_bases is not None: # Restore the __bases__ of the local persistent component registry, # which've removed above. getSite().getSiteManager().__bases__ = ori_site_manager_bases transaction.commit()
def test_isolate_sitehook(self): setSite(self.layer['portal']) with isolate_sitehook(): self.assertIsNone(None, getSite()) setSite(PloneSite('fakesite')) self.assertEquals(self.layer['portal'], getSite())
def pop_from_tmpstorage(obj): # Copy the conversation from the portal (tmpstorage) to the object annotations = IAnnotations(obj) annotations[ANNOTATION_KEY] = IConversation(getSite()) IConversation(obj).__parent__ = obj # Delete the conversation on the portal (tmpstorage) portal_ann = IAnnotations(getSite()) del portal_ann[ANNOTATION_KEY]
def __init__(self, *args, **kwargs): base.Renderer.__init__(self, *args, **kwargs) try: self.news_item = getSite()["news"]["feed"].queryCatalog(batch=True, b_size=1)[0].getObject() except IndexError: self.news_item = None self.news_url = getSite()["news"].absolute_url() self.news_rss_url = self.news_url + "/feed/RSS"
def test_register_once_per_connection(self): once = maintenance.register_once_per_connection self.assertTrue(once('/test', getSite(), 1)) self.assertFalse(once('/test', getSite(), 1)) self.assertFalse(once('/test2', getSite(), 1)) self.assertEqual(1, len(maintenance._clockservers))
def __init__(self, *args, **kwargs): base.Renderer.__init__(self, *args, **kwargs) try: self.archives_item = getSite()["archives"]["feed"].queryCatalog(batch=True, b_size=1)[0].getObject() except IndexError: self.archives_item = None self.archives_url = getSite()["archives"].absolute_url() self.archives_rss_url = self.archives_url + "/feed/itunes.xml" self.archives_itunes_url = "itpc:" + self.archives_url.split(":",1)[1] + "/feed/itunes.xml"
def register_event_recorder(*required): """Register a generic event subscriber for recording certain events. In order to be able to use the testbrowser, the transaction must be able to synchronize the threads. The easiest way to do that is to store the infos in the database. We do that by just attaching them to the Plone site. """ getSite().getSiteManager().registerHandler( recording_event_subscriber, list(required))
def solrSearchResults(request=None, **keywords): """ perform a query using solr after translating the passed in parameters with portal catalog semantics """ search = queryUtility(ISearch) config = queryUtility(ISolrConnectionConfig) if request is None: # try to get a request instance, so that flares can be adapted to # ploneflares and urls can be converted into absolute ones etc; # however, in this case any arguments from the request are ignored request = getattr(getSite(), 'REQUEST', None) args = deepcopy(keywords) elif IHTTPRequest.providedBy(request): args = deepcopy(request.form) # ignore headers and other stuff args.update(keywords) # keywords take precedence else: assert isinstance(request, dict), request args = deepcopy(request) args.update(keywords) # keywords take precedence # if request is a dict, we need the real request in order to # be able to adapt to plone flares request = getattr(getSite(), 'REQUEST', args) if 'path' in args and 'navtree' in args['path']: raise FallBackException # we can't handle navtree queries yet use_solr = args.get('use_solr', False) # A special key to force Solr if not use_solr and config.required: required = set(config.required).intersection(args) if required: for key in required: if not args[key]: raise FallBackException else: raise FallBackException schema = search.getManager().getSchema() or {} params = cleanupQueryParameters(extractQueryParameters(args), schema) languageFilter(args) prepareData(args) mangleQuery(args, config, schema) query = search.buildQuery(**args) if query != {}: optimizeQueryParameters(query, params) __traceback_info__ = (query, params, args) response = search(query, **params) else: return SolrResponse() def wrap(flare): """ wrap a flare object with a helper class """ adapter = queryMultiAdapter((flare, request), IFlare) return adapter is not None and adapter or flare results = response.results() for idx, flare in enumerate(results): flare = wrap(flare) for missing in set(schema.stored).difference(flare): flare[missing] = MV results[idx] = wrap(flare) padResults(results, **params) # pad the batch return response
def _load_state(self): self._loaded = True self.uid = getuid(self.context) self.portal = getSite() altportal = getSite() self.sender = invitation_sender(self.portal) self.localize = getToolByName(self.portal, 'translation_service') self.timefn = self.localize.ulocalized_time if HAS_PAE: self.timefn = ulocalized_time # fixed DateTime timezone bug
def _get_catalogs(obj): try: uid_catalog = getToolByName(obj, config.UID_CATALOG) except AttributeError: uid_catalog = getToolByName(getSite(), config.UID_CATALOG) try: ref_catalog = getToolByName(obj, config.REFERENCE_CATALOG) except AttributeError: ref_catalog = getToolByName(getSite(), config.REFERENCE_CATALOG) return uid_catalog, ref_catalog
def action_join(self, action, data): self.handle_join_success(data) if data.has_key('email'): if data['email'].endswith('ilo.org'): return self.request.response.redirect(getSite().absolute_url() + '/registration_successful') else: return self.request.response.redirect(getSite().absolute_url() + '/registration_success')
def translated_portal_type(self): request = getSite().REQUEST portal_types = getToolByName(getSite(), 'portal_types') fti = portal_types.get(self.attrs['portal_type'], None) default = translate(self.attrs['portal_type'], domain='plone', context=request) if fti: return translate(fti.title, domain=fti.i18n_domain, default=default, context=request) return default
def get_obj_by_relative_path(relative_path): """Returns the object by a path relative to the site root. If no object is found, None is returned. Bad acquisition lookups are eliminiated. """ site_path = '/'.join(getSite().getPhysicalPath()) obj_path = '/'.join((site_path, relative_path.strip('/'))) obj = getSite().restrictedTraverse(obj_path, None) if not obj or '/'.join(obj.getPhysicalPath()) != obj_path: return None return obj
def pathcu(self): folderc = getSite().unrestrictedTraverse('actividades/coloquio') foldersem = getSite().unrestrictedTraverse('actividades/seminarios') folderspe = getSite().unrestrictedTraverse('actividades/actividades-especiales/cu') return [ '/'.join(folderc.getPhysicalPath()), '/'.join(foldersem.getPhysicalPath()), '/'.join(folderspe.getPhysicalPath()), ]
def get_specification(workflow_id): discovery = getMultiAdapter((getSite(), getSite().REQUEST), IWorkflowSpecificationDiscovery) parser = getUtility(IWorkflowSpecificationParser) for path in discovery.discover(): if os.path.basename(os.path.dirname(path)) != workflow_id: continue with open(path) as specfile: return parser(specfile, path=path, silent=True) return None
def get_actor_info(self): membership = getToolByName(getSite(), 'portal_membership') portal_url = getToolByName(getSite(), 'portal_url') userid = self.attrs['actor'] member = membership.getMemberById(userid) if not member: return {'url': '', 'portrait_url': portal_url() + '/defaultUser.png', 'fullname': userid or 'N/A'} portrait = membership.getPersonalPortrait(userid) return {'url': membership.getHomeUrl(userid), 'portrait_url': portrait and portrait.absolute_url() or '', 'fullname': member.getProperty('fullname') or userid}
def getPhysicalPath(self): """this needs implementation if the object doesnt exists for real in portal""" if not self.sql_virtual: return super(SQLDexterityItem, self).getPhysicalPath() portal_url = getToolByName(getSite(), 'portal_url')() fti = ISQLTypeSettings(getUtility(IDexterityFTI, name=self.portal_type)) folder = None parent_path = None if IRelationValue.providedBy(getattr(fti, 'sql_folder_id', None)): folder = fti.sql_folder_id.to_object if folder: parent_path = folder.getPhysicalPath() if not parent_path: parent_path = ('', getSite().id, 'data-'+self.portal_type,) return parent_path+(str(self.id),)
def test_site(): site = DummySite() with Site(site): assert getSite() is site assert getSite() is not site try: with Site(site): assert getSite() is site raise RuntimeError('') except RuntimeError: pass assert getSite() is not site
def _global_unprotect(self): # portal_memberdata._members cache will be written sometimes. if IPloneSiteRoot.providedBy(getSite()): unprotected_write(getToolByName(getSite(), 'portal_memberdata')._members) context = self.getContext() # always allow writes to context's annotations. if IAnnotatable.providedBy(context): annotations = IAnnotations(context) unprotected_write(annotations) if CONTEXT_ASSIGNMENT_KEY in annotations: # also allow writes to context portlet assignments unprotected_write(annotations[CONTEXT_ASSIGNMENT_KEY])
def trigger_mopage_refresh(obj, event): event_pages = filter(None, map(lambda parent: IEventPage(parent, None), aq_chain(obj))) if not event_pages: # We are not within an event page. # We only trigger when publishing an event page # or a child of an event page. return triggers = filter(None, map(lambda parent: IPublisherMopageTrigger(parent, None), aq_chain(obj))) if not triggers or not triggers[0].is_enabled(): return for events in event_pages: IMopageModificationDate(events).touch() from collective.taskqueue import taskqueue trigger_url = triggers[0].build_trigger_url() callback_path = '/'.join(getSite().getPhysicalPath() + ('taskqueue_events_trigger_mopage_refresh',)) taskqueue.add(callback_path, params={'target': trigger_url})
def challenge( self, request, response, **kw ): """ Challenge the user for credentials. Prevent redirect to login page for paths mentioned in control panel" """ portal = getSite() annotations = IAnnotations(portal) do_basic_auth_paths = annotations.get('rohberg.doorman.do_basic_auth_paths', []) if do_basic_auth_paths: realm = response.realm do_basic_auth = False vup = request.get("VIRTUAL_URL_PARTS", None) if vup: vup = list(vup) if len(vup) > 2: do_basic_auth = vup[2] in do_basic_auth_paths if do_basic_auth: if realm: response.addHeader('WWW-Authenticate', 'basic realm="%s"' % realm) m = "<strong>You are not authorized to access this resource.</strong>" response.setBody(m, is_error=1) response.setStatus(401) return 1 return 0
def __call__(self): self.protect() self.errors = [] site = getSite() context = aq_inner(self.context) selection = self.get_selection() self.dest = site.restrictedTraverse( str(self.request.form['folder'].lstrip('/'))) self.catalog = getToolByName(context, 'portal_catalog') self.mtool = getToolByName(self.context, 'portal_membership') for brain in self.catalog(UID=selection): selection.remove(brain.UID) # remove everyone so we know if we # missed any obj = brain.getObject() if self.required_obj_permission: if not self.mtool.checkPermission(self.required_obj_permission, obj): self.errors.append(_('Permission denied for "${title}"', mapping={ 'title': self.objectTitle(obj) })) self.action(obj) return self.message(selection)
def settings(self): """ Settings """ if self._settings is None: site = getSite() self._settings = queryAdapter(site, IAlchemySettings) return self._settings
def traversal_stack(base, path): if path.startswith('/'): base = getSite() path = path[1:] obj = base stack = [obj] components = path.split('/') while components: child_id = unquote(components.pop(0)) try: if hasattr(aq_base(obj), 'scale'): if components: child = obj.scale(child_id, components.pop()) else: child = obj.field(child_id).get(obj.context) else: # Do not use restrictedTraverse here; the path to the # image may lead over containers that lack the View # permission for the current user! # Also, if the image itself is not viewable, we rather # show a broken image than hide it or raise # unauthorized here (for the referring document). child = obj.unrestrictedTraverse(child_id) except ConflictError: raise except (AttributeError, KeyError, NotFound, ztkNotFound): return obj = child stack.append(obj) return stack
def _get_group(self, group_id): portal = getSite() portal_groups = getToolByName(portal, "portal_groups") return portal_groups.getGroupById(group_id)
class getRestrictedTokenForm(form.SchemaForm): grok.name('getRestrictedToken') grok.require('cmf.ManagePortal') grok.template('gettokenform') grok.context(ISiteRoot) schema = ICredentials ignoreContext = True label = _(u'Get a valid token') description = _(u'Give the credentials of a valid account.') def update(self): # call the base class version - this is very important! super(getRestrictedTokenForm, self).update() # disable Plone's editable border self.request.set('disable_border', True) self.actions['get_token'].addClass('context') @button.buttonAndHandler(_(u'Get token'), name='get_token') def handleApply(self, action): data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return username = data['username'] password = data['password'] maxclient, settings = getUtility(IMAXClient)() settings.max_restricted_username = username try: settings.max_restricted_token = maxclient.getToken( username, password) IStatusMessage(self.request).addStatusMessage( 'Restricted token issued for user: {}'.format(username), 'info') except AttributeError, error: IStatusMessage(self.request).addStatusMessage( error, 'Username or password invalid.') # Add context for this site MAX server with the restricted token portal = getSite() portal_permissions = dict(read='subscribed', write='subscribed', subscribe='restricted') # maxclient.setActor(self.maxui_settings.max_restricted_username) # maxclient.setToken(self.maxui_settings.max_restricted_token) # maxclient.addContext(portal.absolute_url(), # portal.title, # portal_permissions # ) context_params = { 'url': portal.absolute_url(), 'displayName': portal.title, 'permissions': portal_permissions } maxclient.setActor(settings.max_restricted_username) maxclient.setToken(settings.max_restricted_token) try: maxclient.contexts.post(**context_params) except: IStatusMessage(self.request).addStatusMessage( 'There was an error trying to create the default (portal root) URL into MAX server.', 'error') # Add the restricted token to the Plone admin user set_user_oauth_token('admin', settings.max_restricted_token) # Redirect back with a status message self.request.response.redirect('{}/{}'.format( self.context.absolute_url(), '@@maxui-settings'))
def __call__(self): form = self.request.form portal = getSite() workbook = None if 'setupexisting' in form and 'existing' in form and form['existing']: fn = form['existing'].split(":") self.dataset_project = fn[0] self.dataset_name = fn[1] path = 'setupdata/%s/%s.xlsx' % \ (self.dataset_name, self.dataset_name) filename = resource_filename(self.dataset_project, path) try: workbook = load_workbook(filename=filename) # , use_iterators=True) except AttributeError: print "" print traceback.format_exc() print "Error while loading ", path elif 'setupfile' in form and 'file' in form and form['file'] and 'projectname' in form and form['projectname']: self.dataset_project = form['projectname'] tmp = tempfile.mktemp() file_content = form['file'].read() open(tmp, 'wb').write(file_content) workbook = load_workbook(filename=tmp) # , use_iterators=True) self.dataset_name = 'uploaded' assert(workbook is not None) adapters = [[name, adapter] for name, adapter in list(getAdapters((self.context, ), ISetupDataImporter))] for sheetname in workbook.get_sheet_names(): transaction.savepoint() ad_name = sheetname.replace(" ", "_") if ad_name in [a[0] for a in adapters]: adapter = [a[1] for a in adapters if a[0] == ad_name][0] adapter(self, workbook, self.dataset_project, self.dataset_name) adapters = [a for a in adapters if a[0] != ad_name] for name, adapter in adapters: transaction.savepoint() adapter(self, workbook, self.dataset_project, self.dataset_name) check = len(self.deferred) while len(self.deferred) > 0: new = self.solve_deferred() logger.info("solved %s of %s deferred references" % ( check - new, check)) if new == check: raise Exception("%s unsolved deferred references: %s" % ( len(self.deferred), self.deferred)) check = new logger.info("Rebuilding bika_setup_catalog") bsc = getToolByName(self.context, 'bika_setup_catalog') bsc.clearFindAndRebuild() logger.info("Rebuilding bika_catalog") bc = getToolByName(self.context, 'bika_catalog') bc.clearFindAndRebuild() logger.info("Rebuilding bika_analysis_catalog") bac = getToolByName(self.context, 'bika_analysis_catalog') bac.clearFindAndRebuild() message = PMF("Changes saved.") self.context.plone_utils.addPortalMessage(message) self.request.RESPONSE.redirect(portal.absolute_url())
def get_events(context, start=None, end=None, limit=None, ret_mode=RET_MODE_BRAINS, expand=False, sort='start', sort_reverse=False, **kw): """Return all events as catalog brains, possibly within a given timeframe. :param context: [required] A context object. :type context: Content object :param start: Date, from which on events should be searched. :type start: Python datetime. :param end: Date, until which events should be searched. :type end: Python datetime :param limit: Number of items to be returned. :type limit: integer :param ret_mode: Return type of search results. These options are available: * 1 (brains): Return results as catalog brains. * 2 (objects): Return results as IEvent and/or IOccurrence objects. * 3 (accessors): Return results as IEventAccessor wrapper objects. :type ret_mode: integer [1|2|3] :param expand: Expand the results to all occurrences (within a timeframe, if given). With this option set to True, the resultset also includes the event's recurrence occurrences and is sorted by the start date. Only available in ret_mode 2 (objects) and 3 (accessors). :type expand: boolean :param sort: Catalog index id to sort on. :type sort: string :param sort_reverse: Change the order of the sorting. :type sort_reverse: boolean :returns: Portal events, matching the search criteria. :rtype: catalog brains, event objects or IEventAccessor object wrapper, depending on ret_mode. """ start, end = _prepare_range(context, start, end) query = {} query['object_provides'] = IEvent.__identifier__ query.update(start_end_query(start, end)) if 'path' not in kw: # limit to the current navigation root, usually (not always) site portal = getSite() navroot = getNavigationRootObject(context, portal) query['path'] = '/'.join(navroot.getPhysicalPath()) else: query['path'] = kw['path'] # Sorting # In expand mode we sort after calculation of recurrences again. But we # need to leave this sorting here in place, since no sort definition could # lead to arbitrary results when limiting with sort_limit. query['sort_on'] = sort if sort_reverse: query['sort_order'] = 'reverse' # cannot limit before resorting or expansion, see below query.update(kw) cat = getToolByName(context, 'portal_catalog') result = cat(**query) # unfiltered catalog results are already sorted correctly on brain.start # filtering on start/end requires a resort, see docstring below and # p.a.event.tests.test_base_module.TestGetEventsDX.test_get_event_sort if sort in ('start', 'end'): result = filter_and_resort(context, result, start, end, sort, sort_reverse) # Limiting a start/end-sorted result set is possible here # and provides an important optimization BEFORE costly expansion if limit: result = result[:limit] if ret_mode in (RET_MODE_OBJECTS, RET_MODE_ACCESSORS): if expand is False: result = [_obj_or_acc(it.getObject(), ret_mode) for it in result] else: result = expand_events(result, ret_mode, start, end, sort, sort_reverse) # Limiting a non-start-sorted result set can only happen here if limit: result = result[:limit] return result
def get_repository_roots(self): roots = filter(IRepositoryRoot.providedBy, aq_chain(self.published)) if roots: return roots # Avoid catalog query for performance reasons return filter(IRepositoryRoot.providedBy, getSite().objectValues())
def _get_current_user(self): portal = getSite() portal_membership = getToolByName(portal, "portal_membership") return portal_membership.getAuthenticatedMember().getId()
def _get_user(self, user_id): portal = getSite() portal_membership = getToolByName(portal, "portal_membership") return portal_membership.getMemberById(user_id)
def migrateCustomAT(fields_mapping, src_type, dst_type, dry_run=False): """ Try to get types infos from archetype_tool, then set a migrator and pass it given values. There is a dry_run mode that allows to check the success of a migration without committing. """ portal = getSite() # if the type still exists get the src_meta_type from the portal_type portal_types = getToolByName(portal, 'portal_types') fti = portal_types.get(src_type, None) # Check if the fti was removed or replaced by a DX-implementation if fti is None or IDexterityFTI.providedBy(fti): # Get the needed info from an instance of the type catalog = portal.portal_catalog brains = catalog(portal_type=src_type, sort_limit=1) if not brains: # no item? assume stuff is_folderish = False src_meta_type = src_type else: src_obj = brains[0].getObject() if IDexterityContent.providedBy(src_obj): logger.error('%s should not be dexterity object!', src_obj.absolute_url()) is_folderish = getattr(src_obj, 'isPrincipiaFolderish', False) src_meta_type = src_obj.meta_type else: # Get info from at-fti src_meta_type = fti.content_meta_type archetype_tool = getToolByName(portal, 'archetype_tool', None) for info in archetype_tool.listRegisteredTypes(): # lookup registered type in archetype_tool with meta_type # because several portal_types can use same meta_type if info.get('meta_type') == src_meta_type: klass = info.get('klass', None) is_folderish = klass.isPrincipiaFolderish migrator = makeCustomATMigrator(context=portal, src_type=src_type, dst_type=dst_type, fields_mapping=fields_mapping, is_folderish=is_folderish, dry_run=dry_run) if migrator: migrator.src_meta_type = src_meta_type migrator.dst_meta_type = '' walker_settings = { 'portal': portal, 'migrator': migrator, 'src_portal_type': src_type, 'dst_portal_type': dst_type, 'src_meta_type': src_meta_type, 'dst_meta_type': '', 'use_savepoint': True } if dry_run: walker_settings['limit'] = 1 walker = CustomQueryWalker(**walker_settings) walker.go() walker_infos = { 'errors': walker.errors, 'msg': walker.getOutput().splitlines(), 'counter': walker.counter } for error in walker.errors: logger.error(error.get('message')) if dry_run: transaction.abort() return walker_infos
def __init__(self, context, request): self.context = context self.request = request self.portal = getSite()
def pretty_title_or_id(self): context = getSite() return pretty_title_or_id(context, self)
def __call__(self): """ returns a json with candidates of duplication """ lang = getattr(self.context, 'getLanguage', lambda: 'en') if lang() != 'en': # suggestions only work for English return settings = IEEASimilaritySettings(self.context).settings max_difference = float(settings.max_difference) or MAX_DIFFERENCE equiv_types = [] if settings.equivalent_content_types: equiv_types = [equiv_set.lower().replace(' ', '').split(',') for equiv_set in settings.equivalent_content_types] max_suggestions = settings.number_of_suggestions or 5 min_words = settings.min_words or 3 catalog = getSite().portal_catalog candidates = OrderedDict() title = self.request.get('title') words = [word for word in simple_preprocess(title, deacc=True) if not settings.remove_stopwords or word not in STOPWORDS] all_content_types = self.request.get('all_content_types') portal_type = self.request.get('portal_type') equivs = [] for equiv_set in equiv_types: if portal_type in equiv_set: equivs.extend(equiv_set) if not equivs: equivs = [portal_type] if len(words) < min_words: return json.dumps(candidates) dictionary, corpus, lsi, index = get_gensim_data() vec_bow = dictionary.doc2bow([stem(word) for word in words]) vec_lsi = lsi[vec_bow] sims = index[vec_lsi] sims = sorted(enumerate(sims), key=lambda item: -item[1]) previous_note = 0 threshold = float( self.request.get('threshold', self.reference_threshold(len(words)))) for sim in sims: if sim[1] < threshold or ( previous_note - sim[1] > max_difference): # if the difference in similarity is big, # next candidates are no longer interesting break previous_note = sim[1] for word_id in corpus[sim[0]]: if len(dictionary[word_id[0]].replace('-', '')) == 32: uid = dictionary[word_id[0]] break try: brain = catalog({'UID': [uid, uid.upper()]})[0] except NameError as err: logger.warn('Catalog UID not found: %s', err) except IndexError as err: logger.warn('Object with UID %s not found in catalog: %s', uid, err) else: if all_content_types or brain.portal_type.lower() in equivs: try: latest = brain.getObject() versions = queryAdapter(latest, IGetVersions) if versions is not None: latest = versions.latest_version() # on edit we don't want the context to be suggested if latest != self.context: url = '/' + latest.absolute_url(1) if url not in candidates: ob_to_candidate( latest, candidates, str(sim[1])) except TypeError: ob = brain.getObject() if ob != self.context: url = brain.getURL() if url not in candidates: ob_to_candidate( brain.getObject(), candidates, str(sim[1])) if len(candidates) == max_suggestions: break return json.dumps(candidates)
def unregisterStubAdapter(factory, adapts=None, provides=None, name=u''): sm = getSite().getSiteManager() sm.unregisterAdapter(factory, required=adapts, provided=provides, name=name)
def absolutize_path(path, is_source=True): """Create path including the path of the portal root. The path must be absolute, so starting with a slash. Or it can be a full url. If is_source is true, this is an alternative url that will point to a target (unknown here). If is_source is true, path is the path of a target. An object must exist at this path, unless it is a full url. Return a 2-tuple: (absolute redirection path, an error message if something goes wrong and otherwise ''). """ portal = getSite() err = None is_external_url = False if not path: if is_source: err = _(u"You have to enter an alternative url.") else: err = _(u"You have to enter a target.") elif not path.startswith('/'): if is_source: err = _(u"Alternative url path must start with a slash.") else: # For targets, we accept external urls. # Do basic check. parsed = urlparse(path) if parsed.scheme in ('https', 'http') and parsed.netloc: is_external_url = True else: err = _(u"Target path must start with a slash.") elif '@@' in path: if is_source: err = _(u"Alternative url path must not be a view.") else: err = _(u"Target path must not be a view.") else: context_path = "/".join(portal.getPhysicalPath()) path = "{0}{1}".format(context_path, path) if not err and not is_external_url: catalog = getToolByName(portal, 'portal_catalog') if is_source: # Check whether already exists in storage storage = getUtility(IRedirectionStorage) if storage.get(path): err = _(u"The provided alternative url already exists!") else: # Check whether obj exists at source path. # A redirect would be useless then. if portal.unrestrictedTraverse(path, None) is not None: err = _(u"Cannot use a working path as alternative url.") else: # Check whether obj exists at target path result = catalog.searchResults(path={"query": path}) if len(result) == 0: err = _(u"The provided target object does not exist.") return path, err
def create_payment_text(context, order_data): payment = order_data.order.attrs['payment_method'] payment_text = safe_encode(IPaymentText(getSite()).payment_text(payment)) if payment_text.strip(): return '\n\n' + payment_text.strip() + '\n' return ''
def mtool(self): return getToolByName(getSite(), 'portal_membership')
def setup_various(context): portal = getSite() target_language, is_combined_language, locale = _get_locales_info(portal) _setup_calendar(portal, locale) _setup_visible_ids(portal, target_language, locale) use_new_view_names(portal, types_to_fix=['Plone Site'])
def _addToSourceUsers(self, login, password): acl_users = getToolByName(getSite(), 'acl_users') acl_users.source_users.doAddUser(login, password)
def settings(self): return GlobalSettings(getSite())
def _is_anonymous(self): portal = getSite() portal_membership = getToolByName(portal, "portal_membership") return portal_membership.isAnonymousUser()
def parent(self): site = getSite() return site['news-archive']
def start_end_from_mode(mode, dt=None, context=None): """Return a start and end date from a given mode string, like "today", "past" or "future". This can be used in event retrieval functions. :param mode: One of the following modes: 'all' Show all events. 'past': Show only past events with descending sorting. 'future': Show only future events (default). 'today': Show todays events. 'now': Show todays upcoming events. '7days': Show events until 7 days in future. 'day': Return all events on the given day (dt parameter required) 'week': Show a weeks events, optionally from a given date. These settings override the start and end parameters. Not implemented yet: 'month': Show this month's events. :type mode: string :param dt: Optional datetime for day mode. :type dt: Python datetime """ if not context: context = getSite() now = localized_now(context) start = end = None if mode == 'all': start = None end = None elif mode == 'past': start = None end = now elif mode == 'future': start = now end = None elif mode == 'now': start = now end = dt_end_of_day(now) elif mode == '7days': start = now end = dt_end_of_day(now + timedelta(days=6)) elif mode == 'day' or mode == 'today': if not dt: dt = now # show today start = dt_start_of_day(dt) end = dt_end_of_day(dt) elif mode == 'week': if not dt: dt = now # show this week wkd = dt.weekday() first = first_weekday() if first <= wkd: delta = wkd - first # >= 0 if first > wkd: delta = wkd + 7 - first # > 0 start = dt_start_of_day(dt - timedelta(days=delta)) end = dt_end_of_day(start + timedelta(days=6)) elif mode == 'month': if not dt: dt = now # show this month year = dt.year month = dt.month last_day = monthrange(year, month)[1] # (wkday, days) start = dt_start_of_day(datetime(year, month, 1)) end = dt_end_of_day(datetime(year, month, last_day)) return start, end
def Command(app): totals = { 'missing_mp': 0, 'good_mp_ownership': 0, 'fixed_mp_ownership': 0, 'found_by_email': 0, 'found_by_id': 0, 'users_without_email': 0, 'new_users': 0, } with api.env.adopt_user(username=params.admin_user): portal = getSite() mps_dir = portal['directory'] profiles = get_member_profiles(mps_dir) if not portal: raise ValueError( "Site is not set. " "Please call this script with -O param. " "E.g. {}".format(USAGE)) idx = 0 tot = len(profiles['by_id']) # with open('users.csv', 'rb') as csvfile: # spamreader = csv.reader(csvfile, delimiter=',', quotechar='"') # for row in spamreader: # fullname, email, last_login, description = row # tmp = { # 'fullname': fullname, # 'email': email, # 'last_login': last_login, # 'description': description # } # old_users[email] = tmp new_users = { 'no_login': [], 'login': [] } idx = 0 tot = len(users) portal = getSite() mps_dir = portal['directory'] profiles = get_member_profiles(mps_dir) for usr in users: idx += 1 print '{idx}/{tot}'.format(idx=idx, tot=tot) if usr.getProperty('email') not in old_users: email = usr.getProperty('email') last_login = usr.getProperty('last_login_time') mp = profiles['by_email'].get(email) if last_login >= ONE_DAY_AGO_LOGIN_TIME or (mp and mp.creation_date >= ONE_DAY_AGO_LOGIN_TIME): print 'user registered in the last day' totals['new_users'] += 1 continue if last_login == DEFAULT_LOGIN_TIME: print 'NEW user' new_users['no_login'].append(usr) else: print 'OLD user' #new_users['login'].append(usr) idx = 0 #tot = len(new_users['no_login'] + new_users['login']) tot = len(new_users['no_login']) #for usr in new_users['no_login'] + new_users['login']: for usr in new_users['no_login']: email = usr.getProperty('email') print '{idx}/{tot}: removing user: {email}'.format( idx=idx, tot=tot, email=email) idx += 1 mp = profiles['by_email'].get(email) if mp: mps_dir.manage_delObjects([mp.getId()]) api.user.delete(user=usr) if idx % 100 == 0: transaction.commit() for k in totals: logger.warning('Total {k}: {tot}'.format(k=k, tot=totals[k]))
def parent(self): site = getSite() return site['more-events']
def portalurl(self): return getSite().absolute_url()
def upgrade_11_to_2(setuptool): site = getSite() uninstall_mimetype_and_transforms(site) install_mimetype_and_transforms(site)
def post_install(context): site = getSite() pas = site.acl_users _addPlugin(pas)
def request(self): site = hooks.getSite() return site.REQUEST
def site(self): return hooks.getSite()
def _get_users(self): portal = getSite() portal_membership = getToolByName(portal, "portal_membership") return portal_membership.listMembers()
def bounceDepartment(self): site = getSite() directory = site.people people = directory.listFolderContents(contentFilter={"portal_type": "FSDPerson"}) for person in people: userID = person.getId() pUID = person.UID() refs = person.at_references.items() depts = self.getDepartments for dept in depts: for ref in refs: if ref.relationship == "DepartmentalMembership": rc = getToolByName(context, 'reference_catalog') deptrels = rc.searchResults({'sourceUID' : pUID, 'relationship' : 'DepartmentalMembership'}) for rel in deptrels: reltid = rel.targetUID dept = self.reference_catalog.lookupObject(reltid) relboj = rel.getObject() # membership content object cobj = relobj.getContentObject() position = cobj.position title = cobj.title primary = cobj.primary_department deptadd = cobj.dept_officeAddress deptstreet = cobj.dept_streetAddress deptcity = cobj.dept_city deptstate = cobj.dept_state deptzip = cobj.dept_zip deptphone = cobj.dept_officePhone quarter = cobj.quarter offhours = cobj.officeHours sumbio = cobj.summarybio api.content.delete(obj=relobj) person.setDepartment(dept) get departmental membership and save although how is that different than the related record? delete relationship (use the api) create a new relationship with the membership info def deptProcessing(self, person): depts = self.getDepartments(person) for dept in depts: membship = dept.getMembershipInformation(person) position = membship.position title = membship.title primary = membship.primary_department deptadd = membship.dept_officeAddress deptstreet = membship.dept_streetAddress deptcity = membship.dept_city deptstate = membship.dept_state deptzip = membship.dept_zip deptphone = membship.dept_officePhone quarter = membship.quarter offhours = membship.officeHours sumbio = membship.summarybio return def AddEditRole(self): site = getSite() directory = site.people people = directory.listFolderContents(contentFilter={"portal_type" : "FSDPerson"}) for person in people: userId = person.getId() departments = userId.getDepartments() for department in departments membership = getMembershipInformation(userId) roles = list() userfolder = directory[userId] membershipt = userId.getDepartmentalMembership roles = list(userfolder.get_local_roles_for_userid(userId)) roles.extend(['Owner', 'User Preferences Editor', u'Reviewer']) # eliminate duplicated roles roles = list(set(roles)) userfolder.manage_setLocalRoles(userId, roles) return "done" site.reference_catalog.lookupObject('ce380ef0f10a85beb864025928e1819b') mtool = context.portal_membership if not mtool.checkPermission('Relations: Manage content relations', context): state.set(status='failure', portal_status_message="Insufficient privileges.") return state