def evolve(root): former_id = None # Create lazily, in case we don't need it profiles = find_profiles(root) search = ICatalogSearch(root) catalog = find_catalog(root) creators = catalog['creator']._fwd_index.keys() modifiers = catalog['modified_by']._fwd_index.keys() userids = set(creators) | set(modifiers) for userid in userids: if userid not in profiles: if former_id is None: former_id = make_unique_name(profiles, 'formeruser') print "Creating profile for former user content:", former_id former_profile = create_content( IProfile, firstname='Former', lastname='User' ) profiles[former_id] = former_profile count, docids, resolver = search(creator=userid) for docid in docids: doc = resolver(docid) print "Updating 'creator' for", model_path(doc) doc.creator = former_id count, docids, resolver = search(modified_by=userid) for docid in docids: doc = resolver(docid) print "Updating 'modified_by' for", model_path(doc) doc.modified_by = former_id
def init_repo(repo, context): if context.__name__ == 'TEMP': return if IFolder.providedBy(context): for child in context.values(): init_repo(repo, child) try: repo.history(context, True) return except: # Not in repo pass version = queryAdapter(context, IObjectVersion) if version is not None: print "Updating version for %s" % model_path(context) repo.archive(version) container = queryAdapter(context, IContainerVersion) if container is not None: print "Updating container version for %s" % model_path(context) user = getattr(context, 'creator', None) if user is None: user = get_setting(context, 'system_user', 'admin') repo.archive_container(container, user) context._p_deactivate() # try not to run out of memory
def evolve(root): former_id = 'formeruser' profiles = find_profiles(root) search = ICatalogSearch(root) catalog = find_catalog(root) creators = catalog['creator']._fwd_index.keys() modifiers = catalog['modified_by']._fwd_index.keys() userids = set(creators) | set(modifiers) for userid in userids: if userid not in profiles: if former_id not in profiles: workflow = get_workflow(IProfile, 'security') former_id = make_unique_name(profiles, 'formeruser') print "Creating profile for former user content:", former_id former_profile = create_content( IProfile, firstname='Former', lastname='User' ) profiles[former_id] = former_profile workflow.initialize(former_profile) workflow.transition_to_state(former_profile, None, 'inactive') count, docids, resolver = search(creator=userid) for docid in docids: doc = resolver(docid) print "Updating 'creator' for", model_path(doc) doc.creator = former_id count, docids, resolver = search(modified_by=userid) for docid in docids: doc = resolver(docid) print "Updating 'modified_by' for", model_path(doc) doc.modified_by = former_id
def _getInfo(profile, content, ifaces=None): ifaces = ifaces or find_events(content).supported_ctx_ifaces() context = find_supported_interface(content, ifaces) if context is None: context_name = context_url = context_creator = context_type = None else: context_name = context.title context_url = model_path(context) context_creator = context.creator context_type = get_content_type(context) tagger = find_tags(content) if tagger is not None: cloud = list(tagger.getCloud(items=(content.docid,))) tag_counts = sorted(cloud, key=lambda x: x[1], reverse=True)[:3] tags = [x[0] for x in tag_counts] else: tags = () content_type = get_content_type(content) desc = getattr(content, 'description', '') short = len(desc) > 80 and '%s...' % desc[:80] or desc if IPosts.providedBy(content): comment_count = len(content.get('comments', ())) else: comment_count = False content_creator = profile.__name__ if IComment.providedBy(content): # my content filter needs to know if a comment was made inside my post content_creator = content.__parent__.__parent__.creator if hasattr(content, 'likes'): likes = len(content.likes) else: likes = 0 return {'content_type': content_type.getTaggedValue('name'), 'userid': profile.__name__, 'context_name': context_name, 'context_url': context_url, 'context_creator': context_creator, 'context_type': context_type.getTaggedValue('name') if context_type else None, 'content_creator': content_creator, 'url': model_path(content), 'title': content.title, 'description': desc, 'short_description': short, 'allowed': principals_allowed_by_permission(content, 'view'), 'comment_count': comment_count, 'tags': tags, #XXX 'author': profile.title, 'profile_url': '/profiles/%s' % profile.__name__, 'thumbnail': '/profiles/%s/profile_thumbnail' % profile.__name__, 'timestamp': _NOW(), 'likes':likes }
def to_profile_inactive(ob): acl = [ (Allow, 'system.Authenticated', ('view_only',)), (Allow, 'group.KarlUserAdmin', ADMINISTRATOR_PERMS + ('view_only',)), (Allow, 'group.KarlAdmin', ADMINISTRATOR_PERMS + ('view_only',)), (Allow, ob.creator, GUEST_PERMS + ('view_only',)), NO_INHERIT, ] msg = None added, removed = acl_diff(ob, acl) if added or removed: ob.__acl__ = acl log.info('profile (%s) to-inactive, added: %s, removed: %s' % (model_path(ob), added, removed)) ob.security_state = 'inactive' log.info('profile (%s) security_state changed to %s' % (model_path(ob), ob.security_state))
def exercise(app): """ Simulate the first request to the application in order to prime the ZODB cache. Performing this operation during start up, it is hoped to delay user requests being handed off to this worker by mod_wsgi until after the cache is already primed. Otherwise the first, slow, cache priming request would fall to an unlucky user. """ # Need to be logged in as somebody. Use the first user we find that is a # member of some group. root, closer = get_root(app) for profile in root['profiles'].values(): user = root.users.get_by_id(profile.__name__) if user['groups']: break request = webob.Request.blank('/') request.environ['repoze.who.identity'] = user user['repoze.who.userid'] = user['id'] home, extra_path = get_user_home(root, request) path = model_path(home, *extra_path) request.path_info = path response = request.get_response(app) if response.status_int != 200: logger = get_logger() logger.warn('Status of %s when priming cache. Response body:\n%s' % (response.status, response.body))
def evolve(root): catalog = find_catalog(root) mimetype_index = catalog['mimetype'] search = ICatalogSearch(root) docid_for_addr = catalog.document_map.docid_for_address count, docids, resolver = search( interfaces=[ICommunityFile], mimetype={ 'operator': 'or', 'query': [ 'application/x-download', 'application/x-application', 'application/binary', 'application/octet-stream', ]} ) for docid in docids: doc = resolver(docid) mimetype = mimetypes.guess_type(doc.filename)[0] if mimetype is not None and mimetype != doc.mimetype: addr = model_path(doc) print "Updating mimetype for %s: %s" % (addr, mimetype) doc.mimetype = mimetype mimetype_index.reindex_doc(docid_for_addr(addr), doc)
def get_batch(context, request, search_interfaces=[IContent], filter_func=None): """Return a batch of results and term sequence for a search request. If the user provided no terms, the returned batch will be None and the term sequence will be empty. """ batch = None terms = () kind = request.params.get("kind") if not kind: # Search form query, terms = make_query(context, request, search_interfaces) log.debug('query: %s' % query) context_path = model_path(context) if context_path and context_path != '/': query['path'] = {'query': context_path} #principals = effective_principals(request) #query['allowed'] = {'query':principals, 'operator':'or'} batch = get_catalog_batch_grid(context, request, filter_func=filter_func, **query) else: # LiveSearch text_term = request.params.get('body') if text_term: searcher = queryUtility(IGroupSearchFactory, kind) if searcher is None: # If the 'kind' we got is not known, return an error fmt = "The LiveSearch group %s is not known" raise HTTPBadRequest(fmt % kind) batch = searcher(context, request, text_term).get_batch() terms = [text_term, kind] return batch, terms
def get_batch(context, request): """Return a batch of results and term sequence for a search request. If the user provided no terms, the returned batch will be None and the term sequence will be empty. """ batch = None terms = () kind = request.params.get("kind") if not kind: # Search form query, terms = make_query(context, request) if terms: context_path = model_path(context) if context_path and context_path != "/": query["path"] = {"query": context_path} principals = effective_principals(request) query["allowed"] = {"query": principals, "operator": "or"} batch = get_catalog_batch_grid(context, request, **query) else: # LiveSearch text_term = request.params.get("body") if text_term: searcher = queryUtility(IGroupSearchFactory, kind) if searcher is None: # If the 'kind' we got is not known, return an error fmt = "The LiveSearch group %s is not known" raise HTTPBadRequest(fmt % kind) batch = searcher(context, request, text_term).get_batch() terms = [text_term, kind] return batch, terms
def test_getMessageTarget_report_alias_w_subdomain(self): from repoze.bfg.interfaces import ISettings from repoze.bfg.testing import registerUtility from repoze.bfg.traversal import model_path from zope.interface import directlyProvides from karl.models.interfaces import IPeopleDirectory class DummySettings: system_list_subdomain = 'lists.example.com' registerUtility(DummySettings(), ISettings) context = self._makeRoot() cf = context['communities'] = self._makeContext() cf['testing'] = self._makeContext() pd = context['people'] = self._makeContext() directlyProvides(pd, IPeopleDirectory) section = pd['section'] = self._makeContext() extant = section['extant'] = self._makeContext() context.list_aliases['testing'] = model_path(extant) mailin = self._makeOne(context) message = DummyMessage() message.to = ('*****@*****.**',) info = mailin.getMessageTargets(message) self.failIf(info.get('error'), info) targets = info['targets'] self.assertEqual(len(targets), 1) info = targets[0] self.assertEqual(info['report'], 'section+extant') self.assertEqual(info['community'], None) self.assertEqual(info['tool'], None) self.failIf(info.get('in_reply_to'), info)
def evolve(site): offices = site.get('offices') if offices is None: return for doc in postorder(offices): if hasattr(doc, '__custom_acl__'): continue try: ct = get_content_type(doc) except: continue if ct is None: continue wf = get_workflow(ct, 'security', doc) if wf is None: continue if wf.name != 'intranet-content': continue print 'Resetting workflow for', model_path(doc) wf.reset(doc) _reindex(offices)
def to_profile_active(ob, info): acl = [ (Allow, ob.creator, MEMBER_PERMS + ('view_only',)), ] acl.append((Allow, 'group.KarlUserAdmin', ADMINISTRATOR_PERMS + ('view_only',))) acl.append((Allow, 'group.KarlAdmin', ADMINISTRATOR_PERMS + ('view_only',))) acl.append((Allow, 'group.KarlStaff', GUEST_PERMS + ('view_only',))) users = find_users(ob) user = users.get_by_id(ob.creator) if user is not None: groups = user['groups'] for group, role in get_community_groups(groups): c_group = 'group.community:%s:%s' % (group, role) acl.append((Allow, c_group, GUEST_PERMS + ('view_only',))) acl.append((Allow, 'system.Authenticated', ('view_only',))) acl.append(NO_INHERIT) msg = None added, removed = acl_diff(ob, acl) if added or removed: ob.__acl__ = acl msg = ts('to-active', model_path(ob), added, removed) _reindex(ob, texts=True) _reindex_peopledir(ob) return msg
def evolve(context): # add default category and layer to all calendars # Prevent 'set_created' event handler from being called since it will, # in turn, set the content_modified attribute of community which is used # as the "Last Activity" in the user interface. We don't want this tweak # to impact a community's last activity. This means we need to set created # and modified on the new layers and categories ourselves. registry = getSiteManager() registry.adapters.unsubscribe( (IContent, IObjectWillBeAddedEvent), None, set_created) try: search = ICatalogSearch(context) default_category_name = ICalendarCategory.getTaggedValue('default_name') default_layer_name = ICalendarLayer.getTaggedValue('default_name') now = datetime.now() cnt, docids, resolver = search(interfaces=[ICalendar]) for docid in docids: calendar = resolver(docid) default_category = create_content(ICalendarCategory, 'Default') default_category.created = default_category.modified = now if not default_category_name in calendar: calendar[default_category_name] = default_category local_layer = create_content(ICalendarLayer, "This Calendar's Events Only", 'blue', [model_path(default_category)]) local_layer.created = local_layer.modified = now if not default_layer_name in calendar: calendar[default_layer_name] = local_layer finally: registry.adapters.subscribe( (IContent, IObjectWillBeAddedEvent), None, set_created)
def test_event(self): from repoze.bfg.traversal import model_path aliases, mlist = self._makeMailinglist() self.failIf(aliases) self._callFUT(mlist, None) self.assertEqual(aliases.items(), [('alias', model_path(mlist.__parent__))])
def clear_mailinglist_aliases(peopledir): site = find_site(peopledir) aliases = site.list_aliases pd_path = model_path(peopledir) for k, v in list(aliases.items()): # avoid mutating-while-iterating if v.startswith(pd_path): del aliases[k]
def __init__(self, context, request): self.context = context self.request = request self.username = authenticated_userid(request) self.path = model_path(context) self.catalog = find_catalog(context) self.tags = find_tags(context)
def get_wikitoc_data(context, request): wikiparent = context.__parent__ search = getAdapter(context, ICatalogSearch) count, docids, resolver = search( path = model_path(wikiparent), interfaces = [IWikiPage,] ) items = [] profiles = find_profiles(context) for docid in docids: entry = resolver(docid) tags = getMultiAdapter((entry, request), ITagQuery).tagswithcounts author = entry.creator profile = profiles.get(author, None) profile_url = model_url(profile, request) if profile is not None: author_name = '%s %s' % (profile.firstname, profile.lastname) else: author_name = author items.append(dict( id = "id_" + entry.__name__, name = entry.__name__, title = entry.title, author = author, author_name = author_name, profile_url = profile_url, tags = [tag['tag'] for tag in tags], created = entry.created.isoformat(), modified = entry.modified.isoformat(), )) result = dict( items = items, ) return result
def _show_communities_view_helper(context, request, prefix='', **kw ): # Grab the data for the two listings, main communities and portlet communities_path = model_path(context) query = dict( sort_index='title', interfaces=[ICommunity], path={'query': communities_path, 'depth': 1}, allowed={'query': effective_principals(request), 'operator': 'or'}, **kw ) qualifiers = [] titlestartswith = request.params.get('titlestartswith') if titlestartswith: query['titlestartswith'] = (titlestartswith, titlestartswith) qualifiers.append("Communities that begin with '%s'" % titlestartswith) body = request.params.get('body') if body: query['texts'] = body qualifiers.append('Search for "%s"' % body) error = None try: batch_info = get_catalog_batch_grid(context, request, **query) except ParseError, e: batch_info = { 'entries': [], 'batching_required': False } error = 'Error: %s' % e
def __call__(self): app_url = get_setting(self.model, 'offline_app_url') if not app_url: raise ValueError('offline_app_url is not set') if app_url.endswith('/'): app_url = app_url[:-1] path = model_path(self.model) return app_url + path
def get_recent_items_batch(community, request, size=10): batch = get_catalog_batch_grid( community, request, interfaces=[ICommunityContent], sort_index="modified_date", reverse=True, batch_size=size, path={'query': model_path(community)}, allowed={'query': effective_principals(request), 'operator': 'or'}, ) return batch
def reindex_content(obj, event): """ Reindex a single piece of content (non-recursive); an IObjectModifed event subscriber """ catalog = find_catalog(obj) if catalog is not None: path = model_path(obj) docid = catalog.document_map.docid_for_address(path) catalog.reindex_doc(docid, obj)
def directory_view(context, request): """directory view Show a list of packages or files in a directory. """ return render_template_to_response( 'templates/page.pt', project='pkgindex', items=[(name, model_path(item)) for (name, item) in context.items()])
def content_to_inherits(ob, info): msg = None added, removed = acl_diff(ob, {}) if hasattr(ob, '__acl__'): del ob.__acl__ msg = ts('content-inherited', model_path(ob), added, removed) _reindex(ob) return msg
def number_of_comments(forum, request): searcher = ICatalogSearch(forum) total, docids, resolver = searcher( interfaces=[IComment], path={'query': model_path(forum)}, allowed={'query': effective_principals(request), 'operator': 'or'}, ) return total
def batch_images(context, request, get_image_info=get_image_info, # unittest get_images_batch=get_images_batch): # unittest # Find query parameters based on the 'source' param, # which signifies the selection index of the source button # in the imagedrawer dialog. source = int(request.params.get('source', '0')) if source == 0: # My Recent creator = 'admin' community_path = None elif source == 1: # This Community creator = None community = find_community(context) # batching api requires the community path community_path = model_path(community) else: # All Karl creator = None community_path = None # batching # Decide start and size here, don't let the lower levels # apply their default. This allows us to enforce # a MINIMAL_BATCH size. batch_start = int(request.params.get('start', '0')) batch_size = max(int(request.params.get('limit', '0')), MINIMAL_BATCH) # there is a minimal batch size to enforce, if the client # does not ask for one # Just pass the values to lower levels where sensible # defaults will be applied. sort_index = request.params.get('sort_on', None) reverse = request.params.get('reverse', None) search_params = dict( creator=creator, community=community_path, batch_start=batch_start, batch_size=batch_size, ) if sort_index: search_params['sort_index'] = sort_index if reverse: search_params['reverse'] = bool(int(reverse)) batch_info = get_images_batch( context, request, **search_params ) records = [get_image_info(image, request) for image in batch_info['entries']] return dict( records=records, start=batch_info['batch_start'], totalRecords=batch_info['total'], )
def jquery_livesearch_view(context, request): # Prefix search is with a wildcard at the end searchterm = request.params.get("val", None) if searchterm is None: # The request forgot to send the key we use to do a search, so # make a friendly error message. Important for the unit test. msg = "Client failed to send a 'val' parameter as the searchterm" return HTTPBadRequest(msg) else: searchterm = searchterm + "*" records = LivesearchResults() principals = effective_principals(request) site_path = model_path(context) records.set_header("", pre='<div class="header"></div>') records.append_to( rowclass="showall", title="Show All", href=model_url(context, request, "searchresults.html", query={"body": searchterm}), ) for listitem in get_listitems(IGroupSearchFactory): utility = listitem["component"] factory = utility(context, request, searchterm) if factory is None: continue try: num, docids, resolver = factory() except ParseError: continue groupname = listitem["title"] records.set_header(groupname, pre='<div class="header">%s</div>' % (groupname,)) results = filter(None, map(resolver, docids)) qs = {"body": searchterm, "kind": groupname} sr_href = model_url(context, request, "searchresults.html", query=qs) for result in results: records.append_to( rowclass="result", title=getattr(result, "title", "<No Title>"), href=model_url(result, request) ) if results: records.append_to(rowclass="showall", title="Show All", href=sr_href) else: records.append_to(rowclass="noresult", title="No Result", href=sr_href) result = JSONEncoder().encode(list(records)) return Response(result, content_type="application/x-json")
def _reindex_peopledir(profile): catalog = find_peopledirectory_catalog(profile) # The docid stored as an attribute on the profile is the docid for the main # site catalog. The peopledir catalog has a different set of docids, so it # is necessary to consult the document map. path = model_path(profile) docid = catalog.document_map.docid_for_address(path) catalog.reindex_doc(docid, profile)
def reindex_profile(obj, event): """ Reindex a single piece of profile (non-recursive); an IObjectModifed event subscriber """ catalog = find_peopledirectory_catalog(obj) if catalog is not None: path = model_path(obj) docid = catalog.document_map.docid_for_address(path) catalog.unindex_doc(docid) catalog.index_doc(docid, obj)
def test_content_object(self): from repoze.bfg.traversal import model_path from opencore.testing import DummyCatalog model = testing.DummyModel() path = model_path(model) catalog = DummyCatalog({1:path}) model.catalog = catalog self._callFUT(model, None) self.assertEqual(catalog.reindexed, [model])
def evolve(context): root = find_root(context) searcher = ICatalogSearch(root) total, docids, resolver = searcher(interfaces=[IForumTopic]) count = 0 workflow = get_workflow(IForumTopic, 'security') for docid in docids: topic = resolver(docid) if has_custom_acl(topic): continue # don't mess with objects customized via edit_acl try: state, msg = workflow.reset(topic) except: print "ERROR while resetting topic workflow: %s" % model_path(topic) else: print "Reset topic workflow: %s" % model_path(topic) count += 1 print "Updated %d forum topic workflows" % count
def getPath(self): return model_path(self)