def getGroups(me, entity, start='', count=PEOPLE_PER_PAGE): """get the groups of entity. (either groups of an organization or groups of an user) keyword params: @me: @entity: org/user start: fetch group from @start count: no.of groups to fetch. """ toFetchCount = count + 1 groups = {} groupIds = [] myGroupsIds = [] groupFollowers = {} pendingConnections = {} toFetchGroups = set() nextPageStart = '' prevPageStart = '' #TODO: list the groups in sorted order. cols = yield db.get_slice(entity.id, 'entityGroupsMap', start=start, count=toFetchCount) groupIds = [x.column.name for x in cols] if len(groupIds) > count: nextPageStart = utils.encodeKey(groupIds[-1]) groupIds = groupIds[0:count] toFetchGroups.update(set([y.split(':', 1)[1] for y in groupIds])) if entity.id == me.id: myGroupsIds = [x.split(':', 1)[1] for x in groupIds] elif groupIds: cols = yield db.get_slice(me.id, "entityGroupsMap", groupIds) myGroupsIds = [x.column.name.split(':', 1)[1] for x in cols] groupIds = [x.split(':', 1)[1] for x in groupIds] if start: cols = yield db.get_slice(entity.id, 'entityGroupsMap', start=start, count=toFetchCount, reverse=True) if len(cols) > 1: prevPageStart = utils.encodeKey(cols[-1].column.name) if toFetchGroups: groups = base.EntitySet(toFetchGroups) yield groups.fetchData() groupFollowers = yield db.multiget_slice(toFetchGroups, "followers", names=[me.id]) groupFollowers = utils.multiColumnsToDict(groupFollowers) columns = reduce(lambda x, y: x + y, [["GO:%s" % (x), "GI:%s" % (x)] for x in toFetchGroups]) cols = yield db.get_slice(me.id, 'pendingConnections', columns) pendingConnections = utils.columnsToDict(cols) data = {"entities": groups, "groupIds": groupIds, "pendingConnections": pendingConnections, "myGroups": myGroupsIds, "groupFollowers": groupFollowers, "nextPageStart": nextPageStart, "prevPageStart": prevPageStart} defer.returnValue(data)
def getManagedGroups(me, start, count=PEOPLE_PER_PAGE): """get all groups managed by me Keyword params: @me: @start: get groups from @start. @count: no.of groups to be fetched. """ groups = {} groupIds = [] myGroupsIds = [] nextPageStart = '' prevPageStart = '' toFetchCount = count + 1 toFetchGroups = set() groupFollowers = {} pendingConnections = {} try: cols = yield db.get_slice(me.id, "entities", super_column='adminOfGroups', start=start, count=toFetchCount) groupIds = [x.column.name for x in cols] toFetchGroups.update(set(groupIds)) myGroupsIds = groupIds if len(groupIds) > count: nextPageStart = utils.encodeKey(groupIds[-1]) groupIds = groupIds[0:count] except ttypes.NotFoundException: pass if start: cols = yield db.get_slice(me.id, "entities", super_column='adminOfGroups', start=start, count=toFetchCount, reverse=True) if len(cols) > 1: prevPageStart = utils.encodeKey(cols[-1].column.name) if toFetchGroups: groups = base.EntitySet(toFetchGroups) yield groups.fetchData() groupFollowers = yield db.multiget_slice(toFetchGroups, "followers", names=[me.id]) groupFollowers = utils.multiColumnsToDict(groupFollowers) columns = reduce(lambda x, y: x + y, [["GO:%s" % (x), "GI:%s" % (x)] for x in toFetchGroups]) cols = yield db.get_slice(me.id, 'pendingConnections', columns) pendingConnections = utils.columnsToDict(cols) data = {"entities": groups, "groupIds": groupIds, "pendingConnections": pendingConnections, "myGroups": myGroupsIds, "groupFollowers": groupFollowers, "nextPageStart": nextPageStart, "prevPageStart": prevPageStart} defer.returnValue(data)
def getPeople(myId, entityId, orgId, start='', count=PEOPLE_PER_PAGE, fn=None, fetchBlocked=True): blockedUsers = [] toFetchCount = count + 1 nextPageStart = None prevPageStart = None userIds = [] if fetchBlocked: cols = yield db.get_slice(orgId, "blockedUsers") blockedUsers = utils.columnsToDict(cols).keys() if not fn: d1 = db.get_slice(entityId, "displayNameIndex", start=start, count=toFetchCount) d2 = db.get_slice(entityId, "displayNameIndex", start=start, count=toFetchCount, reverse=True) if start else None # Get the list of users (sorted by displayName) cols = yield d1 userIds = [col.column.name.split(":")[1] for col in cols] if len(userIds) > count: nextPageStart = utils.encodeKey(cols[-1].column.name) userIds = userIds[0:count] toFetchUsers = userIds # Start of previous page if start and d2: prevCols = yield d2 if len(prevCols) > 1: prevPageStart = utils.encodeKey(prevCols[-1].column.name) else: userIds, nextPageStart, prevPageStart\ = yield fn(entityId, start, toFetchCount) toFetchUsers = userIds entities = base.EntitySet(toFetchUsers) usersDeferred = entities.fetchData() relation = Relation(myId, userIds) results = yield defer.DeferredList([usersDeferred, relation.initSubscriptionsList()]) defer.returnValue((entities, relation, userIds,\ blockedUsers, nextPageStart, prevPageStart))
def _getInvitationsSent(userId, start='', count=PEOPLE_PER_PAGE): toFetchCount = count + 1 prevPageStart = None nextPageStart = None d1 = db.get_slice(userId, "invitationsSent", start=start, count=toFetchCount) d2 = db.get_slice(userId, "invitationsSent", start=start, count=toFetchCount, reverse=True) if start else None cols = yield d1 emailIds = [col.column.name for col in cols] if len(cols) == toFetchCount: nextPageStart = utils.encodeKey(emailIds[-1]) emailIds = emailIds[0:count] if start and d2: prevCols = yield d2 if len(prevCols) > 1: prevPageStart = utils.encodeKey(prevCols[-1].column.name) defer.returnValue((emailIds, prevPageStart, nextPageStart))
def _deleteKeyword(self, request): orgId = request.getSession(IAuthInfo).organization keyword = utils.getRequestArg(request, 'keyword') or '' keyword = utils.decodeKey(keyword) if not keyword: return yield db.remove(orgId, "keywords", keyword) yield db.remove(orgId, "originalKeywords", keyword) yield db.remove(orgId + ':' + keyword, "keywordItems") request.write('$("#keyword-%s").remove()' % (utils.encodeKey(keyword)))
def _listBlockedUsers(self, request): (appchange, script, args, myId) = yield self._getBasicArgs(request) orgId = args["orgId"] landing = not self._ajax start = utils.getRequestArg(request, 'start') or '' start = utils.decodeKey(start) count = PEOPLE_PER_PAGE toFetchCount = count + 1 nextPageStart = '' prevPageStart = '' args["title"] = "Manage Users" args["menuId"] = "users" args["viewType"] = "blocked" if script and landing: t.render(request, "admin.mako", **args) if script and appchange: t.renderScriptBlock(request, "admin.mako", "layout", landing, "#mainbar", "set", **args) args["heading"] = "Admin Console - Blocked Users" cols = yield db.get_slice(orgId, "blockedUsers", start=start, count=toFetchCount) blockedUsers = [col.column.name for col in cols] if len(blockedUsers) > count: nextPageStart = utils.encodeKey(blockedUsers[-1]) blockedUsers = blockedUsers[:count] if start: cols = yield db.get_slice(orgId, "blockedUsers", start=start, count=toFetchCount, reverse=True) if len(cols) > 1: prevPageStart = utils.decodeKey(cols[-1].column.name) entities = base.EntitySet(blockedUsers) yield entities.fetchData() args["entities"] = entities args['nextPageStart'] = nextPageStart args['prevPageStart'] = prevPageStart if script: t.renderScriptBlock(request, "admin.mako", "viewOptions", landing, "#users-view", "set", **args) t.renderScriptBlock(request, "admin.mako", "list_users", landing, "#content", "set", **args) if script and landing: request.write("</body></html>") if not script: t.render(request, "admin.mako", **args)
def getAllInvitations(me, start='', count=PEOPLE_PER_PAGE): """get all group invitations sent to @me starting from @start. Keyword params: @me: @start: fetch invitations starting from @start. @count: no.of invitations to be fetched. """ if not start: start = 'GI' toFetchCount = count + 1 nextPageStart = '' prevPageStart = '' toFetchEntities = set() cols = yield db.get_slice(me.id, "pendingConnections", start=start, count=toFetchCount) groupIds = [x.column.name.split(':')[1] for x in cols if len(x.column.name.split(':')) == 2 and x.column.name.split(':')[0] == 'GI'] pendingConnections = utils.columnsToDict(cols) if len(groupIds) == toFetchCount: groupIds = groupIds[:count] nextPageStart = utils.encodeKey(cols[-1].column.name) toFetchEntities.update(groupIds) cols = yield db.get_slice(me.id, "pendingConnections", reverse=True, start=start, count=toFetchCount) cols = [x for x in cols if len(x.column.name.split(':')) == 2 and x.column.name.split(':')[1] == 'GI'] if len(cols) > 1: prevPageStart = utils.encodeKey(cols[-1].column.name) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() entities.update(me) defer.returnValue({"groupIds": groupIds, "entities": entities, "myGroups": [], "prevPageStart": prevPageStart, "nextPageStart": nextPageStart, "pendingConnections": pendingConnections, "groupFollowers": dict([(x, []) for x in groupIds])})
def getBlockedMembers(group, me, start='', count=PEOPLE_PER_PAGE): """get users blocked from a group. Only group-admins can view blocked users. Keyword params: @me: @group: group object @start: fetch users from @start @count: no.of users to be fetched. """ if me.id not in group.admins: raise errors.PermissionDenied(_("Access Denied")) nextPageStart = '' prevPageStart = '' toFetchCount = count + 1 cols = yield db.get_slice(group.id, "blockedUsers", start=start, count=toFetchCount) blockedUsers = [col.column.name for col in cols] if start: prevCols = yield db.get_slice(group.id, "blockedUsers", start=start, reverse=True, count=toFetchCount) if len(prevCols) > 1: prevPageStart = utils.encodeKey(prevCols[-1].column.name) if len(blockedUsers) == toFetchCount: blockedUsers = blockedUsers[:PEOPLE_PER_PAGE] nextPageStart = utils.encodeKey(blockedUsers[-1]) entities = base.EntitySet(blockedUsers) if blockedUsers: yield entities.fetchData() data = {"userIds": blockedUsers, "entities": entities, "prevPageStart": prevPageStart, "nextPageStart": nextPageStart} defer.returnValue(data)
def responses(myId, itemId, item, start=''): toFetchEntities = set() itemResponses = yield db.get_slice(itemId, "itemResponses", start=start, reverse=True, count=constants.COMMENTS_PER_PAGE + 1) nextPageStart = itemResponses[-1].column.name\ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else None itemResponses = itemResponses[:-1] \ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else itemResponses responseKeys = [] for response in itemResponses: userKey, responseKey = response.column.value.split(":") responseKeys.append(responseKey) toFetchEntities.add(userKey) responseKeys.reverse() entities = base.EntitySet(toFetchEntities) d3 = db.multiget_slice(responseKeys + [itemId], "itemLikes", [myId]) d2 = db.multiget_slice(responseKeys + [itemId], "items", ["meta", "attachments"]) d1 = entities.fetchData() fetchedItems = yield d2 myLikes = yield d3 yield d1 fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) fetchedLikes = utils.multiColumnsToDict(myLikes) # Do some error correction/consistency checking to ensure that the # response items actually exist. I don't know of any reason why these # items may not exist. missingIds = [x for x, y in fetchedItems.items() if not y] if missingIds: yield _cleanupMissingComments(itemId, missingIds, itemResponses) ret = {'items': fetchedItems, 'entities': entities, 'myLikes': myLikes, 'responses': {itemId: responseKeys}} if nextPageStart: ret['oldest'] = utils.encodeKey(nextPageStart) defer.returnValue(ret)
def _getNotifications(self, request, count=15): authinfo = request.getSession(IAuthInfo) myId = authinfo.username myOrgId = authinfo.organization nextPageStart = None keysFromStore = [] # List of keys fetched notifyIds = [] # convIds for which we have notifications details_d = [] # Deferreds waiting of notification items toFetchTags = set() toFetchEntities = set() tags = {} entities = {} timestamps = {} notifyStrs = {} notifyClasses = {} notifyUsers = {} fetchStart = utils.getRequestArg(request, 'start') or '' if fetchStart: fetchStart = utils.decodeKey(fetchStart) fetchCount = count + 2 while len(notifyIds) < count: fetchedNotifyIds = [] results = yield db.get_slice(myId, "notifications", count=fetchCount, start=fetchStart, reverse=True) for col in results: value = col.column.value if value not in notifyIds: fetchedNotifyIds.append(value) keysFromStore.append(col.column.name) timestamps[value] = col.column.timestamp / 1e6 if not keysFromStore: break fetchStart = keysFromStore[-1] notifyIds.extend(fetchedNotifyIds) if len(results) < fetchCount: break if len(keysFromStore) > count: nextPageStart = utils.encodeKey(keysFromStore[count]) notifyIds = notifyIds[0:count] elif len(results) == fetchCount: nextPageStart = utils.encodeKey(keysFromStore[-1]) notifyIds = notifyIds[0:-1] # We don't have notifications on any conversations if not notifyIds: defer.returnValue({}) # We need the name of current user's organization toFetchEntities.add(myOrgId) # Fetch more data about the notifications notifyItems = yield db.get_slice(myId, "notificationItems", notifyIds, reverse=True) notifyValues = {} notifyParts = {} notifyPlugins = {} notifyPluginData = {} for notify in notifyItems: notifyId = notify.super_column.name updates = notify.super_column.columns updates.reverse() notifyValues[notifyId] = [] parts = notifyId.split(':') notifyType = parts[3] if parts[0] else parts[1] plugin = _notificationPlugins.get(notifyType, None) if not plugin: continue values = [update.value for update in updates] userIds, entityIds, pluginData = \ yield plugin.fetchAggregationData(myId, myOrgId, parts, values) notifyValues[notifyId] = utils.uniqify(values) notifyParts[notifyId] = parts notifyPlugins[notifyId] = plugin notifyPluginData[notifyId] = pluginData notifyUsers[notifyId] = utils.uniqify(userIds) toFetchEntities.update(entityIds) # Fetch the required entities entities = base.EntitySet(toFetchEntities) yield entities.fetchData() myOrg = entities.get(myOrgId) # Build strings notifyStrs = {} data = {'entities': entities, 'myId': myId, 'orgId': myOrgId} for notifyId in notifyIds: parts = notifyParts.get(notifyId, None) if not parts: continue plugin = notifyPlugins[notifyId] notifyStrs[notifyId] = plugin.aggregation(parts, notifyValues[notifyId], data, notifyPluginData[notifyId]) args = {"notifications": notifyIds, "notifyStr": notifyStrs, "notifyClasses": notifyClasses, "notifyUsers": notifyUsers, "entities": entities, "timestamps": timestamps, "nextPageStart": nextPageStart} defer.returnValue(args)
def _renderChatArchives(self, request): (appchange, script, args, myId) = yield self._getBasicArgs(request) orgId = args['orgId'] landing = not self._ajax start = utils.getRequestArg(request, 'start') or '' start = utils.decodeKey(start) count = constants.CHATS_PER_PAGE if script and landing: t.render(request, "chat.mako", **args) if appchange and script: t.renderScriptBlock(request, "chat.mako", "layout", landing, "#mainbar", "set", **args) chats = {} chatParticipants = {} prevPageStart = '' nextPageStart = '' cols = yield db.get_slice(myId, "chatArchiveList", start=start, count=count + 1, reverse=True) chatIds = [col.column.value for col in cols] if len(cols) == count + 1: chatIds = chatIds[:count] nextPageStart = utils.encodeKey(cols[-1].column.name) cols = yield db.get_slice(myId, "chatArchiveList", start=start, count=count + 1) if len(cols) > 1 and start: prevPageStart = utils.encodeKey(cols[-1].column.name) if chatIds: cols = yield db.multiget_slice(chatIds, "chatLogs", reverse=False) chats = OrderedDict() for chatId in cols: chats[chatId] = [] for col in cols[chatId]: entityId, comment = col.column.value.split(':', 1) chats[chatId] = (entityId, comment, col.column.timestamp / 1e6) participants = yield db.multiget_slice(chatIds, "chatParticipants") participants = utils.multiColumnsToDict(participants) entityIds = set([]) for chatId in participants: entityIds.update(participants[chatId]) entities = base.EntitySet(entityIds) yield entities.fetchData() entities.update(args['me']) args.update({'chatParticipants': participants, 'entities': entities, 'chats': chats, 'chatIds': chatIds, 'nextPageStart': nextPageStart, 'prevPageStart': prevPageStart, 'view': 'list', 'menuId': 'chats'}) if script: onload = """ $$.menu.selectItem('chats'); """ t.renderScriptBlock(request, "chat.mako", "chatList", landing, '.center-contents', "set", True, handlers={"onload": onload}, **args) else: t.render(request, "chat.mako", **args)
def _renderChat(self, request): (appchange, script, args, myId) = yield self._getBasicArgs(request) orgId = args['orgId'] landing = not self._ajax if script and landing: t.render(request, "chat.mako", **args) if appchange and script: t.renderScriptBlock(request, "chat.mako", "layout", landing, "#mainbar", "set", **args) chatId = utils.getRequestArg(request, 'id') start = utils.getRequestArg(request, 'start') or '' start = utils.decodeKey(start) count = 25 if not chatId: raise errors.MissingParams(["Chat Id"]) chatParticipants = yield db.get_slice(chatId, "chatParticipants") chatParticipants = utils.columnsToDict(chatParticipants).keys() if myId not in chatParticipants: raise errors.ChatAccessDenied(chatId) entityIds = set() chatLogs = [] nextPageStart = '' cols = yield db.get_slice(chatId, "chatLogs", start=start, count=count + 1) for col in cols: timestamp = col.column.timestamp / 1e6 entityId, comment = col.column.value.split(':', 1) entityIds.add(entityId) chatLogs.append((entityId, comment, timestamp)) if len(cols) == count + 1: nextPageStart = utils.encodeKey(cols[-1].column.name) chatLogs = chatLogs[:count] entities = base.EntitySet(chatParticipants) yield entities.fetchData() entities.update(args['me']) title = "Chat with " + ",".join([entities[x].basic['name'] \ for x in chatParticipants if x != myId]) args.update({"chatLogs": chatLogs, "chatId": chatId, "entities": entities, "nextPageStart": nextPageStart, "menuId": "chats", "view": "log"}) if script: if not start: onload = """ $$.menu.selectItem('chats'); """ t.renderScriptBlock(request, 'chat.mako', "chat_title", landing, "#title", "set", True, handlers={"onload": onload}, chatTitle=title) t.renderScriptBlock(request, "chat.mako", "chat", landing, ".center-contents", "set", **args) else: t.renderScriptBlock(request, "chat.mako", "chat", landing, "#next-page-loader", "replace", **args) else: t.render(request, "chat.mako", **args)
def _listTags(self, request): (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax myOrgId = args["orgId"] start = utils.getRequestArg(request, 'start') or '' nextPageStart = '' prevPageStart = '' count = constants.PEOPLE_PER_PAGE toFetchCount = count + 1 start = utils.decodeKey(start) args["menuId"] = "tags" if script and landing: t.render(request, "tags.mako", **args) if script and appchange: t.renderScriptBlock(request, "tags.mako", "layout", landing, "#mainbar", "set", **args) if script: t.renderScriptBlock(request, "tags.mako", "header", landing, "#tags-header", "set", **args) tagsByName = yield db.get_slice(myOrgId, "orgTagsByName", start=start, count=toFetchCount) tagIds = [x.column.value for x in tagsByName] if len(tagsByName) > count: nextPageStart = utils.encodeKey(tagsByName[-1].column.name) tagIds = tagIds[:-1] if start: prevCols = yield db.get_slice(myOrgId, "orgTagsByName", start=start, reverse=True, count=toFetchCount) if len(prevCols) > 1: prevPageStart = utils.encodeKey(prevCols[-1].column.name) tags = {} if tagIds: tags = yield db.get_slice(myOrgId, "orgTags", tagIds) tags = utils.supercolumnsToDict(tags) # TODO: We need an index of all tags that the user is following # Probably convert the 'subscriptions' column family to 'Super' # and have people and tags in the same column family. tagsFollowing = [] if tagIds: cols = yield db.multiget(tagIds, "tagFollowers", myId) tagsFollowing = [x for x in cols.keys() if cols[x]] args['tags'] = tags args['tagIds'] = tagIds args['tagsFollowing'] = tagsFollowing args['nextPageStart'] = nextPageStart args['prevPageStart'] = prevPageStart if script: if appchange: t.renderScriptBlock(request, "tags.mako", "tagsListLayout", landing, "#content", "set", **args) else: t.renderScriptBlock(request, "tags.mako", "listTags", landing, "#tags-wrapper", "set", **args) t.renderScriptBlock(request, "tags.mako", "paging", landing, "#tags-paging", "set", **args) if not script: t.render(request, "tags.mako", **args)
def get(auth, feedId=None, feedItemsId=None, convIds=None, getFn=None, cleanFn=None, start='', count=10, getReasons=True, forceCount=False, itemType=None): """Fetch data from feed represented by feedId. Returns a dictionary that has the items from feed, start of the next page and responses and likes that I know of. Keyword params: @auth - An instance of AuthInfo representing the authenticated user @feedId - Id of the feed from which the data is to be fetched @feedItemsId - Id of the feed from with feed items must be fetched @convIds - List of conversation id's to be used as feed @getFn - Function that must be used to fetch items @cleanFn - Function that must be used to clean items that don't exist @start - Id of the item where the fetching must start @count - Number of items to fetch @getReasons - Add reason strings to the returned dictionary @forceCount - Try hard to get atleast count items from the feed """ toFetchItems = set() # Items and entities that need to be fetched toFetchEntities = set() # toFetchTags = set() # items = {} # Fetched items, entities and tags entities = base.EntitySet([])# tags = {} # deleted = [] # List of items that were deleted deleteKeys = [] # List of keys that need to be deleted responses = {} # Cached data of item responses and likes likes = {} # myLikes = {} myId = auth.username orgId = auth.organization feedSource = "feed_%s" % itemType\ if itemType and itemType in plugins\ and plugins[itemType].hasIndex\ else "feed" feedItemsId = feedItemsId or myId feedItems_d = [] # List of deferred used to fetch feedItems # Used for checking ACL relation = Relation(myId, []) yield relation.initGroupsList() # The updates that will be used to build reason strings convReasonUpdates = {} # Data that is sent to various plugins and returned by this function # XXX: myKey is depricated - use myId data = {"myId": myId, "orgId": orgId, "responses": responses, "likes": likes, "myLikes": myLikes, "items": items, "entities": entities, "tags": tags, "myKey": myId, "relations": relation} @defer.inlineCallbacks def fetchFeedItems(ids): rawFeedItems = yield db.get_slice(feedItemsId, "feedItems", ids) \ if ids else defer.succeed([]) for conv in rawFeedItems: convId = conv.super_column.name convUpdates = conv.super_column.columns responses[convId] = [] likes[convId] = [] latest = None updatesByType = {} for update in convUpdates: parts = update.value.split(':') updatesByType.setdefault(parts[0], []).append(parts) if parts[1] != myId: # Ignore my own updates latest = parts # when displaying latest actors # Parse all notification to make sure we fetch any required # items, entities. and cache generic stuff that we display for tipe in updatesByType.keys(): updates = updatesByType[tipe] if tipe in _feedUpdatePlugins: (i,e) = _feedUpdatePlugins[tipe].parse(convId, updates) toFetchItems.update(i) toFetchEntities.update(e) if tipe == "L": for update in updates: if update[2] == convId: likes[convId].append(update[1]) # XXX: Adding this item may break the sorting # of responses on this conversation # Bug #493 #else: # responses[convId].append(update[2]) elif tipe in ["C", "Q"]: for update in updates: responses[convId].append(update[2]) # Store any information that can be used to render # the reason strings when we have the required data if getReasons and latest: convReasonUpdates[convId] = updatesByType[latest[0]] # Fetch the feed if required and at the same time make sure # we delete unwanted items from the feed (cleanup). # NOTE: We assume that there will be very few deletes. nextPageStart = None if not convIds: feedId = feedId or myId allFetchedConvIds = set() # Complete set of convIds fetched itemsFromFeed = {} # All the key-values retrieved from feed keysFromFeed = [] # Sorted list of keys (used for paging) convIds = [] # List of convIds that will be displayed fetchStart = utils.decodeKey(start) fetchCount = count + 1 while len(convIds) < count: fetchedConvIds = [] # Use the getFn function if given. # NOTE: Part of this code is duplicated just below this. if getFn: results = yield getFn(start=fetchStart, count=fetchCount) for name, value in results.items(): keysFromFeed.append(name) if value not in allFetchedConvIds: fetchedConvIds.append(value) allFetchedConvIds.add(value) itemsFromFeed[name] = value else: deleteKeys.append(name) # Fetch user's feed when getFn isn't given. # NOTE: Part of this code is from above else: results = yield db.get_slice(feedId, feedSource, count=fetchCount, start=fetchStart, reverse=True) for col in results: value = col.column.value keysFromFeed.append(col.column.name) if value not in allFetchedConvIds: fetchedConvIds.append(value) allFetchedConvIds.add(value) itemsFromFeed[col.column.name] = value else: deleteKeys.append(col.column.name) # Initiate fetching feed items for all the conversation Ids. # Meanwhile we check if the authenticated user has access to # all the fetched conversation ids. # NOTE: Conversations would rarely be filtered out. So, we # just go ahead with fetching data for all convs. feedItems_d.append(fetchFeedItems(fetchedConvIds)) (filteredConvIds, deletedIds) = yield utils.fetchAndFilterConvs\ (fetchedConvIds, relation, items, myId, orgId) convIds.extend(filteredConvIds) deleted.extend(deletedIds) # Unless we are forced to fetch count number of items, we only # iterate till we fetch atleast half of them if (not forceCount and len(convIds) > (count/2)) or\ len(results) < fetchCount: break # If we need more items, we start fetching from where we # left in the previous iteration. fetchStart = keysFromFeed[-1] # If DB fetch got as many items as I requested # there may be additional items present in the feed # So, we cut one item from what we return and start the # next page from there. if len(results) == fetchCount: lastConvId = convIds[-1] for key in reversed(keysFromFeed): if key in itemsFromFeed and itemsFromFeed[key] == lastConvId: nextPageStart = utils.encodeKey(key) convIds = convIds[:-1] else: (convIds, deletedIds) = yield utils.fetchAndFilterConvs(convIds, relation, items, myId, orgId) # NOTE: Unlike the above case where we fetch convIds from # database (where we set the nextPageStart to a key), # here we set nextPageStart to the convId. if len(convIds) > count: nextPageStart = utils.encodeKey(convIds[count]) convIds = convIds[0:count] # Since convIds were directly passed to us, we would also # return the list of convIds deleted back to the caller. if deletedIds: data["deleted"] = deletedIds # We don't have any conversations to display! if not convIds: defer.returnValue({"conversations": []}) # Delete any convs that were deleted from the feeds and # any duplicates that were marked for deletion cleanup_d = [] if deleted: for key, value in itemsFromFeed.items(): if value in deleted: deleteKeys.append(key) if cleanFn: d1 = cleanFn(list(deleteKeys)) else: d1 = db.batch_remove({feedSource: [feedId]}, names=deleteKeys) d2 = db.batch_remove({'feedItems': [feedId]}, names=list(deleted)) cleanup_d = [d1, d2] # We now have a filtered list of conversations that can be displayed # Let's wait till all the feed items have been fetched and processed yield defer.DeferredList(feedItems_d) # Fetch the remaining items (comments on the actual conversations) items_d = db.multiget_slice(toFetchItems, "items", ["meta" ,"attachments"]) # Fetch tags on all the conversations that will be displayed for convId in convIds: conv = items[convId] toFetchEntities.add(conv["meta"]["owner"]) if "target" in conv["meta"]: toFetchEntities.update(conv["meta"]["target"].split(',')) toFetchTags.update(conv.get("tags",{}).keys()) tags_d = db.get_slice(orgId, "orgTags", toFetchTags) \ if toFetchTags else defer.succeed([]) # Fetch the list of my likes. # XXX: Latency can be pretty high here becuase many nodes will have to # be contacted for the information. Alternative could be to cache # all likes by a user somewhere. myLikes_d = db.multiget(toFetchItems.union(convIds), "itemLikes", myId) # Fetch extra data that is required to render special items # We already fetched the conversation items, plugins merely # add more data to the already fetched items for convId in convIds[:]: itemType = items[convId]["meta"]["type"] if itemType in plugins: try: entityIds = yield plugins[itemType].fetchData(data, convId) toFetchEntities.update(entityIds) except Exception, e: log.err(e) convIds.remove(convId)
def getGroupRequests(me, start='', count=PEOPLE_PER_PAGE): """get the list of users who want to join groups @me administers. Keyword params: @me: @start: start fetching from @start @count: no.of users/requests to fetch """ userIds = [] entities = {} nextPageStart = None prevPageStart = None cols = yield db.get_slice(me.id, "entities", super_column='adminOfGroups') managedGroupIds = [col.column.name for col in cols] if not managedGroupIds: data = {"userIds": [], "entities": {}, "prevPageStart": None, "nextPageStart": None} defer.returnValue(data) startKey = '' startGroupId = managedGroupIds[0] if len(start.split(':')) == 2: userId, startGroupId = start.split(":") startKey = "GI:%s" % (userId) toFetchStart = startKey or "GI" toFetchGroup = startGroupId toFetchCount = count + 1 toFetchEntities = set() index = 0 try: index = managedGroupIds.index(toFetchGroup) except ValueError: pass while len(userIds) < toFetchCount: cols = yield db.get_slice(toFetchGroup, "pendingConnections", start=toFetchStart, count=toFetchCount) userIds.extend([(col.column.name.split(':')[1], toFetchGroup) for col in cols if len(col.column.name.split(':')) == 2 and col.column.name.split(':')[0] == "GI"]) if len(userIds) >= toFetchCount: break if len(cols) < toFetchCount: if index + 1 < len(managedGroupIds): index = index + 1 toFetchGroup = managedGroupIds[index] toFetchStart = 'GI' else: break if len(userIds) >= toFetchCount: nextPageStart = utils.encodeKey("%s:%s" % (userIds[count])) userIds = userIds[0:count] toFetchEntities.update([userId for userId, groupId in userIds]) toFetchEntities.update([groupId for userId, groupId in userIds]) entities = base.EntitySet(toFetchEntities) entities_d = entities.fetchData() try: toFetchGroup = startGroupId index = managedGroupIds.index(startGroupId) toFetchStart = startKey except ValueError: index = None if index is not None and start: tmpIds = [] while len(tmpIds) < toFetchCount: cols = yield db.get_slice(toFetchGroup, "pendingConnections", start=toFetchStart, reverse=True, count=toFetchCount) tmpIds.extend([(col.column.name.split(':')[1], toFetchGroup) for col in cols if len(col.column.name.split(':')) == 2]) if len(tmpIds) >= toFetchCount: tmpIds = tmpIds[0:toFetchCount] break if len(cols) < toFetchCount: if index - 1 >= 0: index = index - 1 toFetchGroup = managedGroupIds[index] toFetchStart = '' else: break if len(tmpIds) > 1: prevPageStart = utils.encodeKey("%s:%s" % (tmpIds[-1])) yield entities_d entities.update(me) data = {"userIds": userIds, "entities": entities, "prevPageStart": prevPageStart, "nextPageStart": nextPageStart} defer.returnValue(data)
def _getKeywordMatches(self, request, keyword, start='', count=10): args = {} authinfo = request.getSession(IAuthInfo) myId = authinfo.username orgId = authinfo.organization items = {} itemIds = [] itemIdKeyMap = {} allFetchedItems = set() deleted = set() fetchStart = utils.decodeKey(start) fetchCount = count + 2 while len(itemIds) < count: fetchedItemIds = [] toFetchItems = set() results = yield db.get_slice(orgId + ":" + keyword, "keywordItems", count=fetchCount, start=fetchStart, reverse=True) for col in results: fetchStart = col.column.name itemAndParentIds = col.column.value.split(':') itemIdKeyMap[itemAndParentIds[0]] = fetchStart fetchedItemIds.append(itemAndParentIds[0]) for itemId in itemAndParentIds: if itemId not in allFetchedItems: toFetchItems.add(itemId) allFetchedItems.add(itemId) if toFetchItems: fetchedItems = yield db.multiget_slice(toFetchItems, "items", ["meta", "attachments"]) fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) items.update(fetchedItems) for itemId in fetchedItemIds: item = items[itemId] if not 'meta' in item: continue state = item['meta'].get('state', 'published') if state == 'deleted': deleted.add(itemIdKeyMap[itemId]) elif utils.checkAcl(myId, orgId, True, None, item['meta']): itemIds.append(itemId) if len(results) < fetchCount: break if len(itemIds) > count: nextPageStart = utils.encodeKey(itemIdKeyMap[itemIds[-1]]) itemIds = itemIds[:-1] else: nextPageStart = None dd = db.batch_remove({'keywordItems': [orgId + ':' + keyword]}, names=deleted) if deleted else defer.succeed([]) args.update({'items': items, 'myId': myId}) toFetchEntities = set() extraDataDeferreds = [] for itemId in itemIds: item = items[itemId] itemMeta = item['meta'] toFetchEntities.add(itemMeta['owner']) if 'target' in itemMeta: toFetchEntities.update(itemMeta['target'].split(',')) if 'parent' in itemMeta: parentId = itemMeta['parent'] if parentId in items: toFetchEntities.add(items[parentId]['meta']['owner']) itemType = itemMeta.get('type', 'status') if itemType in plugins: d = plugins[itemType].fetchData(args, itemId) extraDataDeferreds.append(d) result = yield defer.DeferredList(extraDataDeferreds) for success, ret in result: if success: toFetchEntities.update(ret) fetchedEntities = {} if toFetchEntities: fetchedEntities = base.EntitySet(toFetchEntities) yield fetchedEntities.fetchData() yield dd args.update({'entities': fetchedEntities, 'matches': itemIds, 'nextPageStart': nextPageStart}) defer.returnValue(args)
def _getUserItems(self, request, userId, start='', count=10): authinfo = request.getSession(IAuthInfo) myId = authinfo.username myOrgId = authinfo.organization toFetchItems = set() toFetchEntities = set() toFetchTags = set() toFetchResponses = set() toFetchCount = count + 1 toFetchStart = utils.decodeKey(start) if start else '' fetchedUserItem = [] responses = {} convs = [] userItemsRaw = [] userItems = [] reasonStr = {} timestamps = {} items = {} nextPageStart = None args = {'myId': myId} relation = Relation(myId, []) yield relation.initGroupsList() toFetchEntities.add(userId) while len(convs) < toFetchCount: cols = yield db.get_slice(userId, "userItems", start=toFetchStart, reverse=True, count=toFetchCount) tmpIds = [] for col in cols: convId = col.column.value.split(":")[2] if convId not in tmpIds and convId not in convs: tmpIds.append(convId) (filteredConvs, deletedConvs) = yield utils.fetchAndFilterConvs\ (tmpIds, relation, items, myId, myOrgId) for col in cols[0:count]: convId = col.column.value.split(":")[2] if len(convs) == count or len(fetchedUserItem) == count*2: nextPageStart = col.column.name break if convId not in filteredConvs and convId not in convs: continue fetchedUserItem.append(col) if convId not in convs: convs.append(convId) if len(cols) < toFetchCount or nextPageStart: break if cols: toFetchStart = cols[-1].column.name if nextPageStart: nextPageStart = utils.encodeKey(nextPageStart) for col in fetchedUserItem: value = tuple(col.column.value.split(":")) timestamps[value] = col.column.timestamp/1e6 rtype, itemId, convId, convType, convOwnerId, commentSnippet = value commentSnippet = """<span class="snippet">"%s"</span>""" %(_(commentSnippet)) toFetchEntities.add(convOwnerId) if rtype == 'I': toFetchItems.add(convId) toFetchResponses.add(convId) userItems.append(value) elif rtype == "L" and itemId == convId and convOwnerId != userId: reasonStr[value] = _("liked %s's %s") userItems.append(value) elif rtype == "L" and convOwnerId != userId: r = "answer" if convType == 'question' else 'comment' reasonStr[value] = _("liked") + " %s " %(commentSnippet) + _("%s "%r) + _("on %s's %s") userItems.append(value) elif rtype in ["C", 'Q'] and convOwnerId != userId: reasonStr[value] = "%s"%(commentSnippet) + _(" on %s's %s") userItems.append(value) itemResponses = yield db.multiget_slice(toFetchResponses, "itemResponses", count=2, reverse=True) for convId, comments in itemResponses.items(): responses[convId] = [] for comment in comments: userId_, itemKey = comment.column.value.split(':') if itemKey not in toFetchItems: responses[convId].insert(0,itemKey) toFetchItems.add(itemKey) toFetchEntities.add(userId_) items = yield db.multiget_slice(toFetchItems, "items", ["meta", "tags", "attachments"]) items = utils.multiSuperColumnsToDict(items) args["items"] = items extraDataDeferreds = [] for convId in convs: if convId not in items: continue meta = items[convId]["meta"] itemType = meta["type"] toFetchEntities.add(meta["owner"]) if "target" in meta: toFetchEntities.update(meta["target"].split(',')) toFetchTags.update(items[convId].get("tags", {}).keys()) if itemType in plugins: d = plugins[itemType].fetchData(args, convId) extraDataDeferreds.append(d) result = yield defer.DeferredList(extraDataDeferreds) for success, ret in result: if success: toFetchEntities.update(ret) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() tags = {} if toFetchTags: userOrgId = entities[userId].basic["org"] fetchedTags = yield db.get_slice(userOrgId, "orgTags", toFetchTags) tags = utils.supercolumnsToDict(fetchedTags) fetchedLikes = yield db.multiget(toFetchItems, "itemLikes", myId) myLikes = utils.multiColumnsToDict(fetchedLikes) data = {"entities": entities, "reasonStr": reasonStr, "tags": tags, "myLikes": myLikes, "userItems": userItems, "responses": responses, "nextPageStart": nextPageStart, "timestamps": timestamps } del args['myId'] args.update(data) defer.returnValue(args)
def _listConversations(self, request): """Renders a time sorted list of coversations in a particular view. Keyword Arguments: filerType: The folder view which is to rendered. One of ['unread', 'all', 'archive', 'trash']. start: The base64 encoded timeUUID of the starting conversation id of the page that needs to be rendered. """ (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax filterType = utils.getRequestArg(request, 'type') folder = self._folders[filterType] if filterType in self._folders else\ self._folders['inbox'] start = utils.getRequestArg(request, "start") or '' start = utils.decodeKey(start) if script and landing: t.render(request, "message.mako", **args) if appchange and script: t.renderScriptBlock(request, "message.mako", "layout", landing, "#mainbar", "set", **args) unread = [] convs = [] users = set() count = 10 fetchCount = count + 1 nextPageStart = '' prevPageStart = '' cols = yield db.get_slice(myId, folder, reverse=True, start=start, count=fetchCount) for col in cols: x, convId = col.column.value.split(':') convs.append(convId) if x == 'u': unread.append(convId) if len(cols) == fetchCount: nextPageStart = utils.encodeKey(col.column.name) convs = convs[:count] ###XXX: try to avoid extra fetch cols = yield db.get_slice(myId, folder, count=fetchCount, start=start) if cols and len(cols) > 1 and start: prevPageStart = utils.encodeKey(cols[-1].column.name) cols = yield db.multiget_slice(convs, 'mConversations') conversations = utils.multiSuperColumnsToDict(cols) m = {} for convId in conversations: if not conversations[convId]: continue participants = conversations[convId]['participants'].keys() users.update(participants) conversations[convId]['people'] = participants conversations[convId]['read'] = str(int(convId not in unread)) messageCount = yield db.get_count(convId, "mConvMessages") conversations[convId]['count'] = messageCount m[convId] = conversations[convId] users = base.EntitySet(users) yield users.fetchData() args.update({"view": "messages"}) args.update({"messages": m}) args.update({"people": users}) args.update({"mids": convs}) args.update({"menuId": "messages"}) args.update({"filterType": filterType or "all"}) args['nextPageStart'] = nextPageStart args['prevPageStart'] = prevPageStart if script: onload = """ $$.menu.selectItem('%s'); $('#mainbar .contents').removeClass("has-right"); """ % args["menuId"] t.renderScriptBlock(request, "message.mako", "render_conversations", landing, ".center-contents", "set", True, handlers={"onload": onload}, **args) yield utils.render_LatestCounts(request, landing) else: t.render(request, "message.mako", **args)
def renderItem(self, request, toFeed=False): (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax myOrgId = args["orgId"] convId, conv = yield utils.getValidItemId(request, "id", columns=['tags']) itemType = conv["meta"].get("type", None) if 'parent' in conv['meta']: raise errors.InvalidItem('conversation', convId) start = utils.getRequestArg(request, "start") or '' start = utils.decodeKey(start) args['convId'] = convId args['isItemView'] = True args['items'] = {convId: conv} meta = conv["meta"] owner = meta["owner"] relation = Relation(myId, []) yield defer.DeferredList([relation.initGroupsList(), relation.initSubscriptionsList()]) args["relations"] = relation if script and landing: t.render(request, "item.mako", **args) if script and appchange: t.renderScriptBlock(request, "item.mako", "layout", landing, "#mainbar", "set", **args) args["entities"] = {} toFetchEntities = set() toFetchTags = set(conv.get("tags", {}).keys()) plugin = plugins[itemType] if itemType in plugins else None if plugin: entityIds = yield plugin.fetchData(args) toFetchEntities.update(entityIds) toFetchEntities.add(conv['meta']['owner']) if "target" in conv["meta"]: toFetchEntities.update(conv['meta']['target'].split(',')) if conv['meta']['owner'] not in toFetchEntities: toFetchEntities.add(conv['meta']['owner']) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() args["entities"] = entities renderers = [] if script: t.renderScriptBlock(request, "item.mako", "conv_root", landing, "#conv-root-%s > .conv-summary" % (convId), "set", **args) convOwner = args["items"][convId]["meta"]["owner"] args["ownerId"] = convOwner if script: if itemType != "feedback": t.renderScriptBlock(request, "item.mako", "conv_owner", landing, "#conv-avatar-%s" % convId, "set", **args) else: feedbackType = conv['meta']['subType'] t.renderScriptBlock(request, "item.mako", "feedback_icon", landing, "#conv-avatar-%s" % convId, "set", args=[feedbackType]) # A copy of this code for fetching comments is present in _responses # Most changes here may need to be done there too. itemResponses = yield db.get_slice(convId, "itemResponses", start=start, reverse=True, count=constants.COMMENTS_PER_PAGE + 1) nextPageStart = itemResponses[-1].column.name\ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else None itemResponses = itemResponses[:-1] \ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else itemResponses responseKeys = [] for response in itemResponses: userKey, responseKey = response.column.value.split(":") responseKeys.append(responseKey) toFetchEntities.add(userKey) responseKeys.reverse() subscriptions = list(relation.subscriptions) likes = yield db.get_slice(convId, "itemLikes", subscriptions) \ if subscriptions else defer.succeed([]) toFetchEntities.update([x.column.name for x in likes]) entities = base.EntitySet(toFetchEntities) d1 = entities.fetchData() d2 = db.multiget_slice(responseKeys, "items", ["meta", "attachments"]) d3 = db.multiget_slice(responseKeys + [convId], "itemLikes", [myId]) d4 = db.get_slice(myOrgId, "orgTags", toFetchTags)\ if toFetchTags else defer.succeed([]) yield d1 fetchedItems = yield d2 myLikes = yield d3 fetchedTags = yield d4 fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) myLikes = utils.multiColumnsToDict(myLikes) fetchedTags = utils.supercolumnsToDict(fetchedTags) # Do some error correction/consistency checking to ensure that the # response items actually exist. I don't know of any reason why these # items may not exist. missingIds = [x for x, y in fetchedItems.items() if not y] if missingIds: yield self._cleanupMissingComments(convId, missingIds, itemResponses) args["items"].update(fetchedItems) args["entities"].update(entities) args["myLikes"] = myLikes args["tags"] = fetchedTags args["responses"] = {convId: responseKeys} if nextPageStart: args["oldest"] = utils.encodeKey(nextPageStart) if script: t.renderScriptBlock(request, "item.mako", 'conv_footer', landing, '#item-footer-%s' % convId, 'set', **args) t.renderScriptBlock(request, "item.mako", 'conv_tags', landing, '#conv-tags-wrapper-%s' % convId, 'set', handlers={"onload": "$('#conv-meta-wrapper-%s').removeClass('no-tags')" % convId} if toFetchTags else None, **args) t.renderScriptBlock(request, "item.mako", 'conv_comments', landing, '#conv-comments-wrapper-%s' % convId, 'set', **args) t.renderScriptBlock(request, "item.mako", 'conv_comment_form', landing, '#comment-form-wrapper-%s' % convId, 'set', True, handlers={"onload": "(function(obj){$$.convs.load(obj);})(this);"}, **args) numLikes = int(conv["meta"].get("likesCount", "0")) if numLikes: numLikes = int(conv["meta"].get("likesCount", "0")) iLike = myId in args["myLikes"].get(convId, []) t.renderScriptBlock(request, "item.mako", 'conv_likes', landing, '#conv-likes-wrapper-%s' % convId, 'set', args=[convId, numLikes, iLike, [x.column.name for x in likes]], entities=args['entities']) if plugin and hasattr(plugin, 'renderItemSideBlock'): plugin.renderItemSideBlock(request, landing, args) if script and landing: request.write("</body></html>") if not script: t.render(request, "item.mako", **args)