def _renderReportResponses(self, request, convId, convMeta, args): reportId = convMeta.get('reportId', None) args['convMeta'] = convMeta script = args["script"] myId = args["myId"] landing = not self._ajax if script: t.renderScriptBlock(request, "item-report.mako", "item_report", landing, "#report-contents", "set", **args) if reportId: reportResponses = yield db.get_slice(reportId, "itemResponses") reportResponseKeys, toFetchEntities = [], [] reportResponseActions = {} for response in reportResponses: userKey, responseKey, action = response.column.value.split(":") reportResponseKeys.append(responseKey) reportResponseActions[responseKey] = action fetchedResponses = yield db.multiget_slice(reportResponseKeys, "items", ["meta"]) fetchedResponses = utils.multiSuperColumnsToDict(fetchedResponses) args["reportId"] = reportId args["reportItems"] = fetchedResponses args["responseKeys"] = reportResponseKeys args["reportResponseActions"] = reportResponseActions #Show comments from report only if I am the owner or the reporter if script and myId in [convMeta["owner"], convMeta["reportedBy"]]: t.renderScriptBlock(request, "item-report.mako", 'report_comments', landing, '#report-comments', 'set', **args)
def fetchData(self, columns=None): if not columns: columns = ['basic'] entities = yield db.multiget_slice(self.ids, "entities", columns) entities = utils.multiSuperColumnsToDict(entities) for entityId in entities: entity = Entity(entityId, entities[entityId]) self.data[entityId] = entity
def sendInvitations(sender): cols = yield db.get_slice(sender, "userAuth") senderInfo = utils.columnsToDict(cols) senderOrgId = senderInfo["org"] senderId = senderInfo["user"] cols = yield db.multiget_slice([senderId, senderOrgId], "entities", ["basic"]) entities = utils.multiSuperColumnsToDict(cols) emails = sys.stdin.readlines() emails = [x.strip() for x in emails] yield people._sendInvitations([], emails, entities[senderId], senderId, entities[senderOrgId])
def _renderDeleteUser(self, request, data=None): myId = request.getSession(IAuthInfo).username user = data['id'] # Admin cannot block himself. if user.id == myId: error_str = "You are the only administrator, you can not delete yourself" request.write("$$.dialog.close('removeuser-dlg');\ $$.alerts.error('%s')" % error_str) else: orgAdminNewGroups = [] affectedGroups = [] groups = yield db.get_slice(user.id, "entityGroupsMap") groupIds = [x.column.name.split(':')[1] for x in groups] groupAdmins = yield db.multiget_slice(groupIds, "entities", ['admins']) groupAdmins = utils.multiSuperColumnsToDict(groupAdmins) for group in groups: name, groupId = group.column.name.split(':') if len(groupAdmins[groupId].get('admins', {})) == 1 and user.id in groupAdmins[groupId]['admins']: orgAdminNewGroups.append((groupId, name)) affectedGroups.append((groupId, name)) apiKeys = yield db.get_slice(user.id, "entities", ['apikeys']) apiKeys = utils.supercolumnsToDict(apiKeys) if apiKeys.get('apikeys', {}).keys(): apps = yield db.multiget_slice(apiKeys['apikeys'].keys(), "apps", ['meta']) apps = utils.multiSuperColumnsToDict(apps) else: apps = {} entities = base.EntitySet(user) args = {'affectedGroups': affectedGroups} args['orgAdminNewGroups'] = orgAdminNewGroups args['apps'] = apps args['userId'] = user.id args["entities"] = entities t.renderScriptBlock(request, 'admin.mako', "confirm_remove_user", False, "#removeuser-dlg", "set", **args)
def _moveConversation(self, request, convIds, toFolder): """Move a conversation or conversations from one folder to another. Keyword Arguments: convIds: List of conversation ids which need to be moved. toFolder: The final destination of the above conversations CF Changes: mConvFolders mUnreadConversations mAllConversations mDeletedConversations """ myId = request.getSession(IAuthInfo).username convs = yield db.multiget_slice(convIds, "mConversations") convs = utils.multiSuperColumnsToDict(convs) for convId in convs: conv = convs.get(convId, {}) if not conv: raise errors.InvalidMessage(convId) if myId not in conv.get('participants', {}): raise errors.MessageAccessDenied(convId) timeUUID = conv['meta']['uuid'] val = "%s:%s" % ('u' if toFolder == 'unread' else 'r', convId) cols = yield db.get_slice(convId, 'mConvFolders', [myId]) cols = utils.supercolumnsToDict(cols) for folder in cols[myId]: cf = self._folders[folder] if folder in self._folders else folder if toFolder != 'unread': if folder != 'mUnreadConversations': col = yield db.get(myId, cf, timeUUID) val = col.column.value yield db.remove(myId, cf, timeUUID) yield db.remove(convId, "mConvFolders", cf, myId) else: yield db.insert(myId, cf, "u:%s" % (convId), timeUUID) if toFolder == 'unread': val = "u:%s" % (convId) yield db.insert(convId, 'mConvFolders', '', 'mUnreadConversations', myId) yield db.insert(myId, 'mUnreadConversations', val, timeUUID) else: folder = self._folders[toFolder] yield db.insert(myId, folder, val, timeUUID) yield db.insert(convId, 'mConvFolders', '', folder, myId)
def getReason(self, convId, requesters, userId): conv = yield db.get_slice(convId, "items", ["meta"]) conv = utils.supercolumnsToDict(conv) cols = yield db.multiget_slice(requesters, "entities", ["basic"]) entities = utils.multiSuperColumnsToDict(cols) foo = requesters[0] if conv["meta"]["subType"] == "connection": reasonStr = '%s accepted your friend request' \ %(utils.userName(foo, entities[foo])) elif conv["meta"]["subType"] == "pendingConnection": reasonStr = "%s sent a friend request." \ "Click <a href='/profile?id=%s'>here </a> to respond" \ %(utils.userName(foo, entities[foo]), foo) defer.returnValue(reasonStr)
def responses(myId, itemId, item, start=''): toFetchEntities = set() itemResponses = yield db.get_slice(itemId, "itemResponses", start=start, reverse=True, count=constants.COMMENTS_PER_PAGE + 1) nextPageStart = itemResponses[-1].column.name\ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else None itemResponses = itemResponses[:-1] \ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else itemResponses responseKeys = [] for response in itemResponses: userKey, responseKey = response.column.value.split(":") responseKeys.append(responseKey) toFetchEntities.add(userKey) responseKeys.reverse() entities = base.EntitySet(toFetchEntities) d3 = db.multiget_slice(responseKeys + [itemId], "itemLikes", [myId]) d2 = db.multiget_slice(responseKeys + [itemId], "items", ["meta", "attachments"]) d1 = entities.fetchData() fetchedItems = yield d2 myLikes = yield d3 yield d1 fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) fetchedLikes = utils.multiColumnsToDict(myLikes) # Do some error correction/consistency checking to ensure that the # response items actually exist. I don't know of any reason why these # items may not exist. missingIds = [x for x, y in fetchedItems.items() if not y] if missingIds: yield _cleanupMissingComments(itemId, missingIds, itemResponses) ret = {'items': fetchedItems, 'entities': entities, 'myLikes': myLikes, 'responses': {itemId: responseKeys}} if nextPageStart: ret['oldest'] = utils.encodeKey(nextPageStart) defer.returnValue(ret)
def _render(self, request): (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax title = "Applications" args["title"] = title args["detail"] = "apps" if script and landing: t.render(request, "apps.mako", **args) if appchange and script: t.renderScriptBlock(request, "apps.mako", "layout", landing, "#mainbar", "set", **args) if script: self.setTitle(request, title) # XXX: Currently fetching all available apps under each category. # In future implement pagination here. appIds = yield db.get_slice(myId, "entities", ["apikeys", "apps"], count=100) appIds = utils.supercolumnsToDict(appIds, timestamps=True) appsByMe = yield db.get_slice(myId, "appsByOwner", count=100) appIds["my"] = utils.columnsToDict(appsByMe) toFetchClients = set() for val in appIds.values(): toFetchClients.update(val.keys()) clients = yield db.multiget_slice(toFetchClients, "apps") clients = utils.multiSuperColumnsToDict(clients) toFetchEntities = set([x.author for x in clients if "author" in x]) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() args.update({"entities": entities, "clients": clients, "apps": appIds}) if script: t.renderScriptBlock(request, "apps.mako", "appListing", landing, "#apps-contents", "set", **args) else: t.render(request, "apps.mako", **args)
def _gotConvs(results): docs = results.data.get('response', {}).get('docs', []) highlight.update(results.data.get('highlighting')) for index, item in enumerate(docs): itemId = item['id'] parentId = item.get('parent', None) if parentId: toFetchItems.add(itemId) toFetchItems.add(parentId) matchedItemIds.append(itemId) else: toFetchItems.add(itemId) matchedItemIds.append(itemId) if toFetchItems and matchedItemIds: fetchedItems = yield db.multiget_slice(toFetchItems, "items", ["meta", "tags", "attachments"]) fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) for itemId, item in fetchedItems.items(): toFetchEntities.add(item['meta']['owner']) if 'target' in item['meta']: toFetchEntities.update(item['meta']['target'].split(',')) items.update(fetchedItems) extraDataDeferreds = [] for itemId in toFetchItems: if itemId in matchedItemIds and itemId in items: meta = items[itemId]['meta'] itemType = meta.get('type', 'status') if itemType in plugins: d = plugins[itemType].fetchData(args, itemId) extraDataDeferreds.append(d) result = yield defer.DeferredList(extraDataDeferreds) for success, ret in result: if success: toFetchEntities.update(ret) args['matchedItemCount'] = results.data.get('response', {}).get('numFound', 0)
def updateData(): def _getAllFiles(): SKey = config.get('CloudFiles', 'SecretKey') AKey = config.get('CloudFiles', 'AccessKey') bucket = config.get('CloudFiles', 'Bucket') conn = S3Connection(AKey, SKey) bucket = conn.get_bucket(bucket) files = [] for key in bucket.list(): if not key.name.endswith('/'): files.append(key.name) return files files = yield threads.deferToThread(_getAllFiles) S3fileIds = [x.split("/")[2] for x in files] log.msg("Fetched %d files" %(len(S3fileIds))) log.msg("Fetching info about all the files") d = db.multiget_slice(S3fileIds, "files") files_map = {} for f in files: org, owner, fileId = f.split("/") files_map[fileId] = (owner, org) res = yield d ids = utils.multiSuperColumnsToDict(res) fileIds = [x for x in ids.keys() if ids[x] and "owner" not in ids[x]["meta"]] log.msg("Out of %d S3 files, found %d files in \'files\' in old format" %( len(S3fileIds), len(fileIds))) updated_file_meta = {} for fileId in fileIds: owner, org = files_map[fileId] updated_file_meta[fileId] = {"files":{"meta":{"owner": owner}}} log.msg(updated_file_meta) yield db.batch_mutate(updated_file_meta)
def _reply(self, request): """Commit a new message in reply to an existing conversation. Creates a new message, uploads any attachments, updates the conversation meta info and finally renders the message for the user. Keyword Arguments: convId: conversation id to which this user is replying to. body: The content of the reply. CF Changes: mConversations mConvMessages attachmentVersions """ (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax myOrgId = args['orgId'] convId = utils.getRequestArg(request, 'id') recipients, body, subject, convId = self._parseComposerArgs(request) epoch = int(time.time()) if not convId: raise errors.MissingParams([]) cols = yield db.get_slice(convId, "mConversations", ['meta', 'participants']) cols = utils.supercolumnsToDict(cols) subject = cols['meta'].get('subject', None) participants = cols.get('participants', {}).keys() if not cols: raise errors.InvalidMessage(convId) if myId not in participants: raise errors.MessageAccessDenied(convId) timeUUID = uuid.uuid1().bytes snippet = self._fetchSnippet(body) meta = {'uuid': timeUUID, 'date_epoch': str(epoch), "snippet": snippet} attachments = yield self._handleAttachments(request) attach_meta = self._formatAttachMeta(attachments) messageId = yield self._newMessage(myId, timeUUID, body, epoch) yield self._deliverMessage(convId, participants, timeUUID, myId) yield db.insert(convId, "mConvMessages", messageId, timeUUID) yield db.batch_insert(convId, "mConversations", {'meta': meta, 'attachments': attach_meta}) # Currently, we don't support searching for private messages # self._indexMessage(convId, messageId, myOrgId, meta, attachments, body) #XXX:We currently only fetch the message we inserted. Later we may fetch # all messages delivered since we last rendered the conversation cols = yield db.get_slice(convId, "mConversations") conv = utils.supercolumnsToDict(cols) participants = set(conv['participants']) mids = [messageId] messages = yield db.multiget_slice(mids, "messages", ["meta"]) messages = utils.multiSuperColumnsToDict(messages) participants.update([messages[mid]['meta']['owner'] for mid in messages]) people = base.EntitySet(participants) yield people.fetchData() value = myId data = {"entities": people} data["entities"].update({args['orgId']: args["org"]}) data["orgId"] = args["orgId"] data["convId"] = convId data["message"] = body data["subject"] = subject data["_fromName"] = people[value].basic['name'] users = participants - set([myId]) if users: yield notifications.notify(users, ":MR", value, timeUUID, **data) args.update({"people": people}) args.update({"messageIds": mids}) args.update({'messages': messages}) if script: onload = """ $('.conversation-reply').attr('value', ''); $('#msgreply-attach-uploaded').empty(); """ t.renderScriptBlock(request, "message.mako", "render_conversation_messages", landing, ".conversation-messages-wrapper", "append", True, handlers={"onload": onload}, **args) #Update the right side bar with any attachments the user uploaded args.update({"conv": conv}) people = base.EntitySet(set(conv['participants'])) yield people.fetchData() args.update({"people": people}) args.update({"conv": conv}) args.update({"id": convId}) args.update({"view": "message"}) if script: onload = """ $('#conversation_add_member').autocomplete({ source: '/auto/users', minLength: 2, select: function( event, ui ) { $('#conversation_recipients').attr('value', ui.item.uid) } }); """ t.renderScriptBlock(request, "message.mako", "right", landing, ".right-contents", "set", True, handlers={"onload": onload}, **args) else: request.redirect('/messages') request.finish()
def _renderConversation(self, request): """Render a conversation. Keyword arguments: convId: The id of the conversation that needs to be rendered. """ (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax convId = utils.getRequestArg(request, 'id', sanitize=False) if not convId: raise errors.MissingParams([]) if script and landing: t.render(request, "message.mako", **args) if appchange and script: t.renderScriptBlock(request, "message.mako", "layout", landing, "#mainbar", "set", **args) cols = yield db.get_slice(convId, "mConversations") conv = utils.supercolumnsToDict(cols) participants = set(conv.get('participants', {}).keys()) if not conv: raise errors.InvalidMessage(convId) if myId not in participants: raise errors.MessageAccessDenied(convId) timeUUID = conv['meta']['uuid'] d1 = db.remove(myId, "mUnreadConversations", timeUUID) d2 = db.remove(convId, "mConvFolders", 'mUnreadConversations', myId) d3 = db.remove(myId, "latest", timeUUID, "messages") deferreds = [d1, d2, d3] yield defer.DeferredList(deferreds) deferreds = [] cols = yield db.get_slice(convId, "mConvFolders", [myId]) cols = utils.supercolumnsToDict(cols) for folder in cols[myId]: if folder in self._folders: folder = self._folders[folder] d = db.insert(myId, folder, "r:%s" % (convId), timeUUID) deferreds.append(d) inFolders = cols[myId].keys() #FIX: make sure that there will be an entry of convId in mConvFolders cols = yield db.get_slice(convId, "mConvMessages") mids = [col.column.value for col in cols] messages = yield db.multiget_slice(mids, "messages", ["meta"]) messages = utils.multiSuperColumnsToDict(messages) s = yield defer.DeferredList(deferreds) participants.update([messages[mid]['meta']['owner'] for mid in messages]) people = base.EntitySet(participants) yield people.fetchData() args.update({"people": people}) args.update({"conv": conv}) args.update({"messageIds": mids}) args.update({'messages': messages}) args.update({"id": convId}) args.update({"flags": {}}) args.update({"view": "message"}) args.update({"menuId": "messages"}) args.update({"inFolders": inFolders}) if script: onload = """ $$.menu.selectItem("messages"); $('#mainbar .contents').addClass("has-right"); $('.conversation-reply').autogrow(); $('#message-reply-form').html5form({messages: 'en'}); """ t.renderScriptBlock(request, "message.mako", "render_conversation", landing, ".center-contents", "set", True, handlers={"onload": onload}, **args) onload = """ $$.files.init('msgreply-attach'); $('#conversation_add_member').autocomplete({ source: '/auto/users', minLength: 2, select: function( event, ui ) { $('#conversation_recipients').attr('value', ui.item.uid) } }); """ t.renderScriptBlock(request, "message.mako", "right", landing, ".right-contents", "set", True, handlers={"onload": onload}, **args) yield utils.render_LatestCounts(request, landing) else: t.render(request, "message.mako", **args)
def _getUserItems(self, request, userId, start='', count=10): authinfo = request.getSession(IAuthInfo) myId = authinfo.username myOrgId = authinfo.organization toFetchItems = set() toFetchEntities = set() toFetchTags = set() toFetchResponses = set() toFetchCount = count + 1 toFetchStart = utils.decodeKey(start) if start else '' fetchedUserItem = [] responses = {} convs = [] userItemsRaw = [] userItems = [] reasonStr = {} timestamps = {} items = {} nextPageStart = None args = {'myId': myId} relation = Relation(myId, []) yield relation.initGroupsList() toFetchEntities.add(userId) while len(convs) < toFetchCount: cols = yield db.get_slice(userId, "userItems", start=toFetchStart, reverse=True, count=toFetchCount) tmpIds = [] for col in cols: convId = col.column.value.split(":")[2] if convId not in tmpIds and convId not in convs: tmpIds.append(convId) (filteredConvs, deletedConvs) = yield utils.fetchAndFilterConvs\ (tmpIds, relation, items, myId, myOrgId) for col in cols[0:count]: convId = col.column.value.split(":")[2] if len(convs) == count or len(fetchedUserItem) == count*2: nextPageStart = col.column.name break if convId not in filteredConvs and convId not in convs: continue fetchedUserItem.append(col) if convId not in convs: convs.append(convId) if len(cols) < toFetchCount or nextPageStart: break if cols: toFetchStart = cols[-1].column.name if nextPageStart: nextPageStart = utils.encodeKey(nextPageStart) for col in fetchedUserItem: value = tuple(col.column.value.split(":")) timestamps[value] = col.column.timestamp/1e6 rtype, itemId, convId, convType, convOwnerId, commentSnippet = value commentSnippet = """<span class="snippet">"%s"</span>""" %(_(commentSnippet)) toFetchEntities.add(convOwnerId) if rtype == 'I': toFetchItems.add(convId) toFetchResponses.add(convId) userItems.append(value) elif rtype == "L" and itemId == convId and convOwnerId != userId: reasonStr[value] = _("liked %s's %s") userItems.append(value) elif rtype == "L" and convOwnerId != userId: r = "answer" if convType == 'question' else 'comment' reasonStr[value] = _("liked") + " %s " %(commentSnippet) + _("%s "%r) + _("on %s's %s") userItems.append(value) elif rtype in ["C", 'Q'] and convOwnerId != userId: reasonStr[value] = "%s"%(commentSnippet) + _(" on %s's %s") userItems.append(value) itemResponses = yield db.multiget_slice(toFetchResponses, "itemResponses", count=2, reverse=True) for convId, comments in itemResponses.items(): responses[convId] = [] for comment in comments: userId_, itemKey = comment.column.value.split(':') if itemKey not in toFetchItems: responses[convId].insert(0,itemKey) toFetchItems.add(itemKey) toFetchEntities.add(userId_) items = yield db.multiget_slice(toFetchItems, "items", ["meta", "tags", "attachments"]) items = utils.multiSuperColumnsToDict(items) args["items"] = items extraDataDeferreds = [] for convId in convs: if convId not in items: continue meta = items[convId]["meta"] itemType = meta["type"] toFetchEntities.add(meta["owner"]) if "target" in meta: toFetchEntities.update(meta["target"].split(',')) toFetchTags.update(items[convId].get("tags", {}).keys()) if itemType in plugins: d = plugins[itemType].fetchData(args, convId) extraDataDeferreds.append(d) result = yield defer.DeferredList(extraDataDeferreds) for success, ret in result: if success: toFetchEntities.update(ret) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() tags = {} if toFetchTags: userOrgId = entities[userId].basic["org"] fetchedTags = yield db.get_slice(userOrgId, "orgTags", toFetchTags) tags = utils.supercolumnsToDict(fetchedTags) fetchedLikes = yield db.multiget(toFetchItems, "itemLikes", myId) myLikes = utils.multiColumnsToDict(fetchedLikes) data = {"entities": entities, "reasonStr": reasonStr, "tags": tags, "myLikes": myLikes, "userItems": userItems, "responses": responses, "nextPageStart": nextPageStart, "timestamps": timestamps } del args['myId'] args.update(data) defer.returnValue(args)
def updateData(): yield db.truncate('user_files') try: yield db.get('asdf', 'entityFeed_files', uuid.uuid1().bytes) except ttypes.InvalidRequestException as exception: log.msg(exception) raise Exception('entityFeed_files CF missing, create the CF') except ttypes.NotFoundException: pass entities = {} items = {} rows = yield db.get_range_slice('items', count=10000, reverse=True) for row in rows: itemId = row.key item = utils.supercolumnsToDict(row.columns) items[itemId]=item for itemId in items: item = items[itemId] log.msg(itemId) if 'meta' not in item: continue # Add org to all items try: owner = item['meta']['owner'] col = yield db.get(owner, "entities", 'org', 'basic') ownerOrgId = col.column.value yield db.insert(itemId, 'items', ownerOrgId, 'org', 'meta') except Exception as e: if item['meta'].get('type', '') == 'feedback': yield db.insert(itemId, 'items', owner, 'org', 'meta') # Fix ACLs if 'parent' not in item['meta']: acl = item['meta']['acl'] convOwner = item['meta']['owner'] convId = itemId if acl == 'company': col = yield db.get(convOwner, "entities", "org", "basic") ownerOrgId = col.column.value acl = pickle.dumps({"accept":{"orgs":[ownerOrgId]}}) yield db.insert(convId, 'items', acl, 'acl', 'meta') else: try: acl = pickle.loads(acl) if 'accept' in acl and 'friends' in acl['accept'] and isinstance(acl['accept']['friends'], bool): del acl['accept']['friends'] acl = pickle.dumps(acl) yield db.insert(convId, 'items', acl, 'acl', 'meta') except : log.msg('cannot unpack acl', acl) # Migrate files # truncate user_files # update user_files and entityFeed_files if 'owner' in item['meta'] and 'attachments' in item: ownerId = item['meta']['owner'] if ownerId not in entities: cols = yield db.get_slice(ownerId, 'entities', ['basic']) entities.update({ownerId: utils.supercolumnsToDict(cols)}) for attachmentId in item['attachments']: orgId = entities[ownerId]['basic']['org'] timeuuid, name = item['attachments'][attachmentId].split(':')[:2] timeuuid = utils.decodeKey(timeuuid) val = '%s:%s:%s:%s' % (attachmentId, name, itemId, ownerId) yield db.insert(ownerId, "user_files", val, timeuuid) if 'parent' not in item['meta'] and item['meta'].get('acl', ''): _entities = yield utils.expandAcl(ownerId, orgId, item['meta']['acl'], itemId, ownerId, True) for entityId in _entities: yield db.insert(entityId, "entityFeed_files", val, timeuuid) # Migrate items # Meta fields in "link", "event" and "poll" if item['meta'].get('type', None) in ['link', 'poll', 'event']: itemMeta = item['meta'] itemType = itemMeta['type'] updated = {} if itemType == "link": if 'url' in itemMeta: updated['link_url'] = itemMeta['url'] if 'title' in itemMeta: updated['link_title'] = itemMeta['title'] if 'summary' in itemMeta: updated['link_summary'] = itemMeta['summary'] if 'imgSrc' in itemMeta: updated['link_imgSrc'] = itemMeta['imgSrc'] if 'embedType' in itemMeta: updated['link_embedType'] = itemMeta['embedType'] if 'embedSrc' in itemMeta: updated['link_embedSrc'] = itemMeta['embedSrc'] if 'embedHeight' in itemMeta: updated['link_embedHeight'] = itemMeta['embedHeight'] if 'embedWidth' in itemMeta: updated['link_embedWidth'] = itemMeta['embedWidth'] elif itemType == 'poll': if 'question' in itemMeta: updated['comment'] = itemMeta['question'] else: print 'Found an event:', itemId if updated: yield db.batch_insert(itemId, 'items', {'meta': updated}) # # Create poll indexes for feed and userItems # rows = yield db.get_range_slice('entities', count=10000, reverse=True) mutations = {} for row in rows: entityId = row.key entity = utils.supercolumnsToDict(row.columns) if entity['basic']['type'] != 'user': continue d1 = db.get_slice(entityId, 'feed', count=10000) d2 = db.get_slice(entityId, 'userItems', count=10000) results = yield d1 for col in results: value = col.column.value if value in items: if items.get(value, {}).get('meta', {}).get('type', '') == 'poll': mutations.setdefault(entityId, {}).setdefault('feed_poll', {}).update({col.column.name: value}) results = yield d2 for col in results: value = col.column.value responseType, itemId, convId, convType, others = value.split(':', 4) if convType == 'poll': mutations.setdefault(entityId, {}).setdefault('userItems_poll', {}).update({col.column.name: value}) yield db.batch_mutate(mutations) #Group type changed from public-private to open-closed. rows = yield db.get_range_slice('entityGroupsMap', count=1000) groupIds = set() for row in rows: for col in row.columns: name_, groupId = col.column.name.split(':') groupIds.add(groupId) cols = yield db.multiget_slice(groupIds, "entities") groups = utils.multiSuperColumnsToDict(cols) for groupId in groups: access = groups[groupId]['basic']['access'].lower() if access == 'public': yield db.insert(groupId, 'entities', 'open', 'access', 'basic') elif access.lower() == 'private': yield db.insert(groupId, 'entities', 'closed', 'access', 'basic') #Fix entityGroupsMap rows = yield db.get_range_slice('entityGroupsMap', count=1000) for row in rows: entityId = row.key for col in row.columns: name_, groupId = col.column.name.split(':') if col.column.name != '%s:%s'%(groups[groupId]['basic']['name'].lower(), groupId): yield db.remove(entityId, 'entityGroupsMap', col.column.name) yield db.insert(entityId, 'entityGroupsMap', '', '%s:%s' %(groups[groupId]['basic']['name'].lower(), groupId))
def _actions(self, request): """Perform an action on a conversation or a group of conversations. Update the UI based on the folder and the view that the user is in. Keyword arguments: convIds: A list of conversations upon which an action is to be taken. filterType: The folder view in which this action was taken. trash: A presence of this argument indicates that the action is to delete the selected conversations. archive: A presence of this argument indicates that the action is to archive the selected conversations. unread: A presence of this argument indicates that the action is to mark the selected conversations as unread. inbox: A presence of this argument indicates that the action is to move the selected conversations to inbox. view: one of [message, messages] indicates whether the user is performing action on a single conversation or multiple conversations. action: The actual action that the user wants to peform. One of ["inbox", "archive", "trash", "read", "unread"]. This is used if none of the above are mentioned. CF Changes: mConvFolders mUnreadConversations latest mAllConversations mDeletedConversations """ (appchange, script, args, myId) = yield self._getBasicArgs(request) convIds = utils.getRequestArg(request, 'selected', multiValued=True) or [] filterType = utils.getRequestArg(request, "filterType") or "all" trash = utils.getRequestArg(request, "trash") or None archive = utils.getRequestArg(request, "archive") or None unread = utils.getRequestArg(request, "unread") or None inbox = utils.getRequestArg(request, "inbox") or None view = utils.getRequestArg(request, "view") or "messages" if trash: action = "trash" elif archive: action = "archive" elif unread: action = "unread" elif inbox: action = "inbox" else: action = utils.getRequestArg(request, "action") if convIds: #Move the conversations based on action and update the CFs. if action in self._folders.keys(): yield self._moveConversation(request, convIds, action) elif action == "read": #Remove it from unreadIndex and mark it as unread in all other # folders cols = yield db.multiget_slice(convIds, "mConversations") convs = utils.multiSuperColumnsToDict(cols) for convId in convs: conv = convs[convId] if not conv: raise errors.InvalidMessage(convId) if myId not in conv.get('participants', {}): raise errors.MessageAccessDenied(convId) timeUUID = conv['meta']['uuid'] yield db.remove(myId, "mUnreadConversations", timeUUID) yield db.remove(convId, "mConvFolders", 'mUnreadConversations', myId) yield db.remove(myId, "latest", timeUUID, "messages") cols = yield db.get_slice(convId, "mConvFolders", [myId]) cols = utils.supercolumnsToDict(cols) for folder in cols[myId]: if folder in self._folders: folder = self._folders[folder] yield db.insert(myId, folder, "r:%s" % (convId), timeUUID) count = yield utils.render_LatestCounts(request) #XXX:Actions are not supported in non js mode so this check is unncessary. if not self._ajax: #Not all actions on message(s) happen over ajax, for them do a redirect request.redirect("/messages?type=%s" % filterType) request.finish() elif self._ajax and len(convIds) > 0: #Update the UI based on the actions and folder view. #For all actions other than read/unread, since the action won't be # available to the user in same view; i.e, archive won't be on # archive view, we can simply remove the conv. if action in ["inbox", "archive", "trash"]: if filterType != "unread": request.write("$('%s').remove();" % ','.join(['#thread-%s' % convId for convId in convIds])) if view == "message": reason = _("Message moved to %s" % (action.capitalize())) if action == "archive": reason = _("Message archived") request.write("""$$.fetchUri('/messages?type=%s');$$.alerts.info("%s");""" % (filterType, _(reason))) elif action == "unread": query_template = """ $('#thread-%s').removeClass('row-read').addClass('row-unread'); $('#thread-%s .messaging-read-icon').removeClass('messaging-read-icon').addClass('messaging-unread-icon'); $('#thread-%s .messaging-unread-icon').attr("title", "Mark this conversation as read"); $('#thread-%s .messaging-unread-icon')[0].onclick = function(event) { $.post('/ajax/messages/thread', 'action=read&selected=%s&filterType=%s', null, 'script') }; """ query = "".join([query_template % (convId, convId, convId, convId, convId, filterType) for convId in convIds]) if view == "message": request.write("""$$.fetchUri('/messages');$$.alerts.info("%s");""" % ("Message marked as unread")) else: request.write(query) elif action == "read": # If we are in unread view, remove the conv else swap the styles if filterType != "unread": query_template = """ $('#thread-%s').removeClass('row-unread').addClass('row-read'); $('#thread-%s .messaging-unread-icon').removeClass('messaging-unread-icon').addClass('messaging-read-icon'); $('#thread-%s .messaging-read-icon').attr("title", "Mark this conversation as unread") $('#thread-%s .messaging-read-icon')[0].onclick = function(event) { $.post('/ajax/messages/thread', 'action=unread&selected=%s&filterType=%s', null, 'script') } """ query = "".join([query_template % (convId, convId, convId, convId, convId, filterType) for convId in convIds]) request.write(query) else: request.write("$('%s').remove()" % ','.join(['#thread-%s' % convId for convId in convIds]))
def deleteItem(itemId, userId, orgId, item=None, conv=None,): if not item: item = yield db.get_slice(itemId, "items", ["meta", "tags"]) item = utils.supercolumnsToDict(item) meta = item["meta"] convId = meta.get("parent", itemId) itemUUID = meta["uuid"] itemOwnerId = meta["owner"] timestamp = str(int(time.time())) d = db.insert(itemId, "items", "deleted", "state", "meta") deferreds = [d] if not conv: conv = yield db.get_slice(convId, "items", ['meta', 'tags']) conv = utils.supercolumnsToDict(conv) plugin = plugins[conv['meta']['type']] if convId == itemId: # Delete from tagItems. for tagId in item.get("tags", {}): d = db.remove(tagId, "tagItems", itemUUID) deferreds.append(d) # Actually, delete the conversation. d1 = db.insert(itemOwnerId, 'deletedConvs', timestamp, itemId) d1.addCallback(lambda x: search.solr.delete(itemId)) d1.addCallback(lambda x: files.deleteFileInfo(userId, orgId, itemId, item)) d2 = plugin.delete(userId, convId, conv) deferreds.extend([d1, d2]) itemResponses = yield db.get_slice(convId, "itemResponses") itemResponses = utils.columnsToDict(itemResponses) items = yield db.multiget_slice(itemResponses.values(), "items", ["meta", "attachments"]) items = utils.multiSuperColumnsToDict(items) for responseId in items: deferreds.append(files.deleteFileInfo(userId, orgId, responseId, items[responseId], conv)) else: convType = conv["meta"]["type"] convOwnerId = conv["meta"]["owner"] convACL = conv["meta"]["acl"] d1 = db.insert(convId, 'deletedConvs', timestamp, itemId) d2 = db.remove(convId, 'itemResponses', itemUUID) d2.addCallback(lambda x: db.get_count(convId, "itemResponses")) d2.addCallback(lambda x: db.insert(convId, 'items', \ str(x), 'responseCount', 'meta')) d2.addCallback(lambda x: search.solr.delete(itemId)) d3 = files.deleteFileInfo(userId, orgId, itemId, item, conv) deferreds.extend([d1, d2, d3]) # Rollback changes to feeds caused by this comment d = db.get_slice(itemId, "itemLikes") def removeLikeFromFeeds(result): likes = utils.columnsToDict(result) removeLikeDeferreds = [] for actorId, likeUUID in likes.items(): likeUpdateVal = "L:%s:%s:%s" % (actorId, itemId, itemOwnerId) d1 = feed.deleteUserFeed(actorId, convType, likeUUID) d2 = Feed.unpush(actorId, orgId, convId, conv, likeUpdateVal) removeLikeDeferreds.extend([d1, d2]) return defer.DeferredList(removeLikeDeferreds) d.addCallback(removeLikeFromFeeds) deferreds.append(d) # Delete comment from comment owner's userItems d = feed.deleteUserFeed(itemOwnerId, convType, itemUUID) deferreds.append(d) # Rollback updates done to comment owner's follower's feeds. responseUpdateVal = "Q:%s:%s" if convType == "question" else "C:%s:%s" responseUpdateVal = responseUpdateVal % (itemOwnerId, itemId) d = Feed.unpush(itemOwnerId, orgId, convId, conv, responseUpdateVal) deferreds.append(d) yield defer.DeferredList(deferreds)
def renderItem(self, request, toFeed=False): (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax myOrgId = args["orgId"] convId, conv = yield utils.getValidItemId(request, "id", columns=['tags']) itemType = conv["meta"].get("type", None) if 'parent' in conv['meta']: raise errors.InvalidItem('conversation', convId) start = utils.getRequestArg(request, "start") or '' start = utils.decodeKey(start) args['convId'] = convId args['isItemView'] = True args['items'] = {convId: conv} meta = conv["meta"] owner = meta["owner"] relation = Relation(myId, []) yield defer.DeferredList([relation.initGroupsList(), relation.initSubscriptionsList()]) args["relations"] = relation if script and landing: t.render(request, "item.mako", **args) if script and appchange: t.renderScriptBlock(request, "item.mako", "layout", landing, "#mainbar", "set", **args) args["entities"] = {} toFetchEntities = set() toFetchTags = set(conv.get("tags", {}).keys()) plugin = plugins[itemType] if itemType in plugins else None if plugin: entityIds = yield plugin.fetchData(args) toFetchEntities.update(entityIds) toFetchEntities.add(conv['meta']['owner']) if "target" in conv["meta"]: toFetchEntities.update(conv['meta']['target'].split(',')) if conv['meta']['owner'] not in toFetchEntities: toFetchEntities.add(conv['meta']['owner']) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() args["entities"] = entities renderers = [] if script: t.renderScriptBlock(request, "item.mako", "conv_root", landing, "#conv-root-%s > .conv-summary" % (convId), "set", **args) convOwner = args["items"][convId]["meta"]["owner"] args["ownerId"] = convOwner if script: if itemType != "feedback": t.renderScriptBlock(request, "item.mako", "conv_owner", landing, "#conv-avatar-%s" % convId, "set", **args) else: feedbackType = conv['meta']['subType'] t.renderScriptBlock(request, "item.mako", "feedback_icon", landing, "#conv-avatar-%s" % convId, "set", args=[feedbackType]) # A copy of this code for fetching comments is present in _responses # Most changes here may need to be done there too. itemResponses = yield db.get_slice(convId, "itemResponses", start=start, reverse=True, count=constants.COMMENTS_PER_PAGE + 1) nextPageStart = itemResponses[-1].column.name\ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else None itemResponses = itemResponses[:-1] \ if len(itemResponses) > constants.COMMENTS_PER_PAGE\ else itemResponses responseKeys = [] for response in itemResponses: userKey, responseKey = response.column.value.split(":") responseKeys.append(responseKey) toFetchEntities.add(userKey) responseKeys.reverse() subscriptions = list(relation.subscriptions) likes = yield db.get_slice(convId, "itemLikes", subscriptions) \ if subscriptions else defer.succeed([]) toFetchEntities.update([x.column.name for x in likes]) entities = base.EntitySet(toFetchEntities) d1 = entities.fetchData() d2 = db.multiget_slice(responseKeys, "items", ["meta", "attachments"]) d3 = db.multiget_slice(responseKeys + [convId], "itemLikes", [myId]) d4 = db.get_slice(myOrgId, "orgTags", toFetchTags)\ if toFetchTags else defer.succeed([]) yield d1 fetchedItems = yield d2 myLikes = yield d3 fetchedTags = yield d4 fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) myLikes = utils.multiColumnsToDict(myLikes) fetchedTags = utils.supercolumnsToDict(fetchedTags) # Do some error correction/consistency checking to ensure that the # response items actually exist. I don't know of any reason why these # items may not exist. missingIds = [x for x, y in fetchedItems.items() if not y] if missingIds: yield self._cleanupMissingComments(convId, missingIds, itemResponses) args["items"].update(fetchedItems) args["entities"].update(entities) args["myLikes"] = myLikes args["tags"] = fetchedTags args["responses"] = {convId: responseKeys} if nextPageStart: args["oldest"] = utils.encodeKey(nextPageStart) if script: t.renderScriptBlock(request, "item.mako", 'conv_footer', landing, '#item-footer-%s' % convId, 'set', **args) t.renderScriptBlock(request, "item.mako", 'conv_tags', landing, '#conv-tags-wrapper-%s' % convId, 'set', handlers={"onload": "$('#conv-meta-wrapper-%s').removeClass('no-tags')" % convId} if toFetchTags else None, **args) t.renderScriptBlock(request, "item.mako", 'conv_comments', landing, '#conv-comments-wrapper-%s' % convId, 'set', **args) t.renderScriptBlock(request, "item.mako", 'conv_comment_form', landing, '#comment-form-wrapper-%s' % convId, 'set', True, handlers={"onload": "(function(obj){$$.convs.load(obj);})(this);"}, **args) numLikes = int(conv["meta"].get("likesCount", "0")) if numLikes: numLikes = int(conv["meta"].get("likesCount", "0")) iLike = myId in args["myLikes"].get(convId, []) t.renderScriptBlock(request, "item.mako", 'conv_likes', landing, '#conv-likes-wrapper-%s' % convId, 'set', args=[convId, numLikes, iLike, [x.column.name for x in likes]], entities=args['entities']) if plugin and hasattr(plugin, 'renderItemSideBlock'): plugin.renderItemSideBlock(request, landing, args) if script and landing: request.write("</body></html>") if not script: t.render(request, "item.mako", **args)
def _search(self, request): (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax myOrgId = args['orgId'] filter_map = {'people':'itemType'} term = utils.getRequestArg(request, "q") start = utils.getRequestArg(request, "start") or 0 filters = utils.getRequestArg(request, 'filter', multiValued=True) or [] filters = dict([(filter_map[x], x) for x in filters if x in filter_map]) args["term"] = term nextPageStart = '' if not term: errors.MissingParams() try: start = int(start) if start < 0: raise ValueError except ValueError: errors.InvalidParamValue() if script and landing: t.render(request, "search.mako", **args) if script and appchange: t.renderScriptBlock(request, "search.mako", "layout", landing, "#mainbar", "set", **args) count = SEARCH_RESULTS_PER_PAGE items = {} convs = set() highlighting = {} toFetchItems = [] toFetchStart = start toFetchEntities = set() people = [] relation = Relation(myId, []) yield defer.DeferredList([relation.initGroupsList(), relation.initSubscriptionsList(), relation.initFollowersList()]) regex = re.compile("(.*?)([^\s]*\s*[^\s]*\s*[^\s]*\s*)(<em class='highlight'>.*<\/em>)(\s*[^\s]*\s*[^\s]*\s*[^\s]*)(.*)") res = yield solr.search(term, args['orgId'], count, toFetchStart, filters={'itemType': 'people'}) docs = res.data.get('response', {}).get('docs', []) for item in docs: entityId = item['id'] people.append(entityId) toFetchEntities.add(entityId) while 1: res = yield solr.search(term, args['orgId'], count, toFetchStart) messages = [] convItems = [] numMatched = res.data.get('response', {}).get('numFound', 0) docs = res.data.get('response', {}).get('docs', []) highlighting.update(res.data.get('highlighting', {})) for index, item in enumerate(docs): itemId = item['id'] parent = item.get('parent', None) position = toFetchStart + index if item.get('itemType', '') == "message": if (item.get('id'), parent) not in messages: messages.append((item.get('id'), parent)) elif item.get('itemType', '') == 'people': entityId = item.get('id') if entityId not in people: people.append(entityId) toFetchEntities.add(entityId) elif parent: convItems.append((itemId, parent, position)) convs.add(parent) else: convItems.append((itemId, itemId, position)) convs.add(item.get('id')) if convs: filteredConvs, deleted = yield utils.fetchAndFilterConvs(convs, relation, items, myId, myOrgId) for itemId, convId, position in convItems: if convId in filteredConvs and itemId not in toFetchItems: toFetchItems.append(itemId) if len(toFetchItems) == count: if position +1 < numMatched: nextPageStart = position + 1 break if len(toFetchItems) == count or len(docs) < count: break toFetchStart = toFetchStart + count _items = yield db.multiget_slice(toFetchItems, "items", ['meta', 'attachments', 'tags']) items.update(utils.multiSuperColumnsToDict(_items)) for itemId, item in items.iteritems(): toFetchEntities.add(item['meta']['owner']) if 'target' in item['meta']: toFetchEntities.update(item['meta']['target'].split(',')) if itemId in highlighting and 'comment' in highlighting[itemId]: match = re.match(regex, unquote(highlighting[itemId]['comment'][0])) if match: comment = "".join(match.groups()[1:4]) comment = comment + " …" if match.group(5) else comment items[itemId]['meta']['comment'] = comment entities = yield db.multiget_slice(toFetchEntities, "entities", ['basic']) entities = utils.multiSuperColumnsToDict(entities) for userId in people: if userId in highlighting and userId in entities: entities[userId]['basic']['reason'] = {} for key in highlighting[userId]: if key in entities[userId]['basic']: entities[userId]['basic'][key] = " ".join(highlighting[userId][key]) else: entities[userId]['basic']['reason'][key] = highlighting[userId][key] fromFetchMore = True if start else False args['term'] = term args['items'] = items args['people'] = people args['entities'] = entities args['relations'] = relation args["conversations"] = toFetchItems args["nextPageStart"] = nextPageStart args['fromFetchMore'] = fromFetchMore args['fromSidebar'] = 'people' in filters.values() if script: onload = "(function(obj){$$.convs.load(obj);})(this);" if fromFetchMore: t.renderScriptBlock(request, "search.mako", "results", landing, "#next-load-wrapper", "replace", True, handlers={"onload": onload}, **args) else: t.renderScriptBlock(request, "search.mako", "results", landing, "#search-results", "set", True, handlers={"onload": onload}, **args) if 'people' not in filters.values() and people: t.renderScriptBlock(request, "search.mako", "_displayUsersMini", landing, "#people-block", "set", True, **args) if script and landing: request.write("</body></html>") if not script: t.render(request, "search.mako", **args)
def _listConversations(self, request): """Renders a time sorted list of coversations in a particular view. Keyword Arguments: filerType: The folder view which is to rendered. One of ['unread', 'all', 'archive', 'trash']. start: The base64 encoded timeUUID of the starting conversation id of the page that needs to be rendered. """ (appchange, script, args, myId) = yield self._getBasicArgs(request) landing = not self._ajax filterType = utils.getRequestArg(request, 'type') folder = self._folders[filterType] if filterType in self._folders else\ self._folders['inbox'] start = utils.getRequestArg(request, "start") or '' start = utils.decodeKey(start) if script and landing: t.render(request, "message.mako", **args) if appchange and script: t.renderScriptBlock(request, "message.mako", "layout", landing, "#mainbar", "set", **args) unread = [] convs = [] users = set() count = 10 fetchCount = count + 1 nextPageStart = '' prevPageStart = '' cols = yield db.get_slice(myId, folder, reverse=True, start=start, count=fetchCount) for col in cols: x, convId = col.column.value.split(':') convs.append(convId) if x == 'u': unread.append(convId) if len(cols) == fetchCount: nextPageStart = utils.encodeKey(col.column.name) convs = convs[:count] ###XXX: try to avoid extra fetch cols = yield db.get_slice(myId, folder, count=fetchCount, start=start) if cols and len(cols) > 1 and start: prevPageStart = utils.encodeKey(cols[-1].column.name) cols = yield db.multiget_slice(convs, 'mConversations') conversations = utils.multiSuperColumnsToDict(cols) m = {} for convId in conversations: if not conversations[convId]: continue participants = conversations[convId]['participants'].keys() users.update(participants) conversations[convId]['people'] = participants conversations[convId]['read'] = str(int(convId not in unread)) messageCount = yield db.get_count(convId, "mConvMessages") conversations[convId]['count'] = messageCount m[convId] = conversations[convId] users = base.EntitySet(users) yield users.fetchData() args.update({"view": "messages"}) args.update({"messages": m}) args.update({"people": users}) args.update({"mids": convs}) args.update({"menuId": "messages"}) args.update({"filterType": filterType or "all"}) args['nextPageStart'] = nextPageStart args['prevPageStart'] = prevPageStart if script: onload = """ $$.menu.selectItem('%s'); $('#mainbar .contents').removeClass("has-right"); """ % args["menuId"] t.renderScriptBlock(request, "message.mako", "render_conversations", landing, ".center-contents", "set", True, handlers={"onload": onload}, **args) yield utils.render_LatestCounts(request, landing) else: t.render(request, "message.mako", **args)
def fetchMatchingEvents(self, request, args, entityId, count=5, start=None): """Find matching events for the user, org or group for a given time range. Events are sorted by their start time and then by their end time. """ myId = args["myId"] convs = [] invitations = [] toFetchEntities = set() my_tz = timezone(args["me"].basic["timezone"]) if not start: # since we store times in UTC, find out the utc time for the user's # 00:00 hours instead of utc 00:00. utc_now = datetime.datetime.now(pytz.utc) mytz_now = utc_now.astimezone(my_tz) mytz_start = mytz_now else: mytz_start = start.replace(tzinfo=my_tz) args["start"] = mytz_start.strftime("%Y-%m-%d") timestamp = calendar.timegm(mytz_start.utctimetuple()) timeUUID = utils.uuid1(timestamp=timestamp) start = timeUUID.bytes page = args.get("page", 1) cols = yield db.get_slice(entityId, "userAgenda", start=start, count=(page*count)*2) matched_events = [col.column.value for col in cols] res = yield db.multiget_slice(matched_events, "items", ["meta"]) matched_events = utils.multiSuperColumnsToDict(res) to_sort_time_tuples = [(x, y["meta"]["event_startTime"], y["meta"]["event_endTime"]) \ for x, y in matched_events.iteritems()] sorted_time_tuples = sorted(to_sort_time_tuples, key=itemgetter(int(1), int(2))) sorted_event_ids = [x[0] for x in sorted_time_tuples] events_in_this_page = sorted_event_ids[(page-1)*count:page*count] if len(events_in_this_page) >= count: nextPage = page + 1 args.update({'nextPage': nextPage}) else: args.update({'nextPage': 0}) args["prevPage"] = page - 1 args["items"] = matched_events args["conversations"] = events_in_this_page #Now fetch all related entities, participants, owners, attendees, # invitees, groups etc for convId in events_in_this_page: entityIds = yield self.fetchData(args, convId) toFetchEntities.update(entityIds) relation = Relation(myId, []) yield relation.initGroupsList() for event, event_meta in matched_events.iteritems(): target = event_meta['meta'].get('target') if target: toFetchEntities.update(target.split(',')) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() args["entities"] = entities args["relations"] = relation
def _removeMembers(self, request): """This method allows the current user to remove another participant to this conversation. Keyword Arguments: newMembers: A list of members who will be added to this conversation. convId: The id of the conversation to which these new members will be added as participants. CF Changes: mConversations latest """ myId = request.getSession(IAuthInfo).username orgId = request.getSession(IAuthInfo).organization members, body, subject, convId = self._parseComposerArgs(request) if not (convId and members): raise errors.MissingParams([]) conv = yield db.get_slice(convId, "mConversations") conv = utils.supercolumnsToDict(conv) subject = conv['meta'].get('subject', None) participants = conv.get('participants', {}).keys() if not conv: raise errors.InvalidMessage(convId) if myId not in participants: raise errors.MessageAccessDenied(convId) cols = yield db.multiget_slice(members, "entities", ['basic']) people = utils.multiSuperColumnsToDict(cols) members = set([userId for userId in people if people[userId] and \ people[userId]["basic"]["org"] == orgId]) members = members.intersection(participants) if len(members) == len(participants): members.remove(conv['meta']['owner']) deferreds = [] if members: d = db.batch_remove({"mConversations": [convId]}, names=members, supercolumn='participants') deferreds.append(d) cols = yield db.get_slice(convId, 'mConvFolders', members) cols = utils.supercolumnsToDict(cols) for recipient in cols: for folder in cols[recipient]: cf = self._folders[folder] if folder in self._folders else folder d = db.remove(recipient, cf, conv['meta']['uuid']) deferreds.append(d) #update latest- messages-count deferreds.append(db.batch_remove({"latest": members}, names=[conv['meta']['uuid']], supercolumn='messages')) if deferreds: yield deferreds mailNotificants = set(participants) - members - set([myId]) if mailNotificants and members: toFetchEntities = mailNotificants.union([myId, orgId]).union(members) entities = base.EntitySet(toFetchEntities) yield entities.fetchData() data = {"entities": entities} data["orgId"] = orgId data["convId"] = convId data["removedMembers"] = members data["subject"] = subject data["_fromName"] = entities[myId].basic['name'] yield notifications.notify(mailNotificants, ":MA", myId, **data)
itemType = items[convId]["meta"]["type"] if itemType in plugins: try: entityIds = yield plugins[itemType].fetchData(data, convId) toFetchEntities.update(entityIds) except Exception, e: log.err(e) convIds.remove(convId) # Fetch all required entities entities = base.EntitySet(toFetchEntities) entities_d = entities.fetchData() # Results of previously initiated fetches (items, tags, entities, likes) fetchedItems = yield items_d items.update(utils.multiSuperColumnsToDict(fetchedItems)) # Filter out any deleted comments from the fetched items. for itemId, itemVal in items.items(): if itemVal.get('meta', {}).get('state', None) == 'deleted': del items[itemId] fetchedTags = yield tags_d tags.update(utils.supercolumnsToDict(fetchedTags)) fetchedMyLikes = yield myLikes_d myLikes.update(utils.multiColumnsToDict(fetchedMyLikes)) #fetchedEntities = yield entities_d #entities.update(utils.multiSuperColumnsToDict(fetchedEntities)) yield entities_d
def _getKeywordMatches(self, request, keyword, start='', count=10): args = {} authinfo = request.getSession(IAuthInfo) myId = authinfo.username orgId = authinfo.organization items = {} itemIds = [] itemIdKeyMap = {} allFetchedItems = set() deleted = set() fetchStart = utils.decodeKey(start) fetchCount = count + 2 while len(itemIds) < count: fetchedItemIds = [] toFetchItems = set() results = yield db.get_slice(orgId + ":" + keyword, "keywordItems", count=fetchCount, start=fetchStart, reverse=True) for col in results: fetchStart = col.column.name itemAndParentIds = col.column.value.split(':') itemIdKeyMap[itemAndParentIds[0]] = fetchStart fetchedItemIds.append(itemAndParentIds[0]) for itemId in itemAndParentIds: if itemId not in allFetchedItems: toFetchItems.add(itemId) allFetchedItems.add(itemId) if toFetchItems: fetchedItems = yield db.multiget_slice(toFetchItems, "items", ["meta", "attachments"]) fetchedItems = utils.multiSuperColumnsToDict(fetchedItems) items.update(fetchedItems) for itemId in fetchedItemIds: item = items[itemId] if not 'meta' in item: continue state = item['meta'].get('state', 'published') if state == 'deleted': deleted.add(itemIdKeyMap[itemId]) elif utils.checkAcl(myId, orgId, True, None, item['meta']): itemIds.append(itemId) if len(results) < fetchCount: break if len(itemIds) > count: nextPageStart = utils.encodeKey(itemIdKeyMap[itemIds[-1]]) itemIds = itemIds[:-1] else: nextPageStart = None dd = db.batch_remove({'keywordItems': [orgId + ':' + keyword]}, names=deleted) if deleted else defer.succeed([]) args.update({'items': items, 'myId': myId}) toFetchEntities = set() extraDataDeferreds = [] for itemId in itemIds: item = items[itemId] itemMeta = item['meta'] toFetchEntities.add(itemMeta['owner']) if 'target' in itemMeta: toFetchEntities.update(itemMeta['target'].split(',')) if 'parent' in itemMeta: parentId = itemMeta['parent'] if parentId in items: toFetchEntities.add(items[parentId]['meta']['owner']) itemType = itemMeta.get('type', 'status') if itemType in plugins: d = plugins[itemType].fetchData(args, itemId) extraDataDeferreds.append(d) result = yield defer.DeferredList(extraDataDeferreds) for success, ret in result: if success: toFetchEntities.update(ret) fetchedEntities = {} if toFetchEntities: fetchedEntities = base.EntitySet(toFetchEntities) yield fetchedEntities.fetchData() yield dd args.update({'entities': fetchedEntities, 'matches': itemIds, 'nextPageStart': nextPageStart}) defer.returnValue(args)
def userFiles(myId, entityId, myOrgId, start='', end='', fromFeed=True): allItems = {} hasPrevPage = False # Do we have another page before the current one. nextPageStart = '' # Start item for the next page accessibleFiles = [] accessibleItems = [] toFetchEntities = set() count = constants.FILES_PER_PAGE toFetchCount = count + 1 relation = Relation(myId, []) yield relation.initGroupsList() # Fetching files owned by entityId or files that were part of entityId's feed. cf = 'entityFeed_files' if fromFeed else 'user_files' # End is actually the start item of next page. # If @end is set, we have to display @count items before @end. For that # we fetch @count + 2 items before (and including) @end. Of the extra items # fetched, one item helps us determine if there is another page before this # and the other one is the start of next page. if end: start = end reverse = False toFetchCount += 1 else: reverse = True while 1: files = yield db.get_slice(entityId, cf, count=toFetchCount, start=start, reverse=reverse) files = utils.columnsToDict(files, True) toFetchItems = [] for tuuid in files: if len(files[tuuid].split(':')) == 4: fid, name, itemId, attachmentId = files[tuuid].split(':') toFetchItems.append(itemId) toFetchItems = [itemId for itemId in toFetchItems if itemId not in accessibleItems] if toFetchItems: items = yield db.multiget_slice(toFetchItems, "items", ["meta"]) items = utils.multiSuperColumnsToDict(items) toFetchConvIds = [items[itemId]['meta']['parent'] for itemId in items if 'parent' in items[itemId]['meta'] and items[itemId]['meta']['parent'] not in allItems] if toFetchConvIds: convs = yield db.multiget_slice(toFetchConvIds, "items", ["meta"]) convs = utils.multiSuperColumnsToDict(convs) allItems.update(convs) allItems.update(items) for itemId in items: if 'parent' in items[itemId]['meta']: convId = items[itemId]['meta']['parent'] acl = allItems[convId]['meta']['acl'] else: acl = items[itemId]['meta']['acl'] convId = itemId if utils.checkAcl(myId, myOrgId, False, relation, allItems[convId]['meta']): accessibleItems.append(itemId) for tuuid in files: if len(files[tuuid].split(':')) == 4: fid, name, itemId, ownerId = files[tuuid].split(':') if itemId in accessibleItems: accessibleFiles.append((tuuid, (fid, urlsafe_b64decode(name), itemId, ownerId, allItems[itemId]))) toFetchEntities.add(ownerId) if len(files) < toFetchCount or len(accessibleFiles) > count: break else: start = files.keys()[-1] if end: # We have enough items to have another page before this. if len(accessibleFiles) > count + 1: hasPrevPage = True accessibleFiles = accessibleFiles[:count + 1] # Revert the list to get most recent items first. accessibleFiles.reverse() # The last item is actually the first item of next page. nextPageStart = accessibleFiles[-1][0] accessibleFiles = accessibleFiles[:-1] elif start: hasPrevPage = True # XXX: may not always be true, but the edge case is OK if len(accessibleFiles) > count: nextPageStart = accessibleFiles[count][0] accessibleFiles = accessibleFiles[:count] defer.returnValue((accessibleFiles, hasPrevPage, nextPageStart, toFetchEntities))