def deleteFileInfo(myId, orgId, itemId, item, conv=None): if 'parent' in item['meta']: if not conv: conv = yield db.get_slice(item['meta']['parent'], 'items', ['meta']) conv = utils.supercolumnsToDict(conv) convId = item['meta']['parent'] else: conv = item convId = itemId acl = pickle.loads(conv['meta']['acl']) allowedGroups = acl.get('accept', {}).get('groups', []) deniedGroups = acl.get('deny', {}).get('groups', []) groups = [x for x in allowedGroups if x not in deniedGroups] allowedOrgs = acl.get('accept', {}).get('orgs', []) ownerId = conv['meta']['owner'] entityIds = [myId] entityIds.extend(groups) entityIds.extend(allowedOrgs) entityIds_ = yield utils.expandAcl(myId, orgId, conv['meta']['acl'], convId, ownerId, True) entityIds.extend(entityIds_) deferreds = [] for attachmentId in item.get('attachments', {}): col = yield db.get_slice(attachmentId, 'attachmentVersions', count=1) tuuid = col[0].column.name deferreds.append(db.remove(myId, "user_files", tuuid)) #TODO: use batch remove/batch mutate for entityId in entityIds: deferreds.append(db.remove(entityId, "entityFeed_files", tuuid)) if deferreds: yield defer.DeferredList(deferreds)
def pushfileinfo(myId, orgId, itemId, item, conv=None): if 'parent' in item['meta']: if not conv: conv = yield db.get_slice(item['meta']['parent'], "items", ["meta"]) conv = utils.supercolumnsToDict(conv) convId = item['meta']['parent'] else: convId = itemId conv = item acl = pickle.loads(conv['meta']['acl']) allowedGroups = acl.get('accept', {}).get('groups', []) deniedGroups = acl.get('deny', {}).get('groups', []) groups = [x for x in allowedGroups if x not in deniedGroups] allowedOrgs = acl.get('accept', {}).get('orgs', []) ownerId = conv['meta']['owner'] entityIds = [myId] entityIds.extend(groups) entityIds.extend(allowedOrgs) entityIds_ = yield utils.expandAcl(myId, orgId, conv['meta']['acl'], convId, ownerId, True) entityIds.extend(entityIds_) for attachmentId in item.get('attachments', {}): name, size, ftype = item['attachments'][attachmentId].split(':') cols = yield db.get_slice(attachmentId, "attachmentVersions", count=1) tuuid = cols[0].column.name value = '%s:%s:%s:%s' % (attachmentId, name, itemId, ownerId) #TODO: use batch remove/batch mutate yield db.insert(myId, "user_files", value, tuuid) for entityId in entityIds: yield db.insert(entityId, "entityFeed_files", value, tuuid)
def pushToOthersFeed(userKey, orgId, timeuuid, itemKey, parentKey, acl, responseType, itemType, convOwner, others=None, tagId='', entities=None, promoteActor=True): if not others: others = yield utils.expandAcl(userKey, orgId, acl, parentKey, convOwner) for key in others: promote = (userKey != key) or (promoteActor) yield pushToFeed(key, timeuuid, itemKey, parentKey, responseType, itemType, convOwner, userKey, tagId, entities, promote=promote)
def updateData(): yield db.truncate('user_files') try: yield db.get('asdf', 'entityFeed_files', uuid.uuid1().bytes) except ttypes.InvalidRequestException as exception: log.msg(exception) raise Exception('entityFeed_files CF missing, create the CF') except ttypes.NotFoundException: pass entities = {} items = {} rows = yield db.get_range_slice('items', count=10000, reverse=True) for row in rows: itemId = row.key item = utils.supercolumnsToDict(row.columns) items[itemId]=item for itemId in items: item = items[itemId] log.msg(itemId) if 'meta' not in item: continue # Add org to all items try: owner = item['meta']['owner'] col = yield db.get(owner, "entities", 'org', 'basic') ownerOrgId = col.column.value yield db.insert(itemId, 'items', ownerOrgId, 'org', 'meta') except Exception as e: if item['meta'].get('type', '') == 'feedback': yield db.insert(itemId, 'items', owner, 'org', 'meta') # Fix ACLs if 'parent' not in item['meta']: acl = item['meta']['acl'] convOwner = item['meta']['owner'] convId = itemId if acl == 'company': col = yield db.get(convOwner, "entities", "org", "basic") ownerOrgId = col.column.value acl = pickle.dumps({"accept":{"orgs":[ownerOrgId]}}) yield db.insert(convId, 'items', acl, 'acl', 'meta') else: try: acl = pickle.loads(acl) if 'accept' in acl and 'friends' in acl['accept'] and isinstance(acl['accept']['friends'], bool): del acl['accept']['friends'] acl = pickle.dumps(acl) yield db.insert(convId, 'items', acl, 'acl', 'meta') except : log.msg('cannot unpack acl', acl) # Migrate files # truncate user_files # update user_files and entityFeed_files if 'owner' in item['meta'] and 'attachments' in item: ownerId = item['meta']['owner'] if ownerId not in entities: cols = yield db.get_slice(ownerId, 'entities', ['basic']) entities.update({ownerId: utils.supercolumnsToDict(cols)}) for attachmentId in item['attachments']: orgId = entities[ownerId]['basic']['org'] timeuuid, name = item['attachments'][attachmentId].split(':')[:2] timeuuid = utils.decodeKey(timeuuid) val = '%s:%s:%s:%s' % (attachmentId, name, itemId, ownerId) yield db.insert(ownerId, "user_files", val, timeuuid) if 'parent' not in item['meta'] and item['meta'].get('acl', ''): _entities = yield utils.expandAcl(ownerId, orgId, item['meta']['acl'], itemId, ownerId, True) for entityId in _entities: yield db.insert(entityId, "entityFeed_files", val, timeuuid) # Migrate items # Meta fields in "link", "event" and "poll" if item['meta'].get('type', None) in ['link', 'poll', 'event']: itemMeta = item['meta'] itemType = itemMeta['type'] updated = {} if itemType == "link": if 'url' in itemMeta: updated['link_url'] = itemMeta['url'] if 'title' in itemMeta: updated['link_title'] = itemMeta['title'] if 'summary' in itemMeta: updated['link_summary'] = itemMeta['summary'] if 'imgSrc' in itemMeta: updated['link_imgSrc'] = itemMeta['imgSrc'] if 'embedType' in itemMeta: updated['link_embedType'] = itemMeta['embedType'] if 'embedSrc' in itemMeta: updated['link_embedSrc'] = itemMeta['embedSrc'] if 'embedHeight' in itemMeta: updated['link_embedHeight'] = itemMeta['embedHeight'] if 'embedWidth' in itemMeta: updated['link_embedWidth'] = itemMeta['embedWidth'] elif itemType == 'poll': if 'question' in itemMeta: updated['comment'] = itemMeta['question'] else: print 'Found an event:', itemId if updated: yield db.batch_insert(itemId, 'items', {'meta': updated}) # # Create poll indexes for feed and userItems # rows = yield db.get_range_slice('entities', count=10000, reverse=True) mutations = {} for row in rows: entityId = row.key entity = utils.supercolumnsToDict(row.columns) if entity['basic']['type'] != 'user': continue d1 = db.get_slice(entityId, 'feed', count=10000) d2 = db.get_slice(entityId, 'userItems', count=10000) results = yield d1 for col in results: value = col.column.value if value in items: if items.get(value, {}).get('meta', {}).get('type', '') == 'poll': mutations.setdefault(entityId, {}).setdefault('feed_poll', {}).update({col.column.name: value}) results = yield d2 for col in results: value = col.column.value responseType, itemId, convId, convType, others = value.split(':', 4) if convType == 'poll': mutations.setdefault(entityId, {}).setdefault('userItems_poll', {}).update({col.column.name: value}) yield db.batch_mutate(mutations) #Group type changed from public-private to open-closed. rows = yield db.get_range_slice('entityGroupsMap', count=1000) groupIds = set() for row in rows: for col in row.columns: name_, groupId = col.column.name.split(':') groupIds.add(groupId) cols = yield db.multiget_slice(groupIds, "entities") groups = utils.multiSuperColumnsToDict(cols) for groupId in groups: access = groups[groupId]['basic']['access'].lower() if access == 'public': yield db.insert(groupId, 'entities', 'open', 'access', 'basic') elif access.lower() == 'private': yield db.insert(groupId, 'entities', 'closed', 'access', 'basic') #Fix entityGroupsMap rows = yield db.get_range_slice('entityGroupsMap', count=1000) for row in rows: entityId = row.key for col in row.columns: name_, groupId = col.column.name.split(':') if col.column.name != '%s:%s'%(groups[groupId]['basic']['name'].lower(), groupId): yield db.remove(entityId, 'entityGroupsMap', col.column.name) yield db.insert(entityId, 'entityGroupsMap', '', '%s:%s' %(groups[groupId]['basic']['name'].lower(), groupId))
def push(userId, orgId, convId, conv, timeUUID, updateVal, feeds=None, promote=True, promoteActor=False): """Push an item update to feeds. This function adds an update to feedItems and if necessary promotes the conversation to the top of the feed. Keyword params: @userId - Id of the actor @orgId - orgId of the actor @convId - Id of the conversation which got updated @conv - The conversation that got updated @timeUUID - UUID1 that represents this update @updateVal - Value that is stored in feedItems @feeds - List of feeds that must be updated @promote - Promote the item up the conversation? @promoteActor - When promote is True, should this update promote the conversation even on the actor's feed. """ meta = conv['meta'] convType = meta['type'] if not feeds: feeds = yield utils.expandAcl(userId, orgId, meta['acl'], convId, meta['owner']) feeds.add(userId) userFeedItems = yield db.multiget_slice(feeds, "feedItems", super_column=convId, reverse=True) # XXX: Assumes that updateVal is of the following format # <updateType>:<actorId>:<plugin specifics> def _updateFeedItems(userId, promoteToUser): userUpdatesOfType = [] # Items in feed of type that promote userUpdateIds = [] # TimeUUIDs of all updates that promote allUpdatesOfType = [] # All items in feed by type oldest = None # Most relevant item for removal oldFeedKeys = [] updateType = updateVal.split(':', 1)[0] cur = userFeedItems.get(userId, []) for x in cur: uid = x.column.name val = x.column.value.split(':') if uid == timeUUID: # We already know this update!!! defer.returnValue(None) rtype = val[0] if rtype not in ('!', 'I'): if rtype == updateType: allUpdatesOfType.append(uid) if val[1] == userId: userUpdateIds.append(uid) userUpdatesOfType.append(uid) oldest = uid oldFeedKeys.append(uid) curUpdateCount = len(cur) curUpdateCountForType = len(allUpdatesOfType) if curUpdateCountForType == MAXFEEDITEMSBYTYPE: # If this update isn't promoting the conversation up the feed, # we ought to make sure that we don't remove an item that is the # reason for the current position of conversation in the feed. if not promoteToUser and \ (len(userUpdatesOfType) == MAXFEEDITEMSBYTYPE or \ (allUpdatesOfType[-1] not in userUpdatesOfType and \ len(userUpdatesOfType) == MAXFEEDITEMSBYTYPE - 1)): oldest = userUpdatesOfType[-2] else: oldest = allUpdatesOfType[-1] if not promoteToUser and \ (len(userUpdateIds) == MAXFEEDITEMS - 1 or \ (oldest not in userUpdateIds and\ len(userUpdateIds) == MAXFEEDITEMS - 2)): oldest = userUpdateIds[-2] feedItemToRemove = oldest if curUpdateCountForType == MAXFEEDITEMSBYTYPE\ or curUpdateCount == MAXFEEDITEMS\ else None insertConv = True if curUpdateCount == 0 and updateType != 'I' else False return oldFeedKeys, feedItemToRemove, insertConv # Fetch list of changes to feedItems removeFeedKeys = {} removeFeedItems = {} insertDummyConvs = [] for feedId in feeds: promoteToUser = promote and (feedId != userId or promoteActor) (oldFeedKeys, feedItemToRemove, insertConv) = \ _updateFeedItems(feedId, promoteToUser) if oldFeedKeys and promoteToUser: removeFeedKeys[feedId] = oldFeedKeys if feedItemToRemove: removeFeedItems[feedId] = [feedItemToRemove] if insertConv: insertDummyConvs.append(feedId) # Update feedItems feedItemsMutations = {} feedItemsRemovalMutations = {} timestamp = int(time.time() * 1e6) for feedId in feeds: mutations = {} keys = removeFeedKeys.get(feedId, None) if keys: predicate = ttypes.SlicePredicate(column_names=keys) deletion = ttypes.Deletion(timestamp, predicate=predicate) mutations['feed'] = [deletion] if feedId in insertDummyConvs: dummyValue = ':'.join(['!', meta['owner'], convId]) mutations['feedItems'] = {convId: {meta['uuid']: dummyValue, timeUUID: updateVal}} else: mutations['feedItems'] = {convId: {timeUUID: updateVal}} feedItemsMutations[feedId] = mutations feedItemKey = removeFeedItems.get(feedId, None) if feedItemKey: predicate = ttypes.SlicePredicate(column_names=feedItemKey) deletion = ttypes.Deletion(timestamp, convId, predicate=predicate) feedItemsRemovalMutations[feedId] = {'feedItems': [deletion]} # Promote items in all feeds feedMutations = {} for feedId in feeds: promoteToUser = promote and (feedId != userId or promoteActor) if promoteToUser: mutations = {'feed': {timeUUID: convId}} if convType in plugins and plugins[convType].hasIndex: mutations['feed_%s' % convType] = {timeUUID: convId} feedMutations[feedId] = mutations yield db.batch_mutate(feedItemsMutations) yield db.batch_mutate(feedItemsRemovalMutations) yield db.batch_mutate(feedMutations)
def unpush(userId, orgId, convId, conv, updateVal, feeds=None): meta = conv['meta'] convType = meta['type'] if not feeds: feeds = yield utils.expandAcl(userId, orgId, meta['acl'], convId, meta['owner']) feeds.add(userId) userFeedItems = yield db.multiget_slice(feeds, "feedItems", super_column=convId, reverse=True) updateType = updateVal.split(':', 1)[0] if updateType == 'T': updateValParts = updateVal.split(':') hasIndex = False if convType in plugins and plugins[convType].hasIndex: hasIndex = True indexColFamily = 'feed_' + convType timestamp = int(time.time() * 1e6) removeMutations = {} insertMutations = {} for feedId in feeds: cols = userFeedItems[feedId] if not cols: continue updatesCount = len(cols) latest, second, pseudoFeedTime = None, None, None for col in cols: timeUUID = col.column.name val = col.column.value valUpdateType = val.split(":", 1)[0] if valUpdateType != '!': if latest and not second: second = timeUUID if not latest: latest = timeUUID elif updatesCount == 2 and valUpdateType == "!": pseudoFeedTime = timeUUID for col in cols: timeUUID = col.column.name val = col.column.value valParts = val.split(':') if (updateType == 'T' and val.startswith('T:') and len(valParts) == 5 and updateValParts[2] == valParts[2] and updateValParts[4] == valParts[4]) or (val == updateVal): removals = {} # Remove the update from feedItems. If this is the only # update then remove the entire super column if not pseudoFeedTime: predicate = ttypes.SlicePredicate(column_names=[timeUUID]) superCol = convId else: predicate = ttypes.SlicePredicate(column_names=[convId]) superCol = None feedItemsDeletion = ttypes.Deletion(timestamp, superCol, predicate) removals['feedItems'] = [feedItemsDeletion] # If this is the latest update, remove conv from it's # current position in feed and feed indexes. feedRemoveKeys = [] if latest == timeUUID: feedRemoveKeys.append(timeUUID) if pseudoFeedTime: feedRemoveKeys.append(pseudoFeedTime) if feedRemoveKeys: feedPredicate = ttypes.SlicePredicate(column_names=feedRemoveKeys) feedDeletion = ttypes.Deletion(timestamp, predicate=feedPredicate) removals['feed'] = [feedDeletion] if hasIndex: removals[indexColFamily] = [feedDeletion] # Reposition feed to the next most recent update if latest == timeUUID and second: insertMutations[feedId] = {'feed': {second: convId}} removeMutations[feedId] = removals break yield db.batch_mutate(insertMutations) yield db.batch_mutate(removeMutations)