def recompute_unread(min_date=None): from v1.models import Inbox, Account, Comment, Message from v1.lib.db import queries def load_accounts(inbox_rel): accounts = set() q = inbox_rel._query(eager_load=False, data=False, sort=desc("_date")) if min_date: q._filter(inbox_rel.c._date > min_date) for i in fetch_things2(q): accounts.add(i._thing1_id) return accounts accounts_m = load_accounts(Inbox.rel(Account, Message)) for i, a in enumerate(accounts_m): a = Account._byID(a) print "%s / %s : %s" % (i, len(accounts_m), a) queries.get_unread_messages(a).update() queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update() accounts = load_accounts(Inbox.rel(Account, Comment)) - accounts_m for i, a in enumerate(accounts): a = Account._byID(a) print "%s / %s : %s" % (i, len(accounts), a) queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update()
def setUp(self): name = "unit_tester_%s" % uuid.uuid4().hex self._password = uuid.uuid4().hex self._account = Account(name=name, password=bcrypt_password(self._password)) self._account._id = 1337 c.cookies = Cookies() c.secure = True c.user_is_loggedin = True c.user = self._account c.oauth_user = None request.method = "POST"
def get_reports(cls, wrapped, max_user_reasons=20): """Get two lists of mod and user reports on the item.""" if (wrapped.reported > 0 and (wrapped.can_ban or getattr(wrapped, "promoted", None) and c.user_is_sponsor)): from v1.models import SRMember reports = cls.for_thing(wrapped.lookups[0]) query = SRMember._query(SRMember.c._thing1_id == wrapped.sr_id, SRMember.c._name == "moderator") mod_dates = {rel._thing2_id: rel._date for rel in query} if g.automoderator_account: automoderator = Account._by_name(g.automoderator_account) else: automoderator = None mod_reports = [] user_reports = [] for report in reports: # always include AutoModerator reports if automoderator and report._thing1_id == automoderator._id: mod_reports.append(report) # include in mod reports if made after the user became a mod elif (report._thing1_id in mod_dates and report._date >= mod_dates[report._thing1_id]): mod_reports.append(report) else: user_reports.append(report) # mod reports return as tuples with (reason, name) mods = Account._byID([report._thing1_id for report in mod_reports], data=True, return_dict=True) mod_reports = [(getattr(report, "reason", None), mods[report._thing1_id].name) for report in mod_reports] # user reports return as tuples with (reason, count) user_reports = Counter( [getattr(report, "reason", None) for report in user_reports]) user_reports = user_reports.most_common(max_user_reasons) return mod_reports, user_reports else: return [], []
def _gift_using_cverbifys(self, recipient, months=1, thing_fullname=None, proxying_for=None): with cverbifys_lock(c.user): if not c.user.employee and c.user.sodium_cverbifys < months: err = VerbifyError("INSUFFICIENT_CVERBIFYS") self.on_validation_error(err) note = None buyer = c.user if c.user.name.lower() in g.live_config["proxy_silding_accounts"]: note = "proxy-%s" % c.user.name if proxying_for: try: buyer = Account._by_name(proxying_for) except NotFound: pass send_gift( buyer=buyer, recipient=recipient, months=months, days=months * 31, signed=False, giftmessage=None, thing_fullname=thing_fullname, note=note, ) if not c.user.employee: c.user.sodium_cverbifys -= months c.user._commit()
def batch_lookups(self): super(LinkUploader, self).batch_lookups() author_ids = [ thing.author_id for thing in self.things if hasattr(thing, 'author_id') ] try: self.accounts = Account._byID(author_ids, data=True, return_dict=True) except NotFound: if self.use_safe_get: self.accounts = safe_get(Account._byID, author_ids, data=True, return_dict=True) else: raise sr_ids = [ thing.sr_id for thing in self.things if hasattr(thing, 'sr_id') ] try: self.srs = Subverbify._byID(sr_ids, data=True, return_dict=True) except NotFound: if self.use_safe_get: self.srs = safe_get(Subverbify._byID, sr_ids, data=True, return_dict=True) else: raise
def get_authenticated_account(self): from v1.models import Account, NotFound, register try: authorization = request.environ.get("HTTP_AUTHORIZATION") username, password = parse_http_basic(authorization) except RequirementException: return None try: account = Account._by_name(username) except NotFound: if g.auth_trust_http_authorization: # note: we're explicitly allowing automatic re-registration of # _deleted accounts and login of _banned accounts here because # we're trusting you know what you're doing in an SSO situation account = register(username, password, request.ip) else: return None # if we're to trust the authorization headers, don't check passwords if g.auth_trust_http_authorization: return account # not all systems support bcrypt in the standard crypt if account.password.startswith("$2a$"): expected_hash = bcrypt.hashpw(password, account.password) else: expected_hash = crypt.crypt(password, account.password) if not constant_time_compare(expected_hash, account.password): return None return account
def update_num_sildings(update_trophy=True, user_id=None): """Returns total number of link, comment, and user sildings""" query = (select([ sodium_table.c.paying_id, sa_count(sodium_table.c.trans_id) ]).where(sodium_table.c.trans_id.like('X%')).group_by( sodium_table.c.paying_id).order_by( sa_count(sodium_table.c.trans_id).desc())) if user_id: query = query.where(sodium_table.c.paying_id == str(user_id)) rows = ENGINE.execute(query) total_updated = 0 for paying_id, count in rows: try: a = Account._byID(int(paying_id), data=True) a.num_sildings = count a._commit() total_updated += 1 #if 'server seconds paid' for are public, update silding trophies if update_trophy and a.pref_public_server_seconds: add_to_trophy_queue(a, "silding") except: g.log.debug("update_num_sildings: paying_id %s is invalid" % paying_id) g.log.debug("update_num_sildings: updated %s accounts" % total_updated)
def send_system_message(user, subject, body, system_user=None, distinguished='admin', repliable=False, add_to_sent=True, author=None, signed=False): from v1.lib.db import queries if system_user is None: system_user = Account.system_user() if not system_user: g.log.warning("Can't send system message " "- invalid system_user or g.system_user setting") return if not author: author = system_user item, inbox_rel = Message._new(author, user, subject, body, ip='0.0.0.0') item.distinguished = distinguished item.repliable = repliable item.display_author = system_user._id item.signed = signed item._commit() try: queries.new_message(item, inbox_rel, add_to_sent=add_to_sent) except MemcachedError: raise MessageError('verbify_inbox')
def process_message(msgs, chan): """Update get_submitted(), the Links by author precomputed query. get_submitted() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by author allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from v1.lib.db.queries import add_queries, get_submitted link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links,) links_by_author_id = defaultdict(list) for link in links: links_by_author_id[link.author_id].append(link) authors_by_id = Account._byID(links_by_author_id.keys()) for author_id, links in links_by_author_id.iteritems(): with g.stats.get_timer("link_vote_processor.author_queries"): author = authors_by_id[author_id] add_queries( queries=[ get_submitted(author, sort, 'all') for sort in SORTS], insert_items=links, )
def set_up_comment_embed(sr, thing, showedits): try: author = Account._byID(thing.author_id) if thing.author_id else None except NotFound: author = None iso_timestamp = request.GET.get("created", "") c.embed_config = { "eventtracker_url": g.eventtracker_url or "", "anon_eventtracker_url": g.anon_eventtracker_url or "", "event_clicktracker_url": g.event_clicktracker_url or "", "created": iso_timestamp, "showedits": showedits, "thing": { "id": thing._id, "sr_id": sr._id, "sr_name": sr.name, "edited": edited_after(thing, iso_timestamp, showedits), "deleted": thing.deleted or author._deleted, }, "comment_max_height": 200, } c.render_style = "iframe" c.user = UnloggedUser([c.lang]) c.user_is_loggedin = False c.forced_loggedout = True
def charge_campaign(link, campaign): if charged_or_not_needed(campaign): return user = Account._byID(link.author_id) success, reason = authorize.charge_transaction(user, campaign.trans_id, campaign._id) if not success: if reason == authorize.TRANSACTION_NOT_FOUND: # authorization hold has expired original_trans_id = campaign.trans_id campaign.trans_id = NO_TRANSACTION campaign._commit() text = ('voided expired transaction for %s: (trans_id: %d)' % (campaign, original_trans_id)) PromotionLog.add(link, text) return hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign) if not is_promoted(link): update_promote_status(link, PROMOTE_STATUS.pending) emailer.queue_promo(link, campaign.total_budget_dollars, campaign.trans_id) text = ('auth charge for campaign %s, trans_id: %d' % (campaign._id, campaign.trans_id)) PromotionLog.add(link, text)
def refund_campaign(link, camp, refund_amount, billable_amount, billable_impressions): owner = Account._byID(camp.owner_id, data=True) success, reason = authorize.refund_transaction(owner, camp.trans_id, camp._id, refund_amount) if not success: text = ('%s $%s refund failed' % (camp, refund_amount)) PromotionLog.add(link, text) g.log.debug(text + ' (reason: %s)' % reason) return False if billable_impressions: text = ('%s completed with $%s billable (%s impressions @ $%s).' ' %s refunded.' % (camp, billable_amount, billable_impressions, camp.bid_pennies / 100., refund_amount)) else: text = ('%s completed with $%s billable. %s refunded' % (camp, billable_amount, refund_amount)) PromotionLog.add(link, text) camp.refund_amount = refund_amount camp._commit() queries.unset_underdelivered_campaigns(camp) emailer.refunded_promo(link) return True
def _restrict_sr(sr): '''Return a cloudsearch appropriate query string that restricts results to only contain results from sr ''' if isinstance(sr, MultiVerbify): if not sr.sr_ids: raise InvalidQuery srs = ["sr_id:%s" % sr_id for sr_id in sr.sr_ids] return "(or %s)" % ' '.join(srs) elif isinstance(sr, DomainSR): return "site:'\"%s\"'" % sr.domain elif isinstance(sr, FriendsSR): if not c.user_is_loggedin or not c.user.friends: raise InvalidQuery # The query limit is roughly 8k bytes. Limit to 200 friends to # avoid getting too close to that limit friend_ids = c.user.friends[:200] friends = [ "author_fullname:'%s'" % Account._fullname_from_id36(v1utils.to36(id_)) for id_ in friend_ids ] return "(or %s)" % ' '.join(friends) elif isinstance(sr, AllMinus): if not sr.exclude_sr_ids: raise InvalidQuery exclude_srs = ["sr_id:%s" % sr_id for sr_id in sr.exclude_sr_ids] return "(not (or %s))" % ' '.join(exclude_srs) elif not isinstance(sr, FakeSubverbify): return "sr_id:%s" % sr._id return None
def store_keys(key, maxes): # we're building queries using queries.py, but we could make the # queries ourselves if we wanted to avoid the individual lookups # for accounts and subverbifys. # Note that we're only generating the 'sr-' type queries here, but # we're also able to process the other listings generated by the # old migrate.mr_permacache for convenience userrel_fns = dict(liked=queries.get_liked, disliked=queries.get_disliked, saved=queries.get_saved, hidden=queries.get_hidden) if key.startswith('user-'): acc_str, keytype, account_id = key.split('-') account_id = int(account_id) fn = queries.get_submitted if keytype == 'submitted' else queries.get_comments q = fn(Account._byID(account_id), 'new', 'all') q._insert_tuples([(fname, float(timestamp)) for (timestamp, fname) in maxes]) elif key.startswith('sr-'): sr_str, sort, time, sr_id = key.split('-') sr_id = int(sr_id) if sort == 'controversy': # I screwed this up in the mapper and it's too late to fix # it sort = 'controversial' q = queries.get_links(Subverbify._byID(sr_id), sort, time) q._insert_tuples( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.startswith('domain/'): d_str, sort, time, domain = key.split('/') q = queries.get_domain_links(domain, sort, time) q._insert_tuples( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.split('-')[0] in userrel_fns: key_type, account_id = key.split('-') account_id = int(account_id) fn = userrel_fns[key_type] q = fn(Account._byID(account_id)) q._insert_tuples( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
def make_sodium_message(thing, user_silded): from v1.models import Comment if thing.sildings == 0 or thing._spam or thing._deleted: return None author = Account._byID(thing.author_id, data=True) if not author._deleted: author_name = author.name else: author_name = _("[deleted]") if c.user_is_loggedin and thing.author_id == c.user._id: if isinstance(thing, Comment): silded_message = ungettext( "a verbifyor gifted you a month of verbify sodium for this " "comment.", "verbifyors have gifted you %(months)d months of verbify sodium " "for this comment.", thing.sildings) else: silded_message = ungettext( "a verbifyor gifted you a month of verbify sodium for this " "submission.", "verbifyors have gifted you %(months)d months of verbify sodium " "for this submission.", thing.sildings) elif user_silded: if isinstance(thing, Comment): silded_message = ungettext( "you have gifted verbify sodium to %(recipient)s for this " "comment.", "you and other verbifyors have gifted %(months)d months of " "verbify sodium to %(recipient)s for this comment.", thing.sildings) else: silded_message = ungettext( "you have gifted verbify sodium to %(recipient)s for this " "submission.", "you and other verbifyors have gifted %(months)d months of " "verbify sodium to %(recipient)s for this submission.", thing.sildings) else: if isinstance(thing, Comment): silded_message = ungettext( "a verbifyor has gifted verbify sodium to %(recipient)s for this " "comment.", "verbifyors have gifted %(months)d months of verbify sodium to " "%(recipient)s for this comment.", thing.sildings) else: silded_message = ungettext( "a verbifyor has gifted verbify sodium to %(recipient)s for this " "submission.", "verbifyors have gifted %(months)d months of verbify sodium to " "%(recipient)s for this submission.", thing.sildings) return silded_message % dict( recipient=author_name, months=thing.sildings, )
def ensure_account(name): """Look up or register an account and return it.""" try: account = Account._by_name(name) print ">> found /u/{}".format(name) return account except NotFound: print ">> registering /u/{}".format(name) return register(name, "password", "127.0.0.1")
def setUpClass(cls): # Create a dummy account for testing with; won't touch the database # as long as we don't `._commit()` name = "unit_tester_%s" % uuid.uuid4().hex cls._password = uuid.uuid4().hex cls._account = Account( name=name, password=bcrypt_password(cls._password) )
def monitor_mentions(comment): if comment._spam or comment._deleted: return sender = comment.author_slow if getattr(sender, "butler_ignore", False): # this is an account that generates false notifications, e.g. # LinkFixer return if sender.in_timeout: return subverbify = comment.subverbify_slow usernames = extract_user_mentions(comment.body) inbox_class = Inbox.rel(Account, Comment) # If more than our allowed number of mentions were passed, don't highlight # any of them. if len(usernames) > g.butler_max_mentions: return # Subverbify.can_view stupidly requires this. c.user_is_loggedin = True for username in usernames: try: account = Account._by_name(username) except NotFound: continue # most people are aware of when they mention themselves. if account == sender: continue # bail out if that user has the feature turned off if not account.pref_monitor_mentions: continue # don't notify users of things they can't see if not subverbify.can_view(account): continue # don't notify users when a person they've blocked mentions them if account.is_enemy(sender): continue # ensure this comment isn't already in the user's inbox already rels = inbox_class._fast_query( account, comment, ("inbox", "selfreply", "mention"), ) if filter(None, rels.values()): continue notify_mention(account, comment)
def cancel_subscription(subscr_id): q = Account._query(Account.c.sodium_subscr_id == subscr_id, data=True) l = list(q) if len(l) != 1: g.log.warning("Found %d matches for canceled subscription %s" % (len(l), subscr_id)) for account in l: account.sodium_subscr_id = None account._commit() g.log.info("%s canceled their recurring subscription %s" % (account.name, subscr_id))
def setUp(self): super(TestVSubmitParent, self).setUp() # Reset the validator state and errors before every test. self.validator = VSubmitParent(None) c.errors = ErrorSet() c.user_is_loggedin = True c.user_is_admin = False c.user = Account(id=100) self.autopatch(Account, "enemy_ids", return_value=[]) self.autopatch(Subverbify, "_byID", return_value=None)
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subverbify_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def accountid_from_subscription(subscr_id): if subscr_id is None: return None q = Account._query(Account.c.sodium_subscr_id == subscr_id, Account.c._spam == (True, False), Account.c._deleted == (True, False), data=False) l = list(q) if l: return l[0]._id else: return None
def _promo_email(thing, kind, body="", **kw): from v1.lib.pages import Promo_Email a = Account._byID(thing.author_id, True) if not a.email: return body = Promo_Email(link=thing, kind=kind, body=body, **kw).render(style="email") return _system_email(a.email, body, kind, thing=thing, reply_to=g.selfserve_support_email, suppress_username=True)
def message_notification_email(data): """Queues a system email for a new message notification.""" from v1.lib.pages import MessageNotificationEmail MAX_EMAILS_PER_DAY = 1000 MESSAGE_THROTTLE_KEY = 'message_notification_emails' # If our counter's expired, initialize it again. g.cache.add(MESSAGE_THROTTLE_KEY, 0, time=24 * 60 * 60) for datum in data.itervalues(): datum = json.loads(datum) user = Account._byID36(datum['to'], data=True) comment = Comment._by_fullname(datum['comment'], data=True) # In case a user has enabled the preference while it was enabled for # them, but we've since turned it off. We need to explicitly state the # user because we're not in the context of an HTTP request from them. if not feature.is_enabled('orangereds_as_emails', user=user): continue if g.cache.get(MESSAGE_THROTTLE_KEY) > MAX_EMAILS_PER_DAY: raise Exception( 'Message notification emails: safety limit exceeded!') mac = generate_notification_email_unsubscribe_token( datum['to'], user_email=user.email, user_password_hash=user.password) base = g.https_endpoint or g.origin unsubscribe_link = base + '/mail/unsubscribe/%s/%s' % (datum['to'], mac) templateData = { 'sender_username': datum.get('from', ''), 'comment': comment, 'permalink': datum['permalink'], 'unsubscribe_link': unsubscribe_link, } _system_email( user.email, MessageNotificationEmail(**templateData).render(style='email'), Email.Kind.MESSAGE_NOTIFICATION, from_address=g.notification_email) g.stats.simple_event('email.message_notification.queued') g.cache.incr(MESSAGE_THROTTLE_KEY)
def new_campaign(link, dates, target, frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies): campaign = PromoCampaign.create( link, target, dates[0], dates[1], frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies) PromotionWeights.add(link, campaign) PromotionLog.add(link, 'campaign %s created' % campaign._id) if not campaign.is_house: author = Account._byID(link.author_id, data=True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign) return campaign
def backfill_deleted_accounts(resume_id=None): del_accts = Account._query(Account.c._deleted == True, sort=desc('_date')) if resume_id: del_accts._filter(Account.c._id < resume_id) for i, account in enumerate(progress(fetch_things2(del_accts))): # Don't kill the rabbit! Wait for the relevant queues to calm down. if i % 1000 == 0: del_len = get_queue_length('del_account_q') cs_len = get_queue_length('cloudsearch_changes') while (del_len > 1000 or cs_len > 10000): sys.stderr.write(("CS: %d, DEL: %d" % (cs_len, del_len)) + "\n") sys.stderr.flush() time.sleep(1) del_len = get_queue_length('del_account_q') cs_len = get_queue_length('cloudsearch_changes') amqp.add_item('account_deleted', account._fullname)
def new(cls, user, thing, reason=None): from v1.lib.db import queries # check if this report exists already! rel = cls.rel(user, thing) q = rel._fast_query(user, thing, ['-1', '0', '1']) q = [report for (tupl, report) in q.iteritems() if report] if q: # stop if we've seen this before, so that we never get the # same report from the same user twice oldreport = q[0] g.log.debug("Ignoring duplicate report %s" % oldreport) return oldreport kw = {} if reason: kw['reason'] = reason r = Report(user, thing, '0', **kw) # mark item as reported try: thing._incr(cls._field) except (ValueError, TypeError): g.log.error("%r has bad field %r = %r" % (thing, cls._field, getattr(thing, cls._field, "(nonexistent)"))) raise r._commit() if hasattr(thing, 'author_id'): author = Account._byID(thing.author_id, data=True) author._incr('reported') if not getattr(thing, "ignore_reports", False): # update the reports queue if it exists queries.new_report(thing, r) # if the thing is already marked as spam, accept the report if thing._spam: cls.accept(thing) return r
def author_spammer(self, things, spam): """incr/decr the 'spammer' field for the author of every passed thing""" by_aid = {} for thing in things: if (hasattr(thing, 'author_id') and not getattr(thing, 'ban_info', {}).get('auto', True)): # only decrement 'spammer' for items that were not # autobanned by_aid.setdefault(thing.author_id, []).append(thing) if by_aid: authors = Account._byID(by_aid.keys(), data=True, return_dict=True) for aid, author_things in by_aid.iteritems(): author = authors[aid] author._incr( 'spammer', len(author_things) if spam else -len(author_things))
def from_queue(self, max_date, batch_limit = 50, kind = None): from v1.models import Account, Thing keep_trying = True min_id = None s = self.queue_table while keep_trying: where = [s.c.date < max_date] if min_id: where.append(s.c.uid > min_id) if kind: where.append(s.c.kind == kind) res = sa.select([s.c.to_addr, s.c.account_id, s.c.from_name, s.c.fullname, s.c.body, s.c.kind, s.c.ip, s.c.date, s.c.uid, s.c.msg_hash, s.c.fr_addr, s.c.reply_to], sa.and_(*where), order_by = s.c.uid, limit = batch_limit).execute() res = res.fetchall() if not res: break # batch load user accounts aids = [x[1] for x in res if x[1] > 0] accts = Account._byID(aids, data = True, return_dict = True) if aids else {} # batch load things tids = [x[3] for x in res if x[3]] things = Thing._by_fullname(tids, data = True, return_dict = True) if tids else {} # get the lower bound date for next iteration min_id = max(x[8] for x in res) # did we not fetch them all? keep_trying = (len(res) == batch_limit) for (addr, acct, fname, fulln, body, kind, ip, date, uid, msg_hash, fr_addr, reply_to) in res: yield (accts.get(acct), things.get(fulln), addr, fname, date, ip, kind, msg_hash, body, fr_addr, reply_to)
def get_details(cls, thing, voters=None): from v1.models import Comment, Link if isinstance(thing, Link): details_cls = VoteDetailsByLink elif isinstance(thing, Comment): details_cls = VoteDetailsByComment else: raise ValueError voter_id36s = None if voters: voter_id36s = [voter._id36 for voter in voters] try: row = details_cls._byID(thing._id36, properties=voter_id36s) raw_details = row._values() except tdb_cassandra.NotFound: return [] try: row = VoterIPByThing._byID(thing._fullname, properties=voter_id36s) ips = row._values() except tdb_cassandra.NotFound: ips = {} details = [] for voter_id36, json_data in raw_details.iteritems(): data = json.loads(json_data) data = cls.convert_old_details(data) user = Account._byID36(voter_id36, data=True) direction = Vote.deserialize_direction(data.pop("direction")) date = datetime.utcfromtimestamp(data.pop("date")) effects = data.pop("effects") data["ip"] = ips.get(voter_id36) vote = Vote(user, thing, direction, date, data, effects, get_previous_vote=False) details.append(vote) details.sort(key=lambda d: d.date) return details