def cancel_subscription(subscr_id): q = Account._query(Account.c.sodium_subscr_id == subscr_id, data=True) l = list(q) if len(l) != 1: g.log.warning("Found %d matches for canceled subscription %s" % (len(l), subscr_id)) for account in l: account.sodium_subscr_id = None account._commit() g.log.info("%s canceled their recurring subscription %s" % (account.name, subscr_id))
def accountid_from_subscription(subscr_id): if subscr_id is None: return None q = Account._query(Account.c.sodium_subscr_id == subscr_id, Account.c._spam == (True, False), Account.c._deleted == (True, False), data=False) l = list(q) if l: return l[0]._id else: return None
def backfill_deleted_accounts(resume_id=None): del_accts = Account._query(Account.c._deleted == True, sort=desc('_date')) if resume_id: del_accts._filter(Account.c._id < resume_id) for i, account in enumerate(progress(fetch_things2(del_accts))): # Don't kill the rabbit! Wait for the relevant queues to calm down. if i % 1000 == 0: del_len = get_queue_length('del_account_q') cs_len = get_queue_length('cloudsearch_changes') while (del_len > 1000 or cs_len > 10000): sys.stderr.write(("CS: %d, DEL: %d" % (cs_len, del_len)) + "\n") sys.stderr.flush() time.sleep(1) del_len = get_queue_length('del_account_q') cs_len = get_queue_length('cloudsearch_changes') amqp.add_item('account_deleted', account._fullname)
def gen_keys(): yield promoted_memo_key # just let this one do its own writing load_all_verbifys() yield queries.get_all_comments().iden l_q = Link._query( Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc('_date'), data=True, ) for link in fetch_things2(l_q, verbosity): yield comments_key(link._id) yield last_modified_key(link, 'comments') a_q = Account._query( Account.c._spam == (True, False), sort=desc('_date'), ) for account in fetch_things2(a_q, verbosity): yield messages_key(account._id) yield last_modified_key(account, 'overview') yield last_modified_key(account, 'commented') yield last_modified_key(account, 'submitted') yield last_modified_key(account, 'liked') yield last_modified_key(account, 'disliked') yield queries.get_comments(account, 'new', 'all').iden yield queries.get_submitted(account, 'new', 'all').iden yield queries.get_liked(account).iden yield queries.get_disliked(account).iden yield queries.get_hidden(account).iden yield queries.get_saved(account).iden yield queries.get_inbox_messages(account).iden yield queries.get_unread_messages(account).iden yield queries.get_inbox_comments(account).iden yield queries.get_unread_comments(account).iden yield queries.get_inbox_selfreply(account).iden yield queries.get_unread_selfreply(account).iden yield queries.get_sent(account).iden sr_q = Subverbify._query( Subverbify.c._spam == (True, False), sort=desc('_date'), ) for sr in fetch_things2(sr_q, verbosity): yield last_modified_key(sr, 'stylesheet_contents') yield queries.get_links(sr, 'hot', 'all').iden yield queries.get_links(sr, 'new', 'all').iden for sort in 'top', 'controversial': for time in 'hour', 'day', 'week', 'month', 'year', 'all': yield queries.get_links(sr, sort, time, merge_batched=False).iden yield queries.get_spam_links(sr).iden yield queries.get_spam_comments(sr).iden yield queries.get_reported_links(sr).iden yield queries.get_reported_comments(sr).iden yield queries.get_subverbify_messages(sr).iden yield queries.get_unread_subverbify_messages(sr).iden
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your verbify install with test data based on verbify.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subverbify(g.default_sr, system_user) ensure_subverbify(g.takedown_sr, system_user) ensure_subverbify(g.beta_sr, system_user) ensure_subverbify(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from verbify.com" modeler = Modeler() subverbifys = [ modeler.model_subverbify("pics"), modeler.model_subverbify("videos"), modeler.model_subverbify("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subverbifys: sr_author = random.choice(accounts) sr = ensure_subverbify(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subverbify._by_name(n) for n in ("pics", "videos", "askhistorians")] LocalizedDefaultSubverbifys.set_global_srs(srs) LocalizedFeaturedSubverbifys.set_global_srs([Subverbify._by_name('pics')])
def account_from_stripe_customer_id(stripe_customer_id): q = Account._query(Account.c.sodium_subscr_id == stripe_customer_id, Account.c._spam == (True, False), data=True) return next(iter(q), None)
# # The Original Developer is the Initial Developer. The Initial Developer of # the Original Code is verbify Inc. # # All portions of the code written by verbify are Copyright (c) 2006-2015 verbify # Inc. All Rights Reserved. ############################################################################### """Ensure modmsgtime is properly set on all accounts. See the comment in Account.is_moderator_somewhere for possible values of this attribute now. """ from v1.lib.db.operators import desc from v1.lib.utils import fetch_things2, progress from v1.models import Account, Subverbify all_accounts = Account._query(sort=desc("_date")) for account in progress(fetch_things2(all_accounts)): is_moderator_somewhere = bool(Subverbify.reverse_moderator_ids(account)) if is_moderator_somewhere: if not account.modmsgtime: account.modmsgtime = False else: # the account already has a date for modmsgtime meaning unread mail pass else: account.modmsgtime = None account._commit()
def all_sodium_users(): q = Account._query(Account.c.sodium == True, Account.c._spam == (True, False), data=True, sort="_id") return fetch_things2(q)
return False # do not keep messages which were deleted on recipient if (isinstance(msg, Message) and msg.to_id == account._id and msg.del_on_recipient): return False # don't show user their own unread stuff if msg.author_id == account._id: return False return True resume_id = long(sys.argv[1]) if len(sys.argv) > 1 else None msg_accounts = Account._query(sort=desc("_date"), data=True) if resume_id: msg_accounts._filter(Account.c._id < resume_id) for account in progress(fetch_things2(msg_accounts), estimate=resume_id): current_inbox_count = account.inbox_count unread_messages = list(queries.get_unread_inbox(account)) if account._id % 100000 == 0: g.reset_caches() if not len(unread_messages): if current_inbox_count: account._incr('inbox_count', -current_inbox_count) else: