def make_daily_promotions(): # charge campaigns so they can go live charge_pending(offset=0) charge_pending(offset=1) # promote links and record ids of promoted links link_ids = set() for campaign, link in get_scheduled_promos(offset=0): link_ids.add(link._id) promote_link(link, campaign) # expire finished links q = Link._query(Link.c.promote_status == PROMOTE_STATUS.promoted, data=True) q = q._filter(not_(Link.c._id.in_(link_ids))) for link in q: update_promote_status(link, PROMOTE_STATUS.finished) emailer.finished_promo(link) # update subverbifys with promos all_live_promo_srnames(_update=True) _mark_promos_updated() finalize_completed_campaigns(daysago=1) hooks.get_hook('promote.make_daily_promotions').call(offset=0)
def port_deleted_links(after_id=None): from v1.models import Link from v1.lib.db.operators import desc from v1.models.query_cache import CachedQueryMutator from v1.lib.db.queries import get_deleted_links from v1.lib.utils import fetch_things2, in_chunks, progress q = Link._query(Link.c._deleted == True, Link.c._spam == (True, False), sort=desc('_date'), data=True) q = fetch_things2(q, chunk_size=500) q = progress(q, verbosity=1000) for chunk in in_chunks(q): with CachedQueryMutator() as m: for link in chunk: query = get_deleted_links(link.author_id) m.insert(query, [link])
def port_cassaurls(after_id=None, estimate=15231317): from v1.models import Link, LinksByUrlAndSubverbify from v1.lib.db import tdb_cassandra from v1.lib.db.operators import desc from v1.lib.db.tdb_cassandra import CL from v1.lib.utils import fetch_things2, in_chunks, progress q = Link._query(Link.c._spam == (True, False), sort=desc('_date'), data=True) if after_id: q._after(Link._byID(after_id, data=True)) q = fetch_things2(q, chunk_size=500) q = progress(q, estimate=estimate) q = (l for l in q if getattr(l, 'url', 'self') != 'self' and not getattr(l, 'is_self', False)) chunks = in_chunks(q, 500) for chunk in chunks: for l in chunk: LinksByUrlAndSubverbify.add_link(l)
def gen_keys(): yield promoted_memo_key # just let this one do its own writing load_all_verbifys() yield queries.get_all_comments().iden l_q = Link._query( Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc('_date'), data=True, ) for link in fetch_things2(l_q, verbosity): yield comments_key(link._id) yield last_modified_key(link, 'comments') a_q = Account._query( Account.c._spam == (True, False), sort=desc('_date'), ) for account in fetch_things2(a_q, verbosity): yield messages_key(account._id) yield last_modified_key(account, 'overview') yield last_modified_key(account, 'commented') yield last_modified_key(account, 'submitted') yield last_modified_key(account, 'liked') yield last_modified_key(account, 'disliked') yield queries.get_comments(account, 'new', 'all').iden yield queries.get_submitted(account, 'new', 'all').iden yield queries.get_liked(account).iden yield queries.get_disliked(account).iden yield queries.get_hidden(account).iden yield queries.get_saved(account).iden yield queries.get_inbox_messages(account).iden yield queries.get_unread_messages(account).iden yield queries.get_inbox_comments(account).iden yield queries.get_unread_comments(account).iden yield queries.get_inbox_selfreply(account).iden yield queries.get_unread_selfreply(account).iden yield queries.get_sent(account).iden sr_q = Subverbify._query( Subverbify.c._spam == (True, False), sort=desc('_date'), ) for sr in fetch_things2(sr_q, verbosity): yield last_modified_key(sr, 'stylesheet_contents') yield queries.get_links(sr, 'hot', 'all').iden yield queries.get_links(sr, 'new', 'all').iden for sort in 'top', 'controversial': for time in 'hour', 'day', 'week', 'month', 'year', 'all': yield queries.get_links(sr, sort, time, merge_batched=False).iden yield queries.get_spam_links(sr).iden yield queries.get_spam_comments(sr).iden yield queries.get_reported_links(sr).iden yield queries.get_reported_comments(sr).iden yield queries.get_subverbify_messages(sr).iden yield queries.get_unread_subverbify_messages(sr).iden
from v1.lib.db.operators import desc from v1.lib.utils import fetch_things2 from v1.models import ( calculate_server_seconds, Comment, Link, Subverbify, ) LINK_SILDING_START = datetime(2014, 2, 1, 0, 0, tzinfo=g.tz) COMMENT_SILDING_START = datetime(2012, 10, 1, 0, 0, tzinfo=g.tz) queries = [ Link._query( Link.c.sildings != 0, Link.c._date > LINK_SILDING_START, data=True, sort=desc('_date'), ), Comment._query( Comment.c.sildings != 0, Comment.c._date > COMMENT_SILDING_START, data=True, sort=desc('_date'), ), ] seconds_by_srid = defaultdict(int) silding_price = g.sodium_month_price.pennies for q in queries: for things in fetch_things2(q, chunks=True, chunk_size=100):
def get_link_counts(period = count_period): links = Link._query(Link.c._date >= utils.timeago(period), limit=50, data = True) return dict((l._fullname, (0, l.sr_id)) for l in links)