def submit_rss_links(srname,rss,user,titlefield='title',linkfield='link'): #F**k the API, let's just do it the way we would if we were really doing it. This avoids screwing around with cookies and so forth... feed=fetch_feed(rss) if feed is None: return ac=Account._byID(user) sr=Subsciteit._by_name(srname) ip='0.0.0.0' niceify=False if domain(rss)=="arxiv.org": niceify=dict(find="\(arXiv:.*?\)",replace="") #Let's randomize why not... random.shuffle(feed.entries) for article in feed.entries: #This can take all night if it has to, we don't want to hammer the server into oblivios sleep(1) kw = fetch_article(article,titlefield=titlefield,linkfield=linkfield,niceify=niceify) if kw is None: continue l = Link._submit(kw['title'],kw['link'],ac,sr,ip,spam=False) l._commit() l.set_url_cache() #We don't really need auto-submitted links to be vote on... queries.queue_vote(ac,l,True,ip,cheater=False) queries.new_link(l) changed(l) print "Submitted %s" % article[titlefield] sleep(.1) return
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit( is_self=False, title=title, content=url, account=account, sr=subreddit, ip=ip, spam=False, ) # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) # various backend processing things queries.queue_vote(account, link, UPVOTE, ip) queries.new_link(link) link.update_search_index() # wait for the amqp worker to finish up worker.join() print link.make_permalink_slow()
def populate(num_srs = 10, num_users = 1000, num_links = 100, num_comments = 20, num_votes = 50): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password", "127.0.0.1") srs = [] for i in range(num_srs): name = "reddit_test%d" % i try: sr = Subreddit._new(name = name, title = "everything about #%d"%i, ip = '0.0.0.0', author_id = a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name, "127.0.0.1") except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1,100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) comments = [ None ] for i in range(int(random.betavariate(2, 8) * 5 * num_comments)): user = random.choice(accounts) body = ' '.join([ random_word(1, 10) for x in range(int(200 * random.betavariate(2, 6))) ]) parent = random.choice(comments) (c, inbox_rel) = Comment._new(user, l, parent, body, '127.0.0.1') queries.new_comment(c, inbox_rel) comments.append(c) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) queries.queue_vote(another_user, c, True, '127.0.0.1') like = random.randint(50,100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) queries.queue_vote(user, l, random.randint(0, 100) <= like, '127.0.0.1') queries.worker.join()
def post_if_goal_reached(date): # bail out if this day's already been submitted for link in get_recent_name_submissions(): if link.revenue_date == date: return revenue = gold_revenue_multi([date]).get(date, 0) goal = gold_goal_on(date) percent = revenue / float(goal) bucket = int(percent) if bucket == 0: return buyer_count = len(gold_buyers_on(date)) template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext") template = random.choice(template_wp._get("content").split("\r\n---\r\n")) boilerplate = WikiPage.get(SERVERNAME_SR, "templates/boilerplate")._get("content") selftext_template = template + "\n\n---\n\n" + boilerplate link = Link._submit( is_self=True, title=date.strftime("%a %Y-%m-%d"), content=selftext_template % { "percent": int(percent * 100), "buyers": buyer_count, }, author=SYSTEM_ACCOUNT, sr=SERVERNAME_SR, ip="127.0.0.1", spam=False, ) link.flair_text = "Name pending..." link.flair_css_class = "goal-bucket-%d-active" % bucket link.revenue_date = date link.revenue_bucket = bucket link.server_names = [] link._commit() UPVOTE = True queries.queue_vote(SYSTEM_ACCOUNT, link, UPVOTE, "127.0.0.1") queries.new_link(link) link.update_search_index() template = WikiPage.get(SERVERNAME_SR, "templates/notification-message")._get("content") subject_template, sep, body_template = template.partition("\r\n") for id in gold_buyers_on(date): recipient = Account._byID(id, data=True) send_system_message( recipient, subject_template, body_template % { "percent": int(percent * 100), "buyers": buyer_count, "user": recipient.name, "link": link.url, }, )
def perform_actions(self, item, data): """Execute all the rule's actions against the item.""" for key, target in self.targets.iteritems(): target_item = self.get_target_item(item, data, key) target.perform_actions(target_item, data) if self.comment: comment = self.build_message(self.comment, item, data, disclaimer=True) # TODO: shouldn't have to do all this manually if isinstance(item, Comment): link = data["link"] parent_comment = item else: link = item parent_comment = None new_comment, inbox_rel = Comment._new( ACCOUNT, link, parent_comment, comment, None) new_comment.distinguished = "yes" new_comment._commit() queries.queue_vote(ACCOUNT, new_comment, True, None) queries.new_comment(new_comment, inbox_rel) g.stats.simple_event("automoderator.comment") if self.modmail: message = self.build_message(self.modmail, item, data, permalink=True) subject = replace_placeholders( self.modmail_subject, data, self.matches) subject = subject[:100] new_message, inbox_rel = Message._new(ACCOUNT, data["subreddit"], subject, message, None) new_message.distinguished = "yes" new_message._commit() queries.new_message(new_message, inbox_rel) g.stats.simple_event("automoderator.modmail") if self.message and not data["author"]._deleted: message = self.build_message(self.message, item, data, disclaimer=True, permalink=True) subject = replace_placeholders( self.message_subject, data, self.matches) subject = subject[:100] new_message, inbox_rel = Message._new(ACCOUNT, data["author"], subject, message, None) queries.new_message(new_message, inbox_rel) g.stats.simple_event("automoderator.message") PerformedRulesByThing.mark_performed(item, self)
def activate_names(link, names): for comment, name in names: # find a slot to assign a name to. we'll prefer nodes that are # currently empty, and failing that find the least-recently-modified # node. ROOT = "/gold/server-names" slot_names = g.zookeeper.get_children(ROOT) slots = [(slot_name, g.zookeeper.get(os.path.join(ROOT, slot_name))) for slot_name in slot_names] slots.sort(key=lambda (path, (data, stat)): (bool(data), stat.mtime)) slot_path = os.path.join(ROOT, slots[0][0]) comment_data = {'name': str(name), 'permalink': comment.make_permalink_slow()} g.zookeeper.set(slot_path, json.dumps(comment_data)) lock = g.zookeeper.Lock(slot_path) lock_contenders = lock.contenders() old_name = lock_contenders[0] if lock_contenders else "" old_name = old_name or "one of our servers" # reply to the user wp = WikiPage.get(SERVERNAME_SR, "templates/success-reply") template = random.choice(wp._get("content").split("\r\n---\r\n")) comment, inbox_rel = Comment._new( author=SYSTEM_ACCOUNT, link=link, parent=comment, body=template % { "old-name": old_name, "new-name": name, }, ip="127.0.0.1", ) queries.queue_vote(SYSTEM_ACCOUNT, comment, dir=True, ip="127.0.0.1") queries.new_comment(comment, inbox_rel) # update the link's text wp = WikiPage.get(SERVERNAME_SR, "templates/goldisms") goldism = random.choice(wp._get("content").split("\r\n---\r\n")) wp = WikiPage.get(SERVERNAME_SR, "templates/selftext-success") template = wp._get("content") link.selftext = template % { "old-name": old_name, "new-name": name, "goldism": goldism, } link._commit()
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit(title, url, account, subreddit, ip, spam=False) # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) # various backend processing things queries.queue_vote(account, link, True, ip) queries.new_link(link) queries.changed(link) print link.make_permalink_slow()
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit(title, url, account, subreddit, ip, spam=False) # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) # various backend processing things queries.queue_vote(account, link, UPVOTE, ip) queries.new_link(link) link.update_search_index() # wait for the amqp worker to finish up worker.join() print link.make_permalink_slow()
def post_if_goal_reached(date): # bail out if this day's already been submitted for link in get_recent_name_submissions(): if link.revenue_date == date: return revenue = gold_revenue_multi([date]).get(date, 0) goal = gold_goal_on(date) percent = revenue / float(goal) bucket = int(percent) if bucket == 0: return buyer_count = len(gold_buyers_on(date)) link = Link._submit( title=date.strftime("%a %Y-%m-%d"), url="self", author=SYSTEM_ACCOUNT, sr=SERVERNAME_SR, ip="127.0.0.1", spam=False, ) template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext") template = random.choice(template_wp._get("content").split("\r\n---\r\n")) boilerplate = WikiPage.get(SERVERNAME_SR, "templates/boilerplate")._get("content") selftext_template = template + "\n\n---\n\n" + boilerplate link.flair_text = "Name pending..." link.flair_css_class = "goal-bucket-%d-active" % bucket link.revenue_date = date link.revenue_bucket = bucket link.server_names = [] link.url = link.make_permalink(SERVERNAME_SR) link.selftext = selftext_template % { "percent": int(percent * 100), "buyers": buyer_count, } link.is_self = True link._commit() UPVOTE = True queries.queue_vote(SYSTEM_ACCOUNT, link, UPVOTE, "127.0.0.1") queries.new_link(link) queries.changed(link) template = WikiPage.get(SERVERNAME_SR, "templates/notification-message")._get("content") subject_template, sep, body_template = template.partition("\r\n") for id in gold_buyers_on(date): recipient = Account._byID(id, data=True) send_system_message( recipient, subject_template, body_template % { "percent": int(percent * 100), "buyers": buyer_count, "user": recipient.name, "link": link.url, }, )
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) link = Link._submit( title=sr_model.generate_link_title(), url=sr_model.generate_link_url(), author=link_author, sr=sr, ip="127.0.0.1", ) if link.url == "self": link.url = link.make_permalink(sr) link.is_self = True link.selftext = sr_model.generate_selfpost_body() link._commit() queries.queue_vote(link_author, link, dir=True, ip="127.0.0.1") queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.queue_vote(comment_author, comment, dir=True, ip="127.0.0.1") queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([True, None, False]) voter = random.choice(accounts) queries.queue_vote(voter, thing, dir=direction, ip="127.0.0.1") amqp.worker.join()
def populate(num_srs=10, num_users=1000, num_links=100, num_comments=20, num_votes=50): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password", "127.0.0.1") srs = [] for i in range(num_srs): name = "reddit_test%d" % i try: sr = Subreddit._new(name=name, title="everything about #%d" % i, ip='0.0.0.0', author_id=a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name, "127.0.0.1") except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1, 100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) comments = [None] for i in range(int(random.betavariate(2, 8) * 5 * num_comments)): user = random.choice(accounts) body = ' '.join([ random_word(1, 10) for x in range(int(200 * random.betavariate(2, 6))) ]) parent = random.choice(comments) (c, inbox_rel) = Comment._new(user, l, parent, body, '127.0.0.1') queries.new_comment(c, inbox_rel) comments.append(c) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) queries.queue_vote(another_user, c, True, '127.0.0.1') like = random.randint(50, 100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) queries.queue_vote(user, l, random.randint(0, 100) <= like, '127.0.0.1') queries.worker.join()
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) link = Link._submit( title=sr_model.generate_link_title(), url=sr_model.generate_link_url(), author=link_author, sr=sr, ip="127.0.0.1", ) if link.url == "self": link.url = link.make_permalink(sr) link.is_self = True link.selftext = sr_model.generate_selfpost_body() link._commit() queries.queue_vote(link_author, link, dir=True, ip="127.0.0.1") queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.queue_vote( comment_author, comment, dir=True, ip="127.0.0.1") queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([True, None, False]) voter = random.choice(accounts) queries.queue_vote(voter, thing, dir=direction, ip="127.0.0.1") amqp.worker.join()
def insert(title, sr_name, url, description, date, author='ArxivBot', cross_srs=[]): a = Account._by_name(author) sr = subreddit_or_create(sr_name, a) srs = [subreddit_or_create(sr_name, a) for sr_name in cross_srs] ups = 0 if author=='AnnalsBot': ups = 1 downs = 0 if False: try: ls = Link._by_url(url, None) print 'Found %d links' % len(ls) for l in ls: if l.author_id == a._id and l.sr_id != sr._id: ups = ups + l._ups - 1 downs = downs + l._downs l._deleted=True l._commit() changed(l) x = l.subreddit_slow queries.delete_links(l) print 'Deleting ' + str(l) else: print 'Not deleting ' + str(l) print 'Seed votes %s %s' % (ups, downs) except NotFound: pass try: l = Link._by_url(url, sr) print "!! Link already exists" return l except NotFound: print "Submitting link" user = a l = Link(_ups = ups, _downs = downs, title = title, url = url, _spam = False, author_id = user._id, sr_id = sr._id, lang = sr.lang, ip = '127.0.0.1', multi_sr_id = [sr._id]+[sr._id for sr in srs], selftext = description) l.verdict = 'admin-approved' l.approval_checkmark = _("auto-approved") l._date = datetime(date.year,date.month,date.day,tzinfo=g.tz) l.selftext = description l._commit() #for cross_sr in cross_srs: # LinkSR(l, subreddit_or_create(cross_sr, a), 'crosspost')._commit() l.set_url_cache() vote = None if author == 'AnnalsBot': vote = True queries.queue_vote(user, l, vote, '127.0.0.1') queries.new_savehide(l._save(user)) queries.new_link(l) changed(l) queries.worker.join() end_trial(l, "admin-approved") admintools.unspam(l, user.name) ModAction.create(sr, user, 'approvelink', target=l)