def submit_rss_links(srname,rss,user,titlefield='title',linkfield='link'): #F**k the API, let's just do it the way we would if we were really doing it. This avoids screwing around with cookies and so forth... feed=fetch_feed(rss) if feed is None: return ac=Account._byID(user) sr=Subsciteit._by_name(srname) ip='0.0.0.0' niceify=False if domain(rss)=="arxiv.org": niceify=dict(find="\(arXiv:.*?\)",replace="") #Let's randomize why not... random.shuffle(feed.entries) for article in feed.entries: #This can take all night if it has to, we don't want to hammer the server into oblivios sleep(1) kw = fetch_article(article,titlefield=titlefield,linkfield=linkfield,niceify=niceify) if kw is None: continue l = Link._submit(kw['title'],kw['link'],ac,sr,ip,spam=False) l._commit() l.set_url_cache() #We don't really need auto-submitted links to be vote on... queries.queue_vote(ac,l,True,ip,cheater=False) queries.new_link(l) changed(l) print "Submitted %s" % article[titlefield] sleep(.1) return
def submit_all(): from r2.models import Subdigg, Account, Link, NotFound from r2.lib.media import set_media from r2.lib.db import queries sr = Subdigg._by_name('testmedia') author = Account._by_name('testmedia') links = [] for url in test_urls: try: # delete any existing version of the link l = Link._by_url(url, sr) print "Deleting %s" % l l._deleted = True l._commit() except NotFound: pass l = Link._submit(url, url, author, sr, '0.0.0.0') try: set_media(l) except Exception, e: print e if g.write_query_queue: queries.new_link(l) links.append(l)
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit( is_self=False, title=title, content=url, author=account, sr=subreddit, ip=ip, spam=False, ) try: # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) except: pass # various backend processing things queries.new_link(link) link.update_search_index() # wait for the amqp worker to finish up worker.join() print link.make_permalink_slow()
def populate(num_srs = 10, num_users = 1000, num_links = 100, num_comments = 20, num_votes = 50): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password", "127.0.0.1") srs = [] for i in range(num_srs): name = "reddit_test%d" % i try: sr = Subreddit._new(name = name, title = "everything about #%d"%i, ip = '0.0.0.0', author_id = a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name, "127.0.0.1") except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1,100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) comments = [ None ] for i in range(int(random.betavariate(2, 8) * 5 * num_comments)): user = random.choice(accounts) body = ' '.join([ random_word(1, 10) for x in range(int(200 * random.betavariate(2, 6))) ]) parent = random.choice(comments) (c, inbox_rel) = Comment._new(user, l, parent, body, '127.0.0.1') queries.new_comment(c, inbox_rel) comments.append(c) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) queries.queue_vote(another_user, c, True, '127.0.0.1') like = random.randint(50,100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) queries.queue_vote(user, l, random.randint(0, 100) <= like, '127.0.0.1') queries.worker.join()
def post_if_goal_reached(date): # bail out if this day's already been submitted for link in get_recent_name_submissions(): if link.revenue_date == date: return revenue = gold_revenue_multi([date]).get(date, 0) goal = gold_goal_on(date) percent = revenue / float(goal) bucket = int(percent) if bucket == 0: return buyer_count = len(gold_buyers_on(date)) template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext") template = random.choice(template_wp._get("content").split("\r\n---\r\n")) boilerplate = WikiPage.get(SERVERNAME_SR, "templates/boilerplate")._get("content") selftext_template = template + "\n\n---\n\n" + boilerplate link = Link._submit( is_self=True, title=date.strftime("%a %Y-%m-%d"), content=selftext_template % { "percent": int(percent * 100), "buyers": buyer_count, }, author=SYSTEM_ACCOUNT, sr=SERVERNAME_SR, ip="127.0.0.1", spam=False, ) link.flair_text = "Name pending..." link.flair_css_class = "goal-bucket-%d-active" % bucket link.revenue_date = date link.revenue_bucket = bucket link.server_names = [] link._commit() queries.new_link(link) link.update_search_index() template = WikiPage.get(SERVERNAME_SR, "templates/notification-message")._get("content") subject_template, sep, body_template = template.partition("\r\n") for id in gold_buyers_on(date): recipient = Account._byID(id, data=True) send_system_message( recipient, subject_template, body_template % { "percent": int(percent * 100), "buyers": buyer_count, "user": recipient.name, "link": link.url, }, )
def post_if_goal_reached(date): # bail out if this day's already been submitted for link in get_recent_name_submissions(): if link.revenue_date == date: return revenue = gold_revenue_multi([date]).get(date, 0) goal = gold_goal_on(date) percent = revenue / float(goal) bucket = int(percent) if bucket == 0: return buyer_count = len(gold_buyers_on(date)) template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext") template = random.choice(template_wp._get("content").split("\r\n---\r\n")) boilerplate = WikiPage.get(SERVERNAME_SR, "templates/boilerplate")._get("content") selftext_template = template + "\n\n---\n\n" + boilerplate link = Link._submit( is_self=True, title=date.strftime("%a %Y-%m-%d"), content=selftext_template % { "percent": int(percent * 100), "buyers": buyer_count, }, author=SYSTEM_ACCOUNT, sr=SERVERNAME_SR, ip="127.0.0.1", spam=False, ) link.flair_text = "Name pending..." link.flair_css_class = "goal-bucket-%d-active" % bucket link.revenue_date = date link.revenue_bucket = bucket link.server_names = [] link._commit() UPVOTE = True queries.queue_vote(SYSTEM_ACCOUNT, link, UPVOTE, "127.0.0.1") queries.new_link(link) link.update_search_index() template = WikiPage.get(SERVERNAME_SR, "templates/notification-message")._get("content") subject_template, sep, body_template = template.partition("\r\n") for id in gold_buyers_on(date): recipient = Account._byID(id, data=True) send_system_message( recipient, subject_template, body_template % { "percent": int(percent * 100), "buyers": buyer_count, "user": recipient.name, "link": link.url, }, )
def test_get_links(self): from r2.lib.db import queries from r2.models import Subreddit, Account, Link, Thing account = Account._byID(1, data=True) sr = Subreddit._by_name("reddit_test0") link_url = self.make_unique_url() new_link = Link._submit("test_get_links", link_url, account, sr, "127.0.0.1", kind="link") queries.new_link(new_link, foreground=True) res = Thing._by_fullname(queries.get_links(sr, "new", "all"), return_dict=False) self.assert_true(len(res) > 0, "no links returned") self.assert_equal(new_link._id, res[0]._id)
def test_sending_an_email(self): sr = Subreddit._by_name('reddit_test0') account = self.get_test_user() sr.add_subscriber(account) self.assertIn(sr._id, account.spaces) summary_email.reset_last_email_sent_at_for_all_accounts() assert summary_email.should_send_activity_summary_email(account) link_url = self.make_unique_url() new_link = Link._submit("test_get_links", link_url, account, sr, '127.0.0.1', kind='link') queries.new_link(new_link, foreground=True) send_email = Mock() summary_email.send_account_summary_email(1, send_email=send_email) self.assert_equal(1, send_email.call_count) self.assert_equal('*****@*****.**', send_email.call_args[0][0])
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit(title, url, account, subreddit, ip, spam=False) # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) # various backend processing things queries.queue_vote(account, link, True, ip) queries.new_link(link) queries.changed(link) print link.make_permalink_slow()
def populate(num_srs=10, num_users=10, num_links=100): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password") srs = [] for i in range(num_srs): name = "reddit_test%d" % i try: sr = Subreddit._new(name=name, title="everything about #%d" % i, ip='0.0.0.0', author_id=a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name) except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1, 100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) queries.worker.join()
def test_get_files(self): from r2.lib.db import queries from r2.models import Subreddit, Account, Link, Thing account = Account._byID(1, data=True) sr = Subreddit._by_name("reddit_test0") link_url = self.make_unique_url() new_link = Link._submit("test_get_files", link_url, account, sr, "127.0.0.1", kind="file") queries.new_link(new_link, foreground=True) # make sure it returns like a normal link res = Thing._by_fullname(queries.get_links(sr, "new", "all"), return_dict=False) self.assert_true(len(res) > 0, "no links returned") self.assert_equal(new_link._id, res[0]._id) # should return with a kind = 'file' filter res = list(queries.get_files(sr)) self.assert_true(len(res) > 0, "no links returned") self.assert_equal(new_link._id, res[0]._id)
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit(title, url, account, subreddit, ip, spam=False) # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) # various backend processing things queries.queue_vote(account, link, UPVOTE, ip) queries.new_link(link) link.update_search_index() # wait for the amqp worker to finish up worker.join() print link.make_permalink_slow()
def populate(num_srs = 10, num_users = 10, num_links = 100): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password") srs = [] for i in range(num_srs): name = "reddit_test%d" % i try: sr = Subreddit._new(name = name, title = "everything about #%d"%i, ip = '0.0.0.0', author_id = a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name) except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1,100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) queries.worker.join()
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) ensure_subreddit(g.beta_sr, system_user) ensure_subreddit(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("worldnews"), modeler.model_subreddit("gaming"), ] extra_settings = { "worldnews": { "show_media": True, }, "pics": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subreddit._by_name(n) for n in ("worldnews", "pics")] LocalizedDefaultSubreddits.set_global_srs(srs) LocalizedFeaturedSubreddits.set_global_srs( [Subreddit._by_name('worldnews')])
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) link = Link._submit( title=sr_model.generate_link_title(), url=sr_model.generate_link_url(), author=link_author, sr=sr, ip="127.0.0.1", ) if link.url == "self": link.url = link.make_permalink(sr) link.is_self = True link.selftext = sr_model.generate_selfpost_body() link._commit() queries.queue_vote(link_author, link, dir=True, ip="127.0.0.1") queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.queue_vote(comment_author, comment, dir=True, ip="127.0.0.1") queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([True, None, False]) voter = random.choice(accounts) queries.queue_vote(voter, thing, dir=direction, ip="127.0.0.1") amqp.worker.join()
def populate(num_srs=10, num_users=1000, num_links=100, num_comments=20, num_votes=50): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password", "127.0.0.1") srs = [] for i in range(num_srs): name = "reddit_test%d" % i try: sr = Subreddit._new(name=name, title="everything about #%d" % i, ip='0.0.0.0', author_id=a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name, "127.0.0.1") except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1, 100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) comments = [None] for i in range(int(random.betavariate(2, 8) * 5 * num_comments)): user = random.choice(accounts) body = ' '.join([ random_word(1, 10) for x in range(int(200 * random.betavariate(2, 6))) ]) parent = random.choice(comments) (c, inbox_rel) = Comment._new(user, l, parent, body, '127.0.0.1') queries.new_comment(c, inbox_rel) comments.append(c) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) queries.queue_vote(another_user, c, True, '127.0.0.1') like = random.randint(50, 100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) queries.queue_vote(user, l, random.randint(0, 100) <= like, '127.0.0.1') queries.worker.join()
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) ensure_subreddit(g.beta_sr, system_user) ensure_subreddit(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subreddit._by_name(n) for n in ("pics", "videos", "askhistorians")] LocalizedDefaultSubreddits.set_global_srs(srs)
def populate(num_srs=10, num_users=1000, num_links=100, num_comments=20, num_votes=50): try: a = Account._by_name(g.system_user) except NotFound: a = register(g.system_user, "password", "127.0.0.1") srs = [] names = [ 'ask_ns', 'battery_park_city', 'harlem', 'chelsea', 'chinatown', 'east_harlem', 'east_village', 'financial_district', 'flatiron', 'fashion_district', 'gramercy', 'greenwich_village', 'hells_kitchen', 'little_italy', 'lower_east_side', 'meatpacking_district', 'midtown_east', 'morningside_heights', 'murray_hill', 'noho', 'nolita', 'nomad', 'soho', 'theater_district', 'tribeca', 'upper_east_side', 'upper_west_side', 'west_village', 'gowanus', 'park_slope', 'carroll_gardens', 'cobble_hill', 'boerum_hill', 'brooklyn_heights', 'downtown_brooklyn', 'prospect_heights', 'fort_greene', 'clinton_hill', 'vinegar_hill', 'dumbo', 'williamsburg', 'greenpoint', 'long_island_city', ] for name in names: try: sr = Subreddit._new(name=name, title="everything about %s" % name, ip='0.0.0.0', author_id=a._id) sr._downs = 10 sr.lang = "en" sr._commit() except SubredditExists: sr = Subreddit._by_name(name) srs.append(sr) accounts = [] for i in range(num_users): name_ext = ''.join([ random.choice(string.letters) for x in range(int(random.uniform(1, 10))) ]) name = 'test_' + name_ext try: a = register(name, name, "127.0.0.1") except AccountExists: a = Account._by_name(name) accounts.append(a) for i in range(num_links): id = random.uniform(1, 100) title = url = 'http://google.com/?q=' + str(id) user = random.choice(accounts) sr = random.choice(srs) if False: l = Link._submit(title, url, user, sr, '127.0.0.1') queries.new_link(l) comments = [None] for i in range(int(random.betavariate(2, 8) * 5 * num_comments)): user = random.choice(accounts) body = ' '.join([ random_word(1, 10) for x in range(int(200 * random.betavariate(2, 6))) ]) parent = random.choice(comments) (c, inbox_rel) = Comment._new(user, l, parent, body, '127.0.0.1') queries.new_comment(c, inbox_rel) comments.append(c) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) v = Vote.vote(another_user, c, True, '127.0.0.1') queries.new_vote(v) if False: like = random.randint(50, 100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) v = Vote.vote(user, l, random.randint(0, 100) <= like, '127.0.0.1') queries.new_vote(v) queries.worker.join()
def insert(title, sr_name, url, description, date, author='ArxivBot', cross_srs=[]): a = Account._by_name(author) sr = subreddit_or_create(sr_name, a) srs = [subreddit_or_create(sr_name, a) for sr_name in cross_srs] ups = 0 if author=='AnnalsBot': ups = 1 downs = 0 if False: try: ls = Link._by_url(url, None) print 'Found %d links' % len(ls) for l in ls: if l.author_id == a._id and l.sr_id != sr._id: ups = ups + l._ups - 1 downs = downs + l._downs l._deleted=True l._commit() changed(l) x = l.subreddit_slow queries.delete_links(l) print 'Deleting ' + str(l) else: print 'Not deleting ' + str(l) print 'Seed votes %s %s' % (ups, downs) except NotFound: pass try: l = Link._by_url(url, sr) print "!! Link already exists" return l except NotFound: print "Submitting link" user = a l = Link(_ups = ups, _downs = downs, title = title, url = url, _spam = False, author_id = user._id, sr_id = sr._id, lang = sr.lang, ip = '127.0.0.1', multi_sr_id = [sr._id]+[sr._id for sr in srs], selftext = description) l.verdict = 'admin-approved' l.approval_checkmark = _("auto-approved") l._date = datetime(date.year,date.month,date.day,tzinfo=g.tz) l.selftext = description l._commit() #for cross_sr in cross_srs: # LinkSR(l, subreddit_or_create(cross_sr, a), 'crosspost')._commit() l.set_url_cache() vote = None if author == 'AnnalsBot': vote = True queries.queue_vote(user, l, vote, '127.0.0.1') queries.new_savehide(l._save(user)) queries.new_link(l) changed(l) queries.worker.join() end_trial(l, "admin-approved") admintools.unspam(l, user.name) ModAction.create(sr, user, 'approvelink', target=l)
def load_fixtures(num_votes = 10): accounts = [] for name, account_data in load_fixture('accounts').items(): print "creating account %r" % (name,) try: a = Account._by_name(name) except NotFound: a = Account(name=name, password=bcrypt_password(account_data['password'])) # new accounts keep the profanity filter settings until opting out for key, val in account_data.items(): if key in ('password',): continue setattr(a, key, val) a._commit() # clear the caches Account._by_name(name, _update = True) Account._by_name(name, allow_deleted = True, _update = True) accounts.append(a) for name, subreddit_data in load_fixture('subreddits').items(): print "creating subreddit %r" % (name,) try: sr = Subreddit._by_name(name) except NotFound: author = Account._by_name(subreddit_data['author']) sr = Subreddit._new(name = name, title = subreddit_data['title'], author_id = author._id, ip = subreddit_data['ip']) for key, val in subreddit_data.items(): if key in ('author', 'ip', 'title', 'subscribers'): continue if val is None or val == '': continue setattr(sr, key, val) sr._downs = 10 sr._commit() for sub_name in subreddit_data.get('subscribers', []): subscriber = Account._by_name(sub_name) Subreddit.subscribe_defaults(subscriber) if sr.add_subscriber(subscriber): sr._incr('_ups', 1) queries.changed(sr, True) for mod_name in subreddit_data.get('moderators', []): moderator = Account._by_name(mod_name) sr.add_moderator(moderator) # defined here so it has access to the 'authors' var def load_comments(link, comments, parent_comment=None): for comment_data in comments: comment_author = Account._by_name(comment_data['author']) (c, inbox_rel) = Comment._new(comment_author, link, parent_comment, comment_data['body'], comment_data['ip']) queries.new_comment(c, inbox_rel) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) v = Vote.vote(another_user, c, True, '127.0.0.1') queries.new_vote(v) if comment_data.has_key('comments'): load_comments(link, comment_data['comments'], c) for link_label, link_data in load_fixture('links').items(): print "creating link for %r" % (link_data['title'],) author = Account._by_name(link_data['author']) sr = Subreddit._by_name(link_data['sr']) link = Link._submit(link_data['title'], link_data['url'], author, sr, link_data['ip']) for key, val in link_data.items(): if key in ('title', 'url', 'author', 'sr', 'comments'): continue if val is None or val == '': continue setattr(link, key, val) link._commit() queries.new_link(link) like = random.randint(50,100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) v = Vote.vote(user, link, random.randint(0, 100) <= like, '127.0.0.1') queries.new_vote(v) if link_data.has_key('comments'): load_comments(link, link_data['comments']) queries.worker.join()