def process_comment(self, comment_data, comment, post): # Prepare data for import ip = '127.0.0.1' if comment_data: naive_date = datetime.datetime.strptime(comment_data['dateCreated'], DATE_FORMAT) local_date = INPUT_TIMEZONE.localize(naive_date, is_dst=False) # Pick the non daylight savings time utc_date = local_date.astimezone(pytz.utc) # Determine account to use for this comment account = self._get_or_create_account(comment_data['author'], comment_data['authorEmail']) if comment_data and not comment: # Create new comment comment, inbox_rel = Comment._new(account, post, None, comment_data['body'], ip, date=utc_date) comment.is_html = True comment.ob_imported = True comment._commit() elif comment_data and comment: # Overwrite existing comment comment.author_id = account._id comment.body = comment_data['body'] comment.ip = ip comment._date = utc_date comment.is_html = True comment.ob_imported = True comment._commit() elif not comment_data and comment: # Not enough comment data being imported to overwrite all comments print 'WARNING: More comments in lesswrong than we are importing, ignoring additional comment in lesswrong'
def process_comment(self, comment_data, comment, post, comment_dictionary): # Prepare data for import ip = '127.0.0.1' if comment_data: naive_date = datetime.datetime.strptime(comment_data['dateCreated'], DATE_FORMAT) local_date = INPUT_TIMEZONE.localize(naive_date, is_dst=False) # Pick the non daylight savings time utc_date = local_date.astimezone(pytz.utc) # Determine account to use for this comment account = self._get_or_create_account(comment_data['author'], comment_data['authorEmail']) if comment_data and not comment_data['author'].endswith("| The Effective Altruism Blog"): if not comment: # Create new comment comment, inbox_rel = Comment._new(account, post, None, comment_data['body'], ip, date=utc_date) if str(comment_data['commentParent']) in comment_dictionary: comment.parent_id = comment_dictionary[str(comment_data['commentParent'])] comment.is_html = True comment.ob_imported = True comment._commit() comment_dictionary[str(comment_data['commentId'])] = comment._id else: # Overwrite existing comment if str(comment_data['commentParent']) in comment_dictionary: comment.parent_id = comment_dictionary[str(comment_data['commentParent'])] comment.author_id = account._id comment.body = comment_data['body'] comment.ip = ip comment._date = utc_date comment.is_html = True comment.ob_imported = True comment._commit() comment_dictionary[str(comment_data['commentId'])] = comment._id
def process_comments_on_post(post, comments): for comment in comments: if comment_exists(post, comment): continue # Prepare data for import ip = '127.0.0.1' naive_date = datetime.datetime.strptime(comment['dateCreated'], DATE_FORMAT) local_date = INPUT_TIMEZONE.localize(naive_date, is_dst=False) # Pick the non daylight savings time utc_date = local_date.astimezone(pytz.utc) # Determine account to use for this comment account = get_or_create_account(comment['author']) if not dryrun: # Create new comment new_comment, inbox_rel = Comment._new(account, post, None, comment['body'], ip, date=utc_date) new_comment.is_html = True new_comment.ob_imported = True new_comment._commit() try: print " Imported as '%s' %s" % (account.name.decode('utf-8').encode('utf-8'), comment_excerpt(comment).decode('utf-8').encode('utf-8')) except UnicodeError: print " Imported comment"
def perform_actions(self, item, data): """Execute all the rule's actions against the item.""" for key, target in self.targets.iteritems(): target_item = self.get_target_item(item, data, key) target.perform_actions(target_item, data) if self.comment: comment = self.build_message(self.comment, item, data, disclaimer=True) # TODO: shouldn't have to do all this manually if isinstance(item, Comment): link = data["link"] parent_comment = item else: link = item parent_comment = None new_comment, inbox_rel = Comment._new( ACCOUNT, link, parent_comment, comment, None) new_comment.distinguished = "yes" new_comment._commit() queries.queue_vote(ACCOUNT, new_comment, True, None) queries.new_comment(new_comment, inbox_rel) g.stats.simple_event("automoderator.comment") if self.modmail: message = self.build_message(self.modmail, item, data, permalink=True) subject = replace_placeholders( self.modmail_subject, data, self.matches) subject = subject[:100] new_message, inbox_rel = Message._new(ACCOUNT, data["subreddit"], subject, message, None) new_message.distinguished = "yes" new_message._commit() queries.new_message(new_message, inbox_rel) g.stats.simple_event("automoderator.modmail") if self.message and not data["author"]._deleted: message = self.build_message(self.message, item, data, disclaimer=True, permalink=True) subject = replace_placeholders( self.message_subject, data, self.matches) subject = subject[:100] new_message, inbox_rel = Message._new(ACCOUNT, data["author"], subject, message, None) queries.new_message(new_message, inbox_rel) g.stats.simple_event("automoderator.message") PerformedRulesByThing.mark_performed(item, self)
def activate_names(link, names): for comment, name in names: # find a slot to assign a name to. we'll prefer nodes that are # currently empty, and failing that find the least-recently-modified # node. ROOT = "/gold/server-names" slot_names = g.zookeeper.get_children(ROOT) slots = [(slot_name, g.zookeeper.get(os.path.join(ROOT, slot_name))) for slot_name in slot_names] slots.sort(key=lambda (path, (data, stat)): (bool(data), stat.mtime)) slot_path = os.path.join(ROOT, slots[0][0]) comment_data = { 'name': str(name), 'permalink': comment.make_permalink_slow() } g.zookeeper.set(slot_path, json.dumps(comment_data)) lock = g.zookeeper.Lock(slot_path) lock_contenders = lock.contenders() old_name = lock_contenders[0] if lock_contenders else "" old_name = old_name or "one of our servers" # reply to the user wp = WikiPage.get(SERVERNAME_SR, "templates/success-reply") template = random.choice(wp._get("content").split("\r\n---\r\n")) comment, inbox_rel = Comment._new( author=SYSTEM_ACCOUNT, link=link, parent=comment, body=template % { "old-name": old_name, "new-name": name, }, ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) # update the link's text wp = WikiPage.get(SERVERNAME_SR, "templates/goldisms") goldism = random.choice(wp._get("content").split("\r\n---\r\n")) wp = WikiPage.get(SERVERNAME_SR, "templates/selftext-success") template = wp._get("content") link.selftext = template % { "old-name": old_name, "new-name": name, "goldism": goldism, } link._commit()
def activate_names(link, names): for comment, name in names: # find a slot to assign a name to. we'll prefer nodes that are # currently empty, and failing that find the least-recently-modified # node. ROOT = "/gold/server-names" slot_names = g.zookeeper.get_children(ROOT) slots = [(slot_name, g.zookeeper.get(os.path.join(ROOT, slot_name))) for slot_name in slot_names] slots.sort(key=lambda (path, (data, stat)): (bool(data), stat.mtime)) slot_path = os.path.join(ROOT, slots[0][0]) comment_data = {'name': str(name), 'permalink': comment.make_permalink_slow()} g.zookeeper.set(slot_path, json.dumps(comment_data)) lock = g.zookeeper.Lock(slot_path) lock_contenders = lock.contenders() old_name = lock_contenders[0] if lock_contenders else "" old_name = old_name or "one of our servers" # reply to the user wp = WikiPage.get(SERVERNAME_SR, "templates/success-reply") template = random.choice(wp._get("content").split("\r\n---\r\n")) comment, inbox_rel = Comment._new( author=SYSTEM_ACCOUNT, link=link, parent=comment, body=template % { "old-name": old_name, "new-name": name, }, ip="127.0.0.1", ) queries.queue_vote(SYSTEM_ACCOUNT, comment, dir=True, ip="127.0.0.1") queries.new_comment(comment, inbox_rel) # update the link's text wp = WikiPage.get(SERVERNAME_SR, "templates/goldisms") goldism = random.choice(wp._get("content").split("\r\n---\r\n")) wp = WikiPage.get(SERVERNAME_SR, "templates/selftext-success") template = wp._get("content") link.selftext = template % { "old-name": old_name, "new-name": name, "goldism": goldism, } link._commit()
def process_comment(self, comment_data, comment, post, comment_dictionary): # Prepare data for import ip = '127.0.0.1' if comment_data: naive_date = datetime.datetime.strptime( comment_data['dateCreated'], DATE_FORMAT) local_date = INPUT_TIMEZONE.localize( naive_date, is_dst=False) # Pick the non daylight savings time utc_date = local_date.astimezone(pytz.utc) # Determine account to use for this comment account = self._get_or_create_account(comment_data['author'], comment_data['authorEmail']) if comment_data and not comment_data['author'].endswith( "| The Effective Altruism Blog"): if not comment: # Create new comment comment, inbox_rel = Comment._new(account, post, None, comment_data['body'], ip, date=utc_date) if str(comment_data['commentParent']) in comment_dictionary: comment.parent_id = comment_dictionary[str( comment_data['commentParent'])] comment.is_html = True comment.ob_imported = True comment._commit() comment_dictionary[str( comment_data['commentId'])] = comment._id else: # Overwrite existing comment if str(comment_data['commentParent']) in comment_dictionary: comment.parent_id = comment_dictionary[str( comment_data['commentParent'])] comment.author_id = account._id comment.body = comment_data['body'] comment.ip = ip comment._date = utc_date comment.is_html = True comment.ob_imported = True comment._commit() comment_dictionary[str( comment_data['commentId'])] = comment._id
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) link = Link._submit( title=sr_model.generate_link_title(), url=sr_model.generate_link_url(), author=link_author, sr=sr, ip="127.0.0.1", ) if link.url == "self": link.url = link.make_permalink(sr) link.is_self = True link.selftext = sr_model.generate_selfpost_body() link._commit() queries.queue_vote(link_author, link, dir=True, ip="127.0.0.1") queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.queue_vote(comment_author, comment, dir=True, ip="127.0.0.1") queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([True, None, False]) voter = random.choice(accounts) queries.queue_vote(voter, thing, dir=direction, ip="127.0.0.1") amqp.worker.join()
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) ensure_subreddit(g.beta_sr, system_user) ensure_subreddit(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subreddit._by_name(n) for n in ("pics", "videos", "askhistorians")] LocalizedDefaultSubreddits.set_global_srs(srs)
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) ensure_subreddit(g.beta_sr, system_user) ensure_subreddit(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("worldnews"), modeler.model_subreddit("gaming"), ] extra_settings = { "worldnews": { "show_media": True, }, "pics": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subreddit._by_name(n) for n in ("worldnews", "pics")] LocalizedDefaultSubreddits.set_global_srs(srs) LocalizedFeaturedSubreddits.set_global_srs( [Subreddit._by_name('worldnews')])