def test__update_wiki(): from datascience_bot import wiki local_wiki_dir = pathlib.Path(wiki.__file__).parent local_wiki_content = defaultdict( str, { path.stem: path.read_text() for path in local_wiki_dir.iterdir() if not path.is_dir() and path.suffix == ".md" }, ) datascience_bot = get_datascience_bot() for wiki_page in datascience_bot.subreddit(SUBREDDIT_NAME).wiki: if wiki_page.name in local_wiki_content: wiki_page.edit(content="", reason=f"Testing {__version__} at {TEST_TIME}") assert wiki_page.content_md == "" update_wiki.main() for wiki_page in datascience_bot.subreddit(SUBREDDIT_NAME).wiki: if wiki_page.name in local_wiki_content: # wiki content doesn't keep newlines at end of file, though we write them assert wiki_page.content_md == local_wiki_content[wiki_page.name].strip()
def main(validate: bool = True): """Refresh the weekly thread """ logger.info("Enter post_weekly_thread.main.py") # either datascience_bot_dev for testing, or datascience for production SUBREDDIT_NAME = os.getenv("SUBREDDIT_NAME") reddit = get_datascience_bot() subreddit = reddit.subreddit(display_name=SUBREDDIT_NAME) logger.info(f"Acting on subreddit: {subreddit.display_name}") if validate: logger.debug("Validating task") validate_task(reddit) # raises error if not valid logger.debug("Unsticky the last weekly thread") old_thread = get_weekly_thread(reddit) old_thread.mod.sticky(state=False) logger.debug("Post the new weekly thread") new_thread = post_weekly_thread(reddit) # PRAW will not request pages more than once every 30 seconds. If you try, # it returns the cached version. Our cached version of page (created in # conf.py) doesn't have the new weekly thread we're creating for these # tests. # https://praw.readthedocs.io/en/v3.6.0/pages/faq.html#i-made-a-change-but-it-doesn-t-seem-to-have-an-effect time.sleep(30) logger.debug("Directed unanswered comments to the new weekly thread") direct_unanswered_comments_to_weekly_thread(reddit, old_thread_id=old_thread.id, new_thread_id=new_thread.id)
def make_existing_thread() -> praw.models.Submission: datascience_bot = get_datascience_bot() SubstantialStrain6 = get_SubstantialStrain6() b3405920 = get_b3405920() weekly_thread = datascience_bot.subreddit(SUBREDDIT_NAME).submit( title=( "Weekly Entering & Transitioning Thread | " f"{(datetime.utcnow() - timedelta(days=7)).strftime('%d %b %Y')} - " f"{datetime.utcnow().strftime('%d %b %Y')}" ).strip(), selftext="Testing", send_replies=False, ) weekly_thread.mod.approve() weekly_thread.mod.distinguish() weekly_thread.mod.sticky(state=True, bottom=True) # make a comment that will go unanswered SubstantialStrain6.submission(id=weekly_thread.id).reply( "I have a question that will go unanswered" ) # make a comment and answer it comment = b3405920.submission(id=weekly_thread.id).reply( "I have a question that will be answered by SubstantialStrain6" ) SubstantialStrain6.comment(id=comment.id).reply("I'm answering your question") return weekly_thread
def test__moderate_low_karma(low_karma): reddit = get_datascience_bot() mod_view = update(low_karma, reddit) assert submission_is_removed(mod_view, reddit) == False moderate_submissions.main() assert submission_is_removed(mod_view, reddit) == True
def test__moderate_spam(spam_video): reddit = get_datascience_bot() submission = update(spam_video, reddit) assert submission_is_removed(submission, reddit) == False moderate_submissions.main() assert submission_is_removed(submission, reddit) == True assert submission.spam == True
def test__refresh_weekly_thread(): datascience_bot = get_datascience_bot() old_thread = refresh_weekly_thread.get_weekly_thread(datascience_bot) refresh_weekly_thread.main(validate=False) time.sleep(30) old_thread = datascience_bot.submission( id=old_thread.id) # refresh cached version for comment in old_thread.comments: assert comment is not None assert len(comment.replies) > 0
def main(): reddit = get_datascience_bot() subreddit = reddit.subreddit(SUBREDDIT_NAME) local_dir = (pathlib.Path(__file__).parent / ".." / "wiki").resolve() for f in local_dir.iterdir(): page_name = f.stem if f.is_dir() or f.suffix != ".md": continue new_content = f.read_text() wiki_page = subreddit.wiki[page_name] wiki_page.edit(content=new_content, reason=f"Deploy version {__version__}")
def remove_all_datascience_bot_dev_submissions(): """Remove all submissions in r/datascience_bot_dev before all tests https://stackoverflow.com/a/17844938 """ reddit = get_datascience_bot() subreddit = reddit.subreddit("datascience_bot_dev") for submission in subreddit.new(limit=1000): comment = submission.reply( f"This submission was removed at {TEST_TIME} to make way for testing." ) comment.mod.distinguish(how="yes", sticky=True) submission.mod.remove(spam=False)
def remove_all_user_submissions_to_datascience_bot_dev(): datascience_bot = get_datascience_bot() SubstantialStrain6 = get_SubstantialStrain6() b3405920 = get_b3405920() for reddit in (datascience_bot, SubstantialStrain6, b3405920): username = reddit.user.me().name # remove all submissions to /r/datascience_bot_dev for submission in reddit.redditor(username).submissions.new(limit=100): if submission.subreddit.display_name == "datascience_bot_dev": submission.delete() # remove all comments on /r/datascience_bot_dev for comment in reddit.redditor(username).comments.new(limit=100): if comment.subreddit.display_name == "datascience_bot_dev": comment.delete()
def main() -> None: """Remove submissions that link to spam """ logger.info("Collect spam submissions") # either datascience_bot_dev for testing, or datascience for production SUBREDDIT_NAME = os.getenv("SUBREDDIT_NAME") reddit = get_datascience_bot() subreddit = reddit.subreddit(display_name=SUBREDDIT_NAME) count_spam_submissions = 0 for submission in subreddit.new(limit=5): count_spam_submissions += 1 # remove_*_submission functions return true if submission is removed if remove_spam_submission(submission): continue if remove_troll_submission(submission): continue logger.info( f"Successfully collected all ({count_spam_submissions}) spam submissions" )
def test__submission_is_removed(): ## Create a post with u/SubstantialStrain6 # fmt: off user_submission = ( get_SubstantialStrain6() .subreddit(display_name="datascience_bot_dev") .submit( title=f"Test datascience_bot:submission_is_removed | {TEST_TIME}", selftext="This is a test.", send_replies=False, ) ) # fmt: on # View u/SubstantialStrain6's post with u/datascience-bot mod = get_datascience_bot() submission = update(user_submission, mod) assert submission_is_removed(submission, mod) == False submission.mod.remove(spam=False) assert submission_is_removed(submission, mod) == True
SUBREDDIT_NAME = os.getenv("SUBREDDIT_NAME") reddit = get_datascience_bot() subreddit = reddit.subreddit(display_name=SUBREDDIT_NAME) count_spam_submissions = 0 for submission in subreddit.new(limit=5): count_spam_submissions += 1 # remove_*_submission functions return true if submission is removed if remove_spam_submission(submission): continue if remove_troll_submission(submission): continue logger.info( f"Successfully collected all ({count_spam_submissions}) spam submissions" ) if __name__ == "__main__": from datascience_bot import get_datascience_bot SUBREDDIT_NAME = os.getenv("SUBREDDIT_NAME") if SUBREDDIT_NAME != "datascience_bot_dev": raise Exception("Test only against r/datascience_bot_dev!") reddit = get_datascience_bot() subreddit = reddit.subreddit(display_name=SUBREDDIT_NAME) main()
def test__get_datascience_bot(): assert isinstance(get_datascience_bot(), praw.reddit.Reddit)