Exemplo n.º 1
0
def test_parse_submission_2zxglv():
    submission_id = "2zxglv"
    submission = reddit.get_submission(submission_id=submission_id)

    # si = submission info; sc = comments
    si, sc = scraper.parse_submission(submission, Submission)

    # selftext
    assert si["selftext"].startswith("E.g. Download all comments in a subreddit from the last 2 months.")

    # individual values
    expected_values = [
        ("id", submission_id),
        ("fullname", "t3_{}".format(submission_id)),
        ("created_utc", 1427051022.0),
        ("subreddit_id", "t5_2qizd"),
        (
            "permalink",
            "https://www.reddit.com/r/redditdev/comments/2zxglv/" "best_way_to_download_comments_from_a_subreddit/",
        ),
        ("author", "teh_shit"),
        ("title", "Best way to download comments from a subreddit, given a " "time interval?"),
        ("archived", True),
    ]
    for k, v in expected_values:
        assert si[k] == v

    # scores
    expected_scores = [("ups", 3), ("downs", 0), ("score", 3), ("num_comments", 7)]
    for k, v in expected_scores:
        assert si[k] >= v

    # comments
    _comments_for_2zxglv(sc, submission_id=submission_id)
Exemplo n.º 2
0
def get_and_parse_reddit_submission(submission_id):
    reddit_submission = reddit.get_submission(submission_id=submission_id)

    info, comments = scraper.parse_submission(reddit_submission, Submission)

    return dict(info=info, comments=comments)
Exemplo n.º 3
0
def test_parse_comments_2zxglv():
    submission_id = "2zxglv"
    submission = reddit.get_submission(submission_id=submission_id)
    sc = scraper.parse_comments(submission)

    _comments_for_2zxglv(sc, submission_id=submission_id)