示例#1
0
def test_doubling_issue(session, test_user):
    """Test to check that a row doubling issue between tags and discussions has not been regressed.

    This is for bug #65.
    """
    epoch_start = datetime(2018, 1, 3)
    bm = make_bookmark(
        tag_triples=set([("a", epoch_start, False), ("b", epoch_start, False)])
    )
    set_bookmark(session, test_user.user_uuid, bm)

    discussions = [
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.REDDIT,
            url=bm.url,
            comment_count=1,
            created_at=datetime(2018, 1, 3),
            title="example",
        ),
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.HN,
            url=bm.url,
            comment_count=0,
            created_at=datetime(2018, 1, 3),
            title="example",
        ),
    ]

    upsert_discussions(session, discussions)
    bookmarks = list(f for f in BookmarkViewQueryBuilder(session, test_user).execute())
    assert len(bookmarks) == 1
def test_reddit_client(http_client, requests_mock):
    url = random_url()

    reddit_client = RedditDiscussionClient(http_client, client_id="", client_secret="")
    # Manually set these
    reddit_client.token_client.expiry = datetime.utcnow() + timedelta(minutes=30)
    reddit_client.token_client._token = "abc123"

    expected_id = "def654"

    requests_mock.add(
        responses.GET,
        re.compile(fr"^https://api\.reddit\.com/search.*"),
        json=make_reddit_search_response(
            children=[
                make_reddit_link(id=expected_id, num_comments=10, url=url.to_string())
            ]
        ),
    )
    (discussion,) = list(reddit_client.discussions_for_url(url))
    assert discussion == Discussion(
        external_id=expected_id,
        source=DiscussionSource.REDDIT,
        title="r/test: An example",
        comment_count=10,
        created_at=datetime(2018, 1, 3),
        url=url,
    )
示例#3
0
def test_discussion_digests(session, test_user):
    bm = make_bookmark()
    set_bookmark(session, test_user.user_uuid, bm)

    discussions = [
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.HN,
            url=bm.url,
            comment_count=1,
            created_at=datetime(2018, 1, 3),
            title="example",
        ),
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.HN,
            url=bm.url,
            comment_count=0,
            created_at=datetime(2018, 1, 3),
            title="example",
        ),
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.HN,
            url=bm.url,
            comment_count=100,
            created_at=datetime(2018, 1, 3),
            title="example",
        ),
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.REDDIT,
            url=bm.url,
            comment_count=1,
            created_at=datetime(2018, 1, 3),
            title="example",
        ),
    ]

    upsert_discussions(session, discussions)
    (bm1_view,) = (f for f in BookmarkViewQueryBuilder(session, test_user).execute())
    assert bm1_view.discussion_digest.comment_count == 102
    assert bm1_view.discussion_digest.discussion_count == 4
    assert bm1_view.discussion_digest.sources == {
        DiscussionSource.HN,
        DiscussionSource.REDDIT,
    }
 def _discussion_from_child_data(self, child_data: Mapping) -> Discussion:
     return Discussion(
         external_id=child_data["id"],
         source=DiscussionSource.REDDIT,
         url=URL.from_string(child_data["url"], coerce_canonicalisation=True),
         comment_count=child_data["num_comments"],
         created_at=datetime.utcfromtimestamp(child_data["created_utc"]),
         title=f'{child_data["subreddit_name_prefixed"]}: {child_data["title"]}',
     )
def sql_discussion_to_discussion(url: URL,
                                 sql_discussion: SQLDiscussion) -> Discussion:
    return Discussion(
        external_id=sql_discussion.external_discussion_id,
        source=DiscussionSource(sql_discussion.discussion_source_id),
        url=url,
        title=sql_discussion.title,
        created_at=sql_discussion.created_at,
        comment_count=sql_discussion.comment_count,
    )
def extract_hn_discussions(response_body: Mapping) -> Iterator[Discussion]:
    log.debug("hn search api returned: %s", response_body)
    for hit in response_body["hits"]:
        yield Discussion(
            comment_count=hit.get("num_comments", 0) or 0,
            created_at=datetime.utcfromtimestamp(hit["created_at_i"]),
            external_id=hit["objectID"],
            title=hit.get("title", ""),
            url=URL.from_string(hit["url"], coerce_canonicalisation=True),
            source=DiscussionSource.HN,
        )
示例#7
0
def test_discussions(signed_in_client, test_user, session):
    bm = make_bookmark()
    set_bookmark(session, test_user.user_uuid, bm)

    discussions = [
        Discussion(
            external_id=str(random_numeric_id()),
            source=DiscussionSource.HN,
            url=bm.url,
            comment_count=1,
            created_at=datetime(2018, 1, 3),
            title="HN discussion 1",
        )
    ]

    upsert_discussions(session, discussions)

    response = signed_in_client.get(
        flask.url_for(
            "quarchive.discussions",
            username=test_user.username,
            url_uuid=bm.url.url_uuid,
        ))
    assert response.status_code == 200