def test_unlimited(args: Tuple[TweetId, int, Optional[int]]) -> None: tweet_id, min_expected, min_tombstones = args # Using batch_size=100 to speed up these larger requests. tweets = list(Thread(tweet_id, max_tweets=None, batch_size=100).request()) assert min_expected <= len(tweets) # TODO: assert min_tombstones assert len(tweets) == len({tweet.id for tweet in tweets})
def _make_batch_results(*, idify_dir: Optional[Path] = None, unidify_dir: Optional[Path] = None) -> BatchResults: batch = Batch() batch.append(Thread("1115689254271819777")) results = batch.execute() assert results is not None if idify_dir is not None: results = results.idify(idify_dir) assert results is not None if unidify_dir is not None: results = results.unidify(unidify_dir) assert results is not None return results
def _make_batch_results( settings: NastySettings, *, idify_dir: Optional[Path] = None, unidify_dir: Optional[Path] = None, ) -> BatchResults: batch = Batch() batch.append(Thread("1115689254271819777")) results = batch.execute() assert results is not None if idify_dir is not None: results = results.idify(idify_dir) assert results is not None if unidify_dir is not None: results = results.unidify(settings.twitter_api, unidify_dir) assert results is not None return results
Search("donald trump"), Search("trump", since=date(2019, 3, 21), until=date(2019, 3, 22)), Search("trump", filter_=SearchFilter.LATEST), Search("trump", lang="de"), Search("trump", max_tweets=17, batch_size=71), Search("trump", max_tweets=None, batch_size=DEFAULT_BATCH_SIZE), ], Replies: [ Replies("332308211321425920"), Replies("332308211321425920", max_tweets=17, batch_size=71), Replies("332308211321425920", max_tweets=None, batch_size=DEFAULT_BATCH_SIZE), ], Thread: [ Thread("332308211321425920"), Thread("332308211321425920", max_tweets=17, batch_size=71), Thread("332308211321425920", max_tweets=None, batch_size=DEFAULT_BATCH_SIZE), ], } ALL_REQUESTS: Final[Sequence[Request]] = [ request for requests_for_type in REQUESTS.values() for request in requests_for_type ] def _make_args( # noqa: C901 request: Request, to_batch: Optional[Path] = None,
def test_exact(args: Tuple[TweetId, Sequence[TweetId]]) -> None: tweet_id, thread = args assert thread == [tweet.id for tweet in Thread(tweet_id).request()]
def test_no_thread(tweet_id: TweetId) -> None: assert not list(Thread(tweet_id).request())
def test_max_tweets(max_tweets: int) -> None: tweets = list( Thread(TweetId("1183715553057239040"), max_tweets=max_tweets).request()) assert max_tweets == len(tweets) assert len(tweets) == len({tweet.id for tweet in tweets})
def _build_request(self) -> Request: return Thread(self.tweet_id, max_tweets=self.max_tweets, batch_size=self.batch_size)
], ids=lambda args: args[0].__name__ + ": " + repr(args[1]), ) def test_illegal_args( args: Tuple[Type[Request], Mapping[str, object]]) -> None: type_, kwargs = args with pytest.raises(ValueError): type_(**kwargs) @pytest.mark.parametrize( "request_", [ Search("q"), Replies("332308211321425920", max_tweets=None), Thread("332308211321425920", max_tweets=123, batch_size=456), ], ids=repr, ) def test_json_conversion(request_: Request) -> None: assert request_ == request_.from_json(request_.to_json()) @pytest.mark.parametrize( "search", [Search("q", since=date(2010, 1, 1), until=date(2010, 2, 1))], ids=repr, ) def test_search_to_daily_requests(search: Search) -> None: # assert is not None necessary for mypy type checking daily_requests = search.to_daily_requests()
from nasty._util.typing_ import checked_cast from nasty.batch.batch import Batch from nasty.batch.batch_entry import BatchEntry from nasty.batch.batch_results import BatchResults from nasty.request.replies import Replies from nasty.request.request import Request from nasty.request.search import Search, SearchFilter from nasty.request.thread import Thread REQUESTS: Sequence[Request] = [ Search("q"), Search("q", filter_=SearchFilter.PHOTOS, lang="de"), Search("q", since=date(2009, 1, 20), until=date(2017, 1, 20)), Replies("332308211321425920"), Replies("332308211321425920", max_tweets=50), Thread("332308211321425920"), Thread("332308211321425920", batch_size=100), ] def _make_json_serialized_exception() -> JsonSerializedException: # Collect exception with trace. try: raise ValueError("Test Error.") except ValueError as e: return JsonSerializedException.from_exception(e) # -- test_json_conversion_* ------------------------------------------------------------