def test_unlimited(args: Tuple[TweetId, int, int]) -> None: tweet_id, min_expected, min_tombstones = args # Using batch_size=100 to speed up these larger requests. tweets = list(Replies(tweet_id, max_tweets=None, batch_size=100).request()) assert min_expected <= len(tweets) # TODO: assert min_tombstones assert len(tweets) == len({tweet.id for tweet in tweets})
from .mock_context import MockRequestContext logger = getLogger(__name__) REQUESTS: Final[Mapping[Type[Request], Sequence[Request]]] = { Search: [ Search("trump"), Search("donald trump"), Search("trump", since=date(2019, 3, 21), until=date(2019, 3, 22)), Search("trump", filter_=SearchFilter.LATEST), Search("trump", lang="de"), Search("trump", max_tweets=17, batch_size=71), Search("trump", max_tweets=None, batch_size=DEFAULT_BATCH_SIZE), ], Replies: [ Replies("332308211321425920"), Replies("332308211321425920", max_tweets=17, batch_size=71), Replies("332308211321425920", max_tweets=None, batch_size=DEFAULT_BATCH_SIZE), ], Thread: [ Thread("332308211321425920"), Thread("332308211321425920", max_tweets=17, batch_size=71), Thread("332308211321425920", max_tweets=None, batch_size=DEFAULT_BATCH_SIZE), ], } ALL_REQUESTS: Final[Sequence[Request]] = [ request for requests_for_type in REQUESTS.values()
tweets: Mapping[TweetId, Tweet] ) -> Callable[[Iterable[TweetId], TwitterApiSettings], Iterable[Optional[Tweet]]]: def statuses_lookup( tweet_ids: Iterable[TweetId], twitter_api_settings: TwitterApiSettings) -> Iterable[Tweet]: return (tweets[tweet_id] for tweet_id in tweet_ids) return statuses_lookup @pytest.mark.parametrize( "requests", list( permutations([ Replies(TweetId("1115690002233556993")), Replies(TweetId("1115690615612825601")), Replies(TweetId("1115691710657499137")), ])), ids=repr, ) def test_unidify_fail_and_restart( requests: Iterable[Request], settings: NastySettings, monkeypatch: MonkeyPatch, tmp_path: Path, ) -> None: idify_dir = tmp_path / "idify" unidify_dir = tmp_path / "unidify" batch = Batch()
def _build_request(self) -> Request: return Replies(self.tweet_id, max_tweets=self.max_tweets, batch_size=self.batch_size)
}), ], ids=lambda args: args[0].__name__ + ": " + repr(args[1]), ) def test_illegal_args( args: Tuple[Type[Request], Mapping[str, object]]) -> None: type_, kwargs = args with pytest.raises(ValueError): type_(**kwargs) @pytest.mark.parametrize( "request_", [ Search("q"), Replies("332308211321425920", max_tweets=None), Thread("332308211321425920", max_tweets=123, batch_size=456), ], ids=repr, ) def test_json_conversion(request_: Request) -> None: assert request_ == request_.from_json(request_.to_json()) @pytest.mark.parametrize( "search", [Search("q", since=date(2010, 1, 1), until=date(2010, 2, 1))], ids=repr, ) def test_search_to_daily_requests(search: Search) -> None: # assert is not None necessary for mypy type checking
def test_exact(args: Tuple[TweetId, Set[TweetId]]) -> None: tweet_id, replies = args assert replies == {tweet.id for tweet in Replies(tweet_id).request()}
def test_no_replies(tweet_id: TweetId) -> None: assert not list(Replies(tweet_id).request())
def test_max_tweets(max_tweets: int) -> None: tweets = list( Replies(TweetId("1096092704709070851"), max_tweets=max_tweets).request() ) assert max_tweets == len(tweets) assert len(tweets) == len({tweet.id for tweet in tweets})
from nasty._util.io_ import read_file, read_lines_file, write_file from nasty._util.json_ import JsonSerializedException, read_json, write_json from nasty._util.typing_ import checked_cast from nasty.batch.batch import Batch from nasty.batch.batch_entry import BatchEntry from nasty.batch.batch_results import BatchResults from nasty.request.replies import Replies from nasty.request.request import Request from nasty.request.search import Search, SearchFilter from nasty.request.thread import Thread REQUESTS: Sequence[Request] = [ Search("q"), Search("q", filter_=SearchFilter.PHOTOS, lang="de"), Search("q", since=date(2009, 1, 20), until=date(2017, 1, 20)), Replies("332308211321425920"), Replies("332308211321425920", max_tweets=50), Thread("332308211321425920"), Thread("332308211321425920", batch_size=100), ] def _make_json_serialized_exception() -> JsonSerializedException: # Collect exception with trace. try: raise ValueError("Test Error.") except ValueError as e: return JsonSerializedException.from_exception(e) # -- test_json_conversion_* ------------------------------------------------------------