def fetch_insert_tweets(username, pages, last_tweet_id=None):
    tweet_scrapper = TweetScrapperProfile(username, pages, last_tweet_id)
    tweets = tweet_scrapper.get_profile_tweets()
    if len(tweets) > 0:
        last_fetched_tweet = tweets[len(tweets) - 1].get_tweet_id()
        print("Last tweet:", last_fetched_tweet)
        sqlt = SQLiteHelper()
        sqlt.insert_tweet(tweets)
Beispiel #2
0
def test_user_tweets(test_user, test_page):
    ts = TweetScrapperProfile(test_user, test_page)
    extracted_tweets = ts.get_profile_tweets(False)
    if test_page > 0:
        assert len(extracted_tweets) > 0
    for tweets in extracted_tweets:
        assert tweets.get_tweet_id() is not None
        assert tweets.get_tweet_text() is not None
        if not tweets.get_is_retweeter():
            assert tweets.get_tweet_author() == test_user[1:]
        else:
            assert tweets.get_retweeter() == test_user[1:]
def main(args):
    """Main entry point allowing external calls

    Args:
      args ([str]): command line parameter list
    """
    args = parse_args(args)
    setup_logging(args.loglevel)
    _logger.info("Scrapping tweets")

    if args.username is not None:

        ts = TweetScrapperProfile(username=args.username,
                                  num_tweets=args.pages,
                                  tweet_dump_path=args.tweet_dump_path,
                                  tweet_dump_format=args.tweet_dump_format,
                                  request_proxies=args.request_proxies)

        l_tweet_count, l_tweet_id, l_tweet_time, l_dump_path = ts.get_profile_tweets(
        )
        print("Extracted {0} tweets till {1} at {2}".format(
            l_tweet_count, l_tweet_time, l_dump_path))
        return "Extracted {0} tweets till {1} at {2}".format(
            l_tweet_count, l_tweet_time, l_dump_path)

    else:

        ts = TweetScrapperSearch(
            search_all=args.search_all,
            search_exact=args.search_exact,
            search_any=args.search_any,
            search_excludes=args.search_excludes,
            search_hashtags=args.search_hashtags,
            search_from_accounts=args.search_from_accounts,
            search_to_accounts=args.search_to_accounts,
            search_mentions=args.search_mentions,
            search_near_place=args.search_near_place,
            search_till_date=args.search_till_date,
            search_since_date=args.search_since_date,
            num_tweets=args.pages,
            language=args.language,
            tweet_dump_path=args.tweet_dump_path,
            tweet_dump_format=args.tweet_dump_format,
            request_proxies=args.request_proxies)

        l_tweet_count, l_tweet_id, l_tweet_time, l_dump_path = ts.get_search_tweets(
        )
        print("Extracted {0} tweets till {1} at {2}".format(
            l_tweet_count, l_tweet_time, l_dump_path))
        return "Extracted {0} tweets till {1} at {2}".format(
            l_tweet_count, l_tweet_time, l_dump_path)
def main(args):
    """Main entry point allowing external calls

    Args:
      args ([str]): command line parameter list
    """
    args = parse_args(args)
    setup_logging(args.loglevel)
    _logger.info("Scrapping tweets for {0}".format(args.username))
    if args.username is not None and args.username.startswith("@"):
        if args.pages is not None:
            ts = TweetScrapperProfile(args.username, args.pages)
        else:
            ts = TweetScrapperProfile(args.username)
        tweets = ts.get_profile_tweets(False)
        for tweet in tweets:
            print(str(tweet))
        return tweets

    elif args.search_term is not None:
        if args.pages is not None:
            ts = TweetScrapperSearch(args.search_term, args.pages)
        else:
            ts = TweetScrapperSearch(args.search_term)
        tweets = ts.get_search_tweets(False)
        for tweet in tweets:
            print(str(tweet))
        return tweets

    else:
        raise ValueError(
            "No matching argument. Provide a twitter username eg. -u @5hirish or"
            " a twitter hashtag eg. -s #Python or any search term.")
def test_user_tweets(test_user, test_page):
    ts = TweetScrapperProfile(test_user, test_page, 'twitter.csv', 'csv')
    tweet_count, tweet_id, tweet_time, dump_path = ts.get_profile_tweets(False)

    assert os.path.exists(dump_path)

    if test_page > 0:
        assert tweet_count == pytest.approx(test_page, abs=5)

    with open(dump_path, 'r') as csv_fp:
        csv_reader = csv.DictReader(csv_fp)
        for tweets in csv_reader:
            assert tweets.get('id') is not None
            assert tweets.get('text') is not None
            assert tweets.get('time') is not None

    os.remove(dump_path)
Beispiel #6
0
    def getTweetsbyAccount(self,username,pagination=1):
        response=[]
        tweet_scrapper = TweetScrapperProfile(str(username), pages=pagination)
        tweets = tweet_scrapper.get_profile_tweets()
        for tweet in tweets:
            dic = tweet.__dict__
            for key in list(dic.keys()):
                dic[self.to_camel_case(key.replace('__tweet_', ''))] = dic[key]
                del dic[key]
            
            response.append(dic)

        return response



#if __name__ == "__main__":
#    x= TweetBody()
#    print(x.getTweetsbyAccount('elonmusk'))

        
Beispiel #7
0
from tweetscrape.profile_tweets import TweetScrapperProfile

tweet_scrapper = TweetScrapperProfile("", 40, 'twitter.csv', 'csv')
tweet_count, tweet_id, tweet_time, dump_path = tweet_scrapper.get_profile_tweets(
)
print("Extracted {0} tweets till {1} at {2}".format(tweet_count, tweet_time,
                                                    dump_path))
Beispiel #8
0
 def fetch_tweets(self, profile, npage):
     TS = TweetScrapperProfile(profile, npage)
     tweets = TS.get_profile_tweets()
     return (tweets)
Beispiel #9
0
def fetch_tweets(profile, npages=1):
    TS = TweetScrapperProfile(profile, npages)
    tweets = TS.get_profile_tweets()
    return (tweets)