from lw import get_logger, get_header, get_root_logger import argparse from src import InsultExtractor def main(path): insults = InsultExtractor() insults.extract() insults.export_to_csv(path) if __name__ == '__main__': root_logger = get_root_logger() get_header(root_logger, "Extracting all of Trump's insults. This may take a while...") parser = argparse.ArgumentParser() parser.add_argument( '--path', required=True, help='where would you like to extract_insult_tweets to?') args = parser.parse_args() main(args.path)
insult_tweets = pd.merge( self.insults, self.insult_tweets, left_on='tweet_id', right_on='id', how='inner' ).loc[:, ['name', 'text_x', 'id_x', 'trump', 'campaign', 'created_at' ]].drop_duplicates() # index by time insult_tweets['created_at'] = pd.to_datetime( insult_tweets['created_at']) insult_tweets.set_index('created_at', inplace=True) # resample to daily + take rolling 7-day avg insult_tweets = insult_tweets.resample('D').sum() insult_tweets_avg = insult_tweets.rolling(window=7, min_periods=1).mean() print('break') if __name__ == '__main__': logger = get_root_logger() logger = get_header(logger, 'Running Tweet Analysis') ta = TweetAnalysis() ta.load_data() ta.transform()
from lw import get_logger, get_header, get_root_logger import argparse from src import InsultExtractor def main(path): insults = InsultExtractor() insults.extract() insults.export_to_csv(path) if __name__ == '__main__': root_logger = get_root_logger() get_header(root_logger, "Extracting all of Trump's insults. This may take a while...") parser = argparse.ArgumentParser() parser.add_argument('--path', required=True, help='where would you like to extract_insult_tweets to?') args = parser.parse_args() main(args.path)
self.insult_tweets, left_on='tweet_id', right_on='id', how='inner' ).loc[:, ['name', 'text_x', 'id_x', 'trump', 'campaign', 'created_at']].drop_duplicates() # index by time insult_tweets['created_at'] = pd.to_datetime(insult_tweets['created_at']) insult_tweets.set_index('created_at', inplace=True) # resample to daily + take rolling 7-day avg insult_tweets = insult_tweets.resample('D').sum() insult_tweets_avg = insult_tweets.rolling(window=7, min_periods=1).mean() print('break') if __name__ == '__main__': logger = get_root_logger() logger = get_header(logger, 'Running Tweet Analysis') ta = TweetAnalysis() ta.load_data() ta.transform()
from lw import get_logger, get_header, get_root_logger import argparse from src import TweetExtractor def main(path): twex = TweetExtractor() twex.extract_insult_tweets() twex.extract_all_tweets() twex.export_to_csv(path) if __name__ == "__main__": root_logger = get_root_logger() get_header(root_logger, "Extracting all of Trump's insulting Tweets. Lucky Me...") parser = argparse.ArgumentParser() parser.add_argument("--path", required=True, help="where would you like to extract_insult_tweets to?") args = parser.parse_args() main(args.path)
from lw import get_logger, get_header, get_root_logger import argparse from src import TweetExtractor def main(path): twex = TweetExtractor() twex.extract_insult_tweets() twex.extract_all_tweets() twex.export_to_csv(path) if __name__ == '__main__': root_logger = get_root_logger() get_header(root_logger, "Extracting all of Trump's insulting Tweets. Lucky Me...") parser = argparse.ArgumentParser() parser.add_argument( '--path', required=True, help='where would you like to extract_insult_tweets to?') args = parser.parse_args() main(args.path)