Example #1
0
 def enrich_tweet(self, scrap):
     """Function that calls all tweet fetchers """
     self._fetch_counters(scrap)
     self._remove_punctuations()
     self._fetch_text(scrap)
     self._fetch_hashtags(scrap)
     self._fetch_date(scrap, False)
     self._fetch_username(scrap)
     self._fetch_tweet_id(scrap)
     self._sentiment = TwitterClient().get_sentiment(self.text)
Example #2
0
    if args.train:
        print("Forcing the model to retrain")
        model = train(args.eval)
    else:
        try:
            model = joblib.load(model_filename)
        except:
            model = train(args.eval)

    cv = model['vec']
    clf = model['clf']

    if args.tweets_file is not None:
        with open(args.tweets_file, 'r') as f:
            tweets = f.read().split('\n')
            df = pd.DataFrame(tweets, columns=['text'])
    else:
        client = TwitterClient()
        tweets = client.get_tweets(query=args.query, count=200)
        df = pd.DataFrame(tweets)

    tc = TextCleaner()
    cleaned_text = tc.fit_transform(df.text)

    counts = cv.transform(cleaned_text)
    preds = clf.predict(counts)

    for text, pred in zip(df.text, preds):
        print("\nSentiment: %s. Tweet: %s" % (pred, text))
Example #3
0
	def __init__(self, cfg):
		TwitterClient.__init__(self, cfg)

		psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
		self.db = psycopg2.connect(c.get('planet','db'))
Example #4
0
from testserver import Server # se server.py

import logging
LOG_FILE = 'kwitter.log'
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG)

if __name__ == '__main__':
    # Ladda in  tweets att testa, från fil
    with open("test_tweets.txt", "r") as source_tweets:
        tweets = source_tweets.readlines()
    # Samt felmeddelande
    with open("test_errormsg.txt", "r") as error_source:
        error_msg = error_source.readlines()
  
    title = "Kwitter"  # Fönstertitel
    client = TwitterClient() # Testverktyget  
    
    try:
       # Försök starta Firefox + ladda sidan
       server = Server() 
       client.get_url(server.port)
       client.assert_connection(title)
       
       try:
           msg = "Startar testning"
           logging.info(msg)
           print(msg)        
           client.test_tweets(tweets, error_msg[0])
           client.test_checkboxes()
           client.test_refresh()
           
Example #5
0
        logging.basicConfig(filename=LOG_FILENAME,
                            level=logging.INFO,
                            format='%(asctime)s:%(levelname)s:%(message)s')

    
    logging.info("Starting tweetStream %s" % __version__)

    if len(sys.argv) < 2:
        logging.fatal("No username given! Stopping...")
        print "Usage: %s [username] [-c]" % sys.argv[0]
        sys.exit(1)
        
    uname = sys.argv[1]
    logging.info("Given username %s" % uname)

    client = TwitterClient(uname)
    if len(sys.argv) == 3 and sys.argv[2] == '-c':
        client.start()
        shown = []
        while True:
            try:
                time.sleep(3)
                c = client.storage.get_all_sorted()
                for t in c:
                    if t.guid not in shown:
                        print t
                        shown.append(t.guid)
            except KeyboardInterrupt:
                client.stop()
                sys.exit(0)
    else:
Example #6
0
    help=
    "Number of tweets to consider - *Warning* This may trigger multiple search requests. Each twitter search requests has a limitation 100 tweets. Default will be best effort",
    default=1000)
parser.add_argument("-st",
                    "--search_type",
                    help="recent(default)/popular/mixed",
                    default="recent")
parser.add_argument("-lang",
                    "--language",
                    help="language tweets",
                    default="en")
### End of arguments ###

args = parser.parse_args()

tClient = TwitterClient()
if args.tweet_count <= 100:
    search_results = tClient.search_until(k=args.keyword,
                                          tc=args.tweet_count,
                                          st=args.search_type,
                                          ud=args.since_date,
                                          lan=args.language)
    opin_dict = tClient.opinion_mining(search_results['statuses'])
    '''
    te_search_results = twitter.search(q=keyword, count=tweet_count, result_type='mixed', since_id=since, lang="te")

    trans = Translator()
    for result in te_search_results['statuses']:
        print result['text']
        trans.translate(result['text'])
    '''
Example #7
0
from twitterclient import TwitterClient
import argparse
from collections import OrderedDict
import pprint


### Start of arguments ###
parser = argparse.ArgumentParser()
parser.add_argument("Movie", help = "keyword to search for")
parser.add_argument("since_date", help = "The date from which tweets should be analyzed")
parser.add_argument("-v", "--verbose", help = "Print traces for debugging", type=bool, default=False)
### End of arguments ###

####main####
args = parser.parse_args()
tClient = TwitterClient()

ReviewRules = [('VeryGood' , 90),
               ('Good'     , 80),
               ('Watchable', 70),
               ('Average'  , 60),
               ('Bad'      , 30),
               ('VeryBad'  ,  0)]

multi_search_results = tClient.multi_search_until(k=args.Movie, ud=args.since_date, verbose=args.verbose)
total_tweets, opinion_dictionary = tClient.review_mining_multi(multi_search_results, ReviewRules)

'''
opinion dictionary is a dictionary of dictionaries has the following structure

{Sentiment : {tweet_id: tweet_text}}
	def __init__(self, cparser, user, listname, token, secret, members):
		self.confwrap = TwitterConfWrapper(cparser, user, listname,
										   token, secret)
		self.members = set([self._normalize_handle(r[0]) for r in members])

		TwitterClient.__init__(self, self.confwrap)