Beispiel #1
0
def home():
    if request.method == 'GET':
        # render homepage template
        return render_template('boot.html')
    else:
        # grab POST form data
        data = request.form

        # parse as JSON
        jsondata = json.dumps(data, separators=(',', ':'))
        if 'topic' in jsondata:
            # load data into dictionary
            new_data = json.loads(jsondata)

            # create random number for this graph
            new_data['rand'] = str(int(random.random() * 999999999))

            # connect to twitter
            auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
            auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
            api = tweepy.API(auth)

            # get the tweets
            tweets = stream.gather_tweets(
                api, auth, keyword=new_data['topic'][0],
                limit=int(new_data['limit'][0]))

            # Create analyzer
            analyzer = Analyzer(tweets, new_data['topic'][0])
            analyzer.save_sentiment_data(int(new_data['rand']))

            # render results page
            return redirect((url_for('log', data=json.dumps(new_data),
                                     mode='debug')))
        elif 'username' in jsondata:
            # load data into dictionary
            new_data = json.loads(jsondata)

            # create random number for this graph
            new_data['rand'] = str(int(random.random() * 999999999))

            # connect to twitter
            auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
            auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
            api = tweepy.API(auth)

            # get the tweets
            tweets = stream.gather_tweets(
                api, auth, username=new_data['username'][0], limit=50)

            # Create analyzer
            analyzer = Analyzer(tweets, new_data['username'][0])

            analyzer.save_sentiment_data(int(new_data['rand']))
            return redirect((url_for('log', data=json.dumps(new_data),
                                     mode='debug')))
Beispiel #2
0
                print(tweet.text.encode('utf-8'))
                tweets.append(tweet._json)
                count += 1
                if count > limit:
                    break
        return tweets
    elif keyword:
        l = StdOutListener()
        stream = Stream(auth, l)
        global lim
        lim = limit
        stream.filter(track=[keyword])
        with open('tweet_stream.pickle', 'rb') as f:
            return pickle.load(f)
    else:
        raise ValueError('Invalid Arguments. username and keyword both' +
                         'can\'t be None')

if __name__ == '__main__':
    s = 'baltimore'
    auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
    auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
    api = tweepy.API(auth)
    # tweets = gather_tweets(username=s) # last 30 tweets
    tweets = gather_tweets(keyword=s, limit=30)
    # Create analyzer
    analyzer = Analyzer(tweets, s)
    avg = analyzer.calc_sentiment()
    #keywrds = analyzer.get_keywords()
    analyzer.save_sentiment_data()