def get_sentiment(company_id, text): alchemyapi = AlchemyAPI() key_phrases = [] for apikey in engine.get_random_alchemy_credentials(): alchemyapi.apikey = apikey response = alchemyapi.keywords('text', text, {'sentiment': 1}) if response['status'] == 'OK': if len(response['keywords']) == 0: return 0 # related_words = models.RelatedWord.query.filter_by(company_id=company_id).all() for keyword in response["keywords"]: if 'sentiment' in keyword: if keyword['sentiment'].has_key('score'): key_phrases.append(float(keyword['sentiment']['score'])) elif keyword['sentiment']['type'] == 'neutral': key_phrases.append(0) if len(key_phrases) == 0: return 0 else: return float("{0:.2f}".format(sum(key_phrases)/len(key_phrases))) elif response['status'] == 'ERROR' and response['statusInfo'] != 'unsupported-text-language': print "ERROR: getting sentiment " + response['statusInfo'] # Skip onto the next api key continue else: print "None of the above " + response['statusInfo'] return 0 #Return none when all api keys are exhausted return None
def get_sentiment(text): alchemyapi = AlchemyAPI() for key in utils.get_random_alchemy_credentials(): alchemyapi.apikey = key response = alchemyapi.sentiment("text", text) if 'docSentiment' not in response: continue return response['docSentiment'].get('score', '0')
def store_concepts(tweets): # Convert string array to string all_tweets_as_string = ' '.join(tweets) alchemyapi = AlchemyAPI() alchemyapi.apikey = get_random_alchemy_credentials() response = alchemyapi.concepts('text', all_tweets_as_string) if response['status'] == 'OK': for concept in response['concepts']: concepts.append(concept['text'])
def get_sentiment(text): alchemyapi = AlchemyAPI() alchemyapi.apikey = get_random_alchemy_credentials() response = alchemyapi.keywords('text', text, {'sentiment': 1}) relevances = [] if 'keywords' not in response or len(response['keywords']) == 0: return None for keyword in response["keywords"]: for company_word in concepts: if company_word.lower() in text.lower() and 'sentiment' in keyword: if 'score' in keyword['sentiment']: relevances.append(float(keyword['sentiment']['score'])) elif keyword['sentiment']['type'] == 'neutral': relevances.append(0.5) if not relevances: return 0.5 else: return float("{0:.2f}".format(sum(relevances)/len(relevances)))
def generate_concepts_for_company(company_id, tweets): all_tweets_as_string = ' '.join(tweets) alchemyapi = AlchemyAPI() api_error = False for apikey in engine.get_random_alchemy_credentials(): alchemyapi.apikey = apikey response = alchemyapi.concepts('text', all_tweets_as_string) related_words = [] if response['status'] == 'OK': for concept in response['concepts']: related_words.append(concept['text']) elif response['status'] == 'ERROR' and tweets != []: print "ERROR getting concepts" + response['statusInfo'] api_error = True # Move onto the next api key continue # Return null when all api keys are exhausted if api_error and len(related_words) == 0: return None return related_words
urllib3.contrib.pyopenssl.inject_into_urllib3() # Import the necessary methods from tweepy library from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import json import re, string #API Keys import keys # AlchemyAPI from alchemyapi import AlchemyAPI alchemyapi = AlchemyAPI() alchemyapi.apikey = keys.alchemy_apikey #TwitterAPI keys consumer_key = keys.twitter_consumer_key consumer_secret = keys.twitter_consumer_secret access_token = keys.twitter_access_token access_token_secret = keys.twitter_access_token_secret # Tweet object class Tweet: def __init__(self, author, text, location): self.author = author self.text = text self.location = location