def SA_text(text_content, service_account): client = language_v1.LanguageServiceClient.from_service_account_json( service_account) type_ = language_v1.Document.Type.PLAIN_TEXT document = {"content": text_content, "type_": type_} encoding_type = language_v1.EncodingType.UTF8 try: response = client.analyze_sentiment(request={ 'document': document, 'encoding_type': encoding_type }) except exceptions.InvalidArgument as e: # if there is an invalid exception print('ERROR ANALYSING TEXT USING GOOGLE ANALYSIS:') print(e.message) return None # Score is the overall emotional learning of the text # Magnitude indicates the overall strength of the emotion. score = response.document_sentiment.score magnitude = response.document_sentiment.magnitude sentimemt = Sentiment.Sentiment(score, magnitude) print('SA RESULTS TEXT: {}'.format(text_content)) print(sentimemt) return sentimemt
def analyse_sentiment(data, tweets): analyser = st.Sentiment(tweets) vader_sentiments, vader_score = analyser.check_sentiment_vader() vader_data = append_to_data_frame(data, 'Nltk_Sentiment_Score', vader_score) vader_data = append_to_data_frame(vader_data, 'Nltk_Sentiment', vader_sentiments) return vader_data
def main(): review = Review("", "", "", "", "", "", "", "", "", "") # this will call your constructor # get 50 results from databases reviews = review.get_reviews("50") for a_review in reviews: #construction of sentiment table blob = TextBlob(a_review.text, analyzer=NaiveBayesAnalyzer()) text_sentiment = blob.sentiment text_sentiment = text_sentiment[ SENTIMENT_TYPE] #text_sentiment will either be pos (for positive) or neg (for negative) #here is where we create a Sentiment object sentiment = Sentiment(a_review.review_id, a_review.business_id, text_sentiment) sentiment.insert( ) #this will insert information into the sentiment table #construction of review_stats table review_stats = Review_stats("", "", "", "", "") review_stats.insert( ) #insert positive and negative reviews' information to review_stats table #construction of common_phrases table business = Business("", "", "", "", "", "", "", "", "", "", "") business_ids = business.get_all_business_ids( ) #acquire all business_ids from sentiment table Reviews = list() words = list() list_of_words = list() word_dictionary = dict() for business_id in business_ids: review = Review("", "", "", "", "", "", business_id.business_id, "", "", "") Reviews = review.get_reviews_by_business_id( ) # get all reviews by business_id insert_words(business_id.business_id, Reviews, word_dictionary) # insert data into common_phrases table
def __init__(self, subreddit, sentimentInterval, dataSet): self.subreddit, self.sentimentInterval = subreddit, sentimentInterval self.subInstance = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent="testscript by u/spaceballcookie").subreddit(subreddit) self.df = pd.DataFrame(columns=['postID', 'ticker', 'sentiment', 'magnitude','subreddit', 'permalink', 'date', 'body']) self.sentiment = Sentiment() self.dataSet = dataSet
from Sentiment import * import sys f = sys.argv[1] S = Sentiment(f)