def tweet_analysis(content): al = acm.AlchemyAPI() try: this_result = al.sentiment('text', content)['docSentiment'] except: print "AlchemyAPI exception" return -2 try: result = float(this_result['score']) except KeyError: result = 0 return result
def sentiment_analysis(text): #aw = AlchemyWrapper() #aw.sentiment_analysis("This is an awesome test!") api = alchemyapi.AlchemyAPI() response = api.sentiment('text', text) if response['status'] == 'OK': #print(json.dumps(response, indent=4)) #print('') print('type: ', response['docSentiment']['type']) if 'score' in response['docSentiment']: #print('score: ', response['docSentiment']['score']) return response['docSentiment']['score'] else: print('Error in sentiment call: ', response['statusInfo']) return 0
def getEntitiesKeywords(text): api = alchemyapi.AlchemyAPI() response = api.entities('text', text, {'sentiment': 1}) if response['status'] != 'OK': raise Exception( 'Error from alchemy.\nText:%s\nStatus is:%s\nResponse:%s' % (text, response['status'], response)) entities = [] for entity in response['entities']: entities.append(entity['text'].encode('utf-8')) time.sleep(10) response = api.keywords('text', text, {'sentiment': 1}) if response['status'] != 'OK': raise Exception( 'Error from alchemy.\nText:%s\nStatus is:%s\nResponse:%s' % (text, response['status'], response)) keywords = [] for keyword in response['keywords']: keywords.append(keyword['text'].encode('utf-8')) return entities, keywords
def __init__(self): s3 = S3Connection('AKIAILFPCHJZ4B4S6WBA', '14dMH/aJM9a0X+nAC2FYufcVhnm/fQxc5AWvaxnp') self.bucket = s3.get_bucket('newslist') self.alchemy = alchemyapi.AlchemyAPI()
#!/usr/bin/python # -*- coding: utf-8 -*- # import sqlite3 as lite #./manage.py shell < articile_into_database.py import sys import alchemyapi # from blog.keyword_handler import KeywordHandler from blog.models import NewsList inst = alchemyapi.AlchemyAPI() ### NewsList.objects.all().delete() f=open('cnn_news.txt','r') temp = f.read().splitlines() num=0 while (num<1): url = temp[num] num = num +1 print url # url = "http://cnn.com/2015/04/06/opinions/wang-china-women-detained/index.html" # url = 'http://edition.cnn.com/2015/04/18/africa/south-africa-xenophobia-explainer/index.html' keywords = inst.keywords("url", url) text = inst.text("url",url) title = inst.title("url",url) author = inst.author("url",url) print keywords
import alchemyapi alchemyapi = alchemyapi.AlchemyAPI() from bs4 import BeautifulSoup import urllib3 import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText #Dictionaries to Hold Final Report report_headlines = [] report_keywords = [] def send_email_report(): #Set up Email Config myaddr = 'YOUR-GMAIL-ADDRESS' msg = MIMEMultipart('alternative') msg['Subject'] = "Newsflash Report" msg['From'] = 'me' msg['To'] = 'me' prepared_headlines = [] for d in report_headlines: prepared_headlines.append('<li>From ' + d['source'] + ' <a href="' + d['href'].encode('utf-8') + '">' + d['title'] + '</a><br>\n') html = '<h2>Newsflash: Headlines</h2>' html += '<ul style="max-height:300px;overflow-y:scroll;border:1px solid grey;">' #Insert headline links for link in prepared_headlines: html += link
def __init__(self): self.keyword_extracter = alchemyapi.AlchemyAPI()