Ejemplo n.º 1
0
def test(request):
	context = {}
	client = IODClient(APIURL, APIKEY)

	r=client.post('analyzesentiment',{'text':'I like cats'})
	analyzesentiment=r.json()
	sentiment = analyzesentiment['aggregate']['sentiment']
	context['sentiment']=analyzesentiment
	hightlight_sentiment = ''

	for word in analyzesentiment[sentiment]:
		hightlight_sentiment += '{},'.format(word['topic'])
		print hightlight_sentiment + " here"

	r=client.post('highlighttext',{'text':'I like cats', 'highlight_expression':'{}'.format(hightlight_sentiment), 'start_tag':'<b>', 'end_tag':'</b>', })
	context['highlight']=r.json()['text']

	index = client.getIndex('mailsift')

	doc1={'reference':'doc1','title':'title1','content':'this is my content'}
	doc2={'reference':'doc2','title':'title2','content':'this is another content'}
	doc3={'reference':'doc3','title':'title2','content':'this is another content'}
	doc4={'reference':'doc2','title':'titleNew','content':'this is another content alksdjflkjasdfkljaslkdf'}
	docs = [doc1, doc2, doc3, doc4]
	index.addDocs([doc1, doc2, doc3, doc4])
	for doc in docs:
		index.pushDoc(doc)
	print index.size()
	index.commit()
	print index.size()


	return render(request, 'parse/test.html', context)
Ejemplo n.º 2
0
def main(input,delete,resume,config):

    config=json.loads(config.read())
    config.update(json.loads(input.read()))
    client = IODClient("http://api.idolondemand.com/",
                        config["iodkey"])
    if delete:
      deleteIndex(config["iodindex"])

    category=False
    description=False
    flavor="standard"
    if config["type"]=="categories":
        category=True
        flavor="categorization"

    if config.get("description",False):
      description=True

    try:
      index=client.createIndex(config["iodindex"],flavor="categorization",index_fields=config.get("indexfields",[]), parametric_fields=config.get("parametricfields",[]))
    except:
      index=client.getIndex(config["iodindex"])

    cursor=""
    if resume:
      cursor= open('cursor','rb').read()

    query= config["freebasequery"]
    freebaseUtil = FreebaseUtil(config["freebasekey"])
    freebaseUtil.aliases=config["aliasfields"]
    freebaseUtil.runQuery(index,query,category=category,description=description,cursor=cursor)
Ejemplo n.º 3
0
def generate_tags(request, feedback_pk):
	context = {}
	client = IODClient(APIURL, APIKEY)
	feedback_email = Email.objects.get(pk=feedback_pk)

	index = client.getIndex('mailsift')
	r = client.post('findsimilar', {'text':feedback_email.message, 'indexes':'mailsift'})
	similar_feedback = r.json()
	if len(similar_feedback['documents']) > 0:
		similar_email = Email.objects.get(pk=similar_feedback['documents'][0]['reference'])
		feedback_email.tags = similar_email.tags
	else:
		feedback_email.tags = ['']
	feedback_email.save()

	doc1={'reference':feedback_email.pk,'title':feedback_email.subject, 'content':feedback_email.message}
	docs = [doc1]
	index.addDocs([doc1])
	for doc in docs:
		index.pushDoc(doc)
	print index.size()
	index.commit()
	print index.size()
	context['test'] = feedback_email
	return render(request, 'parse/test.html', context)
Ejemplo n.º 4
0
def generate_highlight(request, feedback_pk):
    context = {}
    client = IODClient(APIURL, APIKEY)

    feedback_email = Email.objects.get(pk=feedback_pk)

    r = client.post('analyzesentiment', {'text': feedback_email.message})
    analyzesentiment = r.json()
    sentiment = analyzesentiment['aggregate']['sentiment']
    context['sentiment'] = analyzesentiment
    feedback_email.sentiment = 50 + 50 * analyzesentiment['aggregate']['score']
    hightlight_sentiment = ''

    for word in analyzesentiment[sentiment]:
        hightlight_sentiment += '{},'.format(word['topic'])

    r = client.post(
        'highlighttext', {
            'text': feedback_email.message,
            'highlight_expression': '{}'.format(hightlight_sentiment),
            'start_tag': '<b>',
            'end_tag': '</b>',
        })
    feedback_email.content = r.json()['text']
    feedback_email.priority = feedback_email.sentiment + 40
    feedback_email.save()
    context['test'] = feedback_email

    return render(request, 'parse/test.html', context)
Ejemplo n.º 5
0
def generate_highlight(request, feedback_pk):
	context = {}
	client = IODClient(APIURL, APIKEY)

	feedback_email = Email.objects.get(pk=feedback_pk)

	r=client.post('analyzesentiment',{'text':feedback_email.message})
	analyzesentiment=r.json()
	sentiment = analyzesentiment['aggregate']['sentiment']
	context['sentiment']=analyzesentiment
	feedback_email.sentiment = 50 + 50 * analyzesentiment['aggregate']['score']
	hightlight_sentiment = ''

	for word in analyzesentiment[sentiment]:
		hightlight_sentiment += '{},'.format(word['topic'])

	r=client.post('highlighttext',{'text':feedback_email.message, 'highlight_expression':'{}'.format(hightlight_sentiment), 'start_tag':'<b>', 'end_tag':'</b>', })
	feedback_email.content=r.json()['text']
	feedback_email.priority = feedback_email.sentiment + 40
	feedback_email.save()
	context['test'] = feedback_email

	return render(request, 'parse/test.html', context)
Ejemplo n.º 6
0
class SentimentAnalyser:
    def __init__(self):
        self.client = IODClient("http://api.idolondemand.com/","07a106d0-ff07-496b-a1b5-288b752da744")
        self.hashtags = {"#jeb": "Jeb Bush", "#clinton": "Hillary Clinton", "#trump": "Donald Trump", "#hilary": "Hillary Clinton", "#berniesanders": "Bernie Sanders"}
    def generateSentiment(self, text):
        r = self.client.post('analyzesentiment',{'text':text})
        docs = r.json()
        if(docs['aggregate']['score'] != 0):
            dictJson = {"candidate": self.getCandidate(text)}
            if docs['positive'] != []:
                for doc in docs['positive']:
                    dictJson[doc['topic']] = doc['score']
            if docs['negative'] != []:
                for doc in docs['negative']:
                    dictJson[doc['topic']] = doc['score']
            return dictJson
    def getCandidate(self, tweet):
        try:
            x = next(value for key, value in self.hashtags.iteritems() if key in tweet)
        except StopIteration:
            x = "Jeb Bush"               
        return x
Ejemplo n.º 7
0
import collections
from alchemyapi import AlchemyAPI
from iodpython.iodindex import IODClient
import os
import sys


reload(sys)
sys.setdefaultencoding('utf-8')


ALCHEMYAPI_KEY = os.environ["DOSSIER_ALCHEMY_KEY"]
ALCHEMY_RELEVANCE_THRESHOLD = 0.7

alchemyapi = AlchemyAPI()
client = IODClient("http://api.idolondemand.com/", os.environ["DOSSIER_IDOL_KEY"])
index = client.getIndex("conversations")
cardIndex = client.getIndex("cards")



#index a conversation

def dossierConversation(transcript):
	information = extractInformation(transcript)

	if "name" in information:
		title = "Conversation with " + information["name"]
		addCardToIndex(information)
	else:
		information["name"] = ""
Ejemplo n.º 8
0
def call(answers,count2):
	#Call client from IOD and pass in API key
	client = IODClient('http://api.idolondemand.com', '#*APIKEY*#')
	text = answers
	#set data to text dictionary
	data = {'text':text}
	#tell the client what API to call
	r = client.post('analyzesentiment', data)
	#Return information from IOD
	sentiment = r.json()['aggregate']['sentiment']
	score = r.json()['aggregate']['score']
	print "********************************",'\n'
	data = r.json()
	count1 = 0
	
	#Set lists to hold topics
	global n_topics
	global p_topics
	n_topics = []
	p_topics = []
	
	for i in (data['negative']):
		#If statment to check is there is a negative topic
		if 'negative' not in data:
			print('negative key is missing')
		elif len(data['negative']) == 0:
			print('no items in the negative list')
		elif 'topic' not in data['negative'][count2]:
			print('topic is missing')
		elif data['negative'][count2]['topic'] == '':
			print('topic is empty')
		else:
			#what to return from the negative topic
			print "Text:", "'"+text+"'"
			print "Sentiment: negative "
			print "Topic:" , (data['negative'][count2]['topic'])
			print "Score:", (data['negative'][count2]['score'])
			n_topics.append(str(data['negative'][count2]['topic']))
			if count2 == 1:
				print "negative topic"
			break
		logging.warning('Watch out!')
		
	print '\n',"********************************"
	print '\n',"Negative topics:",n_topics
	print '\n',"********************************", '\n'
	#If statment to check is there is a positive topic
	for i in (data['positive']):
		if 'positive' not in data:
			print('positive key is missing')
		elif len(data['positive']) == 0:
			print('no items in the positive list')
		elif 'topic' not in data['positive'][count1]:
			print('topic is missing')
		elif data['positive'][count1]['topic'] == '':
			print('topic is empty')
		else:
			#what to return from the positive topic
			print "Text:", "'"+text+"'"
			print "Sentiment: positive "
			print "Topic:" , (data['positive'][count1]['topic'])
			print "Score:", (data['positive'][count1]['score'])
			p_topics.append(str(data['positive'][count1]['topic']))
			count1 += 1
		print '\n',"********************************",'\n'
	print "Positive topics:",p_topics,'\n'
	
	# print statments to print the aggregate for the data_set
	#print "Aggregate Result"
	#print "sentiment: ", sentiment + '\n',"score: ", score , '\n'
	print "********************************"
Ejemplo n.º 9
0
    "continued_by":[{'name':None,'mid':None,'optional':True}],
    "issues":[{'characters_on_cover':[{'name':None,'mid':None,'optional':True}],"optional":True}],
    #"issues":[{'characters_on_cover':[{}]}],
    "featured_characters":[{'name':None,'mid':None,'optional':True}],
    #'starring':[{"actor":None}],

}]






freebaseUtil = FreebaseUtil(freebase_key,query,description=True)

client = IODClient("http://api.idolondemand.com/",
                        iod_key)


#   client.deleteIndex('quotesdb')

# try: 
#   index=client.deleteIndex('comic_series')
# except:
#   pass

try:
  fields=["created_by_name","publisher_name","source_type_name","genre_name","continues_name","continued_by_name"]
  
  index=client.createIndex('comic_series',index_fields=fields, parametric_fields=fields)
  # index=client.createIndex('quotesdb',index_fields=["spoken_by_character_*","author_*","source_*","incorrectly_attributed_to_*","addressee_*"],parametric_fields=["spoken_by_character_*","author_*","source_*","incorrectly_attributed_to_*","addressee_*"])
  print "sleeping"
Ejemplo n.º 10
0
            result= flattenlists(result)
        except:
            pass
        index.pushDoc(result)
        #print json.dumps(flatten(result),indent=4)
        #print result["continues"]
    print index.name
    try:
        print "trying to index"
        print index.commit(async=True).jobID
    except:
        print "indexing failed"
    return response.get("cursor")


client = IODClient("http://api.idolondemand.com/",
                        iod_key)
#try:
client.deleteIndex('comic_series')
index=client.createIndex('comic_series')

#except:
##    print "getting instead"

#index= client.getIndex('comic_book_series')
cursor = do_query(index)
while(cursor):
    cursor = do_query(index,cursor)



Ejemplo n.º 11
0
import time
import requests
import collections
from alchemyapi import AlchemyAPI
from iodpython.iodindex import IODClient
import os
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

ALCHEMYAPI_KEY = os.environ["DOSSIER_ALCHEMY_KEY"]
ALCHEMY_RELEVANCE_THRESHOLD = 0.7

alchemyapi = AlchemyAPI()
client = IODClient("http://api.idolondemand.com/",
                   os.environ["DOSSIER_IDOL_KEY"])
index = client.getIndex("conversations")
cardIndex = client.getIndex("cards")

#index a conversation


def dossierConversation(transcript):
    information = extractInformation(transcript)

    if "name" in information:
        title = "Conversation with " + information["name"]
        addCardToIndex(information)
    else:
        information["name"] = ""
        title = "Conversation"
Ejemplo n.º 12
0
import time
import picamera
from iodpython.iodindex import IODClient
from twilio.rest import TwilioRestClient

import boto
from boto.s3.key import Key

conn = boto.connect_s3("AWS_API_KEY", "AWS_SECRET_KEY")
b = conn.get_bucket("hackathon-bell")

twilio_client = TwilioRestClient("TWILIO_API_KEY", "TWILIO_SECRET_KEY")
to_number = "TO_NUMBER"
from_number = "FROM_NUMBER"

client = IODClient("http://api.idolondemand.com/", "IOD_API_KEY")

camera = picamera.PiCamera()
#
camera.sharpness = 0
camera.contrast = 0
camera.brightness = 50
camera.saturation = 0
camera.ISO = 0
camera.video_stabilization = False
camera.exposure_compensation = 0
camera.exposure_mode = "auto"
camera.meter_mode = "average"
camera.awb_mode = "auto"
camera.image_effect = "none"
camera.color_effects = None
Ejemplo n.º 13
0
 def __init__(self):
     self.client = IODClient("http://api.idolondemand.com/","07a106d0-ff07-496b-a1b5-288b752da744")
     self.hashtags = {"#jeb": "Jeb Bush", "#clinton": "Hillary Clinton", "#trump": "Donald Trump", "#hilary": "Hillary Clinton", "#berniesanders": "Bernie Sanders"}
Ejemplo n.º 14
0
    "name": None,
    "mid": None,
    "optional": True
  }],
  "subjects": []
}]







freebaseUtil = FreebaseUtil(freebase_key,query)

client = IODClient("http://api.idolondemand.com/",
                        iod_key)


#   client.deleteIndex('quotesdb')

index=client.getIndex('newquotesdb')

#try:
 #   fields=["spoken_by_character_name","author_name","author_profession","source_type_name","source_name","incorrectly_attributed_to_name","addressee_name"]
  # index=client.createIndex('quotesdb',index_fields=fields, parametric_fields=fields)
   # index=client.createIndex('quotesdb',index_fields=["spoken_by_character_*","author_*","source_*","incorrectly_attributed_to_*","addressee_*"],parametric_fields=["spoken_by_character_*","author_*","source_*","incorrectly_attributed_to_*","addressee_*"])

#except:
#    index=client.getIndex('quotesdb')

#except:
Ejemplo n.º 15
0
        #result.pop('issues')
        result= flatten(result)
        result= flattenlists(result)
        index.pushDoc(result)
        #print json.dumps(flatten(result),indent=4)
        #print result["continues"]
    print index.name
    try:
        print "trying to index"
        print index.commit().text
    except:
        print "indexing failed"
    return response.get("cursor")


client = IODClient("http://api.idolondemand.com/",
                        "1642237f-8d30-4263-b2f9-12efab36c779")
#try:
index=client.getIndex('comic_series')

#except:
##    print "getting instead"
#index= client.getIndex('comic_book_series')
cursor = do_query(index)
while(cursor):
    cursor = do_query(index,cursor)





Ejemplo n.º 16
0
# Load Mappings from json
config=json.loads(open('config.json').read())


syncdb=shelve.open("sync.db")

# Initiate Html stripper
sourcelist= json.loads(open('sources.json').read())
schedulesecs=config["schedulesecs"]
database=config["database"]

keyconf = shelve.open('../config.db')
iod_key=keyconf['iod-key']


client = IODClient("http://api.idolondemand.com/",
                        iod_key)
#try:
try:
	index=client.createIndex(database)
	print index
	time.sleep(3)
except:
	print "getting instead"
	index=client.getIndex(database)


def main():
	start()

if __name__ == "__main__":
	main()