def test():
    set_api_key("write your api key here")
    similarity("Sachin is the greatest batsman",
               "Tendulkar is the finest cricketer")
    sentiment("Come on, lets play together")
    ner("Narendra Modi is the prime minister of India")
    keywords(
        "Prime Minister Narendra Modi tweeted a link to the speech Human Resource Development Minister Smriti Irani made in the Lok Sabha during the debate on the ongoing JNU row and the suicide of Dalit scholar Rohith Vemula at the Hyderabad Central University."
    )
    emotion("Did you hear the latest Porcupine Tree song ? It's rocking !")
    intent(
        "Finance ministry calls banks to discuss new facility to drain cash")
    abuse("you f**king a$$hole")
    batch_intent([
        "drugs are fun", "don\'t do drugs, stay in school",
        "lol you a f*g son", "I have a throat infection"
    ])
    batch_abuse([
        "drugs are fun", "don\'t do drugs, stay in school",
        "lol you a f*g son", "I have a throat infection"
    ])
    batch_ner([
        "drugs are fun", "don\'t do drugs, stay in school",
        "lol you a f*g son", "I have a throat infection"
    ])
    batch_sentiment([
        "drugs are fun", "don\'t do drugs, stay in school",
        "lol you a f*g son", "I have a throat infection"
    ])
    batch_phrase_extractor([
        "drugs are fun", "don\'t do drugs, stay in school",
        "lol you a f*g son", "I have a throat infection"
    ])
Beispiel #2
0
def test():
    similarity("Sachin is the greatest batsman",
               "Tendulkar is the finest cricketer")
    sentiment("Come on, lets play together")
    taxonomy("Narendra Modi is the prime minister of India")
    ner("Narendra Modi is the prime minister of India")
    keywords(
        "Prime Minister Narendra Modi tweeted a link to the speech Human Resource Development Minister Smriti Irani made in the Lok Sabha during the debate on the ongoing JNU row and the suicide of Dalit scholar Rohith Vemula at the Hyderabad Central University."
    )
    emotion("Did you hear the latest Porcupine Tree song ? It's rocking !")
    intent(
        "Finance ministry calls banks to discuss new facility to drain cash")
    abuse("you f**king a$$hole")
Beispiel #3
0
    def go(self, text, language):
        if language == 'english':
            key_words = paralleldots.keywords(text)
            key_phrase = paralleldots.phrase_extractor(text)
            emotion = paralleldots.emotion(text)
        elif language == 'schinese':
            key_words = [{
                'Error':
                'The lang_code is not among the supported languages, supported languages: en, pt, zh, es, de, fr, nl, it, ja, th, da, fi, el, ru, ar.',
                'code': 400
            }]  # chinese API not yet available
            key_phrase = paralleldots.multilang_keywords(text, 'zh')
            emotion = paralleldots.emotion(text, 'zh')
        elif language == 'french':
            key_words = paralleldots.multilang_keywords(text, 'fr')
            key_phrase = paralleldots.multilang_keywords(text, 'fr')
            emotion = paralleldots.emotion(text, 'fr')
        elif language == 'japanese':
            key_words = paralleldots.multilang_keywords(text, 'ja')
            key_phrase = paralleldots.multilang_keywords(text, 'ja')
            emotion = paralleldots.emotion(text, 'ja')
        else:
            key_words, key_phrase, emotion = [], [], []

        return key_words, key_phrase, emotion
Beispiel #4
0
def get_vects(text):
    keywords_vect = [k['keyword'] for k in keywords(text)['keywords']]
    emotion_vect = [(key, value)
                    for key, value in emotion(text)['probabilities'].items()]
    sentiment_vect = sentiment(text)
    del sentiment_vect['usage']
    return keywords_vect, emotion_vect, sentiment_vect
Beispiel #5
0
def keywords(request):
    if request.method == 'POST':
        sentence = request.POST.get('sent')
        data = keywords(sentence)
        data = data['keywords']
        print(data)
    return render(request, 'trial.html', {'data': data})
Beispiel #6
0
def short_questions_3():
    global page_idx
    global challenge
    global coping
    global goals
    global challenge_emotion
    global coping_emotion
    global goals_emotion
    global keyword_challenge
    global keyword_coping
    global keyword_goals
    global emotion_arr
    current_emotion = most_common(emotion_arr)
    print('Submitted emotion:', current_emotion)
    emotion_arr = []

    page_idx += 1
    goals = '{}'.format(request.form['goals'])

    goals_emotion = paralleldots.emotion( goals )
    goals_emotion = goals_emotion['emotion']
    goals_emotion = max(goals_emotion.items(), key=operator.itemgetter(1))[0]
    keyword_goals = paralleldots.keywords( goals )

    html_output_writer.goal_text = goals
    html_output_writer.goal_kw = keyword_goals['keywords']
    html_output_writer.goal_face = get_face_emotion_emoji(current_emotion)
    html_output_writer.goal_text_emotion = get_text_emotion_emoji(goals_emotion)


    render_template(page_ID[page_idx])
    return redirect('/')
Beispiel #7
0
def submit():

    who = request.form['who']
    location = request.form['location']
    what_raw = request.form['what']

    if what_raw == "" or not keywords_exist(what_raw):
        return render_template('main_form.html', invalid=True)

    con = sqlite3.connect('database.db')
    cur = con.cursor()

    cur.execute('''SELECT who, location, what FROM reports''')
    rows = cur.fetchall()
    similar_reports = find_similar_reports(who, location, what_raw, rows)

    similar_words = []
    for report in similar_reports:
        for words in keywords(report[2]):
            similar_words.append(words)

    cur.execute('''INSERT INTO reports(who, location, what) VALUES(?,?,?)''',
                (who, location, what_raw))

    con.commit()
    con.close()

    return render_template('results.html',
                           similar_words=str(similar_words)[1:-1],
                           num_of_similar=len(similar_reports))
Beispiel #8
0
def confidenceScores(text):
    dicto = keywords(text)
    list = dicto['keywords']
    out = sortlist(list)
    return out


#print(confidenceScores('''Mr Putin stood up to greet Mr Trump\nPresident Vladimir Putin feels insulted by allegations of Russian interference in the 2016 US election, Donald Trump has said after meeting him briefly at an Asia-Pacific summit in Vietnam.\n"He said he absolutely did not meddle in our election," the US leader said.\nMr Trump, who defeated Democratic rival Hillary Clinton, said the allegations were a "Democratic hit job".\nThe US intelligence community concluded earlier that Russia had indeed tried to sway the poll in favour of Mr Trump.\nThe US justice department has appointed special investigator Robert Mueller to examine any possible collusion involving Mr Trump's team, and legal action has already been taken against several former aides.\nWhat are the allegations against Russia?\nPresident Trump has refused to acknowledge a reported assessment by the CIA and other intelligence agencies that Russia was behind the hacking of the Democratic National Committee (DNC) in the run-up to last year's presidential election.\nThe contents of the emails, passed to Wikileaks and posted online, were embarrassing to the Democrats and shook up the presidential campaign, which ended in defeat for Hillary Clinton.\nIn addition to the Mueller inquiry, congressional committees have been set up to carry out their own investigations.\nRelations between the US and Russia have been strained for years, with the Kremlin long accusing Washington of seeking to sway elections in Russia and other ex-Soviet states including Ukraine and Georgia.\nWhile Russian hackers are widely suspected of involvement, there has been no conclusive link to the Kremlin.\nDenying that Russia had tried to interfere last year by fostering contacts with Mr Trump's campaign, Mr Putin told reporters in Vietnam: "Everything about the so-called Russian dossier in the US is a manifestation of a continuing domestic political struggle."\nWhat does Mr Trump say to the allegations?\nHe said he believed Mr Putin had been "very insulted by" the allegations and that was "not a good thing" for America.\n"He [Putin] said he didn't meddle," he added. "I asked him again."\nAsked if he believed Mr Putin, he replied, "He is very, very strong in the fact that he didn't do it. You have President Putin very strongly, vehemently says he has nothing to do with that. Now, you are not going to get into an argument, you are going to start talking about Syria and the Ukraine."\nTrump out on a limb again\nAleem Maqbool, BBC News, Da Nang\nDonald Trump once again goes against the findings of his own intelligence agencies.\nBecause although the US justice department is investigating the scale and nature of Russian interference in the election of 2016 (and any links to the Trump campaign), the American intelligence community has already long determined that Russia did, indeed, interfere.\nYet Mr Trump suggested this story was not only entirely fabricated by his political opponents, it might even be costing lives in Syria, because it is getting in the way of his relationship with the Russian president and hampering their ability to help solve the conflict together.\n"People will die because of it, and it's a pure hit job, and it's artificially induced and that's a shame," he said.\nIt is hard to know what the president hopes to achieve with this type of rhetoric. The investigation goes on.\nHow did the two presidents get on in Vietnam?\nMr Trump and Mr Putin met for the first time in July at a G20 summit in the German city of Hamburg. In Da Nang they were seen chatting briefly on three occasions within 24 hours during the Asia-Pacific Economic Co-operation (Apec) summit.\nHowever, they had no formal bilateral meeting, with Mr Putin blaming it on scheduling and protocol.\nThey had warm words for each other, with the US president talking of their mutual "very good feeling" and the Russian leader describing his counterpart as "well-mannered... and comfortable to deal with".\nThey did manage to sign off a statement vowing to continue the battle against so-called Islamic State in Syria until the militants are defeated and calling for a political solution to the conflict.\nHow far has US justice department investigation progressed?\nLast month, former Trump campaign adviser George Papadopoulos pleaded guilty to having lied to the Federal Bureau of Investigation (FBI) about the timing of meetings with alleged go-betweens for Russia.\nHe testified that Russian nationals had contacted him in an attempt to gain influence with the Trump campaign, offering "dirt" in the form of "thousands of emails" on Mrs Clinton in April 2016 - two months before the DNC emails were leaked.\nMr Trump has played down the importance of Mr Papadopoulos, calling him a "low-level volunteer" and "liar".\nOn Saturday, Mr Putin brushed aside US media reports that a woman wrongly identified by Mr Papadopoulos as the Russian president's niece had offered to help broker meetings with Kremlin officials.\n"I do not know anything about it and I think it is just some fantasies," Mr Putin said.\nMr Trump's former campaign manager, Paul Manafort, and an associate were also placed under house arrest on charges of money laundering as a result of the Mueller inquiry, but the charges do not relate to the election.'''))
def retKeywords(text):
    keyword_dict = keywords(str(text))['keywords']
    tempDict = {}
    for x in keyword_dict:
        tempDict[x['keyword']] = x['confidence_score']
    selectedWords = dict(
        sorted(tempDict.iteritems(), key=operator.itemgetter(1),
               reverse=True)[0:10])
    print "IN RET KEYWORDS"
    return random.sample(selectedWords.keys(), 5)
Beispiel #10
0
def keywords(raw):
    w = []
    a = []
    w.append(pd.keywords(raw))
    del (w[0])['usage']
    my_list = []
    for d in w[0]['keywords']:
        my_list.append(d['keyword'])

    tags = my_list
    return tags
Beispiel #11
0
def get_kw(string):
    """Return all the keywords in a string."""
    try:
        keys = keywords(string)
        keys = keys['keywords']
        key_list = []
        for i in range(len(keys)):
            key_list.append(keys[i]['keyword'])
        return key_list

    except:
        print("Error in ", string)
Beispiel #12
0
def keywords_exist(text):
    w = []
    a = []
    w.append(pd.keywords(text))
    del (w[0])['usage']

    if ((w[0]['keywords']) == {
            'keywords': 'No Keywords.',
            'confidence_score': '0'
    }):
        return False
    else:
        return True
Beispiel #13
0
def get_keywords(text):
    response = paralleldots.keywords(text)
    print('request sent')
    keywords = []
    for item in response['keywords']:
        with open('words.csv') as csvfile:
            readCSV = csv.reader(csvfile, delimiter=',')
            for row in readCSV:
                word = ''.join(row)
                if word.lower() == item['keyword'].lower(
                ) and item['keyword'] not in keywords:
                    keywords.append(item['keyword'])
    return keywords
Beispiel #14
0
def getKeywords(callText):
    l = []
    data = keywords(callText)
    ans = data['keywords']
    for i in ans:
        try:
            k = i.get("keyword")
        except AttributeError:
            l.append(['no keyword'])
            return (l)
    l.append(k)
    print(l)
    return (l)
Beispiel #15
0
def test():
    set_api_key("Put your Api key here")
    category = {
        "finance": ["markets", "economy", "shares"],
        "world politics": ["diplomacy", "UN", "war"],
        "india": ["congress", "india", "bjp"]
    }
    print(
        similarity("Sachin is the greatest batsman",
                   "Tendulkar is the finest cricketer"))
    print(sentiment("Come on, lets play together"))
    print(ner("Narendra Modi is the prime minister of India", "en"))
    print(
        taxonomy(
            "Michael Jordan of the Chicago Bulls is getting a 10-hour Netflix documentary in 2019"
        ))
    print(
        keywords(
            "Prime Minister Narendra Modi tweeted a link to the speech Human Resource Development Minister Smriti Irani made in the Lok Sabha during the debate on the ongoing JNU row and the suicide of Dalit scholar Rohith Vemula at the Hyderabad Central University."
        ))
    print(
        phrase_extractor(
            "Prime Minister Narendra Modi tweeted a link to the speech Human Resource Development Minister Smriti Irani made in the Lok Sabha during the debate on the ongoing JNU row and the suicide of Dalit scholar Rohith Vemula at the Hyderabad Central University."
        ))
    print(
        emotion(
            "Did you hear the latest Porcupine Tree song ? It's rocking !"))
    print(
        intent(
            "Finance ministry calls banks to discuss new facility to drain cash"
        ))
    print(abuse("you f**king a$$hole"))
    print(
        custom_classifier("Narendra Modi is the prime minister of India",
                          category))
    print(
        batch_intent([
            "drugs are fun", "don\'t do drugs, stay in school",
            "lol you a f*g son", "I have a throat infection"
        ]))
    print(batch_abuse(["drugs are fun", "dont do drugs, stay in school"]))
    print(
        batch_sentiment([
            "drugs are fun", "don\'t do drugs, stay in school",
            "lol you a f*g son", "I have a throat infection"
        ]))
    print(
        batch_phrase_extractor([
            "drugs are fun", "don\'t do drugs, stay in school",
            "lol you a f*g son", "I have a throat infection"
        ]))
    print(
        batch_taxonomy([
            "Michael Jordan of the Chicago Bulls is getting a 10-hour Netflix documentary in 2019",
            "Michael Jordan of the Chicago Bulls is getting a 10-hour Netflix documentary in 2019"
        ]))
    print(
        batch_ner([
            "Michael Jordan of the Chicago Bulls is getting a 10-hour Netflix documentary in 2019",
            "Michael Jordan of the Chicago Bulls is getting a 10-hour Netflix documentary in 2019"
        ]))
    print(
        batch_emotion([
            "drugs are fun", "don\'t do drugs, stay in school",
            "lol you a f*g son", "I have a throat infection"
        ]))
    print(facial_emotion_url("https://i.imgur.com/klb812s.jpg"))
    print(object_recognizer_url("https://i.imgur.com/klb812s.jpg"))
    print(
        sarcasm(
            "The movie that i watched last night is so funny that i get rolled out with laughter"
        ))
    print(
        batch_sarcasm([
            "The movie that i watched last night is so funny that i get rolled out with laughter",
            "I want to spend my life alone"
        ]))
Beispiel #16
0
import requests
url = ('https://newsapi.org/v2/top-headlines?'
       'country=us&'
       'apiKey=72fb4129711844f58401b72dae9df2dc')
response = requests.get(url)
##print(response.json())

import paralleldots
from newspaper import Article

#commented out attempt at article extraction
#url = "https://www.economist.com/united-states/2019/05/30/how-should-america-fight-the-next-downturn" #set url from user's current article
#article = Article(url, language="en")
#article.download()
#text = article.text #extract article text
#print(text)
text = "In a statement issued with France and UN chief António Guterres on Saturday, China committed to “update” its climate target “in a manner representing a progression beyond the current one”.  It also vowed to publish a long term decarbonisation strategy by next year."

# Setting your API key
paralleldots.set_api_key("83JQ4TLL6boJsr6xzHu590WePYyKAWE4KrBTn3pDbGI")

### Viewing your API key
##paralleldots.get_api_key()

# Get article text
response = paralleldots.keywords(text)
print(response)
for pair in response["keywords"]:
    print("keyword: " + pair["keyword"])
    print("confidence: " + str(pair["confidence_score"]))
Beispiel #17
0
def get_keywords(text):
    keywords = paralleldots.keywords(text)
    print(keywords)
Beispiel #18
0
    def parse(self, response):
        body = json.loads(response.body)
        googleClient = language.LanguageServiceClient(credentials=credentials)


        for value in body['articles']:
            
            if value['content'] is None: 
                content = ''
            else: 
                content = value['content']
                
            description = value['description']
            
            document = types.Document(
                content=content,
                type=enums.Document.Type.PLAIN_TEXT)
            sentiment = googleClient.analyze_sentiment(document=document).document_sentiment
            response = paralleldots.keywords(description)
            
            tags_dict = [{
                "keyword": "elon musk",
                "tag": "elon"
            },
            {
                "keyword": "model 3 model Y model X model S car electric vehicles ev's evs vehicle auto industry",
                "tag": "auto"
            },
            {
                "keyword": "home battery battery batteries solar panel solar panels home energy",
                "tag": "solar"
            }]

            tags = []
    
            try:
                for keyword in response['keywords']:  
                    for tag in tags_dict:
                        if (keyword['keyword'].lower() in tag['keyword'].lower()):
                            tags.append(tag['tag'])
            except KeyError:
                print ("No key found")


            print(response)

            # pdb.set_trace()

            readTime = readtime.of_text(content)

            class Person:
                "This is a person class"
                age = 10

                def greet(self):
                    print('Hello')

            # utc 
            # local_time = pytz.timezone("America/New_York")
            # naive_datetime = datetime.strptime (value['publishedAt'], "%Y-%m-%dT%H:%M:%SZ")
            # local_datetime = local_time.localize(naive_datetime, is_dst=None)
            # utc_datetime = local_datetime.astimezone(pytz.utc)
            # utc_timestamp = datetime.replace(tzinfo=timezone.utc).timestamp()
                        
            # Getting the current date  
            # and time 
            dt = datetime.strptime (value['publishedAt'], "%Y-%m-%dT%H:%M:%SZ")
  
            
            # pdb.set_trace()

            # utc_time = dt.replace(tzinfo = timezone.utc) 
            # utc_timestamp = utc_time.timestamp() 
            
            # print(utc_timestamp)
            

            newsItem = NewsApiItem()
            
            year = int(value['publishedAt'].split('-')[0])
            month = int(value['publishedAt'].split('-')[1])
            day = int(value['publishedAt'].split('-')[2].split('T')[0])
            hours = int(value['publishedAt'].split('-')[2].split('T')[1].split(':')[0])
            seconds = int(value['publishedAt'].split('-')[2].split(':')[1])
            miliseconds = int(value['publishedAt'].split('-')[2].split(':')[2].split('Z')[0])

            dt = datetime(year,month,day,hours,seconds,miliseconds).timestamp()
            # pdb.set_trace()

            newsItem['publishDate'] = value['publishedAt']
            newsItem['publisher'] = value['source']['name']
            newsItem['author'] = value['author']
            newsItem['description'] = value['description']
            newsItem['articleLink'] = value['url']
            newsItem['sentiment'] = sentiment.score
            # newsItem['magnitude'] = sentiment.magnitude
            newsItem['title'] = value['title']
            newsItem['tags'] = tags
            newsItem['topic'] = 'tesla'
            newsItem['readTime'] = readTime.seconds
            newsItem['utc'] = dt
            print( dt )
        
            # newsItem['author_sentiment'] = updateAuthorSentiment
            # newsItem['publisher_sentiment'] = updatePublisherSentiment

            # get the news story
            # run the sentiment analysis on that story 
            # attribute sentiment to the author and store that data independently 
            # attribute sentiment to the publisher and store that data independently 
            # attribute sentiment to the news story as well and finish the news agg process and store data

            # print('news item', newsItem)
            yield newsItem
 def keywords(self):
     keywords = paralleldots.keywords(self.text)['keywords']
     if type(keywords) is list:
         return [item['keyword'] for item in keywords]
     return self.text.clean_text()
Beispiel #20
0
from paralleldots import keywords
from paralleldots import set_api_key
from paralleldots import taxonomy

set_api_key("l0gClHlNjmAovHKVzHsxfSRMmPw6FcAljMCm6QZFoOI")
keyw = keywords(" Hyderabad ")
max = 0
keyword1 = []
#ir=keyw['keywords']
#for keydic in ir:
#for eachk in keydic:
#if(eachk!="confidence_score"):
#keyword1.append(keydic[eachk])

place = ""
keyword1 = (" is a place").split()
#print(keyword1)
for k in keyword1:
    clas = taxonomy(k)
    d = clas["taxonomy"]
    #print(clas)
    for e in d:
        if (e['tag'] == 'places' and e['confidence_score'] > max):
            place = k
            max = e['confidence_score']
print(place)
Beispiel #21
0
		if didEnd ==1:
			print("Ending")
			transactions = db.child("Classes/67445/Transcript/").get(user['idToken'])

			sentenceCount = 0
			completeParagraph = ""
			for transaction in transactions.each():
				completeParagraph += transaction.val()['Text']
				completeParagraph += " " 
				sentenceCount += 1
				
			toPut = summarize(completeParagraph, ratio = 0.4)
			
			pSentiment = sentiment(completeParagraph)
			pEmotion = emotion(completeParagraph)
			pKeywords = keywords(completeParagraph)
			
			links = []
			for thing in pKeywords['keywords']:
				if thing['confidence_score'] > 0.90:
					db.child("Classes/67445/Links").push(
							{"Link": "https://en.wikipedia.org/wiki/" + thing['keyword']}, user['idToken'])
			
			
			pSentimentProbabilities = pSentiment['probabilities']
			pEmotionProbabilities = pEmotion['probabilities']
			
			#upload sentiment info
			db.child("Classes/67445/Sentiment").update(
						{"Negative": pSentimentProbabilities['positive'],
						 "Positive": pSentimentProbabilities['negative'],