Beispiel #1
0
def getres(file_loc):
    with open(file_loc, 'r+') as f:
        head = f.readline()
        content = f.read()
        req = '<html><body><h2>{0}</h2>{1}</body></html>'.format(head, content)
        text = head + content
    tone_res = tone_analyzer.tone(req, content_type='text/html').get_result()

    res = natural_language_understanding.analyze(
        html=req,
        features=Features(
            categories=CategoriesOptions(limit=1),
            concepts=ConceptsOptions(limit=5),
            keywords=KeywordsOptions(limit=5, sentiment=True, emotion=True),
            sentiment=SentimentOptions(),
            # entities=EntitiesOptions(limit=5, mentions=True, sentiment=True, emotion=True),
        ),
    ).get_result()
    sentiment = res["sentiment"]["document"]["score"]
    concepts = [(concepts["text"], concepts["relevance"])
                for concepts in res["concepts"]]
    categories = (res["categories"][0]["label"].split("/"),
                  res["categories"][0]["score"])
    keywords = [(keywords["text"], keywords["relevance"])
                for keywords in res["keywords"]]
    tones = [(tone["tone_id"], tone["score"])
             for tone in tone_res["document_tone"]["tones"]]
    return (sentiment, concepts, keywords, tones, text)
Beispiel #2
0
def get_keywords_from_url(url):
    text = None
    if 'reddit' in url:
        submission = _reddit.submission(url=url)
        if submission.selftext is not None:
            text = submission.selftext
            url = None
        else:
            return []
    res = _watson_nlu.analyze(
        url=url,
        text=text,
        features=Features(
            categories=CategoriesOptions(),
            concepts=ConceptsOptions(limit=5)
        )).get_result()
    kwds = set()
    if 'categories' in res:
        categories = res['categories']
        categories = sorted(categories, key=extract_relevancy, reverse=True)[:10]
        for category in categories:
            labels = re.split(',| |/', category['label'])
            for label in labels:
                kwds.add(label)
    if 'concepts' in res:
        for concept in res['concepts']:
            kwds.add(concept['text'])
    for stopword in _stopwords:
        if stopword in kwds:
            kwds.remove(stopword)
    return list(kwds)
Beispiel #3
0
def watson_nlp_analysis(text):

    if text == '': return text

    max_limit_one = 10
    max_limit_two = 30

    naturalLanguageUnderstanding = NaturalLanguageUnderstandingV1(
        version='2018-11-16',
        iam_apikey=os.environ['WATSON'],
        url=
        'https://gateway.watsonplatform.net/natural-language-understanding/api'
    )

    response = naturalLanguageUnderstanding.analyze(
        text=text,
        features=Features(concepts=ConceptsOptions(limit=max_limit_one),
                          categories=CategoriesOptions(limit=max_limit_one),
                          sentiment=SentimentOptions(document=True),
                          emotion=EmotionOptions(document=True),
                          entities=EntitiesOptions(emotion=True,
                                                   sentiment=True,
                                                   limit=max_limit_two),
                          keywords=KeywordsOptions(
                              emotion=True,
                              sentiment=True,
                              limit=max_limit_two))).get_result()
    return response
    def makeWikiSection(self, sectiontitle):
        print("Accessing IBM Watson for NLP understanding on " + sectiontitle +
              " (subtopic of " + self._topic + ")")

        response = self.watsonobj.analyze(
            text=self._page.section(sectiontitle),
            features=Features(concepts=ConceptsOptions(limit=3),
                              entities=EntitiesOptions(limit=3),
                              keywords=KeywordsOptions(limit=5),
                              relations=RelationsOptions(),
                              semantic_roles=SemanticRolesOptions(limit=3)))

        if sectiontitle in wikipedia.search(
                sectiontitle) and sectiontitle is not "See also":
            return Node("Section",
                        title=sectiontitle,
                        content=self._page.section(sectiontitle),
                        concepts=json.dumps(response["concepts"]),
                        entities=json.dumps(response["entities"]),
                        keywords=json.dumps(response["keywords"]),
                        relations=json.dumps(response["relations"]),
                        semantic_roles=json.dumps(response["semantic_roles"]),
                        mainarticleurl=wikipedia.page(self._topic).url)

        return Node("Section",
                    title=sectiontitle,
                    content=self._page.section(sectiontitle),
                    concepts=json.dumps(response["concepts"]),
                    entities=json.dumps(response["entities"]),
                    keywords=json.dumps(response["keywords"]),
                    relations=json.dumps(response["relations"]),
                    semantic_roles=json.dumps(response["semantic_roles"]))
Beispiel #5
0
def extractPopularityText(pytrends, natural_language_understanding, text,numberConcepts=4):

    ### Getting the concepts of the text

    response = natural_language_understanding.analyze(
        text=text,
        features=Features(concepts=ConceptsOptions(limit=numberConcepts)))

    #### Parsing the json response

    concepts = response["concepts"]
    concepts = [k for k in concepts if k['relevance'] > 0.75]

    ### Google trends for getting the popularity of each concept

    kw_list = [k['text'] for k in concepts]

    #### Timeframe indicates the period during which we mesure popularity
    
    pytrends.build_payload(kw_list, cat=0, timeframe='today 1-m', geo='',gprop='')
    data = pytrends.interest_over_time()
    data = data.drop('isPartial', axis=1)

    #### Average popularity during the last month

    average = data.mean()

    ### Convert the results to json

    dict_averages = average.to_dict()
    result = json.dumps(dict_averages)
    print(result, '\n')
    def understanding(self):
        if not self.transcription:
            self.transcript()

        natural_language_understanding = NaturalLanguageUnderstandingV1(
            version='2017-02-27',
            username=os.environ['UNDERSTANDING_USERNAME'],
            password=os.environ['UNDERSTANDING_PASSWORD'])

        self.analysis = natural_language_understanding.analyze(
            text=self.transcription['results'][0]['alternatives'][0]
            ['transcript'],
            features=Features(categories=CategoriesOptions(),
                              concepts=ConceptsOptions(),
                              emotion=EmotionOptions(),
                              entities=EntitiesOptions(emotion=True,
                                                       sentiment=True,
                                                       mentions=True),
                              keywords=KeywordsOptions(emotion=True,
                                                       sentiment=True),
                              relations=RelationsOptions(),
                              sentiment=SentimentOptions()))

        logger.info('Completed analysis of recorded file')
        return self.analysis
Beispiel #7
0
 def summarizewithkeywords(self, keywords, text, words=None):
     try:
         concepts = self.watsonobj.analyze(
             text=keywords,
             features=Features(concepts=ConceptsOptions(limit=3)))
         print(
             Summarize(" ".join([c['text'] for c in concepts['concepts']]),
                       text, words))
     except Exception:
         print(Summarize(" ".join(keywords), text, words))
Beispiel #8
0
def get_concepts(text):
    try:
        response2 = natural_language_understanding.analyze(
            text=text, features=Features(concepts=ConceptsOptions(limit=3)))
        concepts = []
        for concept in response2["concepts"]:
            concepts.append(concept["text"])

        return concepts
    except:
        return []
Beispiel #9
0
 def _concept_finder(self, sentence):
     # Uses IBM BlueMix.
     try:
         response = self.natural_language_understanding.analyze(
             text=sentence,
             features=Features(concepts=ConceptsOptions(limit=3)))
         if response.get("concepts") == []:
             return None
         return (response.get("concepts")[0]).get("text")
     except watson_developer_cloud.watson_service.WatsonApiException:
         return None
Beispiel #10
0
 def _concept_finder(self, sentence):
     try:
         response = self.nlp_engine.analyze(
             text=sentence,
             features=Features(concepts=ConceptsOptions(limit=5)))
         #print('Response from Concept Finder ->', response)
         if response.get("concepts") == []:
             return None
         return (response.get("concepts")[0]).get("text")
     except watson_developer_cloud.watson_service.WatsonApiException:
         return None
Beispiel #11
0
 def call_api(self, content):
     try:
         response = self.client.analyze(
             text=content,
             features=Features(entities=EntitiesOptions(),
                               keywords=KeywordsOptions(),
                               categories=CategoriesOptions(),
                               concepts=ConceptsOptions())).get_result()
     except WatsonException as exception:
         print(exception)
         response = {"ibm_exception": str(exception)}
     return json.dumps(response, sort_keys=True, indent=4)
Beispiel #12
0
def get_news_sentiment(request):
    try:
        response = natural_language_understanding.analyze(
            url=request.GET.get('url'),
            features=Features(sentiment=SentimentOptions(),
                              emotion=EmotionOptions(),
                              concepts=ConceptsOptions(limit=5),
                              categories=CategoriesOptions()))

        return Response(response)
    except:
        return Response({"error": 'problem retrieving'})
 def __getConcepts(self,
                   numberConcepts=4,
                   min_relevance=0.75,
                   text=None,
                   url=None):
     response = self.natural_language_understanding.analyze(
         text=text,
         url=url,
         features=Features(concepts=ConceptsOptions(limit=numberConcepts)))
     concepts = response["concepts"]
     concepts = [k for k in concepts if k['relevance'] > 0.75]
     return concepts
Beispiel #14
0
def queryWatson(headline, story):
    return nlu.analyze(text=headline + "; " + story,
                       language="en",
                       features=Features(
                           entities=EntitiesOptions(emotion=False,
                                                    sentiment=False,
                                                    limit=60),
                           keywords=KeywordsOptions(emotion=False,
                                                    sentiment=False,
                                                    limit=60),
                           categories=CategoriesOptions(limit=60),
                           concepts=ConceptsOptions(limit=50)))
    def get_concepts(self, text=None, url=None):
        """
			returns a list of dicts with text, relevance, reference url
		"""

        if text == None and url == None:
            return "Error"

        response = naturalLanguageUnderstanding.analyze(
            text=text, url=url,
            features=Features(concepts=ConceptsOptions())).get_result()

        return response['concepts']
def analyze_using_NLU(analysistext):
    res = dict()
    response = natural_language_understanding.analyze(
        text=analysistext,
        features=Features(sentiment=SentimentOptions(),
                          entities=EntitiesOptions(),
                          keywords=KeywordsOptions(),
                          emotion=EmotionOptions(),
                          concepts=ConceptsOptions(),
                          categories=CategoriesOptions(),
                          semantic_roles=SemanticRolesOptions()))
    res['results'] = response
    return json.dumps(res)
Beispiel #17
0
 def get_keywords(self, sentence):
     response = self.natural_language_understanding.analyze(
         text=sentence,
         return_analyzed_text='True',
         features=Features(concepts=ConceptsOptions(),
                           categories=CategoriesOptions(),
                           relations=RelationsOptions(),
                           semantic_roles=SemanticRolesOptions(),
                           sentiment=SentimentOptions(),
                           entities=EntitiesOptions(),
                           keywords=KeywordsOptions())).get_result()
     keywords = map(lambda x: (x['text'], x['type']), response['entities'])
     return keywords
Beispiel #18
0
def analyze(links):
    for link in links:
        response = natural_language_understanding.analyze(
            url=link,
            features=Features(
                entities=EntitiesOptions(emotion=True,
                                         sentiment=True,
                                         limit=15),
                emotion=EmotionOptions(targets=['keyword1', 'keyword2']),
                keywords=KeywordsOptions(emotion=True, sentiment=True,
                                         limit=2),
                concepts=ConceptsOptions(limit=5),
                sentiment=SentimentOptions(targets=['stocks']),
                categories=CategoriesOptions()))
 def getTopics(self,
               numberConcepts=4,
               min_relevance=0.75,
               text=None,
               url=None):
     response = self.natural_language_understanding.analyze(
         text=text,
         url=url,
         features=Features(concepts=ConceptsOptions(limit=numberConcepts)))
     concepts = response["concepts"]
     concepts = [{
         'label': k['text'],
         'confidence': k['relevance']
     } for k in concepts]
     return concepts
 def analyze_using_NLU(analysistext):
     """ Extract results from Watson Natural Language Understanding for each news item
     """
     res = dict()
     response = natural_language_understanding.analyze(
         text=analysistext,
         features=Features(
             sentiment=SentimentOptions(),
             entities=EntitiesOptions(),
             keywords=KeywordsOptions(),
             emotion=EmotionOptions(),
             concepts=ConceptsOptions(),
             categories=CategoriesOptions(),
         ))
     res['results'] = response
     return res
Beispiel #21
0
def interface(Purl):
    response = naturalLanguageUnderstanding.analyze(
        url=Purl,
        features=Features(sentiment=SentimentOptions(),
                          categories=CategoriesOptions(limit=1),
                          concepts=ConceptsOptions(limit=1),
                          entities=EntitiesOptions(limit=1),
                          metadata=MetadataOptions())).get_result()
    result = json.dumps(response, indent=2)

    #Sentiment
    vLabel = response['sentiment']['document']['label']
    score = response['sentiment']['document']['score']

    mdTitle = response['metadata']['title']
    mdDate = response['metadata']['publication_date']
    vScore = math.ceil(float(score) * 100)

    #category
    try:
        for category in response['categories']:
            categoryLabel = category['label']

        for concept in response['concepts']:
            conceptText = concept['text']
            conceptRelevance = str(concept['relevance'])
            conceptSource = concept['dbpedia_resource']

        for entity in response['entities']:
            entityText = entity['text']
            entityType = entity['type']

    except:
        vScore = "ERROR"
        vLabel = "ERROR"
        categoryLabel = "ERROR"
        conceptText = "ERROR"
        conceptRelevance = "ERROR"
        conceptSource = "ERROR"
        entityText = "ERROR"
        entityType = "ERROR"
        mdTitle = "ERROR"
        mdDate = "ERROR"

    print(response)

    return vLabel, vScore, categoryLabel, conceptText, conceptRelevance, conceptSource, entityText, entityType, mdTitle, mdDate
Beispiel #22
0
def resultjson():

    if request.method == 'POST':
        #text1 = request.form
        text1 = request.get_json(force=True)
        user = natural_language_understanding.analyze(
            text=str(text1),
            features=Features(entities=EntitiesOptions(emotion=True,
                                                       sentiment=True,
                                                       limit=2),
                              relations=RelationsOptions(),
                              categories=CategoriesOptions(),
                              semantic_roles=SemanticRolesOptions(),
                              concepts=ConceptsOptions(limit=3),
                              keywords=KeywordsOptions(emotion=True,
                                                       sentiment=True,
                                                       limit=2)))
        return jsonify(user)
Beispiel #23
0
def understand_transcript(transcription):
    nlp_client = NaturalLanguageUnderstandingV1(
        version="2017-02-27",
        username=os.environ["WATSON_UNDERSTANDING_USERNAME"],
        password=os.environ["WATSON_UNDERSTANDING_PASSWORD"],
    )

    return nlp_client.analyze(
        text=transcription["results"][0]["alternatives"][0]["transcript"],
        features=Features(
            categories=CategoriesOptions(),
            concepts=ConceptsOptions(),
            emotion=EmotionOptions(),
            entities=EntitiesOptions(),
            keywords=KeywordsOptions(),
            sentiment=SentimentOptions(),
        ),
    )
Beispiel #24
0
def conceptExtraction(sentence):
  # with open(json_file) as f:
  #     data = json.load(f)
  # data = data[data.keys()[0]]

  naturalLanguageUnderstanding = NaturalLanguageUnderstandingV1(
    version='2018-09-21',
    iam_apikey=apikey,
    url=url)

  response = naturalLanguageUnderstanding.analyze(
    text=sentence,
    features=Features(
      concepts=ConceptsOptions(
        limit=100),
      keywords=KeywordsOptions(
        limit=100))).get_result()
  print(json.dumps(response, indent=2))
Beispiel #25
0
def extractPopularityUrl(pytrends, natural_language_understanding, url,numberConcepts=4):

    response = natural_language_understanding.analyze(
        url=url,
        features=Features(concepts=ConceptsOptions(limit=numberConcepts)))

    concepts = response["concepts"]
    concepts = [k for k in concepts if k['relevance'] > 0.75]

    kw_list = [k['text'] for k in concepts]
    pytrends.build_payload(kw_list, cat=0, timeframe='today 1-m', geo='', gprop='') 
    data = pytrends.interest_over_time()
    data = data.drop('isPartial', axis=1)

    average = data.mean()

    dict_averages = average.to_dict()
    result = json.dumps(dict_averages)
    print(result, '\n')
Beispiel #26
0
    def get_concepts(self, user_input, max_number_of_concepts=3):
        '''
        Extract concepts from a given string
        :param str user_input:
        :param int max_number_of_concepts:
        :return: a list of concepts(str)
        '''
        result = []
        response = self.nlu.analyze(text=user_input,
                                    features=Features(concepts=ConceptsOptions(
                                        limit=max_number_of_concepts)),
                                    language='en')
        if self.debug_mode:
            print(json.dumps(response, indent=2))

        concepts = response.get('concepts')
        if len(concepts) != 0:
            for concept in concepts:
                result.append(concept.get('text'))
        return result
    def _parallel_NlU(self, text):
            
            # A Function to call Watson Natural Language Understanding

        if self.config['keywords']:
            keyword_option = KeywordsOptions(limit=self.config['keyword_limit'])
        else:
            keyword_option = None

        if self.config['concepts']:
            concepts_option = ConceptsOptions(
                limit=self.config['concept_limit'])
        else:
            concepts_option = None

        try:
            results = self.model.analyze(
                text=text,
                features=Features(
                    concepts=concepts_option,
                    keywords=keyword_option),
                language='en'
            )

            json_results = results.get_result()

            our_concepts = []
            for concept in json_results['concepts']:
                our_concepts.append(concept['text'])
            
            our_keywords = []
            for keyword in json_results['keywords']:
                our_keywords.append(keyword['text'])
            
            self.lock.acquire()
            self.concepts = self.concepts + our_concepts
            self.keywords = self.keywords + our_keywords
            self.lock.release()
        
        except Exception as e:
            print(str(e))
Beispiel #28
0
def nluRun(txt):
    response = natural_language_understanding.analyze(
        text=txt,
        features=Features(entities=EntitiesOptions(emotion=True,
                                                   sentiment=True,
                                                   limit=3),
                          keywords=KeywordsOptions(emotion=True,
                                                   sentiment=True,
                                                   limit=3),
                          concepts=ConceptsOptions(limit=3)))

    #Prints the relevant texts and scores, len() is used because earlier it looped each tweet individually then we
    #compressed them all to one string
    for x in range(0, len(response["concepts"])):
        # print(x)
        con = response["concepts"][x]["text"]
        rel = response["concepts"][x]["relevance"]
        # print(response["concepts"][x]["text"])
        # print(response["concepts"][x]["relevance"])
        sendArray = {'Concept': con, 'Relevance': rel}
        print(sendArray)
Beispiel #29
0
def get_data(request,   ):
    # response = watsonResponse
    # return JsonResponse(response)
    text = "A Beginner’s Guide to Sous Vide Cooking- Kitchen Conundrums with Thomas JosephA Recap of Bill Burr's Best Interview Moments of 2018 Sources:- Sara Silverman:- https://www.youtube.com/watch?v=i-VU25UfHWM Comic Con:- https://www.youtube.com/watch?v=JOVWum1vv4Y&t=8s The Herd...ABC NewsAcaiadam22Adam22 GOES IN on Corny Rapper who paid to be on WorldstarAden FilmsAmerican Football - TopicAndrea Savage on Husband's Tank Top, Daughter & Her Mom's Bizarre Holiday TraditionsApple Killed the Mac Mini.April Fools' Day Pranks with Mark RoberAwkward Bill Burr vs Sarah Silverman InterviewBaked Lemon Chicken with Garlic Lemon Cream Sauce | Oven Baked Chicken RecipeBaked Potato Puffs - Food WishesBaking - TopicBash: How can that come out of Sarah Sanders' mouth?Beastie Boys, Nas - Too Many RappersBeastieBoysBernie Sanders Assesses The 2020 Presidential FieldBest Ever Food Review ShowBest web features of 2018: Part 2/4 - HTTP203Bill Burr | Best of 2018 | A Year In ReviewBinging with BabishBinging with BabishBinging with BabishBINGING WITH BABISH  S1 • E78BINGING WITH BABISH  S2 • E30Binging with Babish: Chateaubriand Steak from The MatrixBinging with Babish: Roast Beast from How The Grinch Stole ChristmasBinging with Babish: Seinfeld Volume IIBlockchain Takes ManhattanBloombergBon AppétitBoy's emotional Christmas surprise goes viralBrexit (2019) | Official Trailer | HBOBrexit Update - UK Version: Last Week Tonight with John Oliver (HBO)BroadlyBroncos vs. 49ers Week 14 Highlights | NFL 2018BuzzFeedVideoBuzzFeedVideo viewers also watch...Cal McKinley - Go LocalCan a Millennial Troll Survive NATO's Biggest War Games?Cardi B Carpool KaraokeCarFactionCars - TopicCFP RankingsCHAMPAIGN ILL  S1 • E1Champaign ILL - Ep 1 “A Gangster Way To Start Your Day”Check out original movies and series from YouTube Creators and moreChicken Noodle Soup - How to Make Classic Chicken Noodle SoupChinese Girl Visits Amish Country - She Was Shocked!CHRISTMAS RECIPE: Honey Glazed Ham With Pear & Saffron ChutneyCNNCNNCNNCNNCNNCNNCNNCNNCobra KaiCOBRA KAI  S1 • E1Cobra Kai Ep 1 - “Ace Degenerate” - The Karate Kid Saga ContinuesComedians 'R' GoComedy - TopicComedy CentralComedy Central Stand-UpComedy UniversityComedy UniversityComplexComputerphileCONAN On TBS Returns January 22ndContinue watchingCooking - TopicCS50CS50 Lecture by Steve BallmerDALLAS & ROBO  S1 • E1Daniel Solves Your Local Twissues - Tosh.0Day In The Life Of A Software Engineer | Weekend EditionDMX Ends 6ix9ine With Insane FreestyleDoes Mick Mulvaney Like Donald Trump? 'No'Doug DeMuroDoug DeMuroEaterEaterEMOJOIE CUISINEEngineering ExplainedEp 1 - Dallas & Robo Aces WildEpicuriousESPNEveryday FoodFOOD INSIDERFOOD INSIDERFood Truck Serves 3,000 Grilled Cheese Sandwiches A DayFood WishesFood WishesFood WishesFood WishesFood WishesFood WishesFood Wishes viewers also watch...Free episodeFree episodeFree episodeFree episodeFreethinkFrench Cooking AcademyFrom your subscriptionsGeorge W. BushGetting High over Tea with Natasha Leggero and Moshe KasherGochujang MamáGoogle Chrome DevelopersGordon RamsayGrilled Greek Chicken - Garlic, Lemon & Herb Grilled Chicken RecipeHBOHere's Why the Bugatti Veyron Is the Coolest Car of the 2000sHey Laowinners! My Chinese wife has heard of Amish people before, but she never knew she would have a chance to visit them at some point. She was fascinated by their way of life, and how they don'...Hip Hop Music - TopicHomemade Meatloaf Recipe - Laura Vitale - Laura in the Kitchen Episode 552HOW - TO  S1 • E2How Newark Got Lead In Its Water, And What It Means For The Rest Of America (HBO)How to cook a ALL AMERICAN THANKSGIVINGHow to cook a CHRISTMAS FEASTHow to cook a HANGOVER CURE FEASTHow to cook a SUPER SAIYAN FEASTHow To Cook the Perfect Prime Rib RoastHow to Make Danish Christmas Rice PuddingHow To Make Pot Au Feu: the mother recipe of French soups ( Tutorial for beginners)How To Make Scones | Jamie Oliver | ADI Design A Website In Less Than 1 Hour! | Web Design Challenge | Web Design Guide | mmtutsInstant Pot Roast (Best Ever - Literally)Is the Instant Pot Worth It? — The Kitchen Gadget Test ShowJamie OliverJason Momoa Hasn't Seen Aquaman Yet! | The Graham Norton ShowJeff Ross & David Attell Roast Kimmel AudienceJeff Ross Talks to Mexican Immigrants Deported from America - Jeff Ross Roasts the BorderJerry Seinfeld: Kevin Hart Is ‘Going To Be Fine’ After Oscars Fallout | TODAYJimmy Kimmel LiveJimmy Kimmel LiveJimmy Kimmel LiveJimmy Kimmel LiveJimmy Kimmel LiveJimmy Kimmel LiveJimmy Kimmel LiveJimmy Kimmel LiveJimmy O. Yang's Crazy Tinder DateJoe Rogan - Anthony Cumia on Artie LangeJoe Rogan - Ted Nugent is a Good Guy!Joe Rogan | Can You Get Salmonella From Eating Eggs?Joe Rogan Experience #1216 - Sir Roger PenroseJoe Rogan Shares Crazy Baboon StoriesJoe Rogan: Weasels are Badass!Joe Rogan's Hilarious Jennifer Lopez RantJoe Wong: Building A Wall Didn't Work For ChinaJoin me on my day in a life on a weekend during Halloween and Pumpkin season! ❤ Luba Music by Chillhop: http://chillhop.com/listen Birocratic - Tony's Belated Breakfast: https://soundcloud.com/bi...JRE ClipsJRE ClipsJRE ClipsJRE ClipsJRE ClipsJRE ClipsJRE ClipsJudge asks prosecutors: Could Flynn have been charged with treason?Judge delays Michael Flynn sentencing after blistering rebukeJWoww Gets Estranged Husband Booted From Home | TMZ LiveKansas City, MO Blizzard Impacts Region - 11/25/2018Kanye West - Glastonbury 2015 (Full Show HD)Kodak Black - TestimonyKyle Shanahan 'Nick Mullens Has Shown He Can Play QB in this League' | San Francisco 49ersKyle Shanahan 'Yesterday was Nick Mullens' Best Game' | San Francisco 49erslaowhy86laowhy86LastWeekTonightLate Night with Seth MeyersLate Night with Seth MeyersLaura in the KitchenLife of LubaLife of LubaLinus Tech TipsLinus Tech TipsLinus Tech TipsLinus Tech TipsLIVE NOWLive! 49ers vs Broncos NFL 2018 Week 14 PredictionsLOBSTER BEACH BBQ! And Unique Kenyan Street Food in Malindi, Kenya!Mark WiensMark WiensMark WiensMashedMistakes Everyone Makes Using The Slow CookermmtutsMOVING UPSTREAM  S2 • E1MunchiesNancy And Chuck Are: Democrats On The OffensiveNew York Cheesecake RecipeNFLNFL 2018-19 Week 14 Denver Broncos -- San Francisco 49ersNFL Full Games 2018 / 2019No CloutNot a Very Merry Christmas for Donald TrumpOFFICIAL TRAILER | Ryan Hansen Solves Crimes on Television* Season 2Penny Marshall dead at 75Pilot - (Ep 1)Popular uploadsPopulist Revolution - Will It Go Left Or Right? - Candace Owens & Russell BrandPowerfulJREPremiumPremiumPremiumPremiumPressure LuckPRIME TIME  S1 • E16Priya Makes Pav Bhaji | From the Test Kitchen | Bon AppétitQuang TranQuang TranQuang TranQuang TranRARE Noodles of Saigon, Vietnam! All the Best Hidden Noodles You've Never Seen!Recently uploadedRecommendedRecommended channel for youRecommended channel for youRecommended channel for youRecommended channel for youRecommended channel for youRecommended channel for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRecommended videos for youRED CARDS  S1 • E6reflectivehatemoshRiFF RAFF Teal Tone Lobster (WSHH Exclusive - Official Music Video)Rivian R1T (2020) - World's First Electric PickUp TruckRoadieRon Burgundy Interviews Peyton Manning | SportsCenter | ESPN ArchivesRonbo SportsRussell BrandRYAN HANSEN SOLVES CRIMES ON TELEVISION  S1 • E1Ryan Hansen Solves Crimes on Television*Ryan Hansen Solves Crimes on Television*Ryan Hansen Solves Crimes on Television*Ryan Hansen Solves Crimes on Television* - OFFICIAL TRAILERRyan Reynolds Has Had Enough of 'Frozen'San Francisco 49ersSan Francisco 49ersSarah Sanders asked why Michael Flynn isn't a 'rat'Saturday Night Live viewers also watch...Seth MacFarlane on His Childhood Cartoons & Family GuySeth MacFarlane Smoked Weed with His ParentsShannon FRUSTRATED Packers LOSS TO Bears 17-24; Aaron Rodgers 35-42, 274 Yds, Int✦ NFL Gameday PrimeSicilian Christmas Pizza (Sfincione) - Food WishesSICKO MODE but I don't think I got the right versionSlow Cooker Beef Pot Roast Recipe - How to Make Beef Pot Roast in a Slow CookerSNL star Pete Davidson appears on camera hours after disturbing postSpanish Garlic Soup - Sopa de Ajo Recipe - Bread and Garlic SoupSteak - TopicStephen Miller and Rudy Giuliani Try to Defend Trump: A Closer LookStephen Miller Has A Bad Hair DayStormChasingVideoStreamed 1 week agoStreet FoodStreet food - TopicStreet Food in Gilgit + PAKISTANI VILLAGE FOOD | Ultra Happiness in Gilgit-Baltistan, Pakistan!SZECHUAN Seafood EXTREME - INSANE Chinese Seafood TOUR in Chengdu, China - SPICY CHINESE SEAFOOD!!!T-ROY COOKSTalk Shows - TopicTasting the World’s First Test-Tube SteakTeam CocoThai Street Food - Street Food Thailand - Street Food BangkokThe 2019 Bentley Continental GT Is a $250,000 Ultra-Luxury CoupeThe Best Cheesesteak In Philadelphia | Best Of The BestThe Daily Show with Trevor NoahThe Food RangerThe Garage Converting Classic Cars to Electric Vehicles | Freethink DIY ScienceThe Graham Norton ShowTHE KITCHEN GADGET TEST SHOW  S1 • E7The Late Late Show with James CordenThe Late Show with Stephen ColbertThe Late Show with Stephen ColbertThe Late Show with Stephen ColbertThe Late Show with Stephen ColbertThe Late Show with Stephen ColbertThe Late Show with Stephen ColbertThe Late Show with Stephen ColbertThe Mueller Russia investigation's key players: Michael Cohen, Michael Flynn and Paul ManafortThe President Is Facing 17 InvestigationsThe Roast of Donald Trump (2011) FullThe Secrets Behind New York's Most Famous Spicy Noodle Dish — Prime TimeThe Troubling Death of an NBA HopefulTheEllenShowThis 3D Printed Rotary Engine Is Genius - Mazda RX-7This guy should get FIRED!! - $1500 Gaming PC Secret Shopper pt3TigerBellyClipsTMZLiveTODAYTrump's Boarder Tweet, the White House Christmas Reception - MonologueTrump’s New Chief of Staff & Stephen Miller’s New Hairline | The Daily ShowUnique Food in Baltistan - 14 TRADITIONAL DISHES in Skardu | Pakistani Food in Gilgit-Baltistan!Urban Stealth Truck Camping 2.0Vanilla custard cream filled doughnut | Honeykki 꿀키VICEVICE NewsVICE SportsWall Street JournalWe Stole Tampons from the Cashier-less Amazon Go StoreWhat's your Favourite Programming Language? (sound check Q) - ComputerphileWORLDSTARHIPHOPWORTH IT  S5 • E8YouTube OriginalsYouTube OriginalsYouTube Originals"

    naturalLanguageUnderstanding = NaturalLanguageUnderstandingV1(
    version='2018-11-16',
    iam_apikey='',
    url='https://gateway.watsonplatform.net/natural-language-understanding/api')

    response = naturalLanguageUnderstanding.analyze(
    text= text,
    features=Features(
        concepts=ConceptsOptions(limit=10),
        categories=CategoriesOptions(limit=10),
        sentiment=SentimentOptions(document=True),
        emotion=EmotionOptions(document=True),
        entities=EntitiesOptions(emotion=True, sentiment=True, limit=30),
        keywords=KeywordsOptions(emotion=True, sentiment=True, limit=30))).get_result()


    print(json.dumps(response, indent=2))
    return JsonResponse(response)
Beispiel #30
0
def nlp_ibm(text):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        username='******', password='******', version='2018-03-16')

    if text:
        try:
            responses = natural_language_understanding.analyze(
                text=text,
                return_analyzed_text=False,
                features=Features(categories=CategoriesOptions(),
                                  concepts=ConceptsOptions(limit=1),
                                  emotion=EmotionOptions(),
                                  sentiment=SentimentOptions()))
            response = [
                responses["usage"]["text_characters"],
                responses["sentiment"]["document"]["label"],
                responses["sentiment"]["document"]["score"]
            ]
        except Exception as e:
            print(e)
            response = []
    else:
        response = []
    return response